code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RedisAccessKeys(Model):
"""Redis cache access keys.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar primary_key: The current primary key that clients can use to
authenticate with Redis cache.
:vartype primary_key: str
:ivar secondary_key: The current secondary key that clients can use to
authenticate with Redis cache.
:vartype secondary_key: str
"""
_validation = {
'primary_key': {'readonly': True},
'secondary_key': {'readonly': True},
}
_attribute_map = {
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
}
def __init__(self):
self.primary_key = None
self.secondary_key = None
|
v-iam/azure-sdk-for-python
|
azure-mgmt-redis/azure/mgmt/redis/models/redis_access_keys.py
|
Python
|
mit
| 1,325
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com thumbor@googlegroups.com
from os.path import abspath
LOADER = "thumbor.loaders.file_loader"
FILE_LOADER_ROOT_PATH = abspath("./tests/fixtures/images/")
STORAGE = "thumbor.storages.no_storage"
MAX_AGE = 2
MAX_AGE_TEMP_IMAGE = 1
|
gi11es/thumbor
|
tests/fixtures/max_age_conf.py
|
Python
|
mit
| 455
|
#!/usr/bin/python
__author__ = "raphtee@google.com (Travis Miller)"
import mock, mock_demo_MUT
class MyError(Exception):
pass
class A(object):
var = 8
def __init__(self):
self.x = 0
def method1(self):
self.x += 1
return self.x
def method2(self, y):
return y * self.x
class B(A):
def method3(self, z):
return self.x + z
def method4(self, z, w):
return self.x * z + w
class C(B):
def method5(self):
self.method1()
t = self.method2(4)
u = self.method3(t)
return u
class D(C):
def method6(self, error):
if error:
raise MyError("woops")
else:
return 10
class E(D):
def __init__(self, val):
self.val = val
# say we want to test that do_stuff is doing what we think it is doing
def do_stuff(a, b, func):
print b.method1()
print b.method3(10)
print func("how many")
print a.method2(5)
print b.method1()
print b.method4(1, 4)
print b.method2(3)
print b.method2("hello")
def do_more_stuff(d):
print d.method6(False)
try:
d.method6(True)
except:
print "caught error"
def main():
god = mock.mock_god()
m1 = god.create_mock_class(A, "A")
print m1.var
m2 = god.create_mock_class(B, "B")
f = god.create_mock_function("func")
print dir(m1)
print dir(m2)
# sets up the "recording"
m2.method1.expect_call().and_return(1)
m2.method3.expect_call(10).and_return(10)
f.expect_call("how many").and_return(42)
m1.method2.expect_call(5).and_return(0)
m2.method1.expect_call().and_return(2)
m2.method4.expect_call(1, 4).and_return(6)
m2.method2.expect_call(3).and_return(6)
m2.method2.expect_call(mock.is_string_comparator()).and_return("foo")
# check the recording order
for func_call in god.recording:
print func_call
# once we start making calls into the methods we are in
# playback mode
do_stuff(m1, m2, f)
# we can now check that playback succeeded
god.check_playback()
# now test the ability to mock out all methods of an object
# except those under test
c = C()
god.mock_up(c, "c")
# setup recording
c.method1.expect_call()
c.method2.expect_call(4).and_return(4)
c.method3.expect_call(4).and_return(5)
# perform the test
answer = c.method5.run_original_function()
# check playback
print "answer = %s" % (answer)
god.check_playback()
# check exception returns too
m3 = god.create_mock_class(D, "D")
m3.method6.expect_call(False).and_return(10)
m3.method6.expect_call(True).and_raises(MyError("woops"))
do_more_stuff(m3)
god.check_playback()
# now check we can mock out a whole class (rather than just an instance)
mockE = god.create_mock_class_obj(E, "E")
oldE = mock_demo_MUT.E
mock_demo_MUT.E = mockE
m4 = mockE.expect_new(val=7)
m4.method1.expect_call().and_return(1)
mock_demo_MUT.do_create_stuff()
god.check_playback()
mock_demo_MUT.E = oldE
if __name__ == "__main__":
main()
|
wuzhy/autotest
|
client/common_lib/test_utils/mock_demo.py
|
Python
|
gpl-2.0
| 3,145
|
#!/usr/bin/python
#
# Copyright (C) 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for testing ganeti.utils.retry"""
import unittest
from ganeti import constants
from ganeti import errors
from ganeti import utils
import testutils
class TestRetry(testutils.GanetiTestCase):
def setUp(self):
testutils.GanetiTestCase.setUp(self)
self.retries = 0
self.called = 0
self.time = 1379601882.0
self.time_for_time_fn = 0
self.time_for_retry_and_succeed = 0
def _time_fn(self):
self.time += self.time_for_time_fn
return self.time
def _wait_fn(self, delay):
self.time += delay
@staticmethod
def _RaiseRetryAgain():
raise utils.RetryAgain()
@staticmethod
def _RaiseRetryAgainWithArg(args):
raise utils.RetryAgain(*args)
def _WrongNestedLoop(self):
return utils.Retry(self._RaiseRetryAgain, 0.01, 0.02)
def _RetryAndSucceed(self, retries):
self.time += self.time_for_retry_and_succeed
if self.retries < retries:
self.retries += 1
raise utils.RetryAgain()
else:
return True
def _SimpleRetryAndSucceed(self, retries):
self.called += 1
if self.retries < retries:
self.retries += 1
return False
else:
return True
def testRaiseTimeout(self):
self.failUnlessRaises(utils.RetryTimeout, utils.Retry,
self._RaiseRetryAgain, 0.01, 0.02,
wait_fn = self._wait_fn, _time_fn = self._time_fn)
self.failUnlessRaises(utils.RetryTimeout, utils.Retry,
self._RetryAndSucceed, 0.01, 0, args=[1],
wait_fn = self._wait_fn, _time_fn = self._time_fn)
self.failUnlessEqual(self.retries, 1)
def testComplete(self):
self.failUnlessEqual(utils.Retry(lambda: True, 0, 1,
wait_fn = self._wait_fn,
_time_fn = self._time_fn),
True)
self.failUnlessEqual(utils.Retry(self._RetryAndSucceed, 0, 1, args=[2],
wait_fn = self._wait_fn,
_time_fn = self._time_fn),
True)
self.failUnlessEqual(self.retries, 2)
def testCompleteNontrivialTimes(self):
self.time_for_time_fn = 0.01
self.time_for_retry_and_succeed = 0.1
self.failUnlessEqual(utils.Retry(self._RetryAndSucceed, 0, 1, args=[2],
wait_fn = self._wait_fn,
_time_fn = self._time_fn),
True)
self.failUnlessEqual(self.retries, 2)
def testNestedLoop(self):
try:
self.failUnlessRaises(errors.ProgrammerError, utils.Retry,
self._WrongNestedLoop, 0, 1,
wait_fn = self._wait_fn, _time_fn = self._time_fn)
except utils.RetryTimeout:
self.fail("Didn't detect inner loop's exception")
def testTimeoutArgument(self):
retry_arg="my_important_debugging_message"
try:
utils.Retry(self._RaiseRetryAgainWithArg, 0.01, 0.02, args=[[retry_arg]],
wait_fn = self._wait_fn, _time_fn = self._time_fn)
except utils.RetryTimeout, err:
self.failUnlessEqual(err.args, (retry_arg, ))
else:
self.fail("Expected timeout didn't happen")
def testTimeout(self):
self.time_for_time_fn = 0.01
self.time_for_retry_and_succeed = 10
try:
utils.Retry(self._RetryAndSucceed, 1, 18, args=[2],
wait_fn = self._wait_fn, _time_fn = self._time_fn)
except utils.RetryTimeout, err:
self.failUnlessEqual(err.args, ())
else:
self.fail("Expected timeout didn't happen")
def testNoTimeout(self):
self.time_for_time_fn = 0.01
self.time_for_retry_and_succeed = 8
self.failUnlessEqual(
utils.Retry(self._RetryAndSucceed, 1, 18, args=[2],
wait_fn = self._wait_fn, _time_fn = self._time_fn),
True)
def testRaiseInnerWithExc(self):
retry_arg="my_important_debugging_message"
try:
try:
utils.Retry(self._RaiseRetryAgainWithArg, 0.01, 0.02,
args=[[errors.GenericError(retry_arg, retry_arg)]],
wait_fn = self._wait_fn, _time_fn = self._time_fn)
except utils.RetryTimeout, err:
err.RaiseInner()
else:
self.fail("Expected timeout didn't happen")
except errors.GenericError, err:
self.failUnlessEqual(err.args, (retry_arg, retry_arg))
else:
self.fail("Expected GenericError didn't happen")
def testRaiseInnerWithMsg(self):
retry_arg="my_important_debugging_message"
try:
try:
utils.Retry(self._RaiseRetryAgainWithArg, 0.01, 0.02,
args=[[retry_arg, retry_arg]],
wait_fn = self._wait_fn, _time_fn = self._time_fn)
except utils.RetryTimeout, err:
err.RaiseInner()
else:
self.fail("Expected timeout didn't happen")
except utils.RetryTimeout, err:
self.failUnlessEqual(err.args, (retry_arg, retry_arg))
else:
self.fail("Expected RetryTimeout didn't happen")
def testSimpleRetry(self):
self.assertFalse(utils.SimpleRetry(True, lambda: False, 0.01, 0.02,
wait_fn = self._wait_fn,
_time_fn = self._time_fn))
self.assertFalse(utils.SimpleRetry(lambda x: x, lambda: False, 0.01, 0.02,
wait_fn = self._wait_fn,
_time_fn = self._time_fn))
self.assertTrue(utils.SimpleRetry(True, lambda: True, 0, 1,
wait_fn = self._wait_fn,
_time_fn = self._time_fn))
self.assertTrue(utils.SimpleRetry(lambda x: x, lambda: True, 0, 1,
wait_fn = self._wait_fn,
_time_fn = self._time_fn))
self.assertTrue(utils.SimpleRetry(True, self._SimpleRetryAndSucceed, 0, 1,
args=[1], wait_fn = self._wait_fn,
_time_fn = self._time_fn))
self.assertEqual(self.retries, 1)
self.assertEqual(self.called, 2)
self.called = self.retries = 0
self.assertTrue(utils.SimpleRetry(True, self._SimpleRetryAndSucceed, 0, 1,
args=[2], wait_fn = self._wait_fn,
_time_fn = self._time_fn))
self.assertEqual(self.called, 3)
if __name__ == "__main__":
testutils.GanetiTestProgram()
|
kawamuray/ganeti
|
test/py/ganeti.utils.retry_unittest.py
|
Python
|
gpl-2.0
| 7,310
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2014 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Enables flash access to the Crazyflie.
"""
__author__ = 'Bitcraze AB'
__all__ = ['Memory', 'MemoryElement']
import struct
import errno
from Queue import Queue
from threading import Lock
from cflib.crtp.crtpstack import CRTPPacket, CRTPPort
from cflib.utils.callbacks import Caller
from binascii import crc32
import binascii
# Channels used for the logging port
CHAN_INFO = 0
CHAN_READ = 1
CHAN_WRITE = 2
# Commands used when accessing the Settings port
CMD_INFO_VER = 0
CMD_INFO_NBR = 1
CMD_INFO_DETAILS = 2
# The max size of a CRTP packet payload
MAX_LOG_DATA_PACKET_SIZE = 30
import logging
logger = logging.getLogger(__name__)
class MemoryElement(object):
"""A memory """
TYPE_I2C = 0
TYPE_1W = 1
TYPE_DRIVER_LED = 0x10
def __init__(self, id, type, size, mem_handler):
"""Initialize the element with default values"""
self.id = id
self.type = type
self.size = size
self.mem_handler = mem_handler
@staticmethod
def type_to_string(t):
"""Get string representation of memory type"""
if t == MemoryElement.TYPE_I2C:
return "I2C"
if t == MemoryElement.TYPE_1W:
return "1-wire"
if t == MemoryElement.TYPE_DRIVER_LED:
return "LED driver"
return "Unknown"
def new_data(self, mem, addr, data):
logger.info("New data, but not OW mem")
def __str__(self):
"""Generate debug string for memory"""
return ("Memory: id={}, type={}, size={}".format(
self.id, MemoryElement.type_to_string(self.type), self.size))
class LED:
"""Used to set color/intensity of one LED in the LED-ring"""
def __init__(self):
"""Initialize to off"""
self.r = 0
self.g = 0
self.b = 0
self.intensity = 100
def set(self, r, g, b, intensity=None):
"""Set the R/G/B and optionally intensity in one call"""
self.r = r
self.g = g
self.b = b
if intensity:
self.intensity = intensity
class LEDDriverMemory(MemoryElement):
"""Memory interface for using the LED-ring mapped memory for setting RGB
values for all the LEDs in the ring"""
def __init__(self, id, type, size, mem_handler):
"""Initialize with 12 LEDs"""
super(LEDDriverMemory, self).__init__(id=id, type=type, size=size,
mem_handler=mem_handler)
self._update_finished_cb = None
self._write_finished_cb = None
self.leds = []
for i in range(12):
self.leds.append(LED())
def new_data(self, mem, addr, data):
"""Callback for when new memory data has been fetched"""
if mem.id == self.id:
logger.info("Got new data from the LED driver, but we don't care.")
def write_data(self, write_finished_cb):
"""Write the saved LED-ring data to the Crazyflie"""
self._write_finished_cb = write_finished_cb
data = ()
for led in self.leds:
# In order to fit all the LEDs in one radio packet RGB565 is used
# to compress the colors. The calculations below converts 3 bytes
# RGB into 2 bytes RGB565. Then shifts the value of each color to
# LSB, applies the intensity and shifts them back for correct
# alignment on 2 bytes.
R5 = (int)((((int(led.r) & 0xFF) * 249 + 1014) >> 11) & 0x1F) * led.intensity/100
G6 = (int)((((int(led.g) & 0xFF) * 253 + 505) >> 10) & 0x3F) * led.intensity/100
B5 = (int)((((int(led.b) & 0xFF) * 249 + 1014) >> 11) & 0x1F) * led.intensity/100
tmp = (R5 << 11) | (G6 << 5) | (B5 << 0)
data += (tmp >> 8, tmp & 0xFF)
self.mem_handler.write(self, 0x00, data, flush_queue=True)
def update(self, update_finished_cb):
"""Request an update of the memory content"""
if not self._update_finished_cb:
self._update_finished_cb = update_finished_cb
self.valid = False
logger.info("Updating content of memory {}".format(self.id))
# Start reading the header
self.mem_handler.read(self, 0, 16)
def write_done(self, mem, addr):
if self._write_finished_cb and mem.id == self.id:
logger.info("Write to LED driver done")
self._write_finished_cb(self, addr)
self._write_finished_cb = None
def disconnect(self):
self._update_finished_cb = None
self._write_finished_cb = None
class I2CElement(MemoryElement):
def __init__(self, id, type, size, mem_handler):
super(I2CElement, self).__init__(id=id, type=type, size=size, mem_handler=mem_handler)
self._update_finished_cb = None
self._write_finished_cb = None
self.elements = {}
self.valid = False
def new_data(self, mem, addr, data):
"""Callback for when new memory data has been fetched"""
if mem.id == self.id:
if addr == 0:
done = False
# Check for header
if data[0:4] == "0xBC":
logger.info("Got new data: {}".format(data))
[self.elements["version"],
self.elements["radio_channel"],
self.elements["radio_speed"],
self.elements["pitch_trim"],
self.elements["roll_trim"]] = struct.unpack("<BBBff", data[4:15])
if self.elements["version"] == 0:
done = True
elif self.elements["version"] == 1:
self.datav0 = data
self.mem_handler.read(self, 16, 5)
if addr == 16:
[radio_address_upper,
radio_address_lower] = struct.unpack("<BI", self.datav0[15] + data[0:4])
self.elements["radio_address"] = int(radio_address_upper) << 32 | radio_address_lower
logger.info(self.elements)
data = self.datav0 + data
done = True
if done:
if self._checksum256(data[:len(data)-1]) == ord(data[len(data)-1]):
self.valid = True
if self._update_finished_cb:
self._update_finished_cb(self)
self._update_finished_cb = None
def _checksum256(self, st):
return reduce(lambda x, y: x + y, map(ord, st)) % 256
def write_data(self, write_finished_cb):
if self.elements["version"] == 0:
data = (0x00, self.elements["radio_channel"], self.elements["radio_speed"],
self.elements["pitch_trim"], self.elements["roll_trim"])
image = struct.pack("<BBBff", *data)
elif self.elements["version"] == 1:
data = (0x01, self.elements["radio_channel"], self.elements["radio_speed"],
self.elements["pitch_trim"], self.elements["roll_trim"],
self.elements["radio_address"] >> 32, self.elements["radio_address"] & 0xFFFFFFFF)
image = struct.pack("<BBBffBI", *data)
# Adding some magic:
image = "0xBC" + image
image += struct.pack("B", self._checksum256(image))
self._write_finished_cb = write_finished_cb
self.mem_handler.write(self, 0x00, struct.unpack("B"*len(image), image))
def update(self, update_finished_cb):
"""Request an update of the memory content"""
if not self._update_finished_cb:
self._update_finished_cb = update_finished_cb
self.valid = False
logger.info("Updating content of memory {}".format(self.id))
# Start reading the header
self.mem_handler.read(self, 0, 16)
def write_done(self, mem, addr):
if self._write_finished_cb and mem.id == self.id:
self._write_finished_cb(self, addr)
self._write_finished_cb = None
def disconnect(self):
self._update_finished_cb = None
self._write_finished_cb = None
class OWElement(MemoryElement):
"""Memory class with extra functionality for 1-wire memories"""
element_mapping = {
1: "Board name",
2: "Board revision",
3: "Custom"
}
def __init__(self, id, type, size, addr, mem_handler):
"""Initialize the memory with good defaults"""
super(OWElement, self).__init__(id=id, type=type, size=size, mem_handler=mem_handler)
self.addr = addr
self.valid = False
self.vid = None
self.pid = None
self.name = None
self.pins = None
self.elements = {}
self._update_finished_cb = None
self._write_finished_cb = None
self._rev_element_mapping = {}
for key in OWElement.element_mapping.keys():
self._rev_element_mapping[OWElement.element_mapping[key]] = key
def new_data(self, mem, addr, data):
"""Callback for when new memory data has been fetched"""
if mem.id == self.id:
if addr == 0:
if self._parse_and_check_header(data[0:8]):
logger.info("--> HEADER OK")
if self._parse_and_check_elements(data[9:11]):
self.valid = True
self._update_finished_cb(self)
self._update_finished_cb = None
else:
# We need to fetch the elements, find out the length
(elem_ver, elem_len) = struct.unpack("BB", data[8:10])
self.mem_handler.read(self, 8, elem_len + 3)
else:
logger.info("--> HEADER NOT OK")
# Call the update if the CRC check of the header fails, we're done here
if self._update_finished_cb:
self._update_finished_cb(self)
self._update_finished_cb = None
elif addr == 0x08:
if self._parse_and_check_elements(data):
logger.info("--> ELEMENT OK")
self.valid = True
else:
logger.info("--> ELEMENT NOT OK")
if self._update_finished_cb:
self._update_finished_cb(self)
self._update_finished_cb = None
def _parse_and_check_elements(self, data):
"""Parse and check the CRC and length of the elements part of the memory"""
(elem_ver, elem_len, crc) = struct.unpack("<BBB", data[0] + data[1] + data[-1])
test_crc = crc32(data[:-1]) & 0x0ff
elem_data = data[2:-1]
if test_crc == crc:
while len(elem_data) > 0:
(eid, elen) = struct.unpack("BB", elem_data[:2])
self.elements[self.element_mapping[eid]] = elem_data[2:2+elen]
elem_data = elem_data[2+elen:]
return True
return False
def write_done(self, mem, addr):
if self._write_finished_cb:
self._write_finished_cb(self, addr)
self._write_finished_cb = None
def write_data(self, write_finished_cb):
# First generate the header part
header_data = struct.pack("<BIBB", 0xEB, self.pins, self.vid, self.pid)
header_crc = crc32(header_data) & 0x0ff
header_data += struct.pack("B", header_crc)
# Now generate the elements part
elem = ""
logger.info(self.elements.keys())
for element in reversed(self.elements.keys()):
elem_string = self.elements[element]
#logger.info(">>>> {}".format(elem_string))
key_encoding = self._rev_element_mapping[element]
elem += struct.pack("BB", key_encoding, len(elem_string))
elem += elem_string
elem_data = struct.pack("BB", 0x00, len(elem))
elem_data += elem
elem_crc = crc32(elem_data) & 0x0ff
elem_data += struct.pack("B", elem_crc)
data = header_data + elem_data
# Write data
p = ""
for s in data:
p += "0x{:02X} ".format(ord(s))
logger.info(p)
self.mem_handler.write(self, 0x00, struct.unpack("B"*len(data), data))
self._write_finished_cb = write_finished_cb
def update(self, update_finished_cb):
"""Request an update of the memory content"""
if not self._update_finished_cb:
self._update_finished_cb = update_finished_cb
self.valid = False
logger.info("Updating content of memory {}".format(self.id))
# Start reading the header
self.mem_handler.read(self, 0, 11)
#else:
# logger.warning("Already in progress of updating memory {}".format(self.id))
def _parse_and_check_header(self, data):
"""Parse and check the CRC of the header part of the memory"""
#logger.info("Should parse header: {}".format(data))
(start, self.pins, self.vid, self.pid, crc) = struct.unpack("<BIBBB", data)
test_crc = crc32(data[:-1]) & 0x0ff
if start == 0xEB and crc == test_crc:
return True
return False
def __str__(self):
"""Generate debug string for memory"""
return ("OW {} ({:02X}:{:02X}): {}".format(
self.addr, self.vid, self.pid, self.elements))
def disconnect(self):
self._update_finished_cb = None
self._write_finished_cb = None
class _ReadRequest:
"""Class used to handle memory reads that will split up the read in multiple packets in necessary"""
MAX_DATA_LENGTH = 20
def __init__(self, mem, addr, length, cf):
"""Initialize the object with good defaults"""
self.mem = mem
self.addr = addr
self._bytes_left = length
self.data = ""
self.cf = cf
self._current_addr = addr
def start(self):
"""Start the fetching of the data"""
self._request_new_chunk()
def resend(self):
logger.info("Sending write again...")
self._request_new_chunk()
def _request_new_chunk(self):
"""Called to request a new chunk of data to be read from the Crazyflie"""
# Figure out the length of the next request
new_len = self._bytes_left
if new_len > _ReadRequest.MAX_DATA_LENGTH:
new_len = _ReadRequest.MAX_DATA_LENGTH
logger.info("Requesting new chunk of {}bytes at 0x{:X}".format(new_len, self._current_addr))
# Request the data for the next address
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_READ)
pk.data = struct.pack("<BIB", self.mem.id, self._current_addr, new_len)
reply = struct.unpack("<BBBBB", pk.data[:-1])
self.cf.send_packet(pk, expected_reply=reply, timeout=1)
def add_data(self, addr, data):
"""Callback when data is received from the Crazyflie"""
data_len = len(data)
if not addr == self._current_addr:
logger.warning("Address did not match when adding data to read request!")
return
# Add the data and calculate the next address to fetch
self.data += data
self._bytes_left -= data_len
self._current_addr += data_len
if self._bytes_left > 0:
self._request_new_chunk()
return False
else:
return True
class _WriteRequest:
"""Class used to handle memory reads that will split up the read in multiple packets in necessary"""
MAX_DATA_LENGTH = 25
def __init__(self, mem, addr, data, cf):
"""Initialize the object with good defaults"""
self.mem = mem
self.addr = addr
self._bytes_left = len(data)
self._data = data
self.data = ""
self.cf = cf
self._current_addr = addr
self._sent_packet = None
self._sent_reply = None
self._addr_add = 0
def start(self):
"""Start the fetching of the data"""
self._write_new_chunk()
def resend(self):
logger.info("Sending write again...")
self.cf.send_packet(self._sent_packet, expected_reply=self._sent_reply, timeout=1)
def _write_new_chunk(self):
"""Called to request a new chunk of data to be read from the Crazyflie"""
# Figure out the length of the next request
new_len = len(self._data)
if new_len > _WriteRequest.MAX_DATA_LENGTH:
new_len = _WriteRequest.MAX_DATA_LENGTH
logger.info("Writing new chunk of {}bytes at 0x{:X}".format(new_len, self._current_addr))
data = self._data[:new_len]
self._data = self._data[new_len:]
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_WRITE)
pk.data = struct.pack("<BI", self.mem.id, self._current_addr)
# Create a tuple used for matching the reply using id and address
reply = struct.unpack("<BBBBB", pk.data)
self._sent_reply = reply
# Add the data
pk.data += struct.pack("B"*len(data), *data)
self._sent_packet = pk
self.cf.send_packet(pk, expected_reply=reply, timeout=1)
self._addr_add = len(data)
def write_done(self, addr):
"""Callback when data is received from the Crazyflie"""
if not addr == self._current_addr:
logger.warning("Address did not match when adding data to read request!")
return
if len(self._data) > 0:
self._current_addr += self._addr_add
self._write_new_chunk()
return False
else:
logger.info("This write request is done")
return True
class Memory():
"""Access memories on the Crazyflie"""
# These codes can be decoded using os.stderror, but
# some of the text messages will look very strange
# in the UI, so they are redefined here
_err_codes = {
errno.ENOMEM: "No more memory available",
errno.ENOEXEC: "Command not found",
errno.ENOENT: "No such block id",
errno.E2BIG: "Block too large",
errno.EEXIST: "Block already exists"
}
def __init__(self, crazyflie=None):
"""Instantiate class and connect callbacks"""
self.mems = []
# Called when new memories have been added
self.mem_added_cb = Caller()
# Called when new data has been read
self.mem_read_cb = Caller()
self.mem_write_cb = Caller()
self.cf = crazyflie
self.cf.add_port_callback(CRTPPort.MEM, self._new_packet_cb)
self._refresh_callback = None
self._fetch_id = 0
self.nbr_of_mems = 0
self._ow_mem_fetch_index = 0
self._elem_data = ()
self._read_requests = {}
self._read_requests_lock = Lock()
self._write_requests = {}
self._write_requests_lock = Lock()
self._ow_mems_left_to_update = []
self._getting_count = False
def _mem_update_done(self, mem):
"""Callback from each individual memory (only 1-wire) when reading of header/elements are done"""
if mem.id in self._ow_mems_left_to_update:
self._ow_mems_left_to_update.remove(mem.id)
logger.info(mem)
if len(self._ow_mems_left_to_update) == 0:
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
def get_mem(self, id):
"""Fetch the memory with the supplied id"""
for m in self.mems:
if m.id == id:
return m
return None
def get_mems(self, type):
"""Fetch all the memories of the supplied type"""
ret = ()
for m in self.mems:
if m.type == type:
ret += (m, )
return ret
def ow_search(self, vid=0xBC, pid=None, name=None):
"""Search for specific memory id/name and return it"""
for m in self.get_mems(MemoryElement.TYPE_1W):
if pid and m.pid == pid or name and m.name == name:
return m
return None
def write(self, memory, addr, data, flush_queue=False):
"""Write the specified data to the given memory at the given address"""
wreq = _WriteRequest(memory, addr, data, self.cf)
if not memory.id in self._write_requests:
self._write_requests[memory.id] = []
# Workaround until we secure the uplink and change messages for
# mems to non-blocking
self._write_requests_lock.acquire()
if flush_queue:
self._write_requests[memory.id] = self._write_requests[memory.id][:1]
self._write_requests[memory.id].insert(len(self._write_requests), wreq)
if len(self._write_requests[memory.id]) == 1:
wreq.start()
self._write_requests_lock.release()
return True
def read(self, memory, addr, length):
"""Read the specified amount of bytes from the given memory at the given address"""
if memory.id in self._read_requests:
logger.warning("There is already a read operation ongoing for memory id {}".format(memory.id))
return False
rreq = _ReadRequest(memory, addr, length, self.cf)
self._read_requests[memory.id] = rreq
rreq.start()
return True
def refresh(self, refresh_done_callback):
"""Start fetching all the detected memories"""
self._refresh_callback = refresh_done_callback
self._fetch_id = 0
for m in self.mems:
try:
self.mem_read_cb.remove_callback(m.new_data)
m.disconnect()
except Exception as e:
logger.info("Error when removing memory after update: {}".format(e))
self.mems = []
self.nbr_of_mems = 0
self._getting_count = False
logger.info("Requesting number of memories")
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_NBR, )
self.cf.send_packet(pk, expected_reply=(CMD_INFO_NBR,))
def _new_packet_cb(self, packet):
"""Callback for newly arrived packets for the memory port"""
chan = packet.channel
cmd = packet.datal[0]
payload = struct.pack("B" * (len(packet.datal) - 1), *packet.datal[1:])
#logger.info("--------------->CHAN:{}=>{}".format(chan, struct.unpack("B"*len(payload), payload)))
if chan == CHAN_INFO:
if cmd == CMD_INFO_NBR:
self.nbr_of_mems = ord(payload[0])
logger.info("{} memories found".format(self.nbr_of_mems))
# Start requesting information about the memories, if there are any...
if self.nbr_of_mems > 0:
if not self._getting_count:
self._getting_count = True
logger.info("Requesting first id")
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_DETAILS, 0)
self.cf.send_packet(pk, expected_reply=(CMD_INFO_DETAILS, 0))
else:
self._refresh_callback()
if cmd == CMD_INFO_DETAILS:
# Did we get a good reply, otherwise try again:
if len(payload) < 5:
# Workaround for 1-wire bug when memory is detected
# but updating the info crashes the communication with
# the 1-wire. Fail by saying we only found 1 memory (the I2C).
logger.error("-------->Got good count, but no info on mem!")
self.nbr_of_mems = 1
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
return
# Create information about a new memory
# Id - 1 byte
mem_id = ord(payload[0])
# Type - 1 byte
mem_type = ord(payload[1])
# Size 4 bytes (as addr)
mem_size = struct.unpack("I", payload[2:6])[0]
# Addr (only valid for 1-wire?)
mem_addr_raw = struct.unpack("B"*8, payload[6:14])
mem_addr = ""
for m in mem_addr_raw:
mem_addr += "{:02X}".format(m)
if (not self.get_mem(mem_id)):
if mem_type == MemoryElement.TYPE_1W:
mem = OWElement(id=mem_id, type=mem_type, size=mem_size,
addr=mem_addr, mem_handler=self)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
self._ow_mems_left_to_update.append(mem.id)
elif mem_type == MemoryElement.TYPE_I2C:
mem = I2CElement(id=mem_id, type=mem_type, size=mem_size,
mem_handler=self)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
elif mem_type == MemoryElement.TYPE_DRIVER_LED:
mem = LEDDriverMemory(id=mem_id, type=mem_type,
size=mem_size, mem_handler=self)
logger.info(mem)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
else:
mem = MemoryElement(id=mem_id, type=mem_type, size=mem_size, mem_handler=self)
logger.info(mem)
self.mems.append(mem)
self.mem_added_cb.call(mem)
#logger.info(mem)
self._fetch_id = mem_id + 1
if self.nbr_of_mems - 1 >= self._fetch_id:
logger.info("Requesting information about memory {}".format(self._fetch_id))
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_DETAILS, self._fetch_id)
self.cf.send_packet(pk, expected_reply=(CMD_INFO_DETAILS, self._fetch_id))
else:
logger.info("Done getting all the memories, start reading the OWs")
ows = self.get_mems(MemoryElement.TYPE_1W)
# If there are any OW mems start reading them, otherwise we are done
for ow_mem in self.get_mems(MemoryElement.TYPE_1W):
ow_mem.update(self._mem_update_done)
if len (self.get_mems(MemoryElement.TYPE_1W)) == 0:
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
if chan == CHAN_WRITE:
id = cmd
(addr, status) = struct.unpack("<IB", payload[0:5])
logger.info("WRITE: Mem={}, addr=0x{:X}, status=0x{}".format(id, addr, status))
# Find the read request
if id in self._write_requests:
self._write_requests_lock.acquire()
wreq = self._write_requests[id][0]
if status == 0:
if wreq.write_done(addr):
#self._write_requests.pop(id, None)
# Remove the first item
self._write_requests[id].pop(0)
self.mem_write_cb.call(wreq.mem, wreq.addr)
# Get a new one to start (if there are any)
if len(self._write_requests[id]) > 0:
self._write_requests[id][0].start()
else:
logger.info("Status {}: write resending...".format(status))
wreq.resend()
self._write_requests_lock.release()
if chan == CHAN_READ:
id = cmd
(addr, status) = struct.unpack("<IB", payload[0:5])
data = struct.unpack("B"*len(payload[5:]), payload[5:])
logger.info("READ: Mem={}, addr=0x{:X}, status=0x{}, data={}".format(id, addr, status, data))
# Find the read request
if id in self._read_requests:
logger.info("READING: We are still interested in request for mem {}".format(id))
rreq = self._read_requests[id]
if status == 0:
if rreq.add_data(addr, payload[5:]):
self._read_requests.pop(id, None)
self.mem_read_cb.call(rreq.mem, rreq.addr, rreq.data)
else:
logger.info("Status {}: resending...".format(status))
rreq.resend()
|
hajjboy95/crazyflie-clients-python
|
lib/cflib/crazyflie/mem.py
|
Python
|
gpl-2.0
| 30,173
|
# PyDia Code Generation from UML Diagram
# Copyright (c) 2005 Hans Breuer <hans@breuer.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import sys, dia
class Klass :
def __init__ (self, name) :
self.name = name
# use a list to preserve the order
self.attributes = []
# a list, as java/c++ support multiple methods with the same name
self.operations = []
self.comment = ""
self.parents = []
self.templates = []
self.inheritance_type = ""
def AddAttribute(self, name, type, visibility, value, comment) :
self.attributes.append ((name, (type, visibility, value, comment)))
def AddOperation(self, name, type, visibility, params, inheritance_type, comment, class_scope) :
self.operations.append((name,(type, visibility, params, inheritance_type, comment, class_scope)))
def SetComment(self, s) :
self.comment = s
def AddParrent(self, parrent):
self.parents.append(parrent)
def AddTemplate(self, template):
self.templates.append(template)
def SetInheritance_type(self, inheritance_type):
self.inheritance_type = inheritance_type
class ObjRenderer :
"Implements the Object Renderer Interface and transforms diagram into its internal representation"
def __init__ (self) :
# an empty dictionary of classes
self.klasses = {}
self.arrows = []
self.filename = ""
def begin_render (self, data, filename) :
self.filename = filename
# not only reset the filename but also the other state, otherwise we would accumulate information through every export
self.klasses = {}
self.arrows = []
for layer in data.layers :
# for the moment ignore layer info. But we could use this to spread accross different files
for o in layer.objects :
if o.type.name == "UML - Class" :
#print o.properties["name"].value
k = Klass (o.properties["name"].value)
k.SetComment(o.properties["comment"].value)
if o.properties["abstract"].value:
k.SetInheritance_type("abstract")
if o.properties["template"].value:
k.SetInheritance_type("template")
for op in o.properties["operations"].value :
# op : a tuple with fixed placing, see: objects/UML/umloperations.c:umloperation_props
# (name, type, comment, stereotype, visibility, inheritance_type, class_scope, params)
params = []
for par in op[8] :
# par : again fixed placement, see objects/UML/umlparameter.c:umlparameter_props
# (name, type, value, comment, kind)
params.append((par[0], par[1], par[2], par[3], par[4]))
k.AddOperation (op[0], op[1], op[4], params, op[5], op[2], op[7])
#print o.properties["attributes"].value
for attr in o.properties["attributes"].value :
# see objects/UML/umlattributes.c:umlattribute_props
#print "\t", attr[0], attr[1], attr[4]
# name, type, value, comment, visibility, abstract, class_scope
k.AddAttribute(attr[0], attr[1], attr[4], attr[2], attr[3])
self.klasses[o.properties["name"].value] = k
#Connections
elif o.type.name == "UML - Association" :
# should already have got attributes relation by names
pass
# other UML objects which may be interesting
# UML - Note, UML - LargePackage, UML - SmallPackage, UML - Dependency, ...
edges = {}
for layer in data.layers :
for o in layer.objects :
for c in o.connections:
for n in c.connected:
if not n.type.name in ("UML - Generalization", "UML - Realizes"):
continue
if str(n) in edges:
continue
edges[str(n)] = None
if not (n.handles[0].connected_to and n.handles[1].connected_to):
continue
par = n.handles[0].connected_to.object
chi = n.handles[1].connected_to.object
if not par.type.name == "UML - Class" and chi.type.name == "UML - Class":
continue
par_name = par.properties["name"].value
chi_name = chi.properties["name"].value
if n.type.name == "UML - Generalization":
self.klasses[chi_name].AddParrent(par_name)
else: self.klasses[chi_name].AddTemplate(par_name)
def end_render(self) :
# without this we would accumulate info from every pass
self.attributes = []
self.operations = []
class PyRenderer(ObjRenderer) :
def __init__(self) :
ObjRenderer.__init__(self)
def end_render(self) :
f = open(self.filename, "w")
for sk in self.klasses.keys() :
parents = self.klasses[sk].parents + self.klasses[sk].templates
if not parents:
f.write ("class %s :\n" % (sk,))
else:
f.write ("class %s (%s) :\n" % (sk,", ".join(parents)))
k = self.klasses[sk]
if len(k.comment) > 0 :
f.write ("\t'''" + k.comment + "'''\n")
f.write ("\tdef __init__(self) :\n")
for sa, attr in k.attributes :
value = attr[2] == "" and "None" or attr[2]
f.write("\t\tself.%s = %s # %s\n" % (sa, value, attr[0]))
else :
f.write("\t\tpass\n")
for so, op in k.operations :
# we only need the parameter names
pars = "self"
for p in op[2] :
pars = pars + ", " + p[0]
f.write("\tdef %s (%s) :\n" % (so, pars))
if op[4]: f.write("\t\t\"\"\" %s \"\"\"\n" % op[4])
f.write("\t\t# returns %s\n" % (op[0], ))
f.write("\t\tpass\n")
f.close()
ObjRenderer.end_render(self)
class CxxRenderer(ObjRenderer) :
def __init__(self) :
ObjRenderer.__init__(self)
def end_render(self) :
f = open(self.filename, "w")
f.write("/* generated by dia/codegen.py */\n")
# declaration
for sk in self.klasses.keys() :
k = self.klasses[sk]
if len(k.comment) > 0 :
f.write ("/*" + k.comment + "*/\n")
if len(k.parents) > 0 :
f.write ("class %s : %s \n{\n" % (sk, ", ".join(k.parents)))
else :
f.write ("class %s \n{\n" % (sk,))
# first sort by visibility
ops = [[], [], [], []]
for so, (t, v, p, i, c, s) in k.operations :
ops[v].append((t,so,p))
vars = [[], [], [], []]
for sa, (t, vi, va, vc) in k.attributes :
#TODO: use 'va'=value 'vc'=comment
vars[vi].append((t, sa))
visibilities = ("public:", "private:", "protected:", "/* implementation: */")
for v in [0,2,1,3] :
if len(ops[v]) == 0 and len(vars[v]) == 0 :
continue
f.write ("%s\n" % visibilities[v])
for op in ops[v] :
# detect ctor/dtor
so = ""
if sk == op[1] or ("~" + sk) == op[1] :
so = "\t%s (" % (op[1])
else :
so = "\t%s %s (" % (op[0], op[1])
f.write (so)
# align parameters with the opening brace
n = len(so)
i = 0
m = len(op[2]) - 1
for p in op[2] :
linefeed = ",\n\t" + " " * (n - 1)
if i == m :
linefeed = ""
f.write ("%s %s%s" % (p[1], p[0], linefeed))
i = i + 1
f.write(");\n")
for var in vars[v] :
f.write("\t%s %s;\n" % (var[0], var[1]))
f.write ("};\n\n")
# implementation
# ...
f.close()
ObjRenderer.end_render(self)
# #############################################################################
# PascalRenderer: export Dia UML diagram to Object Pascal (Free Pascal, Delphi)
#
# Please follow some "drawing guidelines" and "naming conventions" so that the
# exporter can do its job.
# - Use "UML - Generalization" arrows for class inheritance.
# - Use "UML - Realizes" arrows when implementing an interface.
# - Set a class to be "abstract" to denote it is an interface definition.
# - Set Inheritance Type to "abstract" for 'virtual; abstract;' methods, set
# it to "virtual" for 'virtual;' methods.
# - Array fields are automatically recognized. If the name ends with "[]"
# an 'Array of' is used. If the name uses a number with "[1234]", an
# 'Array[0..1233] of' is used. If the name uses a constant with
# "[MaxEntries]" an 'Array[0..MaxEntries-1] of' is written.
# - To inherit from classes which are not drawn (e.g. LCL/VCL classes),
# name then class with the parent class in paranthesis (e.g.
# "TMainWin(TForm)"
#
# Features
# - Inheriting from one class and implementing multiple interfaces is
# supported.
# - Comments for classes, attributes and operations are supported. They are
# put in the line before the method declaration with a '///' style comment
# (Doxygen-like).
# - Method parameter directions are supported (-> 'Var', 'Out').
# - Method parameter default values are supported.
# - 'Function' and 'Procedure' are automatically recognized by whether a
# return type exists or not.
# - order of classes is alphabetically
# - the order of attributes and operations is preserved
# - Prints a list of forward declarations of all classes at the beginning
# to avoid declaration order problems.
#
# TODO:
# - Automatically use the keyword "override" instead of "virtual" in
# descendant classes.
# - Automatically define 'Properties'. Unfortunately the UML standard
# doesn't support this and so the Dia dialog has no option to specify
# this. So a "code" has to be used.
# - Mark/recognize Constructors and Destructors
# - Write comments for method parameters (e.g. by using a big doxygen
# comment '(** ... *)' before the method)
# - Use "Packages" to split the classes in separate 'Unit's.
# - Beautify and comment the export code. Using arrays with "magic number
# indexes" for certain fields is bad and tedious to work with.
# - Support defining global constants.
# - Support defining global types (especially for enums, arrays,
# records, ...).
# - Apply some sanity checks to the UML diagram:
# - multiple inheritance is forbidded
# - if implementing an interface, all required methods must be
# implemented; alternative: just put all methods there, so the UML
# drawer doesn't have to write them twice
# - visibility for all methods of an interfaces must be "public"
# - don't write the visibility specifier 'public' for interfaces
# - no "Attributes" for interface definitions, but properties are
# allowed
# - default values for method parameters must be the last parameters
class PascalRenderer(ObjRenderer) :
def __init__(self) :
ObjRenderer.__init__(self)
def end_render(self) :
f = open(self.filename, "w")
f.write("/* generated by dia/codegen.py */\n")
f.write("Type\n")
# classes
class_names = self.klasses.keys()
class_names.sort()
# forward declarations of all classes
for sk in class_names :
k = self.klasses[sk]
# class declaration
if k.inheritance_type == "abstract" :
f.write (" %s = interface;\n" % (sk))
else :
f.write (" %s = class;\n" % (sk))
f.write("\n");
# class declarations
for sk in class_names :
k = self.klasses[sk]
# comment
if len(k.comment) > 0 :
f.write(" /// %s\n" % (k.comment))
# class declaration
if k.inheritance_type == "abstract" :
f.write (" %s = interface" % (sk))
else :
f.write (" %s = class %s" % (sk, k.inheritance_type))
# inherited classes / implemented interfaces
p = []
if k.parents :
p.append(k.parents[0])
if k.templates :
p.append(",".join(k.templates))
if len(p) > 0 :
f.write("(%s)" % ",".join(p))
f.write ("\n")
# first sort by visibility
ops = [[], [], [], [], [], []]
for op_name, (op_type, op_visibility, op_params, op_inheritance, op_comment, op_class_scope) in k.operations :
ops[op_visibility].append((op_type, op_name, op_params, op_comment, op_inheritance))
vars = [[], [], [], []]
for var_name, (var_type, var_visibility, var_value, var_comment) in k.attributes : # name, type, visibility, value, comment
vars[var_visibility].append((var_type, var_name, var_value, var_comment))
visibilities = ("public", "private", "protected", "/* implementation */")
for v in [1,2,0,3] :
if len(ops[v]) == 0 and len(vars[v]) == 0 :
continue
# visibility prefix
f.write (" %s\n" % visibilities[v])
# variables
for var in vars[v] :
# comment
if len(var[3]) > 0 :
f.write (" /// %s\n" % var[3])
if var[1].endswith("]") :
# array; check if this is dynamic or with defined size
i = var[1].find("[")
varname = var[1]
arraysize = varname[i+1:-1]
varname = varname[:i]
if len(arraysize) > 0 :
# array with defined size
if arraysize.find("..") > 0 :
f.write(" %s : Array[%s] of %s;\n" % (varname, arraysize, var[0]))
elif arraysize.isdigit() :
arraysize = int(arraysize)-1
f.write(" %s : Array[0..%d] of %s;\n" % (varname, arraysize, var[0]))
else :
f.write(" %s : Array[0..%s-1] of %s;\n" % (varname, arraysize, var[0]))
else :
# dynamic size
f.write(" %s : Array of %s;\n" % (varname, var[0]))
else :
# normal variable
f.write(" %s : %s;\n" % (var[1], var[0]))
# operations
for op in ops[v] :
if len(op[3]) > 0 :
f.write (" /// %s\n" % op[3])
if len(op[0]) == 0 :
f.write (" Procedure %s" % op[1])
else :
f.write (" Function %s" % op[1])
if len(op[2]) > 0 :
f.write ("(")
i = 0
m = len(op[2]) - 1
for p in op[2] :
if p[4] == 2 :
f.write ("Out ")
elif p[4] == 3 :
f.write ("Var ")
f.write ("%s:%s" % (p[0], p[1]))
if len(p[2]) > 0 :
f.write (":=%s" % p[2])
if i != m :
f.write(";")
i = i + 1
f.write (")")
if len(op[0]) == 0 :
f.write(";")
else :
f.write (" : %s;" % op[0])
# inheritance type
if op[4] == 0 :
f.write (" virtual; abstract;");
elif op[4] == 1 :
f.write (" virtual;");
f.write ("\n")
f.write (" End;\n\n")
# implementation
# ...
f.close()
ObjRenderer.end_render(self)
class JavaRenderer(ObjRenderer) :
def __init__(self) :
ObjRenderer.__init__(self)
def end_render(self) :
f = open(self.filename, "w")
visibilities = {0:"public", 2:"private", 1:"protected"}
for name, klass in self.klasses.iteritems() :
if len(klass.comment) > 0 :
f.write ("/*" + klass.comment + "*/\n")
if klass.inheritance_type == "template": classtype = "interface"
elif klass.inheritance_type == "abstract": classtype = "abstract class"
else: classtype = "class"
f.write ("%s %s" % (classtype, name))
if klass.parents:
f.write (" extends %s" % klass.parents[0])
if klass.templates:
f.write (" implements %s" % ", ".join(klass.templates))
f.write(" {\n")
for attrname, (type, visibility, value, comment) in klass.attributes :
#TODO: use comment
if visibility in visibilities:
vis = visibilities[visibility]+" "
else: vis = ""
f.write("\t%s%s %s" % (vis, type, attrname))
if value != "": f.write(" = %s" % value)
f.write(";\n")
if not klass.inheritance_type == "template":
f.write ("\n\tpublic %s() {\n\t\t\n\t}\n\n" % name)
# We should automatic implement abstract parrent and interface methods
parmethods = []
if klass.parents:
parmethods = [(n,m[:3]+(1,)+m[4:]) for n,m in \
self.klasses[klass.parents[0]].operations if m[3] == 0]
for template in klass.templates:
parmethods.extend(self.klasses[template].operations)
for pName, pMethod in parmethods:
pTypes = [p[1] for p in pMethod[2]]
for name, pars in [(n,m[2]) for n,m in klass.operations]:
types = [p[1] for p in pars]
if pars == pMethod[2] and types == pTypes:
break
else: klass.operations.append((pName,pMethod))
for methodname, method in klass.operations :
if method[4]: f.write("\t/** %s */\n" % method[4])
# if there are no parameter names, something else should appear
pars = []
v = ord("a")
for name, type, value, comment, kind in method[2]:
#TODO: also use: value, comment, kind
if not name:
pars.append((type,chr(v)))
v += 1
else: pars.append((type,name))
pars = ", ".join([type+" "+name for type, name in pars])
vis = method[1] in visibilities and visibilities[method[1]] or ""
returntype = method[0]=="" and "void" or method[0]
inheritance_type = method[3]==0 and "abstract " or ""
static = method[4] and "static " or ""
f.write("\t%s %s%s%s %s (%s)" % (vis, static, inheritance_type, returntype, methodname, pars))
if klass.inheritance_type == "template" or method[3]==0:
f.write(";\n\n")
else: f.write(" {\n\t\t\n\t}\n\n")
f.write ("}\n\n")
f.close()
ObjRenderer.end_render(self)
# dia-python keeps a reference to the renderer class and uses it on demand
dia.register_export ("PyDia Code Generation (Python)", "py", PyRenderer())
dia.register_export ("PyDia Code Generation (C++)", "cxx", CxxRenderer())
dia.register_export ("PyDia Code Generation (Pascal)", "pas", PascalRenderer())
dia.register_export ("PyDia Code Generation (Java)", "java", JavaRenderer())
|
montsuqi/monpe
|
plug-ins/python/codegen.py
|
Python
|
gpl-2.0
| 17,391
|
'''OpenGL extension EXT.vertex_weighting
This module customises the behaviour of the
OpenGL.raw.GL.EXT.vertex_weighting to provide a more
Python-friendly API
Overview (from the spec)
The intent of this extension is to provide a means for blending
geometry based on two slightly differing modelview matrices.
The blending is based on a vertex weighting that can change on a
per-vertex basis. This provides a primitive form of skinning.
A second modelview matrix transform is introduced. When vertex
weighting is enabled, the incoming vertex object coordinates are
transformed by both the primary and secondary modelview matrices;
likewise, the incoming normal coordinates are transformed by the
inverses of both the primary and secondary modelview matrices.
The resulting two position coordinates and two normal coordinates
are blended based on the per-vertex vertex weight and then combined
by addition. The transformed, weighted, and combined vertex position
and normal are then used by OpenGL as the eye-space position and
normal for lighting, texture coordinate, generation, clipping,
and further vertex transformation.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/vertex_weighting.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.EXT.vertex_weighting import *
### END AUTOGENERATED SECTION
|
D4wN/brickv
|
src/build_data/windows/OpenGL/GL/EXT/vertex_weighting.py
|
Python
|
gpl-2.0
| 1,493
|
### Author: Dag Wieers <dag$wieers,com>
global mysql_options
mysql_options = os.getenv('DSTAT_MYSQL')
class dstat_plugin(dstat):
def __init__(self):
self.name = 'innodb ops'
self.nick = ('ins', 'upd', 'del', 'rea')
self.vars = ('inserted', 'updated', 'deleted', 'read')
self.type = 'f'
self.width = 3
self.scale = 1000
def check(self):
if os.access('/usr/bin/mysql', os.X_OK):
try:
self.stdin, self.stdout, self.stderr = dpopen('/usr/bin/mysql -n %s' % mysql_options)
except IOError:
raise Exception, 'Cannot interface with MySQL binary'
return True
raise Exception, 'Needs MySQL binary'
def extract(self):
try:
self.stdin.write('show engine innodb status\G\n')
line = greppipe(self.stdout, 'Number of rows inserted')
if line:
l = line.split()
self.set2['inserted'] = int(l[4].rstrip(','))
self.set2['updated'] = int(l[6].rstrip(','))
self.set2['deleted'] = int(l[8].rstrip(','))
self.set2['read'] = int(l[10])
for name in self.vars:
self.val[name] = (self.set2[name] - self.set1[name]) * 1.0 / elapsed
if step == op.delay:
self.set1.update(self.set2)
except IOError, e:
if op.debug > 1: print '%s: lost pipe to mysql, %s' % (self.filename, e)
for name in self.vars: self.val[name] = -1
except Exception, e:
if op.debug > 1: print '%s: exception' % (self.filename, e)
for name in self.vars: self.val[name] = -1
# vim:ts=4:sw=4:et
|
nckx/dstat
|
plugins/dstat_innodb_ops.py
|
Python
|
gpl-2.0
| 1,728
|
#
# The abstract class for annotation module.
#
class Module:
def __init__(self):
'''To instantiate an annotation module'''
pass
def transform(self, leader_info, annot_body_code, trailer_code, lang):
'''
The transformation procedure that is used to transform the annotated code
region.
The input parameters:
leader_info a list that contains information about the leader annotation
= (code, indent, line_no, module_name, module_body)
where:
code the code of the leader annotation
indent the indentation preceeding the leader annotation
line_no the line number of the leader annotation is located in
the source file (for debugging purposes)
module_name the name of the annotation module
module_body the code of the leader annotation
annot_body_code the code of the annotation body
trailer_code the code of the trailer annotation
lang the language of the source code (see "src/main")
The returned value: the transformed code (in string)
'''
raise NotImplementedError('%s: unimplemented abstract function "transform"' %
(self.__class__.__name__))
|
tajkhan/pluto-pocc
|
annotations/module/module.py
|
Python
|
gpl-3.0
| 1,420
|
#! /usr/bin/env python
import sys
from aubio import pvoc, source
from numpy import zeros, hstack
def get_waveform_plot(filename, samplerate = 0, block_size = 4096, ax = None):
import matplotlib.pyplot as plt
if not ax:
fig = plt.figure()
ax = fig.add_subplot(111)
hop_s = block_size
allsamples_max = zeros(0,)
downsample = 2**4 # to plot n samples / hop_s
a = source(filename, samplerate, hop_s) # source file
if samplerate == 0: samplerate = a.samplerate
total_frames = 0
while True:
samples, read = a()
# keep some data to plot it later
new_maxes = (abs(samples.reshape(hop_s/downsample, downsample))).max(axis=0)
allsamples_max = hstack([allsamples_max, new_maxes])
total_frames += read
if read < hop_s: break
allsamples_max = (allsamples_max > 0) * allsamples_max
allsamples_max_times = [ ( float (t) / downsample ) * hop_s for t in range(len(allsamples_max)) ]
ax.plot(allsamples_max_times, allsamples_max, '-b')
ax.plot(allsamples_max_times, -allsamples_max, '-b')
ax.axis(xmin = allsamples_max_times[0], xmax = allsamples_max_times[-1])
set_xlabels_sample2time(ax, allsamples_max_times[-1], samplerate)
return ax
def set_xlabels_sample2time(ax, latest_sample, samplerate):
ax.axis(xmin = 0, xmax = latest_sample)
if latest_sample / float(samplerate) > 60:
ax.set_xlabel('time (mm:ss)')
ax.set_xticklabels([ "%02d:%02d" % (t/float(samplerate)/60, (t/float(samplerate))%60) for t in ax.get_xticks()[:-1]], rotation = 50)
else:
ax.set_xlabel('time (ss.mm)')
ax.set_xticklabels([ "%02d.%02d" % (t/float(samplerate), 100*((t/float(samplerate))%1) ) for t in ax.get_xticks()[:-1]], rotation = 50)
if __name__ == '__main__':
import matplotlib.pyplot as plt
if len(sys.argv) < 2:
print "Usage: %s <filename>" % sys.argv[0]
else:
for soundfile in sys.argv[1:]:
get_waveform_plot(soundfile)
# display graph
plt.show()
|
owenwuef/aubio
|
python/demos/demo_waveform_plot.py
|
Python
|
gpl-3.0
| 2,074
|
import unittest
import warnings
import sys
from test import test_support
class TestSpecifics(unittest.TestCase):
def test_debug_assignment(self):
# catch assignments to __debug__
self.assertRaises(SyntaxError, compile, '__debug__ = 1', '?', 'single')
import __builtin__
prev = __builtin__.__debug__
setattr(__builtin__, '__debug__', 'sure')
setattr(__builtin__, '__debug__', prev)
def test_argument_handling(self):
# detect duplicate positional and keyword arguments
self.assertRaises(SyntaxError, eval, 'lambda a,a:0')
self.assertRaises(SyntaxError, eval, 'lambda a,a=1:0')
self.assertRaises(SyntaxError, eval, 'lambda a=1,a=1:0')
try:
exec 'def f(a, a): pass'
self.fail("duplicate arguments")
except SyntaxError:
pass
try:
exec 'def f(a = 0, a = 1): pass'
self.fail("duplicate keyword arguments")
except SyntaxError:
pass
try:
exec 'def f(a): global a; a = 1'
self.fail("variable is global and local")
except SyntaxError:
pass
def test_syntax_error(self):
self.assertRaises(SyntaxError, compile, "1+*3", "filename", "exec")
def test_none_keyword_arg(self):
self.assertRaises(SyntaxError, compile, "f(None=1)", "<string>", "exec")
def test_duplicate_global_local(self):
try:
exec 'def f(a): global a; a = 1'
self.fail("variable is global and local")
except SyntaxError:
pass
def test_exec_with_general_mapping_for_locals(self):
class M:
"Test mapping interface versus possible calls from eval()."
def __getitem__(self, key):
if key == 'a':
return 12
raise KeyError
def __setitem__(self, key, value):
self.results = (key, value)
def keys(self):
return list('xyz')
m = M()
g = globals()
exec 'z = a' in g, m
self.assertEqual(m.results, ('z', 12))
try:
exec 'z = b' in g, m
except NameError:
pass
else:
self.fail('Did not detect a KeyError')
exec 'z = dir()' in g, m
self.assertEqual(m.results, ('z', list('xyz')))
exec 'z = globals()' in g, m
self.assertEqual(m.results, ('z', g))
exec 'z = locals()' in g, m
self.assertEqual(m.results, ('z', m))
try:
exec 'z = b' in m
except TypeError:
pass
else:
self.fail('Did not validate globals as a real dict')
class A:
"Non-mapping"
pass
m = A()
try:
exec 'z = a' in g, m
except TypeError:
pass
else:
self.fail('Did not validate locals as a mapping')
# Verify that dict subclasses work as well
class D(dict):
def __getitem__(self, key):
if key == 'a':
return 12
return dict.__getitem__(self, key)
d = D()
exec 'z = a' in g, d
self.assertEqual(d['z'], 12)
def test_extended_arg(self):
longexpr = 'x = x or ' + '-x' * 2500
code = '''
def f(x):
%s
%s
%s
%s
%s
%s
%s
%s
%s
%s
# the expressions above have no effect, x == argument
while x:
x -= 1
# EXTENDED_ARG/JUMP_ABSOLUTE here
return x
''' % ((longexpr,)*10)
exec code
self.assertEqual(f(5), 0)
def test_complex_args(self):
def comp_args((a, b)):
return a,b
self.assertEqual(comp_args((1, 2)), (1, 2))
def comp_args((a, b)=(3, 4)):
return a, b
self.assertEqual(comp_args((1, 2)), (1, 2))
self.assertEqual(comp_args(), (3, 4))
def comp_args(a, (b, c)):
return a, b, c
self.assertEqual(comp_args(1, (2, 3)), (1, 2, 3))
def comp_args(a=2, (b, c)=(3, 4)):
return a, b, c
self.assertEqual(comp_args(1, (2, 3)), (1, 2, 3))
self.assertEqual(comp_args(), (2, 3, 4))
def test_argument_order(self):
try:
exec 'def f(a=1, (b, c)): pass'
self.fail("non-default args after default")
except SyntaxError:
pass
def test_float_literals(self):
# testing bad float literals
self.assertRaises(SyntaxError, eval, "2e")
self.assertRaises(SyntaxError, eval, "2.0e+")
self.assertRaises(SyntaxError, eval, "1e-")
self.assertRaises(SyntaxError, eval, "3-4e/21")
def test_indentation(self):
# testing compile() of indented block w/o trailing newline"
s = """
if 1:
if 2:
pass"""
compile(s, "<string>", "exec")
# This test is probably specific to CPython and may not generalize
# to other implementations. We are trying to ensure that when
# the first line of code starts after 256, correct line numbers
# in tracebacks are still produced.
def test_leading_newlines(self):
s256 = "".join(["\n"] * 256 + ["spam"])
co = compile(s256, 'fn', 'exec')
self.assertEqual(co.co_firstlineno, 257)
self.assertEqual(co.co_lnotab, '')
def test_literals_with_leading_zeroes(self):
for arg in ["077787", "0xj", "0x.", "0e", "090000000000000",
"080000000000000", "000000000000009", "000000000000008"]:
self.assertRaises(SyntaxError, eval, arg)
self.assertEqual(eval("0777"), 511)
self.assertEqual(eval("0777L"), 511)
self.assertEqual(eval("000777"), 511)
self.assertEqual(eval("0xff"), 255)
self.assertEqual(eval("0xffL"), 255)
self.assertEqual(eval("0XfF"), 255)
self.assertEqual(eval("0777."), 777)
self.assertEqual(eval("0777.0"), 777)
self.assertEqual(eval("000000000000000000000000000000000000000000000000000777e0"), 777)
self.assertEqual(eval("0777e1"), 7770)
self.assertEqual(eval("0e0"), 0)
self.assertEqual(eval("0000E-012"), 0)
self.assertEqual(eval("09.5"), 9.5)
self.assertEqual(eval("0777j"), 777j)
self.assertEqual(eval("00j"), 0j)
self.assertEqual(eval("00.0"), 0)
self.assertEqual(eval("0e3"), 0)
self.assertEqual(eval("090000000000000."), 90000000000000.)
self.assertEqual(eval("090000000000000.0000000000000000000000"), 90000000000000.)
self.assertEqual(eval("090000000000000e0"), 90000000000000.)
self.assertEqual(eval("090000000000000e-0"), 90000000000000.)
self.assertEqual(eval("090000000000000j"), 90000000000000j)
self.assertEqual(eval("000000000000007"), 7)
self.assertEqual(eval("000000000000008."), 8.)
self.assertEqual(eval("000000000000009."), 9.)
def test_unary_minus(self):
# Verify treatment of unary minus on negative numbers SF bug #660455
if sys.maxint == 2147483647:
# 32-bit machine
all_one_bits = '0xffffffff'
self.assertEqual(eval(all_one_bits), 4294967295L)
self.assertEqual(eval("-" + all_one_bits), -4294967295L)
elif sys.maxint == 9223372036854775807:
# 64-bit machine
all_one_bits = '0xffffffffffffffff'
self.assertEqual(eval(all_one_bits), 18446744073709551615L)
self.assertEqual(eval("-" + all_one_bits), -18446744073709551615L)
else:
self.fail("How many bits *does* this machine have???")
# Verify treatment of contant folding on -(sys.maxint+1)
# i.e. -2147483648 on 32 bit platforms. Should return int, not long.
self.assertTrue(isinstance(eval("%s" % (-sys.maxint - 1)), int))
self.assertTrue(isinstance(eval("%s" % (-sys.maxint - 2)), long))
if sys.maxint == 9223372036854775807:
def test_32_63_bit_values(self):
a = +4294967296 # 1 << 32
b = -4294967296 # 1 << 32
c = +281474976710656 # 1 << 48
d = -281474976710656 # 1 << 48
e = +4611686018427387904 # 1 << 62
f = -4611686018427387904 # 1 << 62
g = +9223372036854775807 # 1 << 63 - 1
h = -9223372036854775807 # 1 << 63 - 1
for variable in self.test_32_63_bit_values.func_code.co_consts:
if variable is not None:
self.assertTrue(isinstance(variable, int))
def test_sequence_unpacking_error(self):
# Verify sequence packing/unpacking with "or". SF bug #757818
i,j = (1, -1) or (-1, 1)
self.assertEqual(i, 1)
self.assertEqual(j, -1)
def test_none_assignment(self):
stmts = [
'None = 0',
'None += 0',
'__builtins__.None = 0',
'def None(): pass',
'class None: pass',
'(a, None) = 0, 0',
'for None in range(10): pass',
'def f(None): pass',
]
for stmt in stmts:
stmt += "\n"
self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'single')
self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'exec')
def test_import(self):
succeed = [
'import sys',
'import os, sys',
'import os as bar',
'import os.path as bar',
'from __future__ import nested_scopes, generators',
'from __future__ import (nested_scopes,\ngenerators)',
'from __future__ import (nested_scopes,\ngenerators,)',
'from sys import stdin, stderr, stdout',
'from sys import (stdin, stderr,\nstdout)',
'from sys import (stdin, stderr,\nstdout,)',
'from sys import (stdin\n, stderr, stdout)',
'from sys import (stdin\n, stderr, stdout,)',
'from sys import stdin as si, stdout as so, stderr as se',
'from sys import (stdin as si, stdout as so, stderr as se)',
'from sys import (stdin as si, stdout as so, stderr as se,)',
]
fail = [
'import (os, sys)',
'import (os), (sys)',
'import ((os), (sys))',
'import (sys',
'import sys)',
'import (os,)',
'import os As bar',
'import os.path a bar',
'from sys import stdin As stdout',
'from sys import stdin a stdout',
'from (sys) import stdin',
'from __future__ import (nested_scopes',
'from __future__ import nested_scopes)',
'from __future__ import nested_scopes,\ngenerators',
'from sys import (stdin',
'from sys import stdin)',
'from sys import stdin, stdout,\nstderr',
'from sys import stdin si',
'from sys import stdin,'
'from sys import (*)',
'from sys import (stdin,, stdout, stderr)',
'from sys import (stdin, stdout),',
]
for stmt in succeed:
compile(stmt, 'tmp', 'exec')
for stmt in fail:
self.assertRaises(SyntaxError, compile, stmt, 'tmp', 'exec')
def test_for_distinct_code_objects(self):
# SF bug 1048870
def f():
f1 = lambda x=1: x
f2 = lambda x=2: x
return f1, f2
f1, f2 = f()
self.assertNotEqual(id(f1.func_code), id(f2.func_code))
def test_unicode_encoding(self):
code = u"# -*- coding: utf-8 -*-\npass\n"
self.assertRaises(SyntaxError, compile, code, "tmp", "exec")
def test_subscripts(self):
# SF bug 1448804
# Class to make testing subscript results easy
class str_map(object):
def __init__(self):
self.data = {}
def __getitem__(self, key):
return self.data[str(key)]
def __setitem__(self, key, value):
self.data[str(key)] = value
def __delitem__(self, key):
del self.data[str(key)]
def __contains__(self, key):
return str(key) in self.data
d = str_map()
# Index
d[1] = 1
self.assertEqual(d[1], 1)
d[1] += 1
self.assertEqual(d[1], 2)
del d[1]
self.assertEqual(1 in d, False)
# Tuple of indices
d[1, 1] = 1
self.assertEqual(d[1, 1], 1)
d[1, 1] += 1
self.assertEqual(d[1, 1], 2)
del d[1, 1]
self.assertEqual((1, 1) in d, False)
# Simple slice
d[1:2] = 1
self.assertEqual(d[1:2], 1)
d[1:2] += 1
self.assertEqual(d[1:2], 2)
del d[1:2]
self.assertEqual(slice(1, 2) in d, False)
# Tuple of simple slices
d[1:2, 1:2] = 1
self.assertEqual(d[1:2, 1:2], 1)
d[1:2, 1:2] += 1
self.assertEqual(d[1:2, 1:2], 2)
del d[1:2, 1:2]
self.assertEqual((slice(1, 2), slice(1, 2)) in d, False)
# Extended slice
d[1:2:3] = 1
self.assertEqual(d[1:2:3], 1)
d[1:2:3] += 1
self.assertEqual(d[1:2:3], 2)
del d[1:2:3]
self.assertEqual(slice(1, 2, 3) in d, False)
# Tuple of extended slices
d[1:2:3, 1:2:3] = 1
self.assertEqual(d[1:2:3, 1:2:3], 1)
d[1:2:3, 1:2:3] += 1
self.assertEqual(d[1:2:3, 1:2:3], 2)
del d[1:2:3, 1:2:3]
self.assertEqual((slice(1, 2, 3), slice(1, 2, 3)) in d, False)
# Ellipsis
d[...] = 1
self.assertEqual(d[...], 1)
d[...] += 1
self.assertEqual(d[...], 2)
del d[...]
self.assertEqual(Ellipsis in d, False)
# Tuple of Ellipses
d[..., ...] = 1
self.assertEqual(d[..., ...], 1)
d[..., ...] += 1
self.assertEqual(d[..., ...], 2)
del d[..., ...]
self.assertEqual((Ellipsis, Ellipsis) in d, False)
def test_main():
test_support.run_unittest(TestSpecifics)
if __name__ == "__main__":
test_main()
|
mancoast/CPythonPyc_test
|
cpython/252_test_compile.py
|
Python
|
gpl-3.0
| 14,209
|
"""Python script to trigger a dump on a MUCK and wait for it.
Uses pyfuzzball repo here: https://github.com/tanabi/pyfuzzball
Change the password, please :)
Returns status code 0 on success, or 1 on failure. Failure will only be
if there happens to be another dump triggered at the exact moment you're
triggering your own dump.
"""
#
# CONFIGURATION - Change these
#
HOST = 'localhost'
PORT = 4201
SSL = False
AUTHTOKEN = 'change-me-please'
#
# LEAVE THIS STUFF ALONE
#
from pyfuzzball.mcp import MCP
import sys
# Open MCP connection
m = MCP(HOST, PORT, SSL, True)
# Negotiate
m.negotiate(['org-fuzzball-dump'])
# Call dump with auth token
m.call('org-fuzzball-dump', 'dump', {
'auth': AUTHTOKEN
})
# Process results
results = m.process()
# results[1] will have the dump output messages most likely. You
# could process these instead of waiting for the MCP event if you
# preferred, but I wrote an MCP parser, so by golly I'm going to
# use it :)
while not results[0] and 'org-fuzzball-dump' not in results[0]:
results = m.process()
# 'success' should be in the parameters if it worked.
if 'success' in results[0]['org-fuzzball-dump'][0]['parameters']:
sys.exit(0)
else:
sys.exit(1)
|
revarbat/fuzzball
|
scripts/trigger-dump.py
|
Python
|
gpl-3.0
| 1,212
|
from __future__ import print_function
import os, sys, json, json5, re
import collections
script_directory = os.path.dirname(os.path.abspath(__file__))
template_directory = os.path.abspath(
os.path.join(script_directory, 'template'))
test_root_directory = os.path.abspath(
os.path.join(script_directory, '..', '..', '..'))
def get_template(basename):
with open(os.path.join(template_directory, basename), "r") as f:
return f.read()
def write_file(filename, contents):
with open(filename, "w") as f:
f.write(contents)
def read_nth_line(fp, line_number):
fp.seek(0)
for i, line in enumerate(fp):
if (i + 1) == line_number:
return line
def load_spec_json(path_to_spec):
re_error_location = re.compile('line ([0-9]+) column ([0-9]+)')
with open(path_to_spec, "r") as f:
try:
return json5.load(f, object_pairs_hook=collections.OrderedDict)
except ValueError as ex:
print(ex.message)
match = re_error_location.search(ex.message)
if match:
line_number, column = int(match.group(1)), int(match.group(2))
print(read_nth_line(f, line_number).rstrip())
print(" " * (column - 1) + "^")
sys.exit(1)
class ShouldSkip(Exception):
'''
Raised when the given combination of subresource type, source context type,
delivery type etc. are not supported and we should skip that configuration.
ShouldSkip is expected in normal generator execution (and thus subsequent
generation continues), as we first enumerate a broad range of configurations
first, and later raise ShouldSkip to filter out unsupported combinations.
ShouldSkip is distinguished from other general errors that cause immediate
termination of the generator and require fix.
'''
def __init__(self):
pass
class PolicyDelivery(object):
'''
See `@typedef PolicyDelivery` comments in
`common/security-features/resources/common.sub.js`.
'''
def __init__(self, delivery_type, key, value):
self.delivery_type = delivery_type
self.key = key
self.value = value
def __eq__(self, other):
return type(self) is type(other) and self.__dict__ == other.__dict__
@classmethod
def list_from_json(cls, list, target_policy_delivery,
supported_delivery_types):
# type: (dict, PolicyDelivery, typing.List[str]) -> typing.List[PolicyDelivery]
'''
Parses a JSON object `list` that represents a list of `PolicyDelivery`
and returns a list of `PolicyDelivery`, plus supporting placeholders
(see `from_json()` comments below or
`common/security-features/README.md`).
Can raise `ShouldSkip`.
'''
if list is None:
return []
out = []
for obj in list:
policy_delivery = PolicyDelivery.from_json(
obj, target_policy_delivery, supported_delivery_types)
# Drop entries with null values.
if policy_delivery.value is None:
continue
out.append(policy_delivery)
return out
@classmethod
def from_json(cls, obj, target_policy_delivery, supported_delivery_types):
# type: (dict, PolicyDelivery, typing.List[str]) -> PolicyDelivery
'''
Parses a JSON object `obj` and returns a `PolicyDelivery` object.
In addition to dicts (in the same format as to_json() outputs),
this method accepts the following placeholders:
"policy":
`target_policy_delivery`
"policyIfNonNull":
`target_policy_delivery` if its value is not None.
"anotherPolicy":
A PolicyDelivery that has the same key as
`target_policy_delivery` but a different value.
The delivery type is selected from `supported_delivery_types`.
Can raise `ShouldSkip`.
'''
if obj == "policy":
policy_delivery = target_policy_delivery
elif obj == "nonNullPolicy":
if target_policy_delivery.value is None:
raise ShouldSkip()
policy_delivery = target_policy_delivery
elif obj == "anotherPolicy":
policy_delivery = target_policy_delivery.get_another_policy(
supported_delivery_types[0])
elif isinstance(obj, dict):
policy_delivery = PolicyDelivery(obj['deliveryType'], obj['key'],
obj['value'])
else:
raise Exception('policy delivery is invalid: ' + obj)
# Omit unsupported combinations of source contexts and delivery type.
if policy_delivery.delivery_type not in supported_delivery_types:
raise ShouldSkip()
return policy_delivery
def to_json(self):
# type: () -> dict
return {
"deliveryType": self.delivery_type,
"key": self.key,
"value": self.value
}
def get_another_policy(self, delivery_type):
# type: (str) -> PolicyDelivery
if self.key == 'referrerPolicy':
if self.value == 'no-referrer':
return PolicyDelivery(delivery_type, self.key, 'unsafe-url')
else:
return PolicyDelivery(delivery_type, self.key, 'no-referrer')
elif self.key == 'mixedContent':
if self.value == 'opt-in':
return PolicyDelivery(delivery_type, self.key, None)
else:
return PolicyDelivery(delivery_type, self.key, 'opt-in')
elif self.key == 'upgradeInsecureRequests':
if self.value == 'upgrade':
return PolicyDelivery(delivery_type, self.key, None)
else:
return PolicyDelivery(delivery_type, self.key, 'upgrade')
else:
raise Exception('delivery key is invalid: ' + self.key)
class SourceContext(object):
def __init__(self, source_context_type, policy_deliveries):
# type: (unicode, typing.List[PolicyDelivery]) -> None
self.source_context_type = source_context_type
self.policy_deliveries = policy_deliveries
def __eq__(self, other):
return type(self) is type(other) and self.__dict__ == other.__dict__
@classmethod
def from_json(cls, obj, target_policy_delivery, source_context_schema):
'''
Parses a JSON object `obj` and returns a `SourceContext` object.
`target_policy_delivery` and `source_context_schema` are used for
policy delivery placeholders and filtering out unsupported
delivery types.
Can raise `ShouldSkip`.
'''
source_context_type = obj.get('sourceContextType')
policy_deliveries = PolicyDelivery.list_from_json(
obj.get('policyDeliveries'), target_policy_delivery,
source_context_schema['supported_delivery_type']
[source_context_type])
return SourceContext(source_context_type, policy_deliveries)
def to_json(self):
return {
"sourceContextType": self.source_context_type,
"policyDeliveries": [x.to_json() for x in self.policy_deliveries]
}
class CustomEncoder(json.JSONEncoder):
'''
Used to dump dicts containing `SourceContext`/`PolicyDelivery` into JSON.
'''
def default(self, obj):
if isinstance(obj, SourceContext):
return obj.to_json()
if isinstance(obj, PolicyDelivery):
return obj.to_json()
return json.JSONEncoder.default(self, obj)
|
asajeffrey/servo
|
tests/wpt/web-platform-tests/common/security-features/tools/util.py
|
Python
|
mpl-2.0
| 7,698
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright 2015 Vauxoo
# Author: Osval Reyes, Yanina Aular
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import api, models
class CrmClaim(models.Model):
_inherit = 'crm.claim'
@api.model
def _get_stock_moves_with_code(self, code='incoming'):
"""
@code: Type of operation code.
Returns all stock_move with filtered by type of
operation.
"""
stockmove = self.env['stock.move']
receipts = self.env['stock.picking.type']
spt_receipts = receipts.search([('code',
'=',
code)])
spt_receipts = [spt.id for spt in spt_receipts]
sm_receipts = stockmove.search([('picking_type_id',
'in',
spt_receipts)])
return sm_receipts
@api.multi
def render_metasearch_view(self):
context = self._context.copy()
context.update({
'active_model': self._name,
'active_ids': self.ids,
'active_id': self.id or False,
})
wizard = self.env['returned.lines.from.serial.wizard'].\
with_context(context).create({})
return wizard.render_metasearch_view()
|
Endika/rma
|
crm_rma_lot_mass_return/models/crm_claim.py
|
Python
|
agpl-3.0
| 2,135
|
import factory
from student.tests.factories import UserFactory
from survey.models import SurveyAnswer, SurveyForm
class SurveyFormFactory(factory.DjangoModelFactory):
class Meta(object):
model = SurveyForm
name = 'Test Survey Form'
form = '<form>First name:<input type="text" name="firstname"/></form>'
class SurveyAnswerFactory(factory.DjangoModelFactory):
class Meta(object):
model = SurveyAnswer
user = factory.SubFactory(UserFactory)
form = factory.SubFactory(SurveyFormFactory)
|
edx-solutions/edx-platform
|
lms/djangoapps/survey/tests/factories.py
|
Python
|
agpl-3.0
| 530
|
# -*- coding: utf-8 -*-
#***************************************************************************
#* *
#* Copyright (c) 2014 Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
import FreeCAD,FreeCADGui,Path,PathGui, PathUtils
from PySide import QtCore,QtGui
"""Path Compound Extended object and FreeCAD command"""
# Qt tanslation handling
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def translate(context, text, disambig=None):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def translate(context, text, disambig=None):
return QtGui.QApplication.translate(context, text, disambig)
class ObjectCompoundExtended:
def __init__(self,obj):
obj.addProperty("App::PropertyString","Description", "Path","An optional description of this compounded operation")
# obj.addProperty("App::PropertySpeed", "FeedRate", "Path","The feed rate of the paths in these compounded operations")
# obj.addProperty("App::PropertyFloat", "SpindleSpeed", "Path","The spindle speed, in revolutions per minute, of the tool used in these compounded operations")
obj.addProperty("App::PropertyLength","SafeHeight", "Path","The safe height for this operation")
obj.addProperty("App::PropertyLength","RetractHeight","Path","The retract height, above top surface of part, between compounded operations inside clamping area")
obj.Proxy = self
def __getstate__(self):
return None
def __setstate__(self,state):
return None
def onChanged(self,obj,prop):
if prop == "Group":
print 'check order'
for child in obj.Group:
if child.isDerivedFrom("Path::Feature"):
child.touch()
def execute(self,obj):
cmds = []
for child in obj.Group:
if child.isDerivedFrom("Path::Feature"):
if obj.UsePlacements:
for c in child.Path.Commands:
cmds.append(c.transform(child.Placement))
else:
cmds.extend(child.Path.Commands)
if cmds:
path = Path.Path(cmds)
obj.Path = path
class ViewProviderCompoundExtended:
def __init__(self,vobj):
vobj.Proxy = self
def attach(self,vobj):
self.Object = vobj.Object
return
def getIcon(self):
return ":/icons/Path-Compound.svg"
def __getstate__(self):
return None
def __setstate__(self,state):
return None
class CommandCompoundExtended:
def GetResources(self):
return {'Pixmap' : 'Path-Compound',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_CompoundExtended","Compound"),
'Accel': "P, C",
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_CompoundExtended","Creates a Path Compound object")}
def IsActive(self):
return not FreeCAD.ActiveDocument is None
def Activated(self):
FreeCAD.ActiveDocument.openTransaction(translate("Path_CompoundExtended","Create Compound"))
FreeCADGui.addModule("PathScripts.PathCompoundExtended")
snippet = '''
import Path
import PathScripts
from PathScripts import PathUtils
incl = []
prjexists = False
sel = FreeCADGui.Selection.getSelection()
for s in sel:
if s.isDerivedFrom("Path::Feature"):
incl.append(s)
obj = FreeCAD.ActiveDocument.addObject("Path::FeatureCompoundPython","Compound")
PathScripts.PathCompoundExtended.ObjectCompoundExtended(obj)
PathScripts.PathCompoundExtended.ViewProviderCompoundExtended(obj.ViewObject)
project = PathUtils.addToProject(obj)
if incl:
children = []
p = project.Group
g = obj.Group
for child in incl:
p.remove(child)
children.append(FreeCAD.ActiveDocument.getObject(child.Name))
project.Group = p
g.append(children)
obj.Group = children
'''
FreeCADGui.doCommand(snippet)
FreeCAD.ActiveDocument.commitTransaction()
FreeCAD.ActiveDocument.recompute()
if FreeCAD.GuiUp:
# register the FreeCAD command
FreeCADGui.addCommand('Path_CompoundExtended',CommandCompoundExtended())
FreeCAD.Console.PrintLog("Loading PathCompoundExtended... done\n")
|
timthelion/FreeCAD
|
src/Mod/Path/PathScripts/PathCompoundExtended.py
|
Python
|
lgpl-2.1
| 5,730
|
# (c) Copyright 2014 Brocade Communications Systems Inc.
# All Rights Reserved.
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Unit tests for brcd fc san lookup service."""
import mock
from oslo.config import cfg
import paramiko
from cinder import exception
from cinder.openstack.common import log as logging
from cinder import test
from cinder.volume import configuration as conf
import cinder.zonemanager.drivers.brocade.brcd_fc_san_lookup_service \
as brcd_lookup
from cinder.zonemanager.drivers.brocade import fc_zone_constants
LOG = logging.getLogger(__name__)
nsshow = '20:1a:00:05:1e:e8:e3:29'
switch_data = [' N 011a00;2,3;20:1a:00:05:1e:e8:e3:29;\
20:1a:00:05:1e:e8:e3:29;na']
nsshow_data = ['10:00:8c:7c:ff:52:3b:01', '20:24:00:02:ac:00:0a:50']
_device_map_to_verify = {
'BRCD_FAB_2': {
'initiator_port_wwn_list': ['10008c7cff523b01'],
'target_port_wwn_list': ['20240002ac000a50']}}
class TestBrcdFCSanLookupService(brcd_lookup.BrcdFCSanLookupService,
test.TestCase):
def setUp(self):
super(TestBrcdFCSanLookupService, self).setUp()
self.client = paramiko.SSHClient()
self.configuration = conf.Configuration(None)
self.configuration.set_default('fc_fabric_names', 'BRCD_FAB_2',
'fc-zone-manager')
self.configuration.fc_fabric_names = 'BRCD_FAB_2'
self.create_configuration()
# override some of the functions
def __init__(self, *args, **kwargs):
test.TestCase.__init__(self, *args, **kwargs)
def create_configuration(self):
fc_fabric_opts = []
fc_fabric_opts.append(cfg.StrOpt('fc_fabric_address',
default='10.24.49.100', help=''))
fc_fabric_opts.append(cfg.StrOpt('fc_fabric_user',
default='admin', help=''))
fc_fabric_opts.append(cfg.StrOpt('fc_fabric_password',
default='password', help='',
secret=True))
fc_fabric_opts.append(cfg.IntOpt('fc_fabric_port',
default=22, help=''))
fc_fabric_opts.append(cfg.StrOpt('principal_switch_wwn',
default='100000051e55a100', help=''))
config = conf.Configuration(fc_fabric_opts, 'BRCD_FAB_2')
self.fabric_configs = {'BRCD_FAB_2': config}
@mock.patch.object(paramiko.hostkeys.HostKeys, 'load')
def test_create_ssh_client(self, load_mock):
mock_args = {}
mock_args['known_hosts_file'] = 'dummy_host_key_file'
mock_args['missing_key_policy'] = paramiko.RejectPolicy()
ssh_client = self.create_ssh_client(**mock_args)
self.assertEqual(ssh_client._host_keys_filename, 'dummy_host_key_file')
self.assertTrue(isinstance(ssh_client._policy, paramiko.RejectPolicy))
mock_args = {}
ssh_client = self.create_ssh_client(**mock_args)
self.assertIsNone(ssh_client._host_keys_filename)
self.assertTrue(isinstance(ssh_client._policy, paramiko.WarningPolicy))
@mock.patch.object(brcd_lookup.BrcdFCSanLookupService,
'get_nameserver_info')
def test_get_device_mapping_from_network(self, get_nameserver_info_mock):
initiator_list = ['10008c7cff523b01']
target_list = ['20240002ac000a50', '20240002ac000a40']
with mock.patch.object(self.client, 'connect'):
get_nameserver_info_mock.return_value = (nsshow_data)
device_map = self.get_device_mapping_from_network(
initiator_list, target_list)
self.assertDictMatch(device_map, _device_map_to_verify)
@mock.patch.object(brcd_lookup.BrcdFCSanLookupService, '_get_switch_data')
def test_get_nameserver_info(self, get_switch_data_mock):
ns_info_list = []
ns_info_list_expected = ['20:1a:00:05:1e:e8:e3:29',
'20:1a:00:05:1e:e8:e3:29']
get_switch_data_mock.return_value = (switch_data)
ns_info_list = self.get_nameserver_info()
self.assertEqual(ns_info_list, ns_info_list_expected)
def test__get_switch_data(self):
cmd = fc_zone_constants.NS_SHOW
with mock.patch.object(self.client, 'exec_command') \
as exec_command_mock:
exec_command_mock.return_value = (Stream(),
Stream(nsshow),
Stream())
switch_data = self._get_switch_data(cmd)
self.assertEqual(switch_data, nsshow)
exec_command_mock.assert_called_once_with(cmd)
def test__parse_ns_output(self):
invalid_switch_data = [' N 011a00;20:1a:00:05:1e:e8:e3:29']
return_wwn_list = []
expected_wwn_list = ['20:1a:00:05:1e:e8:e3:29']
return_wwn_list = self._parse_ns_output(switch_data)
self.assertEqual(return_wwn_list, expected_wwn_list)
self.assertRaises(exception.InvalidParameterValue,
self._parse_ns_output, invalid_switch_data)
def test_get_formatted_wwn(self):
wwn_list = ['10008c7cff523b01']
return_wwn_list = []
expected_wwn_list = ['10:00:8c:7c:ff:52:3b:01']
return_wwn_list.append(self.get_formatted_wwn(wwn_list[0]))
self.assertEqual(return_wwn_list, expected_wwn_list)
class Channel(object):
def recv_exit_status(self):
return 0
class Stream(object):
def __init__(self, buffer=''):
self.buffer = buffer
self.channel = Channel()
def readlines(self):
return self.buffer
def close(self):
pass
def flush(self):
self.buffer = ''
|
e0ne/cinder
|
cinder/tests/zonemanager/test_brcd_fc_san_lookup_service.py
|
Python
|
apache-2.0
| 6,398
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# module for getting the lan ip address of the computer
import os
import socket
if os.name != "nt":
import fcntl
import struct
def get_interface_ip(ifname):
def _bytes(value, encoding):
try:
return bytes(value, encoding) # Python 3
except TypeError:
return value # Python 2
sckt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
sckt.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', _bytes(ifname[:15], 'utf-8'))
)[20:24])
def get_lan_ip():
if os.environ.get('CI') == 'true':
return '0.0.0.0'
try:
ip = socket.gethostbyname(socket.gethostname())
except Exception:
return '0.0.0.0'
if ip.startswith("127.") and os.name != "nt":
interfaces = ["eth0", "eth1", "eth2", "en0", "en1", "en2", "en3",
"en4", "wlan0", "wlan1", "wifi0", "ath0", "ath1", "ppp0"]
for ifname in interfaces:
try:
ip = get_interface_ip(ifname)
break
except IOError:
pass
return ip
|
titusfortner/selenium
|
py/test/selenium/webdriver/common/network.py
|
Python
|
apache-2.0
| 1,977
|
# Copyright 2013 OpenStack Foundation
# Copyright 2013 Rackspace Hosting
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from mock import MagicMock
import testtools
from testtools import matchers
import swiftclient.client
from trove.tests.fakes.swift import SwiftClientStub
from trove.common.context import TroveContext
from trove.common import remote
from trove.common import exception
from trove.common import cfg
class TestRemote(testtools.TestCase):
def setUp(self):
super(TestRemote, self).setUp()
def tearDown(self):
super(TestRemote, self).tearDown()
def test_creation(self):
swiftclient.client.Connection.get_auth = MagicMock(return_value=None)
conn = swiftclient.client.Connection()
self.assertIsNone(conn.get_auth())
def test_create_swift_client(self):
mock_resp = MagicMock()
swiftclient.client.Connection.get_container = MagicMock(
return_value=["text", mock_resp])
service_catalog = [{'endpoints': [{'region': 'RegionOne',
'publicURL': 'example.com'}],
'type': 'object-store'}]
client = remote.create_swift_client(TroveContext(
tenant='123',
service_catalog=service_catalog))
headers, container = client.get_container('bob')
self.assertIs(headers, "text")
self.assertIs(container, mock_resp)
def test_empty_account(self):
"""
this is an account with no containers and no objects
"""
# setup expectation
swift_stub = SwiftClientStub()
swift_stub.with_account('123223')
# interact
conn = swiftclient.client.Connection()
account_info = conn.get_account()
self.assertThat(account_info, matchers.Not(matchers.Is(None)))
self.assertThat(len(account_info), matchers.Is(2))
self.assertThat(account_info, matchers.IsInstance(tuple))
self.assertThat(account_info[0], matchers.IsInstance(dict))
self.assertThat(account_info[0],
matchers.KeysEqual('content-length', 'accept-ranges',
'x-timestamp', 'x-trans-id', 'date',
'x-account-bytes-used',
'x-account-container-count',
'content-type',
'x-account-object-count'))
self.assertThat(account_info[1], matchers.IsInstance(list))
self.assertThat(len(account_info[1]), matchers.Is(0))
def test_one_container(self):
"""
tests to ensure behavior is normal with one container
"""
# setup expectation
swift_stub = SwiftClientStub()
swift_stub.with_account('123223')
cont_name = 'a-container-name'
swift_stub.with_container(cont_name)
# interact
conn = swiftclient.client.Connection()
conn.get_auth()
conn.put_container(cont_name)
# get headers plus container metadata
self.assertThat(len(conn.get_account()), matchers.Is(2))
# verify container details
account_containers = conn.get_account()[1]
self.assertThat(len(account_containers), matchers.Is(1))
self.assertThat(account_containers[0],
matchers.KeysEqual('count', 'bytes', 'name'))
self.assertThat(account_containers[0]['name'], matchers.Is(cont_name))
# get container details
cont_info = conn.get_container(cont_name)
self.assertIsNotNone(cont_info)
self.assertThat(cont_info[0], matchers.KeysEqual('content-length',
'x-container-object-count', 'accept-ranges',
'x-container-bytes-used', 'x-timestamp',
'x-trans-id', 'date', 'content-type'))
self.assertThat(len(cont_info[1]), matchers.Equals(0))
# remove container
swift_stub.without_container(cont_name)
with testtools.ExpectedException(swiftclient.ClientException):
conn.get_container(cont_name)
# ensure there are no more containers in account
self.assertThat(len(conn.get_account()[1]), matchers.Is(0))
def test_one_object(self):
swift_stub = SwiftClientStub()
swift_stub.with_account('123223')
swift_stub.with_container('bob')
swift_stub.with_object('bob', 'test', 'test_contents')
# create connection
conn = swiftclient.client.Connection()
# test container lightly
cont_info = conn.get_container('bob')
self.assertIsNotNone(cont_info)
self.assertThat(cont_info[0],
matchers.KeysEqual('content-length',
'x-container-object-count',
'accept-ranges',
'x-container-bytes-used',
'x-timestamp', 'x-trans-id', 'date',
'content-type'))
cont_objects = cont_info[1]
self.assertThat(len(cont_objects), matchers.Equals(1))
obj_1 = cont_objects[0]
self.assertThat(obj_1, matchers.Equals(
{'bytes': 13, 'last_modified': '2013-03-15T22:10:49.361950',
'hash': 'ccc55aefbf92aa66f42b638802c5e7f6', 'name': 'test',
'content_type': 'application/octet-stream',
'contents': 'test_contents'}))
# test object api - not much to do here
self.assertThat(conn.get_object('bob', 'test')[1],
matchers.Is('test_contents'))
# test remove object
swift_stub.without_object('bob', 'test')
# interact
with testtools.ExpectedException(swiftclient.ClientException):
conn.delete_object('bob', 'test')
self.assertThat(len(conn.get_container('bob')[1]), matchers.Is(0))
def test_two_objects(self):
swift_stub = SwiftClientStub()
swift_stub.with_account('123223')
swift_stub.with_container('bob')
swift_stub.with_container('bob2')
swift_stub.with_object('bob', 'test', 'test_contents')
swift_stub.with_object('bob', 'test2', 'test_contents2')
conn = swiftclient.client.Connection()
self.assertIs(len(conn.get_account()), 2)
cont_info = conn.get_container('bob')
self.assertIsNotNone(cont_info)
self.assertThat(cont_info[0],
matchers.KeysEqual('content-length',
'x-container-object-count',
'accept-ranges',
'x-container-bytes-used',
'x-timestamp', 'x-trans-id', 'date',
'content-type'))
self.assertThat(len(cont_info[1]), matchers.Equals(2))
self.assertThat(cont_info[1][0], matchers.Equals(
{'bytes': 13, 'last_modified': '2013-03-15T22:10:49.361950',
'hash': 'ccc55aefbf92aa66f42b638802c5e7f6', 'name': 'test',
'content_type': 'application/octet-stream',
'contents': 'test_contents'}))
self.assertThat(conn.get_object('bob', 'test')[1],
matchers.Is('test_contents'))
self.assertThat(conn.get_object('bob', 'test2')[1],
matchers.Is('test_contents2'))
swift_stub.without_object('bob', 'test')
with testtools.ExpectedException(swiftclient.ClientException):
conn.delete_object('bob', 'test')
self.assertThat(len(conn.get_container('bob')[1]), matchers.Is(1))
swift_stub.without_container('bob')
with testtools.ExpectedException(swiftclient.ClientException):
conn.get_container('bob')
self.assertThat(len(conn.get_account()), matchers.Is(2))
def test_nonexisting_container(self):
"""
when a container does not exist and is accessed then a 404 is returned
"""
from trove.tests.fakes.swift import SwiftClientStub
swift_stub = SwiftClientStub()
swift_stub.with_account('123223')
swift_stub.with_container('existing')
conn = swiftclient.client.Connection()
with testtools.ExpectedException(swiftclient.ClientException):
conn.get_container('nonexisting')
def test_replace_object(self):
"""
Test to ensure that if an object is updated the container object
count is the same and the contents of the object are updated
"""
swift_stub = SwiftClientStub()
swift_stub.with_account('1223df2')
swift_stub.with_container('new-container')
swift_stub.with_object('new-container', 'new-object',
'new-object-contents')
conn = swiftclient.client.Connection()
conn.put_object('new-container', 'new-object', 'new-object-contents')
obj_resp = conn.get_object('new-container', 'new-object')
self.assertThat(obj_resp, matchers.Not(matchers.Is(None)))
self.assertThat(len(obj_resp), matchers.Is(2))
self.assertThat(obj_resp[1], matchers.Is('new-object-contents'))
# set expected behavior - trivial here since it is the intended
# behavior however keep in mind this is just to support testing of
# trove components
swift_stub.with_object('new-container', 'new-object',
'updated-object-contents')
conn.put_object('new-container', 'new-object',
'updated-object-contents')
obj_resp = conn.get_object('new-container', 'new-object')
self.assertThat(obj_resp, matchers.Not(matchers.Is(None)))
self.assertThat(len(obj_resp), matchers.Is(2))
self.assertThat(obj_resp[1], matchers.Is('updated-object-contents'))
# ensure object count has not increased
self.assertThat(len(conn.get_container('new-container')[1]),
matchers.Is(1))
class TestCreateCinderClient(testtools.TestCase):
def setUp(self):
super(TestCreateCinderClient, self).setUp()
self.volumev2_public_url = 'http://publicURL/v2'
self.volume_public_url_region_two = 'http://publicURL-r2/v1'
self.service_catalog = [
{
'endpoints': [
{
'region': 'RegionOne',
'publicURL': self.volumev2_public_url,
}
],
'type': 'volumev2'
},
{
'endpoints': [
{
'region': 'RegionOne',
'publicURL': 'http://publicURL-r1/v1',
},
{
'region': 'RegionTwo',
'publicURL': self.volume_public_url_region_two,
}
],
'type': 'volume'
}
]
def tearDown(self):
super(TestCreateCinderClient, self).tearDown()
cfg.CONF.clear_override('cinder_url')
cfg.CONF.clear_override('cinder_service_type')
cfg.CONF.clear_override('os_region_name')
def test_create_with_no_conf_no_catalog(self):
self.assertRaises(exception.EmptyCatalog,
remote.create_cinder_client,
TroveContext())
def test_create_with_conf_override(self):
cinder_url_from_conf = 'http://example.com'
tenant_from_ctx = 'abc'
cfg.CONF.set_override('cinder_url', cinder_url_from_conf)
client = remote.create_cinder_client(
TroveContext(tenant=tenant_from_ctx))
self.assertEqual('%s/%s' % (cinder_url_from_conf, tenant_from_ctx),
client.client.management_url)
def test_create_with_conf_override_trailing_slash(self):
cinder_url_from_conf = 'http://example.com/'
tenant_from_ctx = 'abc'
cfg.CONF.set_override('cinder_url', cinder_url_from_conf)
client = remote.create_cinder_client(
TroveContext(tenant=tenant_from_ctx))
self.assertEqual('%s%s' % (cinder_url_from_conf, tenant_from_ctx),
client.client.management_url)
def test_create_with_catalog_and_default_service_type(self):
client = remote.create_cinder_client(
TroveContext(service_catalog=self.service_catalog))
self.assertEqual(self.volumev2_public_url,
client.client.management_url)
def test_create_with_catalog_all_opts(self):
cfg.CONF.set_override('cinder_service_type', 'volume')
cfg.CONF.set_override('os_region_name', 'RegionTwo')
client = remote.create_cinder_client(
TroveContext(service_catalog=self.service_catalog))
self.assertEqual(self.volume_public_url_region_two,
client.client.management_url)
class TestCreateNovaClient(testtools.TestCase):
def setUp(self):
super(TestCreateNovaClient, self).setUp()
self.compute_public_url = 'http://publicURL/v2'
self.computev3_public_url_region_two = 'http://publicURL-r2/v3'
self.service_catalog = [
{
'endpoints': [
{
'region': 'RegionOne',
'publicURL': self.compute_public_url,
}
],
'type': 'compute'
},
{
'endpoints': [
{
'region': 'RegionOne',
'publicURL': 'http://publicURL-r1/v1',
},
{
'region': 'RegionTwo',
'publicURL': self.computev3_public_url_region_two,
}
],
'type': 'computev3'
}
]
def tearDown(self):
super(TestCreateNovaClient, self).tearDown()
cfg.CONF.clear_override('nova_compute_url')
cfg.CONF.clear_override('nova_compute_service_type')
cfg.CONF.clear_override('os_region_name')
def test_create_with_no_conf_no_catalog(self):
self.assertRaises(exception.EmptyCatalog,
remote.create_nova_client,
TroveContext())
def test_create_with_conf_override(self):
nova_url_from_conf = 'http://example.com'
tenant_from_ctx = 'abc'
cfg.CONF.set_override('nova_compute_url', nova_url_from_conf)
client = remote.create_nova_client(
TroveContext(tenant=tenant_from_ctx))
self.assertEqual('%s/%s' % (nova_url_from_conf, tenant_from_ctx),
client.client.management_url)
def test_create_with_conf_override_trailing_slash(self):
nova_url_from_conf = 'http://example.com/'
tenant_from_ctx = 'abc'
cfg.CONF.set_override('nova_compute_url', nova_url_from_conf)
client = remote.create_nova_client(
TroveContext(tenant=tenant_from_ctx))
self.assertEqual('%s%s' % (nova_url_from_conf, tenant_from_ctx),
client.client.management_url)
def test_create_with_catalog_and_default_service_type(self):
client = remote.create_nova_client(
TroveContext(service_catalog=self.service_catalog))
self.assertEqual(self.compute_public_url,
client.client.management_url)
def test_create_with_catalog_all_opts(self):
cfg.CONF.set_override('nova_compute_service_type', 'computev3')
cfg.CONF.set_override('os_region_name', 'RegionTwo')
client = remote.create_nova_client(
TroveContext(service_catalog=self.service_catalog))
self.assertEqual(self.computev3_public_url_region_two,
client.client.management_url)
class TestCreateHeatClient(testtools.TestCase):
def setUp(self):
super(TestCreateHeatClient, self).setUp()
self.heat_public_url = 'http://publicURL/v2'
self.heatv3_public_url_region_two = 'http://publicURL-r2/v3'
self.service_catalog = [
{
'endpoints': [
{
'region': 'RegionOne',
'publicURL': self.heat_public_url,
}
],
'type': 'orchestration'
},
{
'endpoints': [
{
'region': 'RegionOne',
'publicURL': 'http://publicURL-r1/v1',
},
{
'region': 'RegionTwo',
'publicURL': self.heatv3_public_url_region_two,
}
],
'type': 'orchestrationv3'
}
]
def tearDown(self):
super(TestCreateHeatClient, self).tearDown()
cfg.CONF.clear_override('heat_url')
cfg.CONF.clear_override('heat_service_type')
cfg.CONF.clear_override('os_region_name')
def test_create_with_no_conf_no_catalog(self):
self.assertRaises(exception.EmptyCatalog,
remote.create_heat_client,
TroveContext())
def test_create_with_conf_override(self):
heat_url_from_conf = 'http://example.com'
tenant_from_ctx = 'abc'
cfg.CONF.set_override('heat_url', heat_url_from_conf)
client = remote.create_heat_client(
TroveContext(tenant=tenant_from_ctx))
self.assertEqual('%s/%s' % (heat_url_from_conf, tenant_from_ctx),
client.http_client.endpoint)
def test_create_with_conf_override_trailing_slash(self):
heat_url_from_conf = 'http://example.com/'
tenant_from_ctx = 'abc'
cfg.CONF.set_override('heat_url', heat_url_from_conf)
client = remote.create_heat_client(
TroveContext(tenant=tenant_from_ctx))
self.assertEqual('%s%s' % (heat_url_from_conf, tenant_from_ctx),
client.http_client.endpoint)
def test_create_with_catalog_and_default_service_type(self):
client = remote.create_heat_client(
TroveContext(service_catalog=self.service_catalog))
self.assertEqual(self.heat_public_url,
client.http_client.endpoint)
def test_create_with_catalog_all_opts(self):
cfg.CONF.set_override('heat_service_type', 'orchestrationv3')
cfg.CONF.set_override('os_region_name', 'RegionTwo')
client = remote.create_heat_client(
TroveContext(service_catalog=self.service_catalog))
self.assertEqual(self.heatv3_public_url_region_two,
client.http_client.endpoint)
class TestCreateSwiftClient(testtools.TestCase):
def setUp(self):
super(TestCreateSwiftClient, self).setUp()
self.swift_public_url = 'http://publicURL/v2'
self.swiftv3_public_url_region_two = 'http://publicURL-r2/v3'
self.service_catalog = [
{
'endpoints': [
{
'region': 'RegionOne',
'publicURL': self.swift_public_url,
}
],
'type': 'object-store'
},
{
'endpoints': [
{
'region': 'RegionOne',
'publicURL': 'http://publicURL-r1/v1',
},
{
'region': 'RegionTwo',
'publicURL': self.swiftv3_public_url_region_two,
}
],
'type': 'object-storev3'
}
]
def tearDown(self):
super(TestCreateSwiftClient, self).tearDown()
cfg.CONF.clear_override('swift_url')
cfg.CONF.clear_override('swift_service_type')
cfg.CONF.clear_override('os_region_name')
def test_create_with_no_conf_no_catalog(self):
self.assertRaises(exception.EmptyCatalog,
remote.create_swift_client,
TroveContext())
def test_create_with_conf_override(self):
swift_url_from_conf = 'http://example.com/AUTH_'
tenant_from_ctx = 'abc'
cfg.CONF.set_override('swift_url', swift_url_from_conf)
client = remote.create_swift_client(
TroveContext(tenant=tenant_from_ctx))
self.assertEqual('%s%s' % (swift_url_from_conf, tenant_from_ctx),
client.url)
def test_create_with_catalog_and_default_service_type(self):
client = remote.create_swift_client(
TroveContext(service_catalog=self.service_catalog))
self.assertEqual(self.swift_public_url,
client.url)
def test_create_with_catalog_all_opts(self):
cfg.CONF.set_override('swift_service_type', 'object-storev3')
cfg.CONF.set_override('os_region_name', 'RegionTwo')
client = remote.create_swift_client(
TroveContext(service_catalog=self.service_catalog))
self.assertEqual(self.swiftv3_public_url_region_two,
client.url)
class TestEndpoints(testtools.TestCase):
"""
Copied from glance/tests/unit/test_auth.py.
"""
def setUp(self):
super(TestEndpoints, self).setUp()
self.service_catalog = [
{
'endpoint_links': [],
'endpoints': [
{
'adminURL': 'http://localhost:8080/',
'region': 'RegionOne',
'internalURL': 'http://internalURL/',
'publicURL': 'http://publicURL/',
},
{
'adminURL': 'http://localhost:8081/',
'region': 'RegionTwo',
'internalURL': 'http://internalURL2/',
'publicURL': 'http://publicURL2/',
},
],
'type': 'object-store',
'name': 'Object Storage Service',
}
]
def test_get_endpoint_empty_catalog(self):
self.assertRaises(exception.EmptyCatalog,
remote.get_endpoint,
None)
def test_get_endpoint_with_custom_server_type(self):
endpoint = remote.get_endpoint(self.service_catalog,
service_type='object-store',
endpoint_region='RegionOne')
self.assertEqual('http://publicURL/', endpoint)
def test_get_endpoint_with_custom_endpoint_type(self):
endpoint = remote.get_endpoint(self.service_catalog,
service_type='object-store',
endpoint_type='internalURL',
endpoint_region='RegionOne')
self.assertEqual('http://internalURL/', endpoint)
def test_get_endpoint_raises_with_invalid_service_type(self):
self.assertRaises(exception.NoServiceEndpoint,
remote.get_endpoint,
self.service_catalog,
service_type='foo')
def test_get_endpoint_raises_with_invalid_endpoint_type(self):
self.assertRaises(exception.NoServiceEndpoint,
remote.get_endpoint,
self.service_catalog,
service_type='object-store',
endpoint_type='foo',
endpoint_region='RegionOne')
def test_get_endpoint_raises_with_invalid_endpoint_region(self):
self.assertRaises(exception.NoServiceEndpoint,
remote.get_endpoint,
self.service_catalog,
service_type='object-store',
endpoint_region='foo',
endpoint_type='internalURL')
def test_get_endpoint_ignores_missing_type(self):
service_catalog = [
{
'name': 'Other Service',
},
{
'endpoint_links': [],
'endpoints': [
{
'adminURL': 'http://localhost:8080/',
'region': 'RegionOne',
'internalURL': 'http://internalURL/',
'publicURL': 'http://publicURL/',
},
{
'adminURL': 'http://localhost:8081/',
'region': 'RegionTwo',
'internalURL': 'http://internalURL2/',
'publicURL': 'http://publicURL2/',
},
],
'type': 'object-store',
'name': 'Object Storage Service',
}
]
endpoint = remote.get_endpoint(service_catalog,
service_type='object-store',
endpoint_region='RegionOne')
self.assertEqual('http://publicURL/', endpoint)
|
CMSS-BCRDB/RDS
|
trove/tests/unittests/common/test_remote.py
|
Python
|
apache-2.0
| 26,195
|
# Copyright 2016 Intel Corporation
# Copyright 2014 International Business Machines Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from ironic.common.i18n import _
opts = [
cfg.StrOpt('terminal',
default='shellinaboxd',
help=_('Path to serial console terminal program. Used only '
'by Shell In A Box console.')),
cfg.StrOpt('terminal_cert_dir',
help=_('Directory containing the terminal SSL cert (PEM) for '
'serial console access. Used only by Shell In A Box '
'console.')),
cfg.StrOpt('terminal_pid_dir',
help=_('Directory for holding terminal pid files. '
'If not specified, the temporary directory '
'will be used.')),
cfg.IntOpt('terminal_timeout',
default=600,
min=0,
help=_('Timeout (in seconds) for the terminal session to be '
'closed on inactivity. Set to 0 to disable timeout. '
'Used only by Socat console.')),
cfg.IntOpt('subprocess_checking_interval',
default=1,
help=_('Time interval (in seconds) for checking the status of '
'console subprocess.')),
cfg.IntOpt('subprocess_timeout',
default=10,
help=_('Time (in seconds) to wait for the console subprocess '
'to start.')),
cfg.IPOpt('socat_address',
default='$my_ip',
help=_('IP address of Socat service running on the host of '
'ironic conductor. Used only by Socat console.')),
]
def register_opts(conf):
conf.register_opts(opts, group='console')
|
SauloAislan/ironic
|
ironic/conf/console.py
|
Python
|
apache-2.0
| 2,354
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark import since, keyword_only
from pyspark.ml.util import *
from pyspark.ml.wrapper import JavaEstimator, JavaModel, JavaWrapper
from pyspark.ml.param.shared import *
from pyspark.ml.common import inherit_doc
__all__ = ['BisectingKMeans', 'BisectingKMeansModel', 'BisectingKMeansSummary',
'KMeans', 'KMeansModel',
'GaussianMixture', 'GaussianMixtureModel', 'GaussianMixtureSummary',
'LDA', 'LDAModel', 'LocalLDAModel', 'DistributedLDAModel']
class ClusteringSummary(JavaWrapper):
"""
.. note:: Experimental
Clustering results for a given model.
.. versionadded:: 2.1.0
"""
@property
@since("2.1.0")
def predictionCol(self):
"""
Name for column of predicted clusters in `predictions`.
"""
return self._call_java("predictionCol")
@property
@since("2.1.0")
def predictions(self):
"""
DataFrame produced by the model's `transform` method.
"""
return self._call_java("predictions")
@property
@since("2.1.0")
def featuresCol(self):
"""
Name for column of features in `predictions`.
"""
return self._call_java("featuresCol")
@property
@since("2.1.0")
def k(self):
"""
The number of clusters the model was trained with.
"""
return self._call_java("k")
@property
@since("2.1.0")
def cluster(self):
"""
DataFrame of predicted cluster centers for each training data point.
"""
return self._call_java("cluster")
@property
@since("2.1.0")
def clusterSizes(self):
"""
Size of (number of data points in) each cluster.
"""
return self._call_java("clusterSizes")
class GaussianMixtureModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by GaussianMixture.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def weights(self):
"""
Weight for each Gaussian distribution in the mixture.
This is a multinomial probability distribution over the k Gaussians,
where weights[i] is the weight for Gaussian i, and weights sum to 1.
"""
return self._call_java("weights")
@property
@since("2.0.0")
def gaussiansDF(self):
"""
Retrieve Gaussian distributions as a DataFrame.
Each row represents a Gaussian Distribution.
The DataFrame has two columns: mean (Vector) and cov (Matrix).
"""
return self._call_java("gaussiansDF")
@property
@since("2.1.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model
instance.
"""
return self._call_java("hasSummary")
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return GaussianMixtureSummary(self._call_java("summary"))
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class GaussianMixture(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasTol, HasSeed,
HasProbabilityCol, JavaMLWritable, JavaMLReadable):
"""
GaussianMixture clustering.
This class performs expectation maximization for multivariate Gaussian
Mixture Models (GMMs). A GMM represents a composite distribution of
independent Gaussian distributions with associated "mixing" weights
specifying each's contribution to the composite.
Given a set of sample points, this class will maximize the log-likelihood
for a mixture of k Gaussians, iterating until the log-likelihood changes by
less than convergenceTol, or until it has reached the max number of iterations.
While this process is generally guaranteed to converge, it is not guaranteed
to find a global optimum.
.. note:: For high-dimensional data (with many features), this algorithm may perform poorly.
This is due to high-dimensional data (a) making it difficult to cluster at all
(based on statistical/theoretical arguments) and (b) numerical issues with
Gaussian distributions.
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([-0.1, -0.05 ]),),
... (Vectors.dense([-0.01, -0.1]),),
... (Vectors.dense([0.9, 0.8]),),
... (Vectors.dense([0.75, 0.935]),),
... (Vectors.dense([-0.83, -0.68]),),
... (Vectors.dense([-0.91, -0.76]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> gm = GaussianMixture(k=3, tol=0.0001,
... maxIter=10, seed=10)
>>> model = gm.fit(df)
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
3
>>> summary.clusterSizes
[2, 2, 2]
>>> weights = model.weights
>>> len(weights)
3
>>> model.gaussiansDF.show()
+--------------------+--------------------+
| mean| cov|
+--------------------+--------------------+
|[0.82500000140229...|0.005625000000006...|
|[-0.4777098016092...|0.167969502720916...|
|[-0.4472625243352...|0.167304119758233...|
+--------------------+--------------------+
...
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[4].prediction == rows[5].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> gmm_path = temp_path + "/gmm"
>>> gm.save(gmm_path)
>>> gm2 = GaussianMixture.load(gmm_path)
>>> gm2.getK()
3
>>> model_path = temp_path + "/gmm_model"
>>> model.save(model_path)
>>> model2 = GaussianMixtureModel.load(model_path)
>>> model2.hasSummary
False
>>> model2.weights == model.weights
True
>>> model2.gaussiansDF.show()
+--------------------+--------------------+
| mean| cov|
+--------------------+--------------------+
|[0.82500000140229...|0.005625000000006...|
|[-0.4777098016092...|0.167969502720916...|
|[-0.4472625243352...|0.167304119758233...|
+--------------------+--------------------+
...
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "Number of independent Gaussians in the mixture model. " +
"Must be > 1.", typeConverter=TypeConverters.toInt)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", k=2,
probabilityCol="probability", tol=0.01, maxIter=100, seed=None):
"""
__init__(self, featuresCol="features", predictionCol="prediction", k=2, \
probabilityCol="probability", tol=0.01, maxIter=100, seed=None)
"""
super(GaussianMixture, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.GaussianMixture",
self.uid)
self._setDefault(k=2, tol=0.01, maxIter=100)
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
return GaussianMixtureModel(java_model)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", predictionCol="prediction", k=2,
probabilityCol="probability", tol=0.01, maxIter=100, seed=None):
"""
setParams(self, featuresCol="features", predictionCol="prediction", k=2, \
probabilityCol="probability", tol=0.01, maxIter=100, seed=None)
Sets params for GaussianMixture.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of `k`
"""
return self.getOrDefault(self.k)
class GaussianMixtureSummary(ClusteringSummary):
"""
.. note:: Experimental
Gaussian mixture clustering results for a given model.
.. versionadded:: 2.1.0
"""
@property
@since("2.1.0")
def probabilityCol(self):
"""
Name for column of predicted probability of each cluster in `predictions`.
"""
return self._call_java("probabilityCol")
@property
@since("2.1.0")
def probability(self):
"""
DataFrame of probabilities of each cluster for each training data point.
"""
return self._call_java("probability")
class KMeansSummary(ClusteringSummary):
"""
.. note:: Experimental
Summary of KMeans.
.. versionadded:: 2.1.0
"""
pass
class KMeansModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by KMeans.
.. versionadded:: 1.5.0
"""
@since("1.5.0")
def clusterCenters(self):
"""Get the cluster centers, represented as a list of NumPy arrays."""
return [c.toArray() for c in self._call_java("clusterCenters")]
@since("2.0.0")
def computeCost(self, dataset):
"""
Return the K-means cost (sum of squared distances of points to their nearest center)
for this model on the given data.
"""
return self._call_java("computeCost", dataset)
@property
@since("2.1.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model instance.
"""
return self._call_java("hasSummary")
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return KMeansSummary(self._call_java("summary"))
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class KMeans(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasTol, HasSeed,
JavaMLWritable, JavaMLReadable):
"""
K-means clustering with a k-means++ like initialization mode
(the k-means|| algorithm by Bahmani et al).
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([0.0, 0.0]),), (Vectors.dense([1.0, 1.0]),),
... (Vectors.dense([9.0, 8.0]),), (Vectors.dense([8.0, 9.0]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> kmeans = KMeans(k=2, seed=1)
>>> model = kmeans.fit(df)
>>> centers = model.clusterCenters()
>>> len(centers)
2
>>> model.computeCost(df)
2.000...
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[0].prediction == rows[1].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
2
>>> summary.clusterSizes
[2, 2]
>>> kmeans_path = temp_path + "/kmeans"
>>> kmeans.save(kmeans_path)
>>> kmeans2 = KMeans.load(kmeans_path)
>>> kmeans2.getK()
2
>>> model_path = temp_path + "/kmeans_model"
>>> model.save(model_path)
>>> model2 = KMeansModel.load(model_path)
>>> model2.hasSummary
False
>>> model.clusterCenters()[0] == model2.clusterCenters()[0]
array([ True, True], dtype=bool)
>>> model.clusterCenters()[1] == model2.clusterCenters()[1]
array([ True, True], dtype=bool)
.. versionadded:: 1.5.0
"""
k = Param(Params._dummy(), "k", "The number of clusters to create. Must be > 1.",
typeConverter=TypeConverters.toInt)
initMode = Param(Params._dummy(), "initMode",
"The initialization algorithm. This can be either \"random\" to " +
"choose random points as initial cluster centers, or \"k-means||\" " +
"to use a parallel variant of k-means++",
typeConverter=TypeConverters.toString)
initSteps = Param(Params._dummy(), "initSteps", "The number of steps for k-means|| " +
"initialization mode. Must be > 0.", typeConverter=TypeConverters.toInt)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", k=2,
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None):
"""
__init__(self, featuresCol="features", predictionCol="prediction", k=2, \
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None)
"""
super(KMeans, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.KMeans", self.uid)
self._setDefault(k=2, initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20)
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
return KMeansModel(java_model)
@keyword_only
@since("1.5.0")
def setParams(self, featuresCol="features", predictionCol="prediction", k=2,
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None):
"""
setParams(self, featuresCol="features", predictionCol="prediction", k=2, \
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None)
Sets params for KMeans.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("1.5.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("1.5.0")
def getK(self):
"""
Gets the value of `k`
"""
return self.getOrDefault(self.k)
@since("1.5.0")
def setInitMode(self, value):
"""
Sets the value of :py:attr:`initMode`.
"""
return self._set(initMode=value)
@since("1.5.0")
def getInitMode(self):
"""
Gets the value of `initMode`
"""
return self.getOrDefault(self.initMode)
@since("1.5.0")
def setInitSteps(self, value):
"""
Sets the value of :py:attr:`initSteps`.
"""
return self._set(initSteps=value)
@since("1.5.0")
def getInitSteps(self):
"""
Gets the value of `initSteps`
"""
return self.getOrDefault(self.initSteps)
class BisectingKMeansModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by BisectingKMeans.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def clusterCenters(self):
"""Get the cluster centers, represented as a list of NumPy arrays."""
return [c.toArray() for c in self._call_java("clusterCenters")]
@since("2.0.0")
def computeCost(self, dataset):
"""
Computes the sum of squared distances between the input points
and their corresponding cluster centers.
"""
return self._call_java("computeCost", dataset)
@property
@since("2.1.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model instance.
"""
return self._call_java("hasSummary")
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return BisectingKMeansSummary(self._call_java("summary"))
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class BisectingKMeans(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasSeed,
JavaMLWritable, JavaMLReadable):
"""
A bisecting k-means algorithm based on the paper "A comparison of document clustering
techniques" by Steinbach, Karypis, and Kumar, with modification to fit Spark.
The algorithm starts from a single cluster that contains all points.
Iteratively it finds divisible clusters on the bottom level and bisects each of them using
k-means, until there are `k` leaf clusters in total or no leaf clusters are divisible.
The bisecting steps of clusters on the same level are grouped together to increase parallelism.
If bisecting all divisible clusters on the bottom level would result more than `k` leaf
clusters, larger clusters get higher priority.
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([0.0, 0.0]),), (Vectors.dense([1.0, 1.0]),),
... (Vectors.dense([9.0, 8.0]),), (Vectors.dense([8.0, 9.0]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> bkm = BisectingKMeans(k=2, minDivisibleClusterSize=1.0)
>>> model = bkm.fit(df)
>>> centers = model.clusterCenters()
>>> len(centers)
2
>>> model.computeCost(df)
2.000...
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
2
>>> summary.clusterSizes
[2, 2]
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[0].prediction == rows[1].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> bkm_path = temp_path + "/bkm"
>>> bkm.save(bkm_path)
>>> bkm2 = BisectingKMeans.load(bkm_path)
>>> bkm2.getK()
2
>>> model_path = temp_path + "/bkm_model"
>>> model.save(model_path)
>>> model2 = BisectingKMeansModel.load(model_path)
>>> model2.hasSummary
False
>>> model.clusterCenters()[0] == model2.clusterCenters()[0]
array([ True, True], dtype=bool)
>>> model.clusterCenters()[1] == model2.clusterCenters()[1]
array([ True, True], dtype=bool)
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "The desired number of leaf clusters. Must be > 1.",
typeConverter=TypeConverters.toInt)
minDivisibleClusterSize = Param(Params._dummy(), "minDivisibleClusterSize",
"The minimum number of points (if >= 1.0) or the minimum " +
"proportion of points (if < 1.0) of a divisible cluster.",
typeConverter=TypeConverters.toFloat)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", maxIter=20,
seed=None, k=4, minDivisibleClusterSize=1.0):
"""
__init__(self, featuresCol="features", predictionCol="prediction", maxIter=20, \
seed=None, k=4, minDivisibleClusterSize=1.0)
"""
super(BisectingKMeans, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.BisectingKMeans",
self.uid)
self._setDefault(maxIter=20, k=4, minDivisibleClusterSize=1.0)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", predictionCol="prediction", maxIter=20,
seed=None, k=4, minDivisibleClusterSize=1.0):
"""
setParams(self, featuresCol="features", predictionCol="prediction", maxIter=20, \
seed=None, k=4, minDivisibleClusterSize=1.0)
Sets params for BisectingKMeans.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of `k` or its default value.
"""
return self.getOrDefault(self.k)
@since("2.0.0")
def setMinDivisibleClusterSize(self, value):
"""
Sets the value of :py:attr:`minDivisibleClusterSize`.
"""
return self._set(minDivisibleClusterSize=value)
@since("2.0.0")
def getMinDivisibleClusterSize(self):
"""
Gets the value of `minDivisibleClusterSize` or its default value.
"""
return self.getOrDefault(self.minDivisibleClusterSize)
def _create_model(self, java_model):
return BisectingKMeansModel(java_model)
class BisectingKMeansSummary(ClusteringSummary):
"""
.. note:: Experimental
Bisecting KMeans clustering results for a given model.
.. versionadded:: 2.1.0
"""
pass
@inherit_doc
class LDAModel(JavaModel):
"""
Latent Dirichlet Allocation (LDA) model.
This abstraction permits for different underlying representations,
including local and distributed data structures.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def isDistributed(self):
"""
Indicates whether this instance is of type DistributedLDAModel
"""
return self._call_java("isDistributed")
@since("2.0.0")
def vocabSize(self):
"""Vocabulary size (number of terms or words in the vocabulary)"""
return self._call_java("vocabSize")
@since("2.0.0")
def topicsMatrix(self):
"""
Inferred topics, where each topic is represented by a distribution over terms.
This is a matrix of size vocabSize x k, where each column is a topic.
No guarantees are given about the ordering of the topics.
WARNING: If this model is actually a :py:class:`DistributedLDAModel` instance produced by
the Expectation-Maximization ("em") `optimizer`, then this method could involve
collecting a large amount of data to the driver (on the order of vocabSize x k).
"""
return self._call_java("topicsMatrix")
@since("2.0.0")
def logLikelihood(self, dataset):
"""
Calculates a lower bound on the log likelihood of the entire corpus.
See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
WARNING: If this model is an instance of :py:class:`DistributedLDAModel` (produced when
:py:attr:`optimizer` is set to "em"), this involves collecting a large
:py:func:`topicsMatrix` to the driver. This implementation may be changed in the future.
"""
return self._call_java("logLikelihood", dataset)
@since("2.0.0")
def logPerplexity(self, dataset):
"""
Calculate an upper bound bound on perplexity. (Lower is better.)
See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
WARNING: If this model is an instance of :py:class:`DistributedLDAModel` (produced when
:py:attr:`optimizer` is set to "em"), this involves collecting a large
:py:func:`topicsMatrix` to the driver. This implementation may be changed in the future.
"""
return self._call_java("logPerplexity", dataset)
@since("2.0.0")
def describeTopics(self, maxTermsPerTopic=10):
"""
Return the topics described by their top-weighted terms.
"""
return self._call_java("describeTopics", maxTermsPerTopic)
@since("2.0.0")
def estimatedDocConcentration(self):
"""
Value for :py:attr:`LDA.docConcentration` estimated from data.
If Online LDA was used and :py:attr:`LDA.optimizeDocConcentration` was set to false,
then this returns the fixed (given) value for the :py:attr:`LDA.docConcentration` parameter.
"""
return self._call_java("estimatedDocConcentration")
@inherit_doc
class DistributedLDAModel(LDAModel, JavaMLReadable, JavaMLWritable):
"""
Distributed model fitted by :py:class:`LDA`.
This type of model is currently only produced by Expectation-Maximization (EM).
This model stores the inferred topics, the full training dataset, and the topic distribution
for each training document.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def toLocal(self):
"""
Convert this distributed model to a local representation. This discards info about the
training dataset.
WARNING: This involves collecting a large :py:func:`topicsMatrix` to the driver.
"""
return LocalLDAModel(self._call_java("toLocal"))
@since("2.0.0")
def trainingLogLikelihood(self):
"""
Log likelihood of the observed tokens in the training set,
given the current parameter estimates:
log P(docs | topics, topic distributions for docs, Dirichlet hyperparameters)
Notes:
- This excludes the prior; for that, use :py:func:`logPrior`.
- Even with :py:func:`logPrior`, this is NOT the same as the data log likelihood given
the hyperparameters.
- This is computed from the topic distributions computed during training. If you call
:py:func:`logLikelihood` on the same training dataset, the topic distributions
will be computed again, possibly giving different results.
"""
return self._call_java("trainingLogLikelihood")
@since("2.0.0")
def logPrior(self):
"""
Log probability of the current parameter estimate:
log P(topics, topic distributions for docs | alpha, eta)
"""
return self._call_java("logPrior")
@since("2.0.0")
def getCheckpointFiles(self):
"""
If using checkpointing and :py:attr:`LDA.keepLastCheckpoint` is set to true, then there may
be saved checkpoint files. This method is provided so that users can manage those files.
.. note:: Removing the checkpoints can cause failures if a partition is lost and is needed
by certain :py:class:`DistributedLDAModel` methods. Reference counting will clean up
the checkpoints when this model and derivative data go out of scope.
:return List of checkpoint files from training
"""
return self._call_java("getCheckpointFiles")
@inherit_doc
class LocalLDAModel(LDAModel, JavaMLReadable, JavaMLWritable):
"""
Local (non-distributed) model fitted by :py:class:`LDA`.
This model stores the inferred topics only; it does not store info about the training dataset.
.. versionadded:: 2.0.0
"""
pass
@inherit_doc
class LDA(JavaEstimator, HasFeaturesCol, HasMaxIter, HasSeed, HasCheckpointInterval,
JavaMLReadable, JavaMLWritable):
"""
Latent Dirichlet Allocation (LDA), a topic model designed for text documents.
Terminology:
- "term" = "word": an el
- "token": instance of a term appearing in a document
- "topic": multinomial distribution over terms representing some concept
- "document": one piece of text, corresponding to one row in the input data
Original LDA paper (journal version):
Blei, Ng, and Jordan. "Latent Dirichlet Allocation." JMLR, 2003.
Input data (featuresCol):
LDA is given a collection of documents as input data, via the featuresCol parameter.
Each document is specified as a :py:class:`Vector` of length vocabSize, where each entry is the
count for the corresponding term (word) in the document. Feature transformers such as
:py:class:`pyspark.ml.feature.Tokenizer` and :py:class:`pyspark.ml.feature.CountVectorizer`
can be useful for converting text to word count vectors.
>>> from pyspark.ml.linalg import Vectors, SparseVector
>>> from pyspark.ml.clustering import LDA
>>> df = spark.createDataFrame([[1, Vectors.dense([0.0, 1.0])],
... [2, SparseVector(2, {0: 1.0})],], ["id", "features"])
>>> lda = LDA(k=2, seed=1, optimizer="em")
>>> model = lda.fit(df)
>>> model.isDistributed()
True
>>> localModel = model.toLocal()
>>> localModel.isDistributed()
False
>>> model.vocabSize()
2
>>> model.describeTopics().show()
+-----+-----------+--------------------+
|topic|termIndices| termWeights|
+-----+-----------+--------------------+
| 0| [1, 0]|[0.50401530077160...|
| 1| [0, 1]|[0.50401530077160...|
+-----+-----------+--------------------+
...
>>> model.topicsMatrix()
DenseMatrix(2, 2, [0.496, 0.504, 0.504, 0.496], 0)
>>> lda_path = temp_path + "/lda"
>>> lda.save(lda_path)
>>> sameLDA = LDA.load(lda_path)
>>> distributed_model_path = temp_path + "/lda_distributed_model"
>>> model.save(distributed_model_path)
>>> sameModel = DistributedLDAModel.load(distributed_model_path)
>>> local_model_path = temp_path + "/lda_local_model"
>>> localModel.save(local_model_path)
>>> sameLocalModel = LocalLDAModel.load(local_model_path)
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "The number of topics (clusters) to infer. Must be > 1.",
typeConverter=TypeConverters.toInt)
optimizer = Param(Params._dummy(), "optimizer",
"Optimizer or inference algorithm used to estimate the LDA model. "
"Supported: online, em", typeConverter=TypeConverters.toString)
learningOffset = Param(Params._dummy(), "learningOffset",
"A (positive) learning parameter that downweights early iterations."
" Larger values make early iterations count less",
typeConverter=TypeConverters.toFloat)
learningDecay = Param(Params._dummy(), "learningDecay", "Learning rate, set as an"
"exponential decay rate. This should be between (0.5, 1.0] to "
"guarantee asymptotic convergence.", typeConverter=TypeConverters.toFloat)
subsamplingRate = Param(Params._dummy(), "subsamplingRate",
"Fraction of the corpus to be sampled and used in each iteration "
"of mini-batch gradient descent, in range (0, 1].",
typeConverter=TypeConverters.toFloat)
optimizeDocConcentration = Param(Params._dummy(), "optimizeDocConcentration",
"Indicates whether the docConcentration (Dirichlet parameter "
"for document-topic distribution) will be optimized during "
"training.", typeConverter=TypeConverters.toBoolean)
docConcentration = Param(Params._dummy(), "docConcentration",
"Concentration parameter (commonly named \"alpha\") for the "
"prior placed on documents' distributions over topics (\"theta\").",
typeConverter=TypeConverters.toListFloat)
topicConcentration = Param(Params._dummy(), "topicConcentration",
"Concentration parameter (commonly named \"beta\" or \"eta\") for "
"the prior placed on topic' distributions over terms.",
typeConverter=TypeConverters.toFloat)
topicDistributionCol = Param(Params._dummy(), "topicDistributionCol",
"Output column with estimates of the topic mixture distribution "
"for each document (often called \"theta\" in the literature). "
"Returns a vector of zeros for an empty document.",
typeConverter=TypeConverters.toString)
keepLastCheckpoint = Param(Params._dummy(), "keepLastCheckpoint",
"(For EM optimizer) If using checkpointing, this indicates whether"
" to keep the last checkpoint. If false, then the checkpoint will be"
" deleted. Deleting the checkpoint can cause failures if a data"
" partition is lost, so set this bit with care.",
TypeConverters.toBoolean)
@keyword_only
def __init__(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
docConcentration=None, topicConcentration=None,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
"""
__init__(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,\
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,\
subsamplingRate=0.05, optimizeDocConcentration=True,\
docConcentration=None, topicConcentration=None,\
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
"""
super(LDA, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.LDA", self.uid)
self._setDefault(maxIter=20, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True)
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
if self.getOptimizer() == "em":
return DistributedLDAModel(java_model)
else:
return LocalLDAModel(java_model)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
docConcentration=None, topicConcentration=None,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
"""
setParams(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,\
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,\
subsamplingRate=0.05, optimizeDocConcentration=True,\
docConcentration=None, topicConcentration=None,\
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
Sets params for LDA.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
>>> algo = LDA().setK(10)
>>> algo.getK()
10
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of :py:attr:`k` or its default value.
"""
return self.getOrDefault(self.k)
@since("2.0.0")
def setOptimizer(self, value):
"""
Sets the value of :py:attr:`optimizer`.
Currenlty only support 'em' and 'online'.
>>> algo = LDA().setOptimizer("em")
>>> algo.getOptimizer()
'em'
"""
return self._set(optimizer=value)
@since("2.0.0")
def getOptimizer(self):
"""
Gets the value of :py:attr:`optimizer` or its default value.
"""
return self.getOrDefault(self.optimizer)
@since("2.0.0")
def setLearningOffset(self, value):
"""
Sets the value of :py:attr:`learningOffset`.
>>> algo = LDA().setLearningOffset(100)
>>> algo.getLearningOffset()
100.0
"""
return self._set(learningOffset=value)
@since("2.0.0")
def getLearningOffset(self):
"""
Gets the value of :py:attr:`learningOffset` or its default value.
"""
return self.getOrDefault(self.learningOffset)
@since("2.0.0")
def setLearningDecay(self, value):
"""
Sets the value of :py:attr:`learningDecay`.
>>> algo = LDA().setLearningDecay(0.1)
>>> algo.getLearningDecay()
0.1...
"""
return self._set(learningDecay=value)
@since("2.0.0")
def getLearningDecay(self):
"""
Gets the value of :py:attr:`learningDecay` or its default value.
"""
return self.getOrDefault(self.learningDecay)
@since("2.0.0")
def setSubsamplingRate(self, value):
"""
Sets the value of :py:attr:`subsamplingRate`.
>>> algo = LDA().setSubsamplingRate(0.1)
>>> algo.getSubsamplingRate()
0.1...
"""
return self._set(subsamplingRate=value)
@since("2.0.0")
def getSubsamplingRate(self):
"""
Gets the value of :py:attr:`subsamplingRate` or its default value.
"""
return self.getOrDefault(self.subsamplingRate)
@since("2.0.0")
def setOptimizeDocConcentration(self, value):
"""
Sets the value of :py:attr:`optimizeDocConcentration`.
>>> algo = LDA().setOptimizeDocConcentration(True)
>>> algo.getOptimizeDocConcentration()
True
"""
return self._set(optimizeDocConcentration=value)
@since("2.0.0")
def getOptimizeDocConcentration(self):
"""
Gets the value of :py:attr:`optimizeDocConcentration` or its default value.
"""
return self.getOrDefault(self.optimizeDocConcentration)
@since("2.0.0")
def setDocConcentration(self, value):
"""
Sets the value of :py:attr:`docConcentration`.
>>> algo = LDA().setDocConcentration([0.1, 0.2])
>>> algo.getDocConcentration()
[0.1..., 0.2...]
"""
return self._set(docConcentration=value)
@since("2.0.0")
def getDocConcentration(self):
"""
Gets the value of :py:attr:`docConcentration` or its default value.
"""
return self.getOrDefault(self.docConcentration)
@since("2.0.0")
def setTopicConcentration(self, value):
"""
Sets the value of :py:attr:`topicConcentration`.
>>> algo = LDA().setTopicConcentration(0.5)
>>> algo.getTopicConcentration()
0.5...
"""
return self._set(topicConcentration=value)
@since("2.0.0")
def getTopicConcentration(self):
"""
Gets the value of :py:attr:`topicConcentration` or its default value.
"""
return self.getOrDefault(self.topicConcentration)
@since("2.0.0")
def setTopicDistributionCol(self, value):
"""
Sets the value of :py:attr:`topicDistributionCol`.
>>> algo = LDA().setTopicDistributionCol("topicDistributionCol")
>>> algo.getTopicDistributionCol()
'topicDistributionCol'
"""
return self._set(topicDistributionCol=value)
@since("2.0.0")
def getTopicDistributionCol(self):
"""
Gets the value of :py:attr:`topicDistributionCol` or its default value.
"""
return self.getOrDefault(self.topicDistributionCol)
@since("2.0.0")
def setKeepLastCheckpoint(self, value):
"""
Sets the value of :py:attr:`keepLastCheckpoint`.
>>> algo = LDA().setKeepLastCheckpoint(False)
>>> algo.getKeepLastCheckpoint()
False
"""
return self._set(keepLastCheckpoint=value)
@since("2.0.0")
def getKeepLastCheckpoint(self):
"""
Gets the value of :py:attr:`keepLastCheckpoint` or its default value.
"""
return self.getOrDefault(self.keepLastCheckpoint)
if __name__ == "__main__":
import doctest
import pyspark.ml.clustering
from pyspark.sql import SparkSession
globs = pyspark.ml.clustering.__dict__.copy()
# The small batch size here ensures that we see multiple batches,
# even in these small test examples:
spark = SparkSession.builder\
.master("local[2]")\
.appName("ml.clustering tests")\
.getOrCreate()
sc = spark.sparkContext
globs['sc'] = sc
globs['spark'] = spark
import tempfile
temp_path = tempfile.mkdtemp()
globs['temp_path'] = temp_path
try:
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
spark.stop()
finally:
from shutil import rmtree
try:
rmtree(temp_path)
except OSError:
pass
if failure_count:
exit(-1)
|
spark0001/spark2.1.1
|
python/pyspark/ml/clustering.py
|
Python
|
apache-2.0
| 41,639
|
"""
ASV benchmarks for detect clear sky function.
"""
import pandas as pd
from pvlib import clearsky, solarposition
import numpy as np
class DetectClear:
params = [1, 10, 100] # number of days
param_names = ['ndays']
def setup(self, ndays):
self.times = pd.date_range(start='20180601', freq='1min',
periods=1440*ndays)
self.lat = 35.1
self.lon = -106.6
self.solar_position = solarposition.get_solarposition(
self.times, self.lat, self.lon)
clearsky_df = clearsky.simplified_solis(
self.solar_position['apparent_elevation'])
self.clearsky = clearsky_df['ghi']
measured_dni = clearsky_df['dni'].where(
(self.times.hour % 2).astype(bool), 0)
cos_zen = np.cos(np.deg2rad(self.solar_position['apparent_zenith']))
self.measured = measured_dni * cos_zen + clearsky_df['dhi']
self.measured *= 0.98
self.window_length = 10
def time_detect_clearsky(self, ndays):
clearsky.detect_clearsky(
self.measured, self.clearsky, self.times, self.window_length
)
|
mikofski/pvlib-python
|
benchmarks/benchmarks/detect_clearsky.py
|
Python
|
bsd-3-clause
| 1,151
|
"""
Cart-pole balancing with continuous / Kernelized iFDD
"""
from rlpy.Domains import InfCartPoleBalance
from rlpy.Agents import SARSA, Q_LEARNING
from rlpy.Representations import *
from rlpy.Policies import eGreedy
from rlpy.Experiments import Experiment
import numpy as np
from hyperopt import hp
from rlpy.Representations import KernelizediFDD
param_space = {
'kernel_resolution':
hp.loguniform("kernel_resolution", np.log(3), np.log(100)),
'discover_threshold':
hp.loguniform(
"discover_threshold",
np.log(1e-2),
np.log(1e1)),
'lambda_': hp.uniform("lambda_", 0., 1.),
'boyan_N0': hp.loguniform("boyan_N0", np.log(1e1), np.log(1e5)),
'initial_learn_rate': hp.loguniform("initial_learn_rate", np.log(1e-3), np.log(1))}
def make_experiment(
exp_id=1, path="./Results/Temp/{domain}/{agent}/{representation}/",
discover_threshold=.02208,
lambda_=0.6756,
boyan_N0=480.72,
initial_learn_rate=.2911,
kernel_resolution=18.435):
opt = {}
opt["path"] = path
opt["exp_id"] = exp_id
opt["max_steps"] = 10000
opt["num_policy_checks"] = 20
opt["checks_per_policy"] = 10
active_threshold = 0.01
max_base_feat_sim = 0.5
sparsify = 1
domain = InfCartPoleBalance()
opt["domain"] = domain
kernel_width = (
domain.statespace_limits[:,
1] - domain.statespace_limits[:,
0]) / kernel_resolution
representation = KernelizediFDD(domain, sparsify=sparsify,
kernel=linf_triangle_kernel,
kernel_args=[kernel_width],
active_threshold=active_threshold,
discover_threshold=discover_threshold,
normalization=True,
max_active_base_feat=10, max_base_feat_sim=max_base_feat_sim)
policy = eGreedy(representation, epsilon=0.1)
# agent = SARSA(representation,policy,domain,initial_learn_rate=1.,
# lambda_=0., learn_rate_decay_mode="boyan", boyan_N0=100)
opt["agent"] = Q_LEARNING(
policy, representation, discount_factor=domain.discount_factor,
lambda_=lambda_, initial_learn_rate=initial_learn_rate,
learn_rate_decay_mode="boyan", boyan_N0=boyan_N0)
experiment = Experiment(**opt)
return experiment
if __name__ == '__main__':
experiment = make_experiment(1)
experiment.run()
experiment.save()
|
imanolarrieta/RL
|
examples/cartpole2d/kifdd_triangle.py
|
Python
|
bsd-3-clause
| 2,580
|
from __future__ import print_function
from __future__ import absolute_import
import numpy as nm
import sys
from six.moves import range
sys.path.append('.')
from sfepy.base.base import output, assert_
from sfepy.base.ioutils import ensure_path
from sfepy.linalg import cycle
from sfepy.discrete.fem.mesh import Mesh
from sfepy.mesh.mesh_tools import elems_q2t
def get_tensor_product_conn(shape):
"""
Generate vertex connectivity for cells of a tensor-product mesh of the
given shape.
Parameters
----------
shape : array of 2 or 3 ints
Shape (counts of nodes in x, y, z) of the mesh.
Returns
-------
conn : array
The vertex connectivity array.
desc : str
The cell kind.
"""
shape = nm.asarray(shape)
dim = len(shape)
assert_(1 <= dim <= 3)
n_nod = nm.prod(shape)
n_el = nm.prod(shape - 1)
grid = nm.arange(n_nod, dtype=nm.int32)
grid.shape = shape
if dim == 1:
conn = nm.zeros((n_el, 2), dtype=nm.int32)
conn[:, 0] = grid[:-1]
conn[:, 1] = grid[1:]
desc = '1_2'
elif dim == 2:
conn = nm.zeros((n_el, 4), dtype=nm.int32)
conn[:, 0] = grid[:-1, :-1].flat
conn[:, 1] = grid[1:, :-1].flat
conn[:, 2] = grid[1:, 1:].flat
conn[:, 3] = grid[:-1, 1:].flat
desc = '2_4'
else:
conn = nm.zeros((n_el, 8), dtype=nm.int32)
conn[:, 0] = grid[:-1, :-1, :-1].flat
conn[:, 1] = grid[1:, :-1, :-1].flat
conn[:, 2] = grid[1:, 1:, :-1].flat
conn[:, 3] = grid[:-1, 1:, :-1].flat
conn[:, 4] = grid[:-1, :-1, 1:].flat
conn[:, 5] = grid[1:, :-1, 1:].flat
conn[:, 6] = grid[1:, 1:, 1:].flat
conn[:, 7] = grid[:-1, 1:, 1:].flat
desc = '3_8'
return conn, desc
def gen_block_mesh(dims, shape, centre, mat_id=0, name='block',
coors=None, verbose=True):
"""
Generate a 2D or 3D block mesh. The dimension is determined by the
lenght of the shape argument.
Parameters
----------
dims : array of 2 or 3 floats
Dimensions of the block.
shape : array of 2 or 3 ints
Shape (counts of nodes in x, y, z) of the block mesh.
centre : array of 2 or 3 floats
Centre of the block.
mat_id : int, optional
The material id of all elements.
name : string
Mesh name.
verbose : bool
If True, show progress of the mesh generation.
Returns
-------
mesh : Mesh instance
"""
dims = nm.asarray(dims, dtype=nm.float64)
shape = nm.asarray(shape, dtype=nm.int32)
centre = nm.asarray(centre, dtype=nm.float64)
dim = shape.shape[0]
centre = centre[:dim]
dims = dims[:dim]
n_nod = nm.prod(shape)
output('generating %d vertices...' % n_nod, verbose=verbose)
x0 = centre - 0.5 * dims
dd = dims / (shape - 1)
ngrid = nm.mgrid[[slice(ii) for ii in shape]]
ngrid.shape = (dim, n_nod)
if coors is None:
coors = x0 + ngrid.T * dd
output('...done', verbose=verbose)
n_el = nm.prod(shape - 1)
output('generating %d cells...' % n_el, verbose=verbose)
mat_ids = nm.empty((n_el,), dtype=nm.int32)
mat_ids.fill(mat_id)
conn, desc = get_tensor_product_conn(shape)
output('...done', verbose=verbose)
mesh = Mesh.from_data(name, coors, None, [conn], [mat_ids], [desc])
return mesh
def gen_cylinder_mesh(dims, shape, centre, axis='x', force_hollow=False,
is_open=False, open_angle=0.0, non_uniform=False,
name='cylinder', verbose=True):
"""
Generate a cylindrical mesh along an axis. Its cross-section can be
ellipsoidal.
Parameters
----------
dims : array of 5 floats
Dimensions of the cylinder: inner surface semi-axes a1, b1, outer
surface semi-axes a2, b2, length.
shape : array of 3 ints
Shape (counts of nodes in radial, circumferential and longitudinal
directions) of the cylinder mesh.
centre : array of 3 floats
Centre of the cylinder.
axis: one of 'x', 'y', 'z'
The axis of the cylinder.
force_hollow : boolean
Force hollow mesh even if inner radii a1 = b1 = 0.
is_open : boolean
Generate an open cylinder segment.
open_angle : float
Opening angle in radians.
non_uniform : boolean
If True, space the mesh nodes in radial direction so that the element
volumes are (approximately) the same, making thus the elements towards
the outer surface thinner.
name : string
Mesh name.
verbose : bool
If True, show progress of the mesh generation.
Returns
-------
mesh : Mesh instance
"""
dims = nm.asarray(dims, dtype=nm.float64)
shape = nm.asarray(shape, dtype=nm.int32)
centre = nm.asarray(centre, dtype=nm.float64)
a1, b1, a2, b2, length = dims
nr, nfi, nl = shape
origin = centre - nm.array([0.5 * length, 0.0, 0.0])
dfi = 2.0 * (nm.pi - open_angle) / nfi
if is_open:
nnfi = nfi + 1
else:
nnfi = nfi
is_hollow = force_hollow or not (max(abs(a1), abs(b1)) < 1e-15)
if is_hollow:
mr = 0
else:
mr = (nnfi - 1) * nl
grid = nm.zeros((nr, nnfi, nl), dtype=nm.int32)
n_nod = nr * nnfi * nl - mr
coors = nm.zeros((n_nod, 3), dtype=nm.float64)
angles = nm.linspace(open_angle, open_angle+(nfi)*dfi, nfi+1)
xs = nm.linspace(0.0, length, nl)
if non_uniform:
ras = nm.zeros((nr,), dtype=nm.float64)
rbs = nm.zeros_like(ras)
advol = (a2**2 - a1**2) / (nr - 1)
bdvol = (b2**2 - b1**2) / (nr - 1)
ras[0], rbs[0] = a1, b1
for ii in range(1, nr):
ras[ii] = nm.sqrt(advol + ras[ii-1]**2)
rbs[ii] = nm.sqrt(bdvol + rbs[ii-1]**2)
else:
ras = nm.linspace(a1, a2, nr)
rbs = nm.linspace(b1, b2, nr)
# This is 3D only...
output('generating %d vertices...' % n_nod, verbose=verbose)
ii = 0
for ix in range(nr):
a, b = ras[ix], rbs[ix]
for iy, fi in enumerate(angles[:nnfi]):
for iz, x in enumerate(xs):
grid[ix,iy,iz] = ii
coors[ii] = origin + [x, a * nm.cos(fi), b * nm.sin(fi)]
ii += 1
if not is_hollow and (ix == 0):
if iy > 0:
grid[ix,iy,iz] = grid[ix,0,iz]
ii -= 1
assert_(ii == n_nod)
output('...done', verbose=verbose)
n_el = (nr - 1) * nfi * (nl - 1)
conn = nm.zeros((n_el, 8), dtype=nm.int32)
output('generating %d cells...' % n_el, verbose=verbose)
ii = 0
for (ix, iy, iz) in cycle([nr-1, nnfi, nl-1]):
if iy < (nnfi - 1):
conn[ii,:] = [grid[ix ,iy ,iz ], grid[ix+1,iy ,iz ],
grid[ix+1,iy+1,iz ], grid[ix ,iy+1,iz ],
grid[ix ,iy ,iz+1], grid[ix+1,iy ,iz+1],
grid[ix+1,iy+1,iz+1], grid[ix ,iy+1,iz+1]]
ii += 1
elif not is_open:
conn[ii,:] = [grid[ix ,iy ,iz ], grid[ix+1,iy ,iz ],
grid[ix+1,0,iz ], grid[ix ,0,iz ],
grid[ix ,iy ,iz+1], grid[ix+1,iy ,iz+1],
grid[ix+1,0,iz+1], grid[ix ,0,iz+1]]
ii += 1
mat_id = nm.zeros((n_el,), dtype = nm.int32)
desc = '3_8'
assert_(n_nod == (conn.max() + 1))
output('...done', verbose=verbose)
if axis == 'z':
coors = coors[:,[1,2,0]]
elif axis == 'y':
coors = coors[:,[2,0,1]]
mesh = Mesh.from_data(name, coors, None, [conn], [mat_id], [desc])
return mesh
def _spread_along_axis(axis, coors, tangents, grading_fun):
"""
Spread the coordinates along the given axis using the grading function, and
the tangents in the other two directions.
"""
oo = list(set([0, 1, 2]).difference([axis]))
c0, c1, c2 = coors[:, axis], coors[:, oo[0]], coors[:, oo[1]]
out = nm.empty_like(coors)
mi, ma = c0.min(), c0.max()
nc0 = (c0 - mi) / (ma - mi)
out[:, axis] = oc0 = grading_fun(nc0) * (ma - mi) + mi
nc = oc0 - oc0.min()
mi, ma = c1.min(), c1.max()
n1 = 2 * (c1 - mi) / (ma - mi) - 1
out[:, oo[0]] = c1 + n1 * nc * tangents[oo[0]]
mi, ma = c2.min(), c2.max()
n2 = 2 * (c2 - mi) / (ma - mi) - 1
out[:, oo[1]] = c2 + n2 * nc * tangents[oo[1]]
return out
def _get_extension_side(side, grading_fun, mat_id,
b_dims, b_shape, e_dims, e_shape, centre):
"""
Get a mesh extending the given side of a block mesh.
"""
# Pure extension dimensions.
pe_dims = 0.5 * (e_dims - b_dims)
coff = 0.5 * (b_dims + pe_dims)
cc = centre + coff * nm.eye(3)[side]
if side == 0: # x axis.
dims = [pe_dims[0], b_dims[1], b_dims[2]]
shape = [e_shape, b_shape[1], b_shape[2]]
tangents = [0, pe_dims[1] / pe_dims[0], pe_dims[2] / pe_dims[0]]
elif side == 1: # y axis.
dims = [b_dims[0], pe_dims[1], b_dims[2]]
shape = [b_shape[0], e_shape, b_shape[2]]
tangents = [pe_dims[0] / pe_dims[1], 0, pe_dims[2] / pe_dims[1]]
elif side == 2: # z axis.
dims = [b_dims[0], b_dims[1], pe_dims[2]]
shape = [b_shape[0], b_shape[1], e_shape]
tangents = [pe_dims[0] / pe_dims[2], pe_dims[1] / pe_dims[2], 0]
e_mesh = gen_block_mesh(dims, shape, cc, mat_id=mat_id, verbose=False)
e_mesh.coors[:] = _spread_along_axis(side, e_mesh.coors, tangents,
grading_fun)
return e_mesh, shape
def gen_extended_block_mesh(b_dims, b_shape, e_dims, e_shape, centre,
grading_fun=None, name=None):
"""
Generate a 3D mesh with a central block and (coarse) extending side meshes.
The resulting mesh is again a block. Each of the components has a different
material id.
Parameters
----------
b_dims : array of 3 floats
The dimensions of the central block.
b_shape : array of 3 ints
The shape (counts of nodes in x, y, z) of the central block mesh.
e_dims : array of 3 floats
The dimensions of the complete block (central block + extensions).
e_shape : int
The count of nodes of extending blocks in the direction from the
central block.
centre : array of 3 floats
The centre of the mesh.
grading_fun : callable, optional
A function of :math:`x \in [0, 1]` that can be used to shift nodes in
the extension axis directions to allow smooth grading of element sizes
from the centre. The default function is :math:`x**p` with :math:`p`
determined so that the element sizes next to the central block have the
size of the shortest edge of the central block.
name : string, optional
The mesh name.
Returns
-------
mesh : Mesh instance
"""
b_dims = nm.asarray(b_dims, dtype=nm.float64)
b_shape = nm.asarray(b_shape, dtype=nm.int32)
e_dims = nm.asarray(e_dims, dtype=nm.float64)
centre = nm.asarray(centre, dtype=nm.float64)
# Pure extension dimensions.
pe_dims = 0.5 * (e_dims - b_dims)
# Central block element sizes.
dd = (b_dims / (b_shape - 1))
# The "first x" going to grading_fun.
nc = 1.0 / (e_shape - 1)
# Grading power and function.
power = nm.log(dd.min() / pe_dims.min()) / nm.log(nc)
grading_fun = (lambda x: x**power) if grading_fun is None else grading_fun
# Central block mesh.
b_mesh = gen_block_mesh(b_dims, b_shape, centre, mat_id=0, verbose=False)
# 'x' extension.
e_mesh, xs = _get_extension_side(0, grading_fun, 10,
b_dims, b_shape, e_dims, e_shape, centre)
mesh = b_mesh + e_mesh
# Mirror by 'x'.
e_mesh.coors[:, 0] = (2 * centre[0]) - e_mesh.coors[:, 0]
e_mesh.cmesh.cell_groups.fill(11)
mesh = mesh + e_mesh
# 'y' extension.
e_mesh, ys = _get_extension_side(1, grading_fun, 20,
b_dims, b_shape, e_dims, e_shape, centre)
mesh = mesh + e_mesh
# Mirror by 'y'.
e_mesh.coors[:, 1] = (2 * centre[1]) - e_mesh.coors[:, 1]
e_mesh.cmesh.cell_groups.fill(21)
mesh = mesh + e_mesh
# 'z' extension.
e_mesh, zs = _get_extension_side(2, grading_fun, 30,
b_dims, b_shape, e_dims, e_shape, centre)
mesh = mesh + e_mesh
# Mirror by 'z'.
e_mesh.coors[:, 2] = (2 * centre[2]) - e_mesh.coors[:, 2]
e_mesh.cmesh.cell_groups.fill(31)
mesh = mesh + e_mesh
if name is not None:
mesh.name = name
# Verify merging by checking the number of nodes.
n_nod = (nm.prod(nm.maximum(b_shape - 2, 0)) + 2 * nm.prod(xs)
+ 2 * (max(ys[0] - 2, 0) * ys[1] * ys[2])
+ 2 * (max(zs[0] - 2, 0) * max(zs[1] - 2, 0) * zs[2]))
if n_nod != mesh.n_nod:
raise ValueError('Merge of meshes failed! (%d == %d)'
% (n_nod, mesh.n_nod))
return mesh
def tiled_mesh1d(conn, coors, ngrps, idim, n_rep, bb, eps=1e-6, ndmap=False):
from sfepy.discrete.fem.periodic import match_grid_plane
s1 = nm.nonzero(coors[:,idim] < (bb[0] + eps))[0]
s2 = nm.nonzero(coors[:,idim] > (bb[1] - eps))[0]
if s1.shape != s2.shape:
raise ValueError('incompatible shapes: %s == %s'\
% (s1.shape, s2.shape))
(nnod0, dim) = coors.shape
nnod = nnod0 * n_rep - s1.shape[0] * (n_rep - 1)
(nel0, nnel) = conn.shape
nel = nel0 * n_rep
dd = nm.zeros((dim,), dtype=nm.float64)
dd[idim] = bb[1] - bb[0]
m1, m2 = match_grid_plane(coors[s1], coors[s2], idim)
oconn = nm.zeros((nel, nnel), dtype=nm.int32)
ocoors = nm.zeros((nnod, dim), dtype=nm.float64)
ongrps = nm.zeros((nnod,), dtype=nm.int32)
if type(ndmap) is bool:
ret_ndmap = ndmap
else:
ret_ndmap= True
ndmap_out = nm.zeros((nnod,), dtype=nm.int32)
el_off = 0
nd_off = 0
for ii in range(n_rep):
if ii == 0:
oconn[0:nel0,:] = conn
ocoors[0:nnod0,:] = coors
ongrps[0:nnod0] = ngrps.squeeze()
nd_off += nnod0
mapto = s2[m2]
mask = nm.ones((nnod0,), dtype=nm.int32)
mask[s1] = 0
remap0 = nm.cumsum(mask) - 1
nnod0r = nnod0 - s1.shape[0]
cidx = nm.where(mask)
if ret_ndmap:
ndmap_out[0:nnod0] = nm.arange(nnod0)
else:
remap = remap0 + nd_off
remap[s1[m1]] = mapto
mapto = remap[s2[m2]]
ocoors[nd_off:(nd_off + nnod0r),:] =\
(coors[cidx,:] + ii * dd)
ongrps[nd_off:(nd_off + nnod0r)] = ngrps[cidx].squeeze()
oconn[el_off:(el_off + nel0),:] = remap[conn]
if ret_ndmap:
ndmap_out[nd_off:(nd_off + nnod0r)] = cidx[0]
nd_off += nnod0r
el_off += nel0
if ret_ndmap:
if ndmap is not None:
max_nd_ref = nm.max(ndmap)
idxs = nm.where(ndmap_out > max_nd_ref)
ndmap_out[idxs] = ndmap[ndmap_out[idxs]]
return oconn, ocoors, ongrps, ndmap_out
else:
return oconn, ocoors, ongrps
def gen_tiled_mesh(mesh, grid=None, scale=1.0, eps=1e-6, ret_ndmap=False):
"""
Generate a new mesh by repeating a given periodic element
along each axis.
Parameters
----------
mesh : Mesh instance
The input periodic FE mesh.
grid : array
Number of repetition along each axis.
scale : float, optional
Scaling factor.
eps : float, optional
Tolerance for boundary detection.
ret_ndmap : bool, optional
If True, return global node map.
Returns
-------
mesh_out : Mesh instance
FE mesh.
ndmap : array
Maps: actual node id --> node id in the reference cell.
"""
bbox = mesh.get_bounding_box()
if grid is None:
iscale = max(int(1.0 / scale), 1)
grid = [iscale] * mesh.dim
conn = mesh.get_conn(mesh.descs[0])
mat_ids = mesh.cmesh.cell_groups
coors = mesh.coors
ngrps = mesh.cmesh.vertex_groups
nrep = nm.prod(grid)
ndmap = None
output('repeating %s ...' % grid)
nblk = 1
for ii, gr in enumerate(grid):
if ret_ndmap:
(conn, coors,
ngrps, ndmap0) = tiled_mesh1d(conn, coors, ngrps,
ii, gr, bbox.transpose()[ii],
eps=eps, ndmap=ndmap)
ndmap = ndmap0
else:
conn, coors, ngrps = tiled_mesh1d(conn, coors, ngrps,
ii, gr, bbox.transpose()[ii],
eps=eps)
nblk *= gr
output('...done')
mat_ids = nm.tile(mat_ids, (nrep,))
mesh_out = Mesh.from_data('tiled mesh', coors * scale, ngrps,
[conn], [mat_ids], [mesh.descs[0]])
if ret_ndmap:
return mesh_out, ndmap
else:
return mesh_out
def gen_misc_mesh(mesh_dir, force_create, kind, args, suffix='.mesh',
verbose=False):
"""
Create sphere or cube mesh according to `kind` in the given
directory if it does not exist and return path to it.
"""
import os
from sfepy import data_dir
defdir = os.path.join(data_dir, 'meshes')
if mesh_dir is None:
mesh_dir = defdir
def retype(args, types, defaults):
args=list(args)
args.extend(defaults[len(args):len(defaults)])
return tuple([type(value) for type, value in zip(types, args) ])
if kind == 'sphere':
default = [5, 41, args[0]]
args = retype(args, [float, int, float], default)
mesh_pattern = os.path.join(mesh_dir, 'sphere-%.2f-%.2f-%i')
else:
assert_(kind == 'cube')
args = retype(args,
(int, float, int, float, int, float),
(args[0], args[1], args[0], args[1], args[0], args[1]))
mesh_pattern = os.path.join(mesh_dir, 'cube-%i_%.2f-%i_%.2f-%i_%.2f')
if verbose:
output(args)
filename = mesh_pattern % args
if not force_create:
if os.path.exists(filename): return filename
if os.path.exists(filename + '.mesh') : return filename + '.mesh'
if os.path.exists(filename + '.vtk'): return filename + '.vtk'
if kind == 'cube':
filename = filename + suffix
ensure_path(filename)
output('creating new cube mesh')
output('(%i nodes in %.2f) x (%i nodes in %.2f) x (%i nodes in %.2f)'
% args)
output('to file %s...' % filename)
mesh = gen_block_mesh(args[1::2], args[0::2],
(0.0, 0.0, 0.0), name=filename)
mesh.write(filename, io='auto')
output('...done')
else:
import subprocess, shutil, tempfile
filename = filename + '.mesh'
ensure_path(filename)
output('creating new sphere mesh (%i nodes, r=%.2f) and gradation %d'
% args)
output('to file %s...' % filename)
f = open(os.path.join(defdir, 'quantum', 'sphere.geo'))
tmp_dir = tempfile.mkdtemp()
tmpfile = os.path.join(tmp_dir, 'sphere.geo.temp')
ff = open(tmpfile, "w")
ff.write("""
R = %i.0;
n = %i.0;
dens = %f;
""" % args)
ff.write(f.read())
f.close()
ff.close()
subprocess.call(['gmsh', '-3', tmpfile, '-format', 'mesh',
'-o', filename])
shutil.rmtree(tmp_dir)
output('...done')
return filename
def gen_mesh_from_string(mesh_name, mesh_dir):
import re
result = re.match('^\\s*([a-zA-Z]+)[:\\(]([^\\):]*)[:\\)](\\*)?\\s*$',
mesh_name)
if result is None:
return mesh_name
else:
args = re.split(',', result.group(2))
kind = result.group(1)
return gen_misc_mesh(mesh_dir, result.group(3)=='*', kind, args)
def gen_mesh_from_geom(geo, a=None, verbose=False, refine=False):
"""
Runs mesh generator - tetgen for 3D or triangle for 2D meshes.
Parameters
----------
geo : geometry
geometry description
a : int, optional
a maximum area/volume constraint
verbose : bool, optional
detailed information
refine : bool, optional
refines mesh
Returns
-------
mesh : Mesh instance
triangular or tetrahedral mesh
"""
import os.path as op
import pexpect
import tempfile
import shutil
tmp_dir = tempfile.mkdtemp()
polyfilename = op.join(tmp_dir, 'meshgen.poly')
# write geometry to poly file
geo.to_poly_file(polyfilename)
meshgen_call = {2: ('triangle', ''), 3: ('tetgen', 'BFENk')}
params = "-ACp"
params += "q" if refine else ''
params += "V" if verbose else "Q"
params += meshgen_call[geo.dim][1]
if a is not None:
params += "a%f" % (a)
params += " %s" % (polyfilename)
cmd = "%s %s" % (meshgen_call[geo.dim][0], params)
if verbose: print("Generating mesh using", cmd)
p=pexpect.run(cmd, timeout=None)
bname, ext = op.splitext(polyfilename)
if geo.dim == 2:
mesh = Mesh.from_file(bname + '.1.node')
if geo.dim == 3:
mesh = Mesh.from_file(bname + '.1.vtk')
shutil.rmtree(tmp_dir)
return mesh
def gen_mesh_from_voxels(voxels, dims, etype='q'):
"""
Generate FE mesh from voxels (volumetric data).
Parameters
----------
voxels : array
Voxel matrix, 1=material.
dims : array
Size of one voxel.
etype : integer, optional
'q' - quadrilateral or hexahedral elements
't' - triangular or tetrahedral elements
Returns
-------
mesh : Mesh instance
Finite element mesh.
"""
dims = nm.array(dims).squeeze()
dim = len(dims)
nddims = nm.array(voxels.shape) + 2
nodemtx = nm.zeros(nddims, dtype=nm.int32)
if dim == 2:
#iy, ix = nm.where(voxels.transpose())
iy, ix = nm.where(voxels)
nel = ix.shape[0]
if etype == 'q':
nodemtx[ix,iy] += 1
nodemtx[ix + 1,iy] += 1
nodemtx[ix + 1,iy + 1] += 1
nodemtx[ix,iy + 1] += 1
elif etype == 't':
nodemtx[ix,iy] += 2
nodemtx[ix + 1,iy] += 1
nodemtx[ix + 1,iy + 1] += 2
nodemtx[ix,iy + 1] += 1
nel *= 2
elif dim == 3:
#iy, ix, iz = nm.where(voxels.transpose(1, 0, 2))
iy, ix, iz = nm.where(voxels)
nel = ix.shape[0]
if etype == 'q':
nodemtx[ix,iy,iz] += 1
nodemtx[ix + 1,iy,iz] += 1
nodemtx[ix + 1,iy + 1,iz] += 1
nodemtx[ix,iy + 1,iz] += 1
nodemtx[ix,iy,iz + 1] += 1
nodemtx[ix + 1,iy,iz + 1] += 1
nodemtx[ix + 1,iy + 1,iz + 1] += 1
nodemtx[ix,iy + 1,iz + 1] += 1
elif etype == 't':
nodemtx[ix,iy,iz] += 6
nodemtx[ix + 1,iy,iz] += 2
nodemtx[ix + 1,iy + 1,iz] += 2
nodemtx[ix,iy + 1,iz] += 2
nodemtx[ix,iy,iz + 1] += 2
nodemtx[ix + 1,iy,iz + 1] += 2
nodemtx[ix + 1,iy + 1,iz + 1] += 6
nodemtx[ix,iy + 1,iz + 1] += 2
nel *= 6
else:
msg = 'incorrect voxel dimension! (%d)' % dim
raise ValueError(msg)
ndidx = nm.where(nodemtx)
coors = nm.array(ndidx).transpose() * dims
nnod = coors.shape[0]
nodeid = -nm.ones(nddims, dtype=nm.int32)
nodeid[ndidx] = nm.arange(nnod)
# generate elements
if dim == 2:
elems = nm.array([nodeid[ix,iy],
nodeid[ix + 1,iy],
nodeid[ix + 1,iy + 1],
nodeid[ix,iy + 1]]).transpose()
elif dim == 3:
elems = nm.array([nodeid[ix,iy,iz],
nodeid[ix + 1,iy,iz],
nodeid[ix + 1,iy + 1,iz],
nodeid[ix,iy + 1,iz],
nodeid[ix,iy,iz + 1],
nodeid[ix + 1,iy,iz + 1],
nodeid[ix + 1,iy + 1,iz + 1],
nodeid[ix,iy + 1,iz + 1]]).transpose()
if etype == 't':
elems = elems_q2t(elems)
eid = etype + str(dim)
eltab = {'q2': 4, 'q3': 8, 't2': 3, 't3': 4}
mesh = Mesh.from_data('voxel_data',
coors, nm.ones((nnod,), dtype=nm.int32),
[nm.ascontiguousarray(elems)],
[nm.ones((nel,), dtype=nm.int32)],
['%d_%d' % (dim, eltab[eid])])
return mesh
def main():
mesh = gen_block_mesh(nm.array((1.0, 2.0, 3.0)),
nm.array((10,10,10)), nm.array((1.0, 2.0, 3.0)),
name='')
mesh.write('0.mesh', io = 'auto')
mesh = gen_cylinder_mesh(nm.array((1.0, 1.0, 2.0, 2.0, 3)),
nm.array((10,10,10)), nm.array((1.0, 2.0, 3.0)),
is_open=False, open_angle = 0.0,
name='')
mesh.write('1.mesh', io = 'auto')
mesh = gen_cylinder_mesh(nm.array((1.0, 1.0, 2.0, 2.0, 3)),
nm.array((10,10,10)), nm.array((1.0, 2.0, 3.0)),
is_open=True, open_angle = 0.0,
name='')
mesh.write('2.mesh', io = 'auto')
mesh = gen_cylinder_mesh(nm.array((1.0, 1.0, 2.0, 2.0, 3)),
nm.array((10,10,10)), nm.array((1.0, 2.0, 3.0)),
is_open=True, open_angle = 0.5,
name='')
mesh.write('3.mesh', io = 'auto')
mesh = gen_cylinder_mesh(nm.array((0.0, 0.0, 2.0, 2.0, 3)),
nm.array((10,10,10)), nm.array((1.0, 2.0, 3.0)),
is_open=False, open_angle = 0.0,
name='')
mesh.write('4.mesh', io = 'auto')
mesh = gen_cylinder_mesh(nm.array((0.0, 0.0, 1.0, 2.0, 3)),
nm.array((10,10,10)), nm.array((1.0, 2.0, 3.0)),
is_open=True, open_angle = 0.5,
name='')
mesh.write('5.mesh', io = 'auto')
mesh = gen_cylinder_mesh(nm.array((0.0, 0.0, 1.0, 2.0, 3)),
nm.array((10,10,10)), nm.array((1.0, 2.0, 3.0)),
is_open=True, open_angle = 0.5, non_uniform=True,
name='')
mesh.write('6.mesh', io = 'auto')
mesh = gen_cylinder_mesh(nm.array((0.5, 0.5, 1.0, 2.0, 3)),
nm.array((10,10,10)), nm.array((1.0, 2.0, 3.0)),
is_open=True, open_angle = 0.5, non_uniform=True,
name='')
mesh.write('7.mesh', io = 'auto')
if __name__ == '__main__':
main()
|
vlukes/sfepy
|
sfepy/mesh/mesh_generators.py
|
Python
|
bsd-3-clause
| 27,209
|
from __future__ import division, print_function
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fft', parent_package, top_path)
config.add_data_dir('tests')
# Configure pocketfft_internal
config.add_extension('_pocketfft_internal',
sources=['_pocketfft.c']
)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
MSeifert04/numpy
|
numpy/fft/setup.py
|
Python
|
bsd-3-clause
| 542
|
import pytest
import numpy as np
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_allclose
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import skip_if_32bit
from sklearn import datasets
from sklearn.linear_model import LogisticRegression, SGDClassifier, Lasso
from sklearn.svm import LinearSVC
from sklearn.feature_selection import SelectFromModel
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import PassiveAggressiveClassifier
from sklearn.base import BaseEstimator
iris = datasets.load_iris()
data, y = iris.data, iris.target
rng = np.random.RandomState(0)
def test_invalid_input():
clf = SGDClassifier(alpha=0.1, max_iter=10, shuffle=True,
random_state=None, tol=None)
for threshold in ["gobbledigook", ".5 * gobbledigook"]:
model = SelectFromModel(clf, threshold=threshold)
model.fit(data, y)
assert_raises(ValueError, model.transform, data)
@pytest.mark.filterwarnings('ignore:The default value of n_estimators')
def test_input_estimator_unchanged():
# Test that SelectFromModel fits on a clone of the estimator.
est = RandomForestClassifier()
transformer = SelectFromModel(estimator=est)
transformer.fit(data, y)
assert_true(transformer.estimator is est)
@pytest.mark.parametrize(
"max_features, err_type, err_msg",
[(-1, ValueError, "'max_features' should be 0 and"),
(data.shape[1] + 1, ValueError, "'max_features' should be 0 and"),
('gobbledigook', TypeError, "should be an integer"),
('all', TypeError, "should be an integer")]
)
def test_max_features_error(max_features, err_type, err_msg):
clf = RandomForestClassifier(n_estimators=50, random_state=0)
transformer = SelectFromModel(estimator=clf,
max_features=max_features,
threshold=-np.inf)
with pytest.raises(err_type, match=err_msg):
transformer.fit(data, y)
@pytest.mark.parametrize("max_features", [0, 2, data.shape[1]])
def test_max_features_dim(max_features):
clf = RandomForestClassifier(n_estimators=50, random_state=0)
transformer = SelectFromModel(estimator=clf,
max_features=max_features,
threshold=-np.inf)
X_trans = transformer.fit_transform(data, y)
assert X_trans.shape[1] == max_features
class FixedImportanceEstimator(BaseEstimator):
def __init__(self, importances):
self.importances = importances
def fit(self, X, y=None):
self.feature_importances_ = np.array(self.importances)
def test_max_features():
# Test max_features parameter using various values
X, y = datasets.make_classification(
n_samples=1000, n_features=10, n_informative=3, n_redundant=0,
n_repeated=0, shuffle=False, random_state=0)
max_features = X.shape[1]
est = RandomForestClassifier(n_estimators=50, random_state=0)
transformer1 = SelectFromModel(estimator=est,
threshold=-np.inf)
transformer2 = SelectFromModel(estimator=est,
max_features=max_features,
threshold=-np.inf)
X_new1 = transformer1.fit_transform(X, y)
X_new2 = transformer2.fit_transform(X, y)
assert_allclose(X_new1, X_new2)
# Test max_features against actual model.
transformer1 = SelectFromModel(estimator=Lasso(alpha=0.025,
random_state=42))
X_new1 = transformer1.fit_transform(X, y)
scores1 = np.abs(transformer1.estimator_.coef_)
candidate_indices1 = np.argsort(-scores1, kind='mergesort')
for n_features in range(1, X_new1.shape[1] + 1):
transformer2 = SelectFromModel(estimator=Lasso(alpha=0.025,
random_state=42),
max_features=n_features,
threshold=-np.inf)
X_new2 = transformer2.fit_transform(X, y)
scores2 = np.abs(transformer2.estimator_.coef_)
candidate_indices2 = np.argsort(-scores2, kind='mergesort')
assert_allclose(X[:, candidate_indices1[:n_features]],
X[:, candidate_indices2[:n_features]])
assert_allclose(transformer1.estimator_.coef_,
transformer2.estimator_.coef_)
def test_max_features_tiebreak():
# Test if max_features can break tie among feature importance
X, y = datasets.make_classification(
n_samples=1000, n_features=10, n_informative=3, n_redundant=0,
n_repeated=0, shuffle=False, random_state=0)
max_features = X.shape[1]
feature_importances = np.array([4, 4, 4, 4, 3, 3, 3, 2, 2, 1])
for n_features in range(1, max_features + 1):
transformer = SelectFromModel(
FixedImportanceEstimator(feature_importances),
max_features=n_features,
threshold=-np.inf)
X_new = transformer.fit_transform(X, y)
selected_feature_indices = np.where(transformer._get_support_mask())[0]
assert_array_equal(selected_feature_indices, np.arange(n_features))
assert X_new.shape[1] == n_features
def test_threshold_and_max_features():
X, y = datasets.make_classification(
n_samples=1000, n_features=10, n_informative=3, n_redundant=0,
n_repeated=0, shuffle=False, random_state=0)
est = RandomForestClassifier(n_estimators=50, random_state=0)
transformer1 = SelectFromModel(estimator=est, max_features=3,
threshold=-np.inf)
X_new1 = transformer1.fit_transform(X, y)
transformer2 = SelectFromModel(estimator=est, threshold=0.04)
X_new2 = transformer2.fit_transform(X, y)
transformer3 = SelectFromModel(estimator=est, max_features=3,
threshold=0.04)
X_new3 = transformer3.fit_transform(X, y)
assert X_new3.shape[1] == min(X_new1.shape[1], X_new2.shape[1])
selected_indices = transformer3.transform(
np.arange(X.shape[1])[np.newaxis, :])
assert_allclose(X_new3, X[:, selected_indices[0]])
@skip_if_32bit
def test_feature_importances():
X, y = datasets.make_classification(
n_samples=1000, n_features=10, n_informative=3, n_redundant=0,
n_repeated=0, shuffle=False, random_state=0)
est = RandomForestClassifier(n_estimators=50, random_state=0)
for threshold, func in zip(["mean", "median"], [np.mean, np.median]):
transformer = SelectFromModel(estimator=est, threshold=threshold)
transformer.fit(X, y)
assert_true(hasattr(transformer.estimator_, 'feature_importances_'))
X_new = transformer.transform(X)
assert_less(X_new.shape[1], X.shape[1])
importances = transformer.estimator_.feature_importances_
feature_mask = np.abs(importances) > func(importances)
assert_array_almost_equal(X_new, X[:, feature_mask])
@pytest.mark.filterwarnings('ignore: Default solver will be changed') # 0.22
@pytest.mark.filterwarnings('ignore: Default multi_class will') # 0.22
def test_sample_weight():
# Ensure sample weights are passed to underlying estimator
X, y = datasets.make_classification(
n_samples=100, n_features=10, n_informative=3, n_redundant=0,
n_repeated=0, shuffle=False, random_state=0)
# Check with sample weights
sample_weight = np.ones(y.shape)
sample_weight[y == 1] *= 100
est = LogisticRegression(random_state=0, fit_intercept=False)
transformer = SelectFromModel(estimator=est)
transformer.fit(X, y, sample_weight=None)
mask = transformer._get_support_mask()
transformer.fit(X, y, sample_weight=sample_weight)
weighted_mask = transformer._get_support_mask()
assert not np.all(weighted_mask == mask)
transformer.fit(X, y, sample_weight=3 * sample_weight)
reweighted_mask = transformer._get_support_mask()
assert np.all(weighted_mask == reweighted_mask)
def test_coef_default_threshold():
X, y = datasets.make_classification(
n_samples=100, n_features=10, n_informative=3, n_redundant=0,
n_repeated=0, shuffle=False, random_state=0)
# For the Lasso and related models, the threshold defaults to 1e-5
transformer = SelectFromModel(estimator=Lasso(alpha=0.1,
random_state=42))
transformer.fit(X, y)
X_new = transformer.transform(X)
mask = np.abs(transformer.estimator_.coef_) > 1e-5
assert_array_almost_equal(X_new, X[:, mask])
@pytest.mark.filterwarnings('ignore: Default solver will be changed') # 0.22
@pytest.mark.filterwarnings('ignore: Default multi_class will') # 0.22
@skip_if_32bit
def test_2d_coef():
X, y = datasets.make_classification(
n_samples=1000, n_features=10, n_informative=3, n_redundant=0,
n_repeated=0, shuffle=False, random_state=0, n_classes=4)
est = LogisticRegression()
for threshold, func in zip(["mean", "median"], [np.mean, np.median]):
for order in [1, 2, np.inf]:
# Fit SelectFromModel a multi-class problem
transformer = SelectFromModel(estimator=LogisticRegression(),
threshold=threshold,
norm_order=order)
transformer.fit(X, y)
assert_true(hasattr(transformer.estimator_, 'coef_'))
X_new = transformer.transform(X)
assert_less(X_new.shape[1], X.shape[1])
# Manually check that the norm is correctly performed
est.fit(X, y)
importances = np.linalg.norm(est.coef_, axis=0, ord=order)
feature_mask = importances > func(importances)
assert_array_almost_equal(X_new, X[:, feature_mask])
@pytest.mark.filterwarnings('ignore:The default value of n_estimators')
def test_partial_fit():
est = PassiveAggressiveClassifier(random_state=0, shuffle=False,
max_iter=5, tol=None)
transformer = SelectFromModel(estimator=est)
transformer.partial_fit(data, y,
classes=np.unique(y))
old_model = transformer.estimator_
transformer.partial_fit(data, y,
classes=np.unique(y))
new_model = transformer.estimator_
assert_true(old_model is new_model)
X_transform = transformer.transform(data)
transformer.fit(np.vstack((data, data)), np.concatenate((y, y)))
assert_array_almost_equal(X_transform, transformer.transform(data))
# check that if est doesn't have partial_fit, neither does SelectFromModel
transformer = SelectFromModel(estimator=RandomForestClassifier())
assert_false(hasattr(transformer, "partial_fit"))
def test_calling_fit_reinitializes():
est = LinearSVC(random_state=0)
transformer = SelectFromModel(estimator=est)
transformer.fit(data, y)
transformer.set_params(estimator__C=100)
transformer.fit(data, y)
assert_equal(transformer.estimator_.C, 100)
def test_prefit():
# Test all possible combinations of the prefit parameter.
# Passing a prefit parameter with the selected model
# and fitting a unfit model with prefit=False should give same results.
clf = SGDClassifier(alpha=0.1, max_iter=10, shuffle=True,
random_state=0, tol=None)
model = SelectFromModel(clf)
model.fit(data, y)
X_transform = model.transform(data)
clf.fit(data, y)
model = SelectFromModel(clf, prefit=True)
assert_array_almost_equal(model.transform(data), X_transform)
# Check that the model is rewritten if prefit=False and a fitted model is
# passed
model = SelectFromModel(clf, prefit=False)
model.fit(data, y)
assert_array_almost_equal(model.transform(data), X_transform)
# Check that prefit=True and calling fit raises a ValueError
model = SelectFromModel(clf, prefit=True)
assert_raises(ValueError, model.fit, data, y)
def test_threshold_string():
est = RandomForestClassifier(n_estimators=50, random_state=0)
model = SelectFromModel(est, threshold="0.5*mean")
model.fit(data, y)
X_transform = model.transform(data)
# Calculate the threshold from the estimator directly.
est.fit(data, y)
threshold = 0.5 * np.mean(est.feature_importances_)
mask = est.feature_importances_ > threshold
assert_array_almost_equal(X_transform, data[:, mask])
def test_threshold_without_refitting():
# Test that the threshold can be set without refitting the model.
clf = SGDClassifier(alpha=0.1, max_iter=10, shuffle=True,
random_state=0, tol=None)
model = SelectFromModel(clf, threshold="0.1 * mean")
model.fit(data, y)
X_transform = model.transform(data)
# Set a higher threshold to filter out more features.
model.threshold = "1.0 * mean"
assert_greater(X_transform.shape[1], model.transform(data).shape[1])
|
vortex-ape/scikit-learn
|
sklearn/feature_selection/tests/test_from_model.py
|
Python
|
bsd-3-clause
| 13,321
|
import warnings
from django.template import TemplateSyntaxError
from django.test import ignore_warnings, SimpleTestCase
from django.test.utils import reset_warning_registry
from django.utils.deprecation import RemovedInDjango20Warning
from ..utils import setup, TestObj
class IfTagTests(SimpleTestCase):
@setup({'if-tag01': '{% if foo %}yes{% else %}no{% endif %}'})
def test_if_tag01(self):
output = self.engine.render_to_string('if-tag01', {'foo': True})
self.assertEqual(output, 'yes')
@setup({'if-tag02': '{% if foo %}yes{% else %}no{% endif %}'})
def test_if_tag02(self):
output = self.engine.render_to_string('if-tag02', {'foo': False})
self.assertEqual(output, 'no')
@setup({'if-tag03': '{% if foo %}yes{% else %}no{% endif %}'})
def test_if_tag03(self):
output = self.engine.render_to_string('if-tag03')
self.assertEqual(output, 'no')
@setup({'if-tag04': '{% if foo %}foo{% elif bar %}bar{% endif %}'})
def test_if_tag04(self):
output = self.engine.render_to_string('if-tag04', {'foo': True})
self.assertEqual(output, 'foo')
@setup({'if-tag05': '{% if foo %}foo{% elif bar %}bar{% endif %}'})
def test_if_tag05(self):
output = self.engine.render_to_string('if-tag05', {'bar': True})
self.assertEqual(output, 'bar')
@setup({'if-tag06': '{% if foo %}foo{% elif bar %}bar{% endif %}'})
def test_if_tag06(self):
output = self.engine.render_to_string('if-tag06')
self.assertEqual(output, '')
@setup({'if-tag07': '{% if foo %}foo{% elif bar %}bar{% else %}nothing{% endif %}'})
def test_if_tag07(self):
output = self.engine.render_to_string('if-tag07', {'foo': True})
self.assertEqual(output, 'foo')
@setup({'if-tag08': '{% if foo %}foo{% elif bar %}bar{% else %}nothing{% endif %}'})
def test_if_tag08(self):
output = self.engine.render_to_string('if-tag08', {'bar': True})
self.assertEqual(output, 'bar')
@setup({'if-tag09': '{% if foo %}foo{% elif bar %}bar{% else %}nothing{% endif %}'})
def test_if_tag09(self):
output = self.engine.render_to_string('if-tag09')
self.assertEqual(output, 'nothing')
@setup({'if-tag10': '{% if foo %}foo{% elif bar %}bar{% elif baz %}baz{% else %}nothing{% endif %}'})
def test_if_tag10(self):
output = self.engine.render_to_string('if-tag10', {'foo': True})
self.assertEqual(output, 'foo')
@setup({'if-tag11': '{% if foo %}foo{% elif bar %}bar{% elif baz %}baz{% else %}nothing{% endif %}'})
def test_if_tag11(self):
output = self.engine.render_to_string('if-tag11', {'bar': True})
self.assertEqual(output, 'bar')
@setup({'if-tag12': '{% if foo %}foo{% elif bar %}bar{% elif baz %}baz{% else %}nothing{% endif %}'})
def test_if_tag12(self):
output = self.engine.render_to_string('if-tag12', {'baz': True})
self.assertEqual(output, 'baz')
@setup({'if-tag13': '{% if foo %}foo{% elif bar %}bar{% elif baz %}baz{% else %}nothing{% endif %}'})
def test_if_tag13(self):
output = self.engine.render_to_string('if-tag13')
self.assertEqual(output, 'nothing')
# Filters
@setup({'if-tag-filter01': '{% if foo|length == 5 %}yes{% else %}no{% endif %}'})
def test_if_tag_filter01(self):
output = self.engine.render_to_string('if-tag-filter01', {'foo': 'abcde'})
self.assertEqual(output, 'yes')
@setup({'if-tag-filter02': '{% if foo|upper == \'ABC\' %}yes{% else %}no{% endif %}'})
def test_if_tag_filter02(self):
output = self.engine.render_to_string('if-tag-filter02')
self.assertEqual(output, 'no')
# Equality
@setup({'if-tag-eq01': '{% if foo == bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq01(self):
output = self.engine.render_to_string('if-tag-eq01')
self.assertEqual(output, 'yes')
@setup({'if-tag-eq02': '{% if foo == bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq02(self):
output = self.engine.render_to_string('if-tag-eq02', {'foo': 1})
self.assertEqual(output, 'no')
@setup({'if-tag-eq03': '{% if foo == bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq03(self):
output = self.engine.render_to_string('if-tag-eq03', {'foo': 1, 'bar': 1})
self.assertEqual(output, 'yes')
@setup({'if-tag-eq04': '{% if foo == bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq04(self):
output = self.engine.render_to_string('if-tag-eq04', {'foo': 1, 'bar': 2})
self.assertEqual(output, 'no')
@setup({'if-tag-eq05': '{% if foo == \'\' %}yes{% else %}no{% endif %}'})
def test_if_tag_eq05(self):
output = self.engine.render_to_string('if-tag-eq05')
self.assertEqual(output, 'no')
# Comparison
@setup({'if-tag-gt-01': '{% if 2 > 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_gt_01(self):
output = self.engine.render_to_string('if-tag-gt-01')
self.assertEqual(output, 'yes')
@setup({'if-tag-gt-02': '{% if 1 > 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_gt_02(self):
output = self.engine.render_to_string('if-tag-gt-02')
self.assertEqual(output, 'no')
@setup({'if-tag-gte-01': '{% if 1 >= 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_gte_01(self):
output = self.engine.render_to_string('if-tag-gte-01')
self.assertEqual(output, 'yes')
@setup({'if-tag-gte-02': '{% if 1 >= 2 %}yes{% else %}no{% endif %}'})
def test_if_tag_gte_02(self):
output = self.engine.render_to_string('if-tag-gte-02')
self.assertEqual(output, 'no')
@setup({'if-tag-lt-01': '{% if 1 < 2 %}yes{% else %}no{% endif %}'})
def test_if_tag_lt_01(self):
output = self.engine.render_to_string('if-tag-lt-01')
self.assertEqual(output, 'yes')
@setup({'if-tag-lt-02': '{% if 1 < 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_lt_02(self):
output = self.engine.render_to_string('if-tag-lt-02')
self.assertEqual(output, 'no')
@setup({'if-tag-lte-01': '{% if 1 <= 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_lte_01(self):
output = self.engine.render_to_string('if-tag-lte-01')
self.assertEqual(output, 'yes')
@setup({'if-tag-lte-02': '{% if 2 <= 1 %}yes{% else %}no{% endif %}'})
def test_if_tag_lte_02(self):
output = self.engine.render_to_string('if-tag-lte-02')
self.assertEqual(output, 'no')
# Contains
@setup({'if-tag-in-01': '{% if 1 in x %}yes{% else %}no{% endif %}'})
def test_if_tag_in_01(self):
output = self.engine.render_to_string('if-tag-in-01', {'x': [1]})
self.assertEqual(output, 'yes')
@setup({'if-tag-in-02': '{% if 2 in x %}yes{% else %}no{% endif %}'})
def test_if_tag_in_02(self):
output = self.engine.render_to_string('if-tag-in-02', {'x': [1]})
self.assertEqual(output, 'no')
@setup({'if-tag-not-in-01': '{% if 1 not in x %}yes{% else %}no{% endif %}'})
def test_if_tag_not_in_01(self):
output = self.engine.render_to_string('if-tag-not-in-01', {'x': [1]})
self.assertEqual(output, 'no')
@setup({'if-tag-not-in-02': '{% if 2 not in x %}yes{% else %}no{% endif %}'})
def test_if_tag_not_in_02(self):
output = self.engine.render_to_string('if-tag-not-in-02', {'x': [1]})
self.assertEqual(output, 'yes')
# AND
@setup({'if-tag-and01': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and01(self):
output = self.engine.render_to_string('if-tag-and01', {'foo': True, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-and02': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and02(self):
output = self.engine.render_to_string('if-tag-and02', {'foo': True, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-and03': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and03(self):
output = self.engine.render_to_string('if-tag-and03', {'foo': False, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-and04': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and04(self):
output = self.engine.render_to_string('if-tag-and04', {'foo': False, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-and05': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and05(self):
output = self.engine.render_to_string('if-tag-and05', {'foo': False})
self.assertEqual(output, 'no')
@setup({'if-tag-and06': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and06(self):
output = self.engine.render_to_string('if-tag-and06', {'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-and07': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and07(self):
output = self.engine.render_to_string('if-tag-and07', {'foo': True})
self.assertEqual(output, 'no')
@setup({'if-tag-and08': '{% if foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_and08(self):
output = self.engine.render_to_string('if-tag-and08', {'bar': True})
self.assertEqual(output, 'no')
# OR
@setup({'if-tag-or01': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or01(self):
output = self.engine.render_to_string('if-tag-or01', {'foo': True, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-or02': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or02(self):
output = self.engine.render_to_string('if-tag-or02', {'foo': True, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-or03': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or03(self):
output = self.engine.render_to_string('if-tag-or03', {'foo': False, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-or04': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or04(self):
output = self.engine.render_to_string('if-tag-or04', {'foo': False, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-or05': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or05(self):
output = self.engine.render_to_string('if-tag-or05', {'foo': False})
self.assertEqual(output, 'no')
@setup({'if-tag-or06': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or06(self):
output = self.engine.render_to_string('if-tag-or06', {'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-or07': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or07(self):
output = self.engine.render_to_string('if-tag-or07', {'foo': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-or08': '{% if foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_or08(self):
output = self.engine.render_to_string('if-tag-or08', {'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-or09': '{% if foo or bar or baz %}yes{% else %}no{% endif %}'})
def test_if_tag_or09(self):
"""
multiple ORs
"""
output = self.engine.render_to_string('if-tag-or09', {'baz': True})
self.assertEqual(output, 'yes')
# NOT
@setup({'if-tag-not01': '{% if not foo %}no{% else %}yes{% endif %}'})
def test_if_tag_not01(self):
output = self.engine.render_to_string('if-tag-not01', {'foo': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not02': '{% if not not foo %}no{% else %}yes{% endif %}'})
def test_if_tag_not02(self):
output = self.engine.render_to_string('if-tag-not02', {'foo': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not06': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not06(self):
output = self.engine.render_to_string('if-tag-not06')
self.assertEqual(output, 'no')
@setup({'if-tag-not07': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not07(self):
output = self.engine.render_to_string('if-tag-not07', {'foo': True, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not08': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not08(self):
output = self.engine.render_to_string('if-tag-not08', {'foo': True, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not09': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not09(self):
output = self.engine.render_to_string('if-tag-not09', {'foo': False, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not10': '{% if foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not10(self):
output = self.engine.render_to_string('if-tag-not10', {'foo': False, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not11': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not11(self):
output = self.engine.render_to_string('if-tag-not11')
self.assertEqual(output, 'no')
@setup({'if-tag-not12': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not12(self):
output = self.engine.render_to_string('if-tag-not12', {'foo': True, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not13': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not13(self):
output = self.engine.render_to_string('if-tag-not13', {'foo': True, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not14': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not14(self):
output = self.engine.render_to_string('if-tag-not14', {'foo': False, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not15': '{% if not foo and bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not15(self):
output = self.engine.render_to_string('if-tag-not15', {'foo': False, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not16': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not16(self):
output = self.engine.render_to_string('if-tag-not16')
self.assertEqual(output, 'yes')
@setup({'if-tag-not17': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not17(self):
output = self.engine.render_to_string('if-tag-not17', {'foo': True, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not18': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not18(self):
output = self.engine.render_to_string('if-tag-not18', {'foo': True, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not19': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not19(self):
output = self.engine.render_to_string('if-tag-not19', {'foo': False, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not20': '{% if foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not20(self):
output = self.engine.render_to_string('if-tag-not20', {'foo': False, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not21': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not21(self):
output = self.engine.render_to_string('if-tag-not21')
self.assertEqual(output, 'yes')
@setup({'if-tag-not22': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not22(self):
output = self.engine.render_to_string('if-tag-not22', {'foo': True, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not23': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not23(self):
output = self.engine.render_to_string('if-tag-not23', {'foo': True, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not24': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not24(self):
output = self.engine.render_to_string('if-tag-not24', {'foo': False, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not25': '{% if not foo or bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not25(self):
output = self.engine.render_to_string('if-tag-not25', {'foo': False, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not26': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not26(self):
output = self.engine.render_to_string('if-tag-not26')
self.assertEqual(output, 'yes')
@setup({'if-tag-not27': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not27(self):
output = self.engine.render_to_string('if-tag-not27', {'foo': True, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not28': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not28(self):
output = self.engine.render_to_string('if-tag-not28', {'foo': True, 'bar': False})
self.assertEqual(output, 'no')
@setup({'if-tag-not29': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not29(self):
output = self.engine.render_to_string('if-tag-not29', {'foo': False, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not30': '{% if not foo and not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not30(self):
output = self.engine.render_to_string('if-tag-not30', {'foo': False, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not31': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not31(self):
output = self.engine.render_to_string('if-tag-not31')
self.assertEqual(output, 'yes')
@setup({'if-tag-not32': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not32(self):
output = self.engine.render_to_string('if-tag-not32', {'foo': True, 'bar': True})
self.assertEqual(output, 'no')
@setup({'if-tag-not33': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not33(self):
output = self.engine.render_to_string('if-tag-not33', {'foo': True, 'bar': False})
self.assertEqual(output, 'yes')
@setup({'if-tag-not34': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not34(self):
output = self.engine.render_to_string('if-tag-not34', {'foo': False, 'bar': True})
self.assertEqual(output, 'yes')
@setup({'if-tag-not35': '{% if not foo or not bar %}yes{% else %}no{% endif %}'})
def test_if_tag_not35(self):
output = self.engine.render_to_string('if-tag-not35', {'foo': False, 'bar': False})
self.assertEqual(output, 'yes')
# Various syntax errors
@setup({'if-tag-error01': '{% if %}yes{% endif %}'})
def test_if_tag_error01(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error01')
@setup({'if-tag-error02': '{% if foo and %}yes{% else %}no{% endif %}'})
def test_if_tag_error02(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-error02', {'foo': True})
@setup({'if-tag-error03': '{% if foo or %}yes{% else %}no{% endif %}'})
def test_if_tag_error03(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-error03', {'foo': True})
@setup({'if-tag-error04': '{% if not foo and %}yes{% else %}no{% endif %}'})
def test_if_tag_error04(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-error04', {'foo': True})
@setup({'if-tag-error05': '{% if not foo or %}yes{% else %}no{% endif %}'})
def test_if_tag_error05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('if-tag-error05', {'foo': True})
@setup({'if-tag-error06': '{% if abc def %}yes{% endif %}'})
def test_if_tag_error06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error06')
@setup({'if-tag-error07': '{% if not %}yes{% endif %}'})
def test_if_tag_error07(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error07')
@setup({'if-tag-error08': '{% if and %}yes{% endif %}'})
def test_if_tag_error08(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error08')
@setup({'if-tag-error09': '{% if or %}yes{% endif %}'})
def test_if_tag_error09(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error09')
@setup({'if-tag-error10': '{% if == %}yes{% endif %}'})
def test_if_tag_error10(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error10')
@setup({'if-tag-error11': '{% if 1 == %}yes{% endif %}'})
def test_if_tag_error11(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error11')
@setup({'if-tag-error12': '{% if a not b %}yes{% endif %}'})
def test_if_tag_error12(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('if-tag-error12')
@setup({'if-tag-shortcircuit01': '{% if x.is_true or x.is_bad %}yes{% else %}no{% endif %}'})
def test_if_tag_shortcircuit01(self):
"""
If evaluations are shortcircuited where possible
"""
output = self.engine.render_to_string('if-tag-shortcircuit01', {'x': TestObj()})
self.assertEqual(output, 'yes')
@setup({'if-tag-shortcircuit02': '{% if x.is_false and x.is_bad %}yes{% else %}no{% endif %}'})
def test_if_tag_shortcircuit02(self):
"""
The is_bad() function should not be evaluated. If it is, an
exception is raised.
"""
output = self.engine.render_to_string('if-tag-shortcircuit02', {'x': TestObj()})
self.assertEqual(output, 'no')
@setup({'if-tag-badarg01': '{% if x|default_if_none:y %}yes{% endif %}'})
def test_if_tag_badarg01(self):
"""
Non-existent args
"""
output = self.engine.render_to_string('if-tag-badarg01')
self.assertEqual(output, '')
@setup({'if-tag-badarg02': '{% if x|default_if_none:y %}yes{% endif %}'})
def test_if_tag_badarg02(self):
output = self.engine.render_to_string('if-tag-badarg02', {'y': 0})
self.assertEqual(output, '')
@setup({'if-tag-badarg03': '{% if x|default_if_none:y %}yes{% endif %}'})
def test_if_tag_badarg03(self):
output = self.engine.render_to_string('if-tag-badarg03', {'y': 1})
self.assertEqual(output, 'yes')
@setup({'if-tag-badarg04': '{% if x|default_if_none:y %}yes{% else %}no{% endif %}'})
def test_if_tag_badarg04(self):
output = self.engine.render_to_string('if-tag-badarg04')
self.assertEqual(output, 'no')
@setup({'if-tag-eq-deprecated': '{% if foo = bar %}yes{% else %}no{% endif %}'},
test_once=True)
def test_if_tag_eq_deprecated(self):
reset_warning_registry()
with warnings.catch_warnings(record=True) as warns:
warnings.simplefilter('always')
output = self.engine.render_to_string('if-tag-eq-deprecated')
self.assertEqual(output, 'yes')
self.assertEqual(len(warns), 1)
self.assertEqual(
str(warns[0].message),
"Operator '=' is deprecated and will be removed in Django 2.0. "
"Use '==' instead."
)
@ignore_warnings(category=RemovedInDjango20Warning)
class TestEqualitySingleEqualsSign(SimpleTestCase):
# The following tests should be changed to template.TemplateSyntaxError
# (or simply removed) when the deprecation path ends in Django 2.0.
@setup({'if-tag-eq01': '{% if foo = bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq01(self):
output = self.engine.render_to_string('if-tag-eq01', {'foo': 1})
self.assertEqual(output, 'no')
@setup({'if-tag-eq02': '{% if foo = bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq02(self):
output = self.engine.render_to_string('if-tag-eq02', {'foo': 1, 'bar': 1})
self.assertEqual(output, 'yes')
@setup({'if-tag-eq03': '{% if foo = bar %}yes{% else %}no{% endif %}'})
def test_if_tag_eq03(self):
output = self.engine.render_to_string('if-tag-eq03', {'foo': 1, 'bar': 2})
self.assertEqual(output, 'no')
@setup({'if-tag-eq04': '{% if foo == \'\' %}yes{% else %}no{% endif %}'})
def test_if_tag_eq04(self):
output = self.engine.render_to_string('if-tag-eq04')
self.assertEqual(output, 'no')
|
Sonicbids/django
|
tests/template_tests/syntax_tests/test_if.py
|
Python
|
bsd-3-clause
| 25,371
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'voicex_dev', # Or path to database file if using sqlite3.
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': 'postgres', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format' : "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
'datefmt' : "%d/%b/%Y %H:%M:%S"
},
},
'handlers': {
'null': {
'level':'DEBUG',
'class':'django.utils.log.NullHandler',
},
'logfile': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename':"/workspace/voicex/logs/voicex.log",
'maxBytes': 50000,
'backupCount': 2,
'formatter': 'standard',
},
'console':{
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'standard'
},
},
'loggers': {
'django': {
'handlers':['console'],
'propagate': True,
'level':'WARN',
},
'django.request': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
'django.db.backends': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': False,
},
'voicex': {
'handlers': ['console', 'logfile'],
'level': 'DEBUG',
},
'transport': {
'handlers': ['console', 'logfile'],
'level': 'DEBUG',
},
}
}
|
imclab/voicex
|
http_handler/rename_to_local_settings.py
|
Python
|
mit
| 2,387
|
from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'^login/$', 'authsub.views.login'),
)
|
pombreda/django-hotclub
|
apps/local_apps/authsub/urls.py
|
Python
|
mit
| 114
|
# -*- coding:utf-8 -*-
"""
/***************************************************************************
Plugin Installer module
-------------------
Date : May 2013
Copyright : (C) 2013 by Borys Jurgiel
Email : info at borysjurgiel dot pl
This module is based on former plugin_installer plugin:
Copyright (C) 2007-2008 Matthew Perry
Copyright (C) 2008-2013 Borys Jurgiel
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
import json
import zipfile
from qgis.PyQt.QtCore import Qt, QObject, QDir, QUrl, QFileInfo, QFile
from qgis.PyQt.QtWidgets import QApplication, QDialog, QDialogButtonBox, QFrame, QMessageBox, QLabel, QVBoxLayout
from qgis.PyQt.QtNetwork import QNetworkRequest
import qgis
from qgis.core import Qgis, QgsApplication, QgsNetworkAccessManager, QgsSettings, QgsNetworkRequestParameters
from qgis.gui import QgsMessageBar, QgsPasswordLineEdit, QgsHelp
from qgis.utils import (iface, startPlugin, unloadPlugin, loadPlugin,
reloadPlugin, updateAvailablePlugins, plugins_metadata_parser)
from .installer_data import (repositories, plugins, officialRepo,
settingsGroup, reposGroup, removeDir)
from .qgsplugininstallerinstallingdialog import QgsPluginInstallerInstallingDialog
from .qgsplugininstallerpluginerrordialog import QgsPluginInstallerPluginErrorDialog
from .qgsplugininstallerfetchingdialog import QgsPluginInstallerFetchingDialog
from .qgsplugininstallerrepositorydialog import QgsPluginInstallerRepositoryDialog
from .unzip import unzip
from .plugindependencies import find_dependencies
from .qgsplugindependenciesdialog import QgsPluginDependenciesDialog
# public instances:
pluginInstaller = None
def initPluginInstaller():
global pluginInstaller
pluginInstaller = QgsPluginInstaller()
# -------------------------------------------------------- #
class QgsPluginInstaller(QObject):
""" The main class for managing the plugin installer stuff"""
statusLabel = None
# ----------------------------------------- #
def __init__(self):
""" Initialize data objects, starts fetching if appropriate, and warn about/removes obsolete plugins """
QObject.__init__(self) # initialize QObject in order to to use self.tr()
repositories.load()
plugins.getAllInstalled()
if repositories.checkingOnStart() and repositories.timeForChecking() and repositories.allEnabled():
# start fetching repositories
self.statusLabel = QLabel(iface.mainWindow().statusBar())
iface.mainWindow().statusBar().addPermanentWidget(self.statusLabel)
self.statusLabel.linkActivated.connect(self.showPluginManagerWhenReady)
repositories.checkingDone.connect(self.checkingDone)
for key in repositories.allEnabled():
repositories.requestFetching(key)
else:
# no fetching at start, so mark all enabled repositories as requesting to be fetched.
for key in repositories.allEnabled():
repositories.setRepositoryData(key, "state", 3)
# look for obsolete plugins updates (the user-installed one is older than the core one)
for key in plugins.obsoletePlugins:
plugin = plugins.localCache[key]
msg = QMessageBox()
msg.setIcon(QMessageBox.Warning)
msg.setWindowTitle(self.tr("QGIS Python Plugin Installer"))
msg.addButton(self.tr("Uninstall (recommended)"), QMessageBox.AcceptRole)
msg.addButton(self.tr("I will uninstall it later"), QMessageBox.RejectRole)
msg.setText("%s <b>%s</b><br/><br/>%s" % (self.tr("Obsolete plugin:"), plugin["name"], self.tr("QGIS has detected an obsolete plugin that masks its more recent version shipped with this copy of QGIS. This is likely due to files associated with a previous installation of QGIS. Do you want to remove the old plugin right now and unmask the more recent version?")))
msg.exec_()
if not msg.result():
# uninstall the update, update utils and reload if enabled
self.uninstallPlugin(key, quiet=True)
updateAvailablePlugins()
settings = QgsSettings()
if settings.value("/PythonPlugins/" + key, False, type=bool):
settings.setValue("/PythonPlugins/watchDog/" + key, True)
loadPlugin(key)
startPlugin(key)
settings.remove("/PythonPlugins/watchDog/" + key)
# ----------------------------------------- #
def fetchAvailablePlugins(self, reloadMode):
""" Fetch plugins from all enabled repositories."""
""" reloadMode = true: Fully refresh data from QgsSettings to mRepositories """
""" reloadMode = false: Fetch unready repositories only """
QApplication.setOverrideCursor(Qt.WaitCursor)
if reloadMode:
repositories.load()
plugins.clearRepoCache()
plugins.getAllInstalled()
for key in repositories.allEnabled():
if reloadMode or repositories.all()[key]["state"] == 3: # if state = 3 (error or not fetched yet), try to fetch once again
repositories.requestFetching(key, force_reload=reloadMode)
if repositories.fetchingInProgress():
fetchDlg = QgsPluginInstallerFetchingDialog(iface.mainWindow())
fetchDlg.exec_()
del fetchDlg
for key in repositories.all():
repositories.killConnection(key)
QApplication.restoreOverrideCursor()
# display error messages for every unavailable repository, unless Shift pressed nor all repositories are unavailable
keepQuiet = QgsApplication.keyboardModifiers() == Qt.KeyboardModifiers(Qt.ShiftModifier)
if repositories.allUnavailable() and repositories.allUnavailable() != repositories.allEnabled():
for key in repositories.allUnavailable():
if not keepQuiet:
QMessageBox.warning(iface.mainWindow(), self.tr("QGIS Python Plugin Installer"), self.tr("Error reading repository:") + " " + key + "\n\n" + repositories.all()[key]["error"])
if QgsApplication.keyboardModifiers() == Qt.KeyboardModifiers(Qt.ShiftModifier):
keepQuiet = True
# finally, rebuild plugins from the caches
plugins.rebuild()
# ----------------------------------------- #
def checkingDone(self):
""" Remove the "Looking for new plugins..." label and display a notification instead if any updates or news available """
if not self.statusLabel:
# only proceed if the label is present
return
# rebuild plugins cache
plugins.rebuild()
# look for news in the repositories
plugins.markNews()
status = ""
icon = ""
# first check for news
for key in plugins.all():
if plugins.all()[key]["status"] == "new":
status = self.tr("There is a new plugin available")
icon = "pluginNew.svg"
tabIndex = 4 # PLUGMAN_TAB_NEW
# then check for updates (and eventually overwrite status)
for key in plugins.all():
if plugins.all()[key]["status"] == "upgradeable":
status = self.tr("There is a plugin update available")
icon = "pluginUpgrade.svg"
tabIndex = 3 # PLUGMAN_TAB_UPGRADEABLE
# finally set the notify label
if status:
self.statusLabel.setText(u'<a href="%d"><img src="qrc:/images/themes/default/%s"></a>' % (tabIndex, icon))
self.statusLabel.setToolTip(status)
else:
iface.mainWindow().statusBar().removeWidget(self.statusLabel)
self.statusLabel = None
# ----------------------------------------- #
def exportRepositoriesToManager(self):
""" Update manager's repository tree widget with current data """
iface.pluginManagerInterface().clearRepositoryList()
for key in repositories.all():
url = repositories.all()[key]["url"] + repositories.urlParams()
if repositories.inspectionFilter():
enabled = (key == repositories.inspectionFilter())
else:
enabled = repositories.all()[key]["enabled"]
iface.pluginManagerInterface().addToRepositoryList({
"name": key,
"url": url,
"enabled": enabled and "true" or "false",
"valid": repositories.all()[key]["valid"] and "true" or "false",
"state": str(repositories.all()[key]["state"]),
"error": repositories.all()[key]["error"],
"inspection_filter": repositories.inspectionFilter() and "true" or "false"
})
# ----------------------------------------- #
def exportPluginsToManager(self):
""" Insert plugins metadata to QgsMetadataRegistry """
iface.pluginManagerInterface().clearPythonPluginMetadata()
for key in plugins.all():
plugin = plugins.all()[key]
iface.pluginManagerInterface().addPluginMetadata({
"id": key,
"plugin_id": plugin["plugin_id"] or "",
"name": plugin["name"],
"description": plugin["description"],
"about": plugin["about"],
"category": plugin["category"],
"tags": plugin["tags"],
"changelog": plugin["changelog"],
"author_name": plugin["author_name"],
"author_email": plugin["author_email"],
"homepage": plugin["homepage"],
"tracker": plugin["tracker"],
"code_repository": plugin["code_repository"],
"version_installed": plugin["version_installed"],
"library": plugin["library"],
"icon": plugin["icon"],
"readonly": plugin["readonly"] and "true" or "false",
"installed": plugin["installed"] and "true" or "false",
"available": plugin["available"] and "true" or "false",
"status": plugin["status"],
"status_exp": plugin["status_exp"],
"error": plugin["error"],
"error_details": plugin["error_details"],
"create_date": plugin["create_date"],
"update_date": plugin["update_date"],
"create_date_stable": plugin["create_date_stable"],
"update_date_stable": plugin["update_date_stable"],
"create_date_experimental": plugin["create_date_experimental"],
"update_date_experimental": plugin["update_date_experimental"],
"experimental": plugin["experimental"] and "true" or "false",
"deprecated": plugin["deprecated"] and "true" or "false",
"trusted": plugin["trusted"] and "true" or "false",
"version_available": plugin["version_available"],
"version_available_stable": plugin["version_available_stable"] or "",
"version_available_experimental": plugin["version_available_experimental"] or "",
"zip_repository": plugin["zip_repository"],
"download_url": plugin["download_url"],
"download_url_stable": plugin["download_url_stable"],
"download_url_experimental": plugin["download_url_experimental"],
"filename": plugin["filename"],
"downloads": plugin["downloads"],
"average_vote": plugin["average_vote"],
"rating_votes": plugin["rating_votes"],
"plugin_dependencies": plugin.get("plugin_dependencies", None),
"pythonic": "true"
})
iface.pluginManagerInterface().reloadModel()
# ----------------------------------------- #
def reloadAndExportData(self):
""" Reload All repositories and export data to the Plugin Manager """
self.fetchAvailablePlugins(reloadMode=True)
self.exportRepositoriesToManager()
self.exportPluginsToManager()
# ----------------------------------------- #
def showPluginManagerWhenReady(self, * params):
""" Open the plugin manager window. If fetching is still in progress, it shows the progress window first """
""" Optionally pass the index of tab to be opened in params """
if self.statusLabel:
iface.mainWindow().statusBar().removeWidget(self.statusLabel)
self.statusLabel = None
self.fetchAvailablePlugins(reloadMode=False)
self.exportRepositoriesToManager()
self.exportPluginsToManager()
# finally, show the plugin manager window
tabIndex = -1
if len(params) == 1:
indx = str(params[0])
if indx.isdigit() and int(indx) > -1 and int(indx) < 7:
tabIndex = int(indx)
iface.pluginManagerInterface().showPluginManager(tabIndex)
# ----------------------------------------- #
def onManagerClose(self):
""" Call this method when closing manager window - it resets last-use-dependent values. """
plugins.updateSeenPluginsList()
repositories.saveCheckingOnStartLastDate()
# ----------------------------------------- #
def exportSettingsGroup(self):
""" Return QgsSettings settingsGroup value """
return settingsGroup
# ----------------------------------------- #
def upgradeAllUpgradeable(self):
""" Reinstall all upgradeable plugins """
for key in plugins.allUpgradeable():
self.installPlugin(key, quiet=True)
# ----------------------------------------- #
def installPlugin(self, key, quiet=False, stable=True):
""" Install given plugin """
error = False
status_key = 'status' if stable else 'status_exp'
infoString = ('', '')
plugin = plugins.all()[key]
previousStatus = plugin[status_key]
if not plugin:
return
if plugin[status_key] == "newer" and not plugin["error"]: # ask for confirmation if user downgrades an usable plugin
if QMessageBox.warning(iface.mainWindow(), self.tr("QGIS Python Plugin Installer"), self.tr("Are you sure you want to downgrade the plugin to the latest available version? The installed one is newer!"), QMessageBox.Yes, QMessageBox.No) == QMessageBox.No:
return
dlg = QgsPluginInstallerInstallingDialog(iface.mainWindow(), plugin, stable=stable)
dlg.exec_()
plugin_path = qgis.utils.home_plugin_path + "/" + key
if dlg.result():
error = True
infoString = (self.tr("Plugin installation failed"), dlg.result())
elif not QDir(plugin_path).exists():
error = True
infoString = (
self.tr("Plugin has disappeared"),
self.tr(
"The plugin seems to have been installed but it's not possible to know where. The directory \"{}\" "
"has not been found. Probably the plugin package contained a wrong named directory.\nPlease search "
"the list of installed plugins. You should find the plugin there, but it's not possible to "
"determine which of them it is and it's also not possible to inform you about available updates. "
"Please contact the plugin author and submit this issue.").format(plugin_path))
QApplication.setOverrideCursor(Qt.WaitCursor)
plugins.getAllInstalled()
plugins.rebuild()
self.exportPluginsToManager()
QApplication.restoreOverrideCursor()
else:
QApplication.setOverrideCursor(Qt.WaitCursor)
# update the list of plugins in plugin handling routines
updateAvailablePlugins()
self.processDependencies(plugin["id"])
# try to load the plugin
loadPlugin(plugin["id"])
plugins.getAllInstalled()
plugins.rebuild()
plugin = plugins.all()[key]
if not plugin["error"]:
if previousStatus in ["not installed", "new"]:
infoString = (self.tr("Plugin installed successfully"), "")
if startPlugin(plugin["id"]):
settings = QgsSettings()
settings.setValue("/PythonPlugins/" + plugin["id"], True)
else:
settings = QgsSettings()
if settings.value("/PythonPlugins/" + key, False, type=bool): # plugin will be reloaded on the fly only if currently loaded
reloadPlugin(key) # unloadPlugin + loadPlugin + startPlugin
infoString = (self.tr("Plugin reinstalled successfully"), "")
else:
unloadPlugin(key) # Just for a case. Will exit quietly if really not loaded
loadPlugin(key)
infoString = (self.tr("Plugin reinstalled successfully"), self.tr("Python plugin reinstalled.\nYou need to restart QGIS in order to reload it."))
if quiet:
infoString = (None, None)
QApplication.restoreOverrideCursor()
else:
QApplication.restoreOverrideCursor()
if plugin["error"] == "incompatible":
message = self.tr("The plugin is not compatible with this version of QGIS. It's designed for QGIS versions:")
message += " <b>" + plugin["error_details"] + "</b>"
elif plugin["error"] == "dependent":
message = self.tr("The plugin depends on some components missing on your system. You need to install the following Python module in order to enable it:")
message += "<b> " + plugin["error_details"] + "</b>"
else:
message = self.tr("The plugin is broken. Python said:")
message += "<br><b>" + plugin["error_details"] + "</b>"
dlg = QgsPluginInstallerPluginErrorDialog(iface.mainWindow(), message)
dlg.exec_()
if dlg.result():
# revert installation
pluginDir = qgis.utils.home_plugin_path + "/" + plugin["id"]
result = removeDir(pluginDir)
if QDir(pluginDir).exists():
error = True
infoString = (self.tr("Plugin uninstall failed"), result)
try:
exec("sys.path_importer_cache.clear()")
exec("import %s" % plugin["id"])
exec("reload (%s)" % plugin["id"])
except:
pass
else:
try:
exec("del sys.modules[%s]" % plugin["id"])
except:
pass
plugins.getAllInstalled()
plugins.rebuild()
self.exportPluginsToManager()
if infoString[0]:
level = error and Qgis.Critical or Qgis.Info
msg = "<b>%s</b>" % infoString[0]
if infoString[1]:
msg += "<b>:</b> %s" % infoString[1]
iface.pluginManagerInterface().pushMessage(msg, level)
# ----------------------------------------- #
def uninstallPlugin(self, key, quiet=False):
""" Uninstall given plugin """
if key in plugins.all():
plugin = plugins.all()[key]
else:
plugin = plugins.localCache[key]
if not plugin:
return
if not quiet:
warning = self.tr("Are you sure you want to uninstall the following plugin?") + "\n(" + plugin["name"] + ")"
if plugin["status"] == "orphan" and not plugin["error"]:
warning += "\n\n" + self.tr("Warning: this plugin isn't available in any accessible repository!")
if QMessageBox.warning(iface.mainWindow(), self.tr("QGIS Python Plugin Installer"), warning, QMessageBox.Yes, QMessageBox.No) == QMessageBox.No:
return
# unload the plugin
QApplication.setOverrideCursor(Qt.WaitCursor)
try:
unloadPlugin(key)
except:
pass
pluginDir = qgis.utils.home_plugin_path + "/" + plugin["id"]
result = removeDir(pluginDir)
if result:
QApplication.restoreOverrideCursor()
msg = "<b>%s:</b>%s" % (self.tr("Plugin uninstall failed"), result)
iface.pluginManagerInterface().pushMessage(msg, Qgis.Critical)
else:
# safe remove
try:
unloadPlugin(plugin["id"])
except:
pass
try:
exec("plugins[%s].unload()" % plugin["id"])
exec("del plugins[%s]" % plugin["id"])
except:
pass
try:
exec("del sys.modules[%s]" % plugin["id"])
except:
pass
try:
exec("del plugins_metadata_parser[%s]" % plugin["id"])
except:
pass
plugins.getAllInstalled()
plugins.rebuild()
self.exportPluginsToManager()
QApplication.restoreOverrideCursor()
iface.pluginManagerInterface().pushMessage(self.tr("Plugin uninstalled successfully"), Qgis.Info)
# ----------------------------------------- #
def addRepository(self):
""" add new repository connection """
dlg = QgsPluginInstallerRepositoryDialog(iface.mainWindow())
dlg.editParams.setText(repositories.urlParams())
dlg.checkBoxEnabled.setCheckState(Qt.Checked)
if not dlg.exec_():
return
for i in list(repositories.all().values()):
if dlg.editURL.text().strip() == i["url"]:
iface.pluginManagerInterface().pushMessage(self.tr("Unable to add another repository with the same URL!"), Qgis.Warning)
return
settings = QgsSettings()
settings.beginGroup(reposGroup)
reposName = dlg.editName.text()
reposURL = dlg.editURL.text().strip()
if reposName in repositories.all():
reposName = reposName + "(2)"
# add to settings
settings.setValue(reposName + "/url", reposURL)
settings.setValue(reposName + "/authcfg", dlg.editAuthCfg.text().strip())
settings.setValue(reposName + "/enabled", bool(dlg.checkBoxEnabled.checkState()))
# refresh lists and populate widgets
plugins.removeRepository(reposName)
self.reloadAndExportData()
# ----------------------------------------- #
def editRepository(self, reposName):
""" edit repository connection """
if not reposName:
return
checkState = {False: Qt.Unchecked, True: Qt.Checked}
dlg = QgsPluginInstallerRepositoryDialog(iface.mainWindow())
dlg.editName.setText(reposName)
dlg.editURL.setText(repositories.all()[reposName]["url"])
dlg.editAuthCfg.setText(repositories.all()[reposName]["authcfg"])
dlg.editParams.setText(repositories.urlParams())
dlg.checkBoxEnabled.setCheckState(checkState[repositories.all()[reposName]["enabled"]])
if repositories.all()[reposName]["valid"]:
dlg.checkBoxEnabled.setEnabled(True)
dlg.labelInfo.setText("")
else:
dlg.checkBoxEnabled.setEnabled(False)
dlg.labelInfo.setText(self.tr("This repository is blocked due to incompatibility with your QGIS version"))
dlg.labelInfo.setFrameShape(QFrame.Box)
if not dlg.exec_():
return # nothing to do if canceled
for i in list(repositories.all().values()):
if dlg.editURL.text().strip() == i["url"] and dlg.editURL.text().strip() != repositories.all()[reposName]["url"]:
iface.pluginManagerInterface().pushMessage(self.tr("Unable to add another repository with the same URL!"), Qgis.Warning)
return
# delete old repo from QgsSettings and create new one
settings = QgsSettings()
settings.beginGroup(reposGroup)
settings.remove(reposName)
newName = dlg.editName.text()
if newName in repositories.all() and newName != reposName:
newName = newName + "(2)"
settings.setValue(newName + "/url", dlg.editURL.text().strip())
settings.setValue(newName + "/authcfg", dlg.editAuthCfg.text().strip())
settings.setValue(newName + "/enabled", bool(dlg.checkBoxEnabled.checkState()))
if dlg.editAuthCfg.text().strip() != repositories.all()[reposName]["authcfg"]:
repositories.all()[reposName]["authcfg"] = dlg.editAuthCfg.text().strip()
if dlg.editURL.text().strip() == repositories.all()[reposName]["url"] and dlg.checkBoxEnabled.checkState() == checkState[repositories.all()[reposName]["enabled"]]:
repositories.rename(reposName, newName)
self.exportRepositoriesToManager()
return # nothing else to do if only repository name was changed
plugins.removeRepository(reposName)
self.reloadAndExportData()
# ----------------------------------------- #
def deleteRepository(self, reposName):
""" delete repository connection """
if not reposName:
return
settings = QgsSettings()
settings.beginGroup(reposGroup)
if settings.value(reposName + "/url", "", type=str) == officialRepo[1]:
iface.pluginManagerInterface().pushMessage(self.tr("You can't remove the official QGIS Plugin Repository. You can disable it if needed."), Qgis.Warning)
return
warning = self.tr("Are you sure you want to remove the following repository?") + "\n" + reposName
if QMessageBox.warning(iface.mainWindow(), self.tr("QGIS Python Plugin Installer"), warning, QMessageBox.Yes, QMessageBox.No) == QMessageBox.No:
return
# delete from the settings, refresh data and repopulate all the widgets
settings.remove(reposName)
repositories.remove(reposName)
plugins.removeRepository(reposName)
self.reloadAndExportData()
# ----------------------------------------- #
def setRepositoryInspectionFilter(self, reposName=None):
""" temporarily block another repositories to fetch only one for inspection """
repositories.setInspectionFilter(reposName)
self.reloadAndExportData()
# ----------------------------------------- #
def sendVote(self, plugin_id, vote):
""" send vote via the RPC """
if not plugin_id or not vote:
return False
url = "http://plugins.qgis.org/plugins/RPC2/"
params = {"id": "djangorpc", "method": "plugin.vote", "params": [str(plugin_id), str(vote)]}
req = QNetworkRequest(QUrl(url))
req.setAttribute(QNetworkRequest.Attribute(QgsNetworkRequestParameters.AttributeInitiatorClass), "QgsPluginInstaller")
req.setAttribute(QNetworkRequest.Attribute(QgsNetworkRequestParameters.AttributeInitiatorRequestId), "sendVote")
req.setRawHeader(b"Content-Type", b"application/json")
QgsNetworkAccessManager.instance().post(req, bytes(json.dumps(params), "utf-8"))
return True
def installFromZipFile(self, filePath):
if not os.path.isfile(filePath):
return
settings = QgsSettings()
settings.setValue(settingsGroup + '/lastZipDirectory',
QFileInfo(filePath).absoluteDir().absolutePath())
pluginName = None
with zipfile.ZipFile(filePath, 'r') as zf:
# search for metadata.txt. In case of multiple files, we can assume that
# the shortest path relates <pluginname>/metadata.txt
metadatafiles = sorted(f for f in zf.namelist() if f.endswith('metadata.txt'))
if len(metadatafiles) > 0:
pluginName = os.path.split(metadatafiles[0])[0]
pluginFileName = os.path.splitext(os.path.basename(filePath))[0]
if not pluginName:
msg_box = QMessageBox()
msg_box.setIcon(QMessageBox.Warning)
msg_box.setWindowTitle(self.tr("QGIS Python Install from ZIP Plugin Installer"))
msg_box.setText(self.tr("The Zip file is not a valid QGIS python plugin. No root folder was found inside."))
msg_box.setStandardButtons(QMessageBox.Ok)
more_info_btn = msg_box.addButton(self.tr("More Information"), QMessageBox.HelpRole)
msg_box.exec()
if msg_box.clickedButton() == more_info_btn:
QgsHelp.openHelp("plugins/plugins.html#the-install-from-zip-tab")
return
pluginsDirectory = qgis.utils.home_plugin_path
if not QDir(pluginsDirectory).exists():
QDir().mkpath(pluginsDirectory)
pluginDirectory = QDir.cleanPath(os.path.join(pluginsDirectory, pluginName))
# If the target directory already exists as a link,
# remove the link without resolving
QFile(pluginDirectory).remove()
password = None
infoString = None
success = False
keepTrying = True
while keepTrying:
try:
# Test extraction. If fails, then exception will be raised and no removing occurs
unzip(filePath, pluginsDirectory, password)
# Removing old plugin files if exist
removeDir(pluginDirectory)
# Extract new files
unzip(filePath, pluginsDirectory, password)
keepTrying = False
success = True
except Exception as e:
success = False
if 'password' in str(e):
infoString = self.tr('Aborted by user')
if 'Bad password' in str(e):
msg = self.tr('Wrong password. Please enter a correct password to the zip file.')
else:
msg = self.tr('The zip file is encrypted. Please enter password.')
# Display a password dialog with QgsPasswordLineEdit
dlg = QDialog()
dlg.setWindowTitle(self.tr('Enter password'))
buttonBox = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel, Qt.Horizontal)
buttonBox.rejected.connect(dlg.reject)
buttonBox.accepted.connect(dlg.accept)
lePass = QgsPasswordLineEdit()
layout = QVBoxLayout()
layout.addWidget(QLabel(msg))
layout.addWidget(lePass)
layout.addWidget(buttonBox)
dlg.setLayout(layout)
keepTrying = dlg.exec_()
password = lePass.text()
else:
infoString = self.tr("Failed to unzip the plugin package\n{}.\nProbably it is broken".format(filePath))
keepTrying = False
if success:
updateAvailablePlugins()
self.processDependencies(pluginName)
loadPlugin(pluginName)
plugins.getAllInstalled()
plugins.rebuild()
if settings.contains('/PythonPlugins/' + pluginName):
if settings.value('/PythonPlugins/' + pluginName, False, bool):
startPlugin(pluginName)
reloadPlugin(pluginName)
else:
unloadPlugin(pluginName)
loadPlugin(pluginName)
else:
if startPlugin(pluginName):
settings.setValue('/PythonPlugins/' + pluginName, True)
self.exportPluginsToManager()
msg = "<b>%s</b>" % self.tr("Plugin installed successfully")
else:
msg = "<b>%s:</b> %s" % (self.tr("Plugin installation failed"), infoString)
level = Qgis.Info if success else Qgis.Critical
iface.pluginManagerInterface().pushMessage(msg, level)
def processDependencies(self, plugin_id):
"""Processes plugin dependencies
:param plugin_id: plugin id
:type plugin_id: str
"""
to_install, to_upgrade, not_found = find_dependencies(plugin_id)
if to_install or to_upgrade or not_found:
dlg = QgsPluginDependenciesDialog(plugin_id, to_install, to_upgrade, not_found)
if dlg.exec_() == QgsPluginDependenciesDialog.Accepted:
actions = dlg.actions()
for dependency_plugin_id, action in actions.items():
try:
self.installPlugin(dependency_plugin_id)
if action == 'install':
iface.pluginManagerInterface().pushMessage(self.tr("Plugin dependency <b>%s</b> successfully installed") %
dependency_plugin_id, Qgis.Info)
else:
iface.pluginManagerInterface().pushMessage(self.tr("Plugin dependency <b>%s</b> successfully upgraded") %
dependency_plugin_id, Qgis.Info)
except Exception as ex:
if action == 'install':
iface.pluginManagerInterface().pushMessage(self.tr("Error installing plugin dependency <b>%s</b>: %s") %
(dependency_plugin_id, ex), Qgis.Warning)
else:
iface.pluginManagerInterface().pushMessage(self.tr("Error upgrading plugin dependency <b>%s</b>: %s") %
(dependency_plugin_id, ex), Qgis.Warning)
|
DelazJ/QGIS
|
python/pyplugin_installer/installer.py
|
Python
|
gpl-2.0
| 35,174
|
from django.conf.urls import patterns, url
from .views import ContactFormView, ContactCompleted
form_urls = patterns('',
url('^$', ContactFormView.as_view(), name='form'),
)
success_urls = patterns('',
url('^success/$', ContactCompleted.as_view(), name='completed'),
)
urlpatterns = form_urls + success_urls
|
596acres/django-livinglots-template
|
project_name/contact/urls.py
|
Python
|
gpl-3.0
| 321
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Mark Theunissen <mark.theunissen@gmail.com>
# Sponsored by Four Kitchens http://fourkitchens.com.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: mysql_db
short_description: Add or remove MySQL databases from a remote host.
description:
- Add or remove MySQL databases from a remote host.
version_added: "0.6"
options:
name:
description:
- name of the database to add or remove
- name=all May only be provided if I(state) is C(dump) or C(import).
- if name=all Works like --all-databases option for mysqldump (Added in 2.0)
required: true
aliases: [ db ]
state:
description:
- The database state
default: present
choices: [ "present", "absent", "dump", "import" ]
collation:
description:
- Collation mode (sorting). This only applies to new table/databases and does not update existing ones, this is a limitation of MySQL.
encoding:
description:
- Encoding mode to use, examples include C(utf8) or C(latin1_swedish_ci)
target:
description:
- Location, on the remote host, of the dump file to read from or write to. Uncompressed SQL
files (C(.sql)) as well as bzip2 (C(.bz2)), gzip (C(.gz)) and xz (Added in 2.0) compressed files are supported.
single_transaction:
description:
- Execute the dump in a single transaction
type: bool
default: 'no'
version_added: "2.1"
quick:
description:
- Option used for dumping large tables
type: bool
default: 'yes'
version_added: "2.1"
ignore_tables:
description:
- A list of table names that will be ignored in the dump of the form database_name.table_name
required: false
default: []
version_added: "2.7"
author: "Ansible Core Team"
requirements:
- mysql (command line binary)
- mysqldump (command line binary)
notes:
- Requires the mysql and mysqldump binaries on the remote host.
- This module is B(not idempotent) when I(state) is C(import), and will import the dump file each time if run more than once.
extends_documentation_fragment: mysql
'''
EXAMPLES = '''
- name: Create a new database with name 'bobdata'
mysql_db:
name: bobdata
state: present
# Copy database dump file to remote host and restore it to database 'my_db'
- name: Copy database dump file
copy:
src: dump.sql.bz2
dest: /tmp
- name: Restore database
mysql_db:
name: my_db
state: import
target: /tmp/dump.sql.bz2
- name: Dump all databases to hostname.sql
mysql_db:
state: dump
name: all
target: /tmp/{{ inventory_hostname }}.sql
- name: Import file.sql similar to mysql -u <username> -p <password> < hostname.sql
mysql_db:
state: import
name: all
target: /tmp/{{ inventory_hostname }}.sql
'''
import os
import pipes
import subprocess
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.database import mysql_quote_identifier
from ansible.module_utils.mysql import mysql_connect, mysql_driver, mysql_driver_fail_msg
from ansible.module_utils._text import to_native
# ===========================================
# MySQL module specific support methods.
#
def db_exists(cursor, db):
res = cursor.execute("SHOW DATABASES LIKE %s", (db.replace("_", r"\_"),))
return bool(res)
def db_delete(cursor, db):
query = "DROP DATABASE %s" % mysql_quote_identifier(db, 'database')
cursor.execute(query)
return True
def db_dump(module, host, user, password, db_name, target, all_databases, port, config_file, socket=None, ssl_cert=None, ssl_key=None, ssl_ca=None,
single_transaction=None, quick=None, ignore_tables=None):
cmd = module.get_bin_path('mysqldump', True)
# If defined, mysqldump demands --defaults-extra-file be the first option
if config_file:
cmd += " --defaults-extra-file=%s" % pipes.quote(config_file)
if user is not None:
cmd += " --user=%s" % pipes.quote(user)
if password is not None:
cmd += " --password=%s" % pipes.quote(password)
if ssl_cert is not None:
cmd += " --ssl-cert=%s" % pipes.quote(ssl_cert)
if ssl_key is not None:
cmd += " --ssl-key=%s" % pipes.quote(ssl_key)
if ssl_cert is not None:
cmd += " --ssl-ca=%s" % pipes.quote(ssl_ca)
if socket is not None:
cmd += " --socket=%s" % pipes.quote(socket)
else:
cmd += " --host=%s --port=%i" % (pipes.quote(host), port)
if all_databases:
cmd += " --all-databases"
else:
cmd += " %s" % pipes.quote(db_name)
if single_transaction:
cmd += " --single-transaction=true"
if quick:
cmd += " --quick"
if ignore_tables:
for an_ignored_table in ignore_tables:
cmd += " --ignore-table={0}".format(an_ignored_table)
path = None
if os.path.splitext(target)[-1] == '.gz':
path = module.get_bin_path('gzip', True)
elif os.path.splitext(target)[-1] == '.bz2':
path = module.get_bin_path('bzip2', True)
elif os.path.splitext(target)[-1] == '.xz':
path = module.get_bin_path('xz', True)
if path:
cmd = '%s | %s > %s' % (cmd, path, pipes.quote(target))
else:
cmd += " > %s" % pipes.quote(target)
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
return rc, stdout, stderr
def db_import(module, host, user, password, db_name, target, all_databases, port, config_file, socket=None, ssl_cert=None, ssl_key=None, ssl_ca=None):
if not os.path.exists(target):
return module.fail_json(msg="target %s does not exist on the host" % target)
cmd = [module.get_bin_path('mysql', True)]
# --defaults-file must go first, or errors out
if config_file:
cmd.append("--defaults-extra-file=%s" % pipes.quote(config_file))
if user:
cmd.append("--user=%s" % pipes.quote(user))
if password:
cmd.append("--password=%s" % pipes.quote(password))
if socket is not None:
cmd.append("--socket=%s" % pipes.quote(socket))
if ssl_cert is not None:
cmd.append("--ssl-cert=%s" % pipes.quote(ssl_cert))
if ssl_key is not None:
cmd.append("--ssl-key=%s" % pipes.quote(ssl_key))
if ssl_cert is not None:
cmd.append("--ssl-ca=%s" % pipes.quote(ssl_ca))
else:
cmd.append("--host=%s" % pipes.quote(host))
cmd.append("--port=%i" % port)
if not all_databases:
cmd.append("-D")
cmd.append(pipes.quote(db_name))
comp_prog_path = None
if os.path.splitext(target)[-1] == '.gz':
comp_prog_path = module.get_bin_path('gzip', required=True)
elif os.path.splitext(target)[-1] == '.bz2':
comp_prog_path = module.get_bin_path('bzip2', required=True)
elif os.path.splitext(target)[-1] == '.xz':
comp_prog_path = module.get_bin_path('xz', required=True)
if comp_prog_path:
p1 = subprocess.Popen([comp_prog_path, '-dc', target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p2 = subprocess.Popen(cmd, stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout2, stderr2) = p2.communicate()
p1.stdout.close()
p1.wait()
if p1.returncode != 0:
stderr1 = p1.stderr.read()
return p1.returncode, '', stderr1
else:
return p2.returncode, stdout2, stderr2
else:
cmd = ' '.join(cmd)
cmd += " < %s" % pipes.quote(target)
rc, stdout, stderr = module.run_command(cmd, use_unsafe_shell=True)
return rc, stdout, stderr
def db_create(cursor, db, encoding, collation):
query_params = dict(enc=encoding, collate=collation)
query = ['CREATE DATABASE %s' % mysql_quote_identifier(db, 'database')]
if encoding:
query.append("CHARACTER SET %(enc)s")
if collation:
query.append("COLLATE %(collate)s")
query = ' '.join(query)
cursor.execute(query, query_params)
return True
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default=None),
login_password=dict(default=None, no_log=True),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
name=dict(required=True, aliases=['db']),
encoding=dict(default=""),
collation=dict(default=""),
target=dict(default=None, type='path'),
state=dict(default="present", choices=["absent", "present", "dump", "import"]),
ssl_cert=dict(default=None, type='path'),
ssl_key=dict(default=None, type='path'),
ssl_ca=dict(default=None, type='path'),
connect_timeout=dict(default=30, type='int'),
config_file=dict(default="~/.my.cnf", type='path'),
single_transaction=dict(default=False, type='bool'),
quick=dict(default=True, type='bool'),
ignore_tables=dict(default=[], type='list')
),
supports_check_mode=True
)
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
db = module.params["name"]
encoding = module.params["encoding"]
collation = module.params["collation"]
state = module.params["state"]
target = module.params["target"]
socket = module.params["login_unix_socket"]
login_port = module.params["login_port"]
if login_port < 0 or login_port > 65535:
module.fail_json(msg="login_port must be a valid unix port number (0-65535)")
ssl_cert = module.params["ssl_cert"]
ssl_key = module.params["ssl_key"]
ssl_ca = module.params["ssl_ca"]
connect_timeout = module.params['connect_timeout']
config_file = module.params['config_file']
login_password = module.params["login_password"]
login_user = module.params["login_user"]
login_host = module.params["login_host"]
ignore_tables = module.params["ignore_tables"]
for a_table in ignore_tables:
if a_table == "":
module.fail_json(msg="Name of ignored table cannot be empty")
single_transaction = module.params["single_transaction"]
quick = module.params["quick"]
if state in ['dump', 'import']:
if target is None:
module.fail_json(msg="with state=%s target is required" % state)
if db == 'all':
db = 'mysql'
all_databases = True
else:
all_databases = False
else:
if db == 'all':
module.fail_json(msg="name is not allowed to equal 'all' unless state equals import, or dump.")
try:
cursor = mysql_connect(module, login_user, login_password, config_file, ssl_cert, ssl_key, ssl_ca,
connect_timeout=connect_timeout)
except Exception as e:
if os.path.exists(config_file):
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or %s has the credentials. "
"Exception message: %s" % (config_file, to_native(e)))
else:
module.fail_json(msg="unable to find %s. Exception message: %s" % (config_file, to_native(e)))
changed = False
if not os.path.exists(config_file):
config_file = None
if db_exists(cursor, db):
if state == "absent":
if module.check_mode:
module.exit_json(changed=True, db=db)
else:
try:
changed = db_delete(cursor, db)
except Exception as e:
module.fail_json(msg="error deleting database: %s" % to_native(e))
module.exit_json(changed=changed, db=db)
elif state == "dump":
if module.check_mode:
module.exit_json(changed=True, db=db)
else:
rc, stdout, stderr = db_dump(module, login_host, login_user,
login_password, db, target, all_databases,
login_port, config_file, socket, ssl_cert, ssl_key,
ssl_ca, single_transaction, quick, ignore_tables)
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
elif state == "import":
if module.check_mode:
module.exit_json(changed=True, db=db)
else:
rc, stdout, stderr = db_import(module, login_host, login_user,
login_password, db, target,
all_databases,
login_port, config_file,
socket, ssl_cert, ssl_key, ssl_ca)
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
elif state == "present":
if module.check_mode:
module.exit_json(changed=False, db=db)
module.exit_json(changed=False, db=db)
else:
if state == "present":
if module.check_mode:
changed = True
else:
try:
changed = db_create(cursor, db, encoding, collation)
except Exception as e:
module.fail_json(msg="error creating database: %s" % to_native(e),
exception=traceback.format_exc())
module.exit_json(changed=changed, db=db)
elif state == "import":
if module.check_mode:
module.exit_json(changed=True, db=db)
else:
try:
changed = db_create(cursor, db, encoding, collation)
if changed:
rc, stdout, stderr = db_import(module, login_host, login_user,
login_password, db, target, all_databases,
login_port, config_file, socket, ssl_cert, ssl_key, ssl_ca)
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
except Exception as e:
module.fail_json(msg="error creating database: %s" % to_native(e),
exception=traceback.format_exc())
elif state == "absent":
if module.check_mode:
module.exit_json(changed=False, db=db)
module.exit_json(changed=False, db=db)
elif state == "dump":
if module.check_mode:
module.exit_json(changed=False, db=db)
module.fail_json(msg="Cannot dump database %s - not found" % (db))
if __name__ == '__main__':
main()
|
valentin-krasontovitsch/ansible
|
lib/ansible/modules/database/mysql/mysql_db.py
|
Python
|
gpl-3.0
| 15,512
|
# coding=utf-8
from __future__ import absolute_import, unicode_literals # noqa
import os
import numpy as np
import oslotest.base
import lda
import lda.utils
class TestLDANewsReuters(oslotest.base.BaseTestCase):
@classmethod
def setUpClass(cls):
test_dir = os.path.dirname(__file__)
reuters_ldac_fn = os.path.join(test_dir, 'reuters.ldac')
cls.dtm = dtm = lda.utils.ldac2dtm(open(reuters_ldac_fn), offset=0)
cls.n_iter = n_iter = 1
cls.n_topics = n_topics = 10
cls.random_seed = random_seed = 1
cls.model = model = lda.LDA(n_topics=n_topics, n_iter=n_iter, random_state=random_seed)
cls.doc_topic = model.fit_transform(dtm)
def test_lda_news(self):
dtm = self.dtm
doc_topic = self.doc_topic
self.assertEqual(len(doc_topic), len(dtm))
def test_lda_attributes(self):
dtm = self.dtm
doc_topic = self.doc_topic
model = self.model
# check dims
N = dtm.sum()
D, V = dtm.shape
_, K = doc_topic.shape
self.assertEqual(model.doc_topic_.shape, doc_topic.shape)
np.testing.assert_array_equal(model.doc_topic_, doc_topic)
self.assertEqual(model.doc_topic_.shape, (D, K))
self.assertEqual(model.ndz_.shape, (D, K))
self.assertEqual(model.topic_word_.shape, (K, V))
self.assertEqual(model.nzw_.shape, (K, V))
# check contents
self.assertAlmostEqual(model.nzw_.sum(), N)
self.assertAlmostEqual(model.ndz_.sum(), N)
self.assertAlmostEqual(model.nz_.sum(), N)
self.assertAlmostEqual(model.doc_topic_.sum(), D)
self.assertAlmostEqual(model.topic_word_.sum(), K)
np.testing.assert_array_equal(model.ndz_.sum(axis=0), model.nz_)
# check distributions sum to one
np.testing.assert_array_almost_equal(model.doc_topic_.sum(axis=1), np.ones(D))
np.testing.assert_array_almost_equal(model.topic_word_.sum(axis=1), np.ones(K))
def test_lda_random_seed(self):
dtm = self.dtm
doc_topic = self.doc_topic
n_iter = self.n_iter
n_topics = self.n_topics
random_seed = self.random_seed
random_state = self.model.random_state
# refit model with same random seed and verify results identical
model_new = lda.LDA(n_topics=n_topics, n_iter=n_iter, random_state=random_seed)
rands_init = model_new._rands.copy()
doc_topic_new = model_new.fit_transform(dtm)
rands_fit = model_new._rands.copy()
random_state_new = model_new.random_state
np.testing.assert_array_equal(doc_topic_new, doc_topic)
np.testing.assert_array_equal(random_state_new, random_state)
# verify random variates are not changed
np.testing.assert_array_equal(rands_init, rands_fit)
def test_lda_monotone(self):
dtm = self.dtm
model = self.model
n_topics = self.n_topics
random_seed = self.random_seed
# fit model with additional iterations, verify improvement in log likelihood
n_iter = self.n_iter * 2
model_new = lda.LDA(n_topics=n_topics, n_iter=n_iter, random_state=random_seed)
model_new.fit(dtm)
self.assertGreater(model_new.loglikelihood(), model.loglikelihood())
def test_lda_zero_iter(self):
dtm = self.dtm
model = self.model
doc_topic = self.doc_topic
n_topics = self.n_topics
random_seed = self.random_seed
# fit a new model with 0 iterations
n_iter = 0
model_new = lda.LDA(n_topics=n_topics, n_iter=n_iter, random_state=random_seed)
doc_topic_new = model_new.fit_transform(dtm)
self.assertIsNotNone(model_new)
self.assertIsNotNone(doc_topic_new)
self.assertLess(model_new.loglikelihood(), model.loglikelihood())
self.assertFalse((doc_topic_new == doc_topic).all())
|
ariddell/lda-debian
|
lda/tests/test_lda_reuters.py
|
Python
|
mpl-2.0
| 3,924
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Nicolas Duhamel
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.deprecated.browser import Page
import urllib
class LoginPage(Page):
def on_loaded(self):
pass
def login(self, user, pwd):
post_data = {"credential" : str(user),
"password" : str(pwd),
"save_user": "false",
"save_pwd" : "false",
"save_TC" : "true",
"action" : "valider",
"usertype" : "",
"service" : "",
"url" : "http://www.orange.fr",
"case" : "",
"origin" : "", }
post_data = urllib.urlencode(post_data)
self.browser.addheaders = [('Referer', 'http://id.orange.fr/auth_user/template/auth0user/htm/vide.html'),
("Content-Type" , 'application/x-www-form-urlencoded') ]
self.browser.open(self.browser.geturl(), data=post_data)
#~ print "LOGIN!!!"
#~ self.browser.select_form(predicate=lambda form: "id" in form.attrs and form.attrs["id"] == "authentication_form" )
#~ user_control = self.browser.find_control(id="user_credential")
#~ user_control.value = user
#~ pwd_control = self.browser.find_control(id="user_password")
#~ pwd_control.value = pwd
#~ self.browser.submit()
|
sputnick-dev/weboob
|
modules/orange/pages/login.py
|
Python
|
agpl-3.0
| 2,090
|
from django.conf import settings
# Dashboard
JET_INDEX_DASHBOARD = getattr(settings, 'JET_INDEX_DASHBOARD', 'jet.dashboard.dashboard.DefaultIndexDashboard')
JET_APP_INDEX_DASHBOARD = getattr(settings, 'JET_APP_INDEX_DASHBOARD', 'jet.dashboard.dashboard.DefaultAppIndexDashboard')
|
pombredanne/django-jet
|
jet/dashboard/settings.py
|
Python
|
agpl-3.0
| 280
|
# -*- coding: utf-8 -*-
from odoo import models, fields, api
from math import fabs
import calendar
class BalanceSheet(models.Model):
"""资产负债表模板
模板用来定义最终输出的 资产负债表的格式,
每行的 科目的顺序 科目的大分类的所属的子科目的顺序
-- 本模板适合中国会计使用.
"""
_name = "balance.sheet"
_order = "sequence,id"
_description = u'资产负债表模板'
sequence = fields.Integer(u'序号')
line = fields.Integer(u'序号', required=True, help=u'资产负债表的行次')
balance = fields.Char(u'资产')
line_num = fields.Char(u'行次', help=u'此处行次并不是出报表的实际的行数,只是显示用的用来符合国人习惯')
ending_balance = fields.Float(u'期末数')
balance_formula = fields.Text(
u'科目范围', help=u'设定本行的资产负债表的科目范围,例如1001~1012999999 结束科目尽可能大一些方便以后扩展')
beginning_balance = fields.Float(u'年初数')
balance_two = fields.Char(u'负债和所有者权益')
line_num_two = fields.Char(u'行次', help=u'此处行次并不是出报表的实际的行数,只是显示用的用来符合国人习惯')
ending_balance_two = fields.Float(u'期末数')
balance_two_formula = fields.Text(
u'科目范围', help=u'设定本行的资产负债表的科目范围,例如1001~1012999999 结束科目尽可能大一些方便以后扩展')
beginning_balance_two = fields.Float(u'年初数', help=u'报表行本年的年余额')
company_id = fields.Many2one(
'res.company',
string=u'公司',
change_default=True,
default=lambda self: self.env['res.company']._company_default_get())
class CreateBalanceSheetWizard(models.TransientModel):
"""创建资产负债 和利润表的 wizard"""
_name = "create.balance.sheet.wizard"
_description = u'资产负债表和利润表的向导'
company_id = fields.Many2one(
'res.company',
string=u'公司',
change_default=True,
default=lambda self: self.env['res.company']._company_default_get())
@api.model
def _default_period_domain(self):
"""
用来设定期间的 可选的范围(这个是一个范围)
:return: domain条件
"""
period_domain_setting = self.env['ir.values'].get_default(
'finance.config.settings', 'default_period_domain')
return [('is_closed', '!=', False)] if period_domain_setting == 'cannot' else []
@api.model
def _default_period_id(self):
return self._default_period_id_impl()
def _default_period_id_impl(self):
"""
默认是当前会计期间
:return: 当前会计期间的对象
"""
return self.env['finance.period'].get_date_now_period_id()
period_id = fields.Many2one('finance.period', string=u'会计期间', domain=_default_period_domain,
default=_default_period_id, help=u'用来设定报表的期间')
@api.multi
def compute_balance(self, parameter_str, period_id, compute_field_list):
"""根据所填写的 科目的code 和计算的字段 进行计算对应的资产值"""
if parameter_str:
parameter_str_list = parameter_str.split('~')
subject_vals = []
if len(parameter_str_list) == 1:
subject_ids = self.env['finance.account'].search(
[('code', '=', parameter_str_list[0]), ('account_type', '!=', 'view')])
else:
subject_ids = self.env['finance.account'].search(
[('code', '>=', parameter_str_list[0]), ('code', '<=', parameter_str_list[1]),
('account_type', '!=', 'view')])
trial_balances = self.env['trial.balance'].search([('subject_name_id', 'in', [
subject.id for subject in subject_ids]), ('period_id', '=', period_id.id)])
for trial_balance in trial_balances:
# 根据参数code 对应的科目的 方向 进行不同的操作
# trial_balance.subject_name_id.costs_types == 'assets'解决:累计折旧 余额记贷方
if trial_balance.subject_name_id.costs_types == 'assets' or trial_balance.subject_name_id.costs_types == 'cost':
subject_vals.append(
trial_balance[compute_field_list[0]] - trial_balance[compute_field_list[1]])
elif trial_balance.subject_name_id.costs_types == 'debt' or trial_balance.subject_name_id.costs_types == 'equity':
subject_vals.append(
trial_balance[compute_field_list[1]] - trial_balance[compute_field_list[0]])
return sum(subject_vals)
def deal_with_balance_formula(self, balance_formula, period_id, year_begain_field):
if balance_formula:
return_vals = sum([self.compute_balance(one_formula, period_id, year_begain_field)
for one_formula in balance_formula.split(';')])
else:
return_vals = 0
return return_vals
def balance_sheet_create(self, balance_sheet_obj, year_begain_field, current_period_field):
balance_sheet_obj.write(
{'beginning_balance': self.deal_with_balance_formula(balance_sheet_obj.balance_formula,
self.period_id, year_begain_field),
'ending_balance': self.deal_with_balance_formula(balance_sheet_obj.balance_formula,
self.period_id, current_period_field),
'beginning_balance_two': self.deal_with_balance_formula(balance_sheet_obj.balance_two_formula,
self.period_id, year_begain_field),
'ending_balance_two': self.deal_with_balance_formula(balance_sheet_obj.balance_two_formula,
self.period_id, current_period_field)})
@api.multi
def create_balance_sheet(self):
""" 资产负债表的创建 """
balance_wizard = self.env['create.trial.balance.wizard'].create(
{'period_id': self.period_id.id})
balance_wizard.create_trial_balance()
view_id = self.env.ref('finance.balance_sheet_tree_wizard').id
balance_sheet_objs = self.env['balance.sheet'].search([])
year_begain_field = ['year_init_debit', 'year_init_credit']
current_period_field = [
'ending_balance_debit', 'ending_balance_credit']
for balance_sheet_obj in balance_sheet_objs:
self.balance_sheet_create(
balance_sheet_obj, year_begain_field, current_period_field)
force_company = self._context.get('force_company')
if not force_company:
force_company = self.env.user.company_id.id
company_row = self.env['res.company'].browse(force_company)
days = calendar.monthrange(
int(self.period_id.year), int(self.period_id.month))[1]
#TODO 格子不对
attachment_information = u'编制单位:' + company_row.name + u',,,,' + self.period_id.year \
+ u'年' + self.period_id.month + u'月' + \
str(days) + u'日' + u',' + u'单位:元'
domain = [('id', 'in', [balance_sheet_obj.id for balance_sheet_obj in balance_sheet_objs])]
return { # 返回生成资产负债表的数据的列表
'type': 'ir.actions.act_window',
'name': u'资产负债表:' + self.period_id.name,
'view_type': 'form',
'view_mode': 'tree',
'res_model': 'balance.sheet',
'target': 'current',
'view_id': False,
'views': [(view_id, 'tree')],
'context': {'period_id': self.period_id.id, 'attachment_information': attachment_information},
'domain': domain,
'limit': 65535,
}
def deal_with_profit_formula(self, occurrence_balance_formula, period_id, year_begain_field):
if occurrence_balance_formula:
return_vals = sum([self.compute_profit(balance_formula, period_id, year_begain_field)
for balance_formula in occurrence_balance_formula.split(";")
])
else:
return_vals = 0
return return_vals
@api.multi
def create_profit_statement(self):
"""生成利润表"""
balance_wizard = self.env['create.trial.balance.wizard'].create(
{'period_id': self.period_id.id})
balance_wizard.create_trial_balance()
view_id = self.env.ref('finance.profit_statement_tree').id
balance_sheet_objs = self.env['profit.statement'].search([])
year_begain_field = ['cumulative_occurrence_debit',
'cumulative_occurrence_credit']
current_period_field = [
'current_occurrence_debit', 'current_occurrence_credit']
for balance_sheet_obj in balance_sheet_objs:
balance_sheet_obj.write({'cumulative_occurrence_balance': self.deal_with_profit_formula(
balance_sheet_obj.occurrence_balance_formula, self.period_id, year_begain_field),
'current_occurrence_balance': self.compute_profit(
balance_sheet_obj.occurrence_balance_formula, self.period_id,
current_period_field)})
force_company = self._context.get('force_company')
if not force_company:
force_company = self.env.user.company_id.id
company_row = self.env['res.company'].browse(force_company)
days = calendar.monthrange(
int(self.period_id.year), int(self.period_id.month))[1]
attachment_information = u'编制单位:' + company_row.name + u',,' + self.period_id.year \
+ u'年' + self.period_id.month + u'月' + u',' + u'单位:元'
domain = [('id', 'in', [balance_sheet_obj.id for balance_sheet_obj in balance_sheet_objs])]
return { # 返回生成利润表的数据的列表
'type': 'ir.actions.act_window',
'name': u'利润表:' + self.period_id.name,
'view_type': 'form',
'view_mode': 'tree',
'res_model': 'profit.statement',
'target': 'current',
'view_id': False,
'views': [(view_id, 'tree')],
'context': {'period_id': self.period_id.id, 'attachment_information': attachment_information},
'domain': domain,
'limit': 65535,
}
@api.multi
def compute_profit(self, parameter_str, period_id, compute_field_list):
""" 根据传进来的 的科目的code 进行利润表的计算 """
if parameter_str:
parameter_str_list = parameter_str.split('~')
subject_vals_in = []
subject_vals_out = []
total_sum = 0
sign_in = False
sign_out = False
if len(parameter_str_list) == 1:
subject_ids = self.env['finance.account'].search(
[('code', '=', parameter_str_list[0]), ('account_type', '!=', 'view')])
else:
subject_ids = self.env['finance.account'].search(
[('code', '>=', parameter_str_list[0]), ('code', '<=', parameter_str_list[1]),
('account_type', '!=', 'view')])
if subject_ids: # 本行计算科目借贷方向
for line in subject_ids:
if line.balance_directions == 'in':
sign_in = True
if line.balance_directions == 'out':
sign_out = True
trial_balances = self.env['trial.balance'].search([('subject_name_id', 'in', [
subject.id for subject in subject_ids]), ('period_id', '=', period_id.id)])
for trial_balance in trial_balances:
if trial_balance.subject_name_id.balance_directions == 'in':
subject_vals_in.append(trial_balance[compute_field_list[0]])
elif trial_balance.subject_name_id.balance_directions == 'out':
subject_vals_out.append(trial_balance[compute_field_list[1]])
if sign_out and sign_in: # 方向有借且有贷
total_sum = sum(subject_vals_out) - sum(subject_vals_in)
else:
if subject_vals_in:
total_sum = sum(subject_vals_in)
else:
total_sum = sum(subject_vals_out)
return total_sum
class ProfitStatement(models.Model):
"""利润表模板
模板主要用来定义项目的 科目范围,
然后根据科目的范围得到科目范围内的科目 的利润
"""
_name = "profit.statement"
_order = "sequence,id"
_description = u'利润表模板'
sequence = fields.Integer(u'序号')
balance = fields.Char(u'项目', help=u'报表的行次的总一个名称')
line_num = fields.Char(u'行次', help=u'生成报表的行次')
cumulative_occurrence_balance = fields.Float(u'本年累计金额', help=u'本年利润金额')
occurrence_balance_formula = fields.Text(
u'科目范围', help=u'设定本行的利润的科目范围,例如1001~1012999999 结束科目尽可能大一些方便以后扩展')
current_occurrence_balance = fields.Float(u'本月金额', help=u'本月的利润的金额')
company_id = fields.Many2one(
'res.company',
string=u'公司',
change_default=True,
default=lambda self: self.env['res.company']._company_default_get())
|
floraXiao/gooderp_addons
|
finance/models/balance_sheet.py
|
Python
|
agpl-3.0
| 13,962
|
# -*- coding: UTF-8 -*-
import time
from odoorpc.tests import LoginTestCase
from odoorpc import error
from odoorpc.models import Model
from odoorpc.env import Environment
class TestModel(LoginTestCase):
def setUp(self):
LoginTestCase.setUp(self)
self.partner_obj = self.odoo.env['res.partner']
self.p0_id = self.partner_obj.create({'name': "Parent"})
self.p1_id = self.partner_obj.create({'name': "Child 1"})
self.p2_id = self.partner_obj.create({'name': "Child 2"})
self.group_obj = self.odoo.env['res.groups']
self.u0_id = self.user_obj.create(
{'name': "TestOdooRPC", 'login': 'test_%s' % time.time()})
self.g1_id = self.group_obj.create({'name': "Group 1"})
self.g2_id = self.group_obj.create({'name': "Group 2"})
def test_create_model_class(self):
partner_obj = self.odoo.env['res.partner']
self.assertEqual(partner_obj._name, 'res.partner')
self.assertIn('name', partner_obj._columns)
self.assertIsInstance(partner_obj.env, Environment)
def test_model_browse(self):
partner = self.partner_obj.browse(1)
self.assertIsInstance(partner, Model)
self.assertEqual(partner.id, 1)
self.assertEqual(partner.ids, [1])
self.assertEqual(partner.env, self.partner_obj.env)
partners = self.partner_obj.browse([1])
self.assertIsInstance(partners, Model)
self.assertEqual(partners.id, 1)
self.assertEqual(partners.ids, [1])
self.assertEqual(partners.env, self.partner_obj.env)
self.assertEqual(partners.ids, partner.ids)
def test_model_browse_false(self):
partner = self.partner_obj.browse(False)
self.assertEqual(len(partner), 0)
def test_model_browse_wrong_id(self):
self.assertRaises(
ValueError,
self.partner_obj.browse,
9999999) # Wrong ID
self.assertRaises(
error.RPCError,
self.partner_obj.browse,
"1") # Wrong ID type
def test_model_browse_without_arg(self):
self.assertRaises(TypeError, self.partner_obj.browse)
def test_model_rpc_method(self):
user_obj = self.odoo.env['res.users']
user_obj.name_get(self.odoo.env.uid)
self.odoo.env['ir.sequence'].get('fake.code') # Return False
def test_model_rpc_method_error_no_arg(self):
# Handle exception (execute a 'name_get' with without args)
user_obj = self.odoo.env['res.users']
self.assertRaises(
error.RPCError,
user_obj.name_get) # No arg
def test_model_rpc_method_error_wrong_args(self):
# Handle exception (execute a 'search' with wrong args)
user_obj = self.odoo.env['res.users']
self.assertRaises(
error.RPCError,
user_obj.search,
False) # Wrong arg
def test_record_getitem_field(self):
partner = self.partner_obj.browse(1)
self.assertEqual(partner['id'], 1)
self.assertEqual(partner['name'], partner.name)
def test_record_getitem_integer(self):
partner = self.partner_obj.browse(1)
self.assertEqual(partner[0], partner)
def test_record_getitem_slice(self):
partner = self.partner_obj.browse(1)
self.assertEqual([record.id for record in partner[:]], [1])
def test_record_iter(self):
ids = self.partner_obj.search([])[:5]
partners = self.partner_obj.browse(ids)
self.assertEqual(set([partner.id for partner in partners]), set(ids))
partner = partners[0]
self.assertIn(partner.id, partners.ids)
self.assertEqual(id(partner._values), id(partners._values))
def test_record_with_context(self):
user = self.odoo.env.user
self.assertEqual(user.env.lang, 'en_US')
user_fr = user.with_context(lang='fr_FR')
self.assertEqual(user_fr.env.lang, 'fr_FR')
# Install 'fr_FR' and test the use of context with it
Wizard = self.odoo.env['base.language.install']
wiz_id = Wizard.create({'lang': 'fr_FR'})
Wizard.lang_install([wiz_id])
# Read data with two languages
Country = self.odoo.env['res.country']
de_id = Country.search([('code', '=', 'DE')])[0]
de = Country.browse(de_id)
self.assertEqual(de.name, 'Germany')
self.assertEqual(de.with_context(lang='fr_FR').name, 'Allemagne')
# Write data with two languages
Product = self.odoo.env['product.product']
self.assertEqual(Product.env.lang, 'en_US')
name_en = "Product en_US"
product_id = Product.create({'name': name_en})
product_en = Product.browse(product_id)
self.assertEqual(product_en.name, name_en)
product_fr = product_en.with_context(lang='fr_FR')
self.assertEqual(product_fr.env.lang, 'fr_FR')
name_fr = "Produit fr_FR"
product_fr.write({'name': name_fr})
product_fr = product_fr.with_context() # Refresh the recordset
self.assertEqual(product_fr.name, name_fr)
self.assertEqual(Product.env.lang, 'en_US')
product_en = Product.browse(product_id)
self.assertEqual(product_en.name, name_en)
new_name_fr = "%s (nouveau)" % name_fr
product_fr.name = new_name_fr
product_fr = product_fr.with_context() # Refresh the recordset
self.assertEqual(product_fr.name, new_name_fr)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Danisan/odoorpc
|
odoorpc/tests/test_model.py
|
Python
|
lgpl-3.0
| 5,538
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import datetime
from oslo.utils import timeutils
import six
from keystone.common import cache
from keystone.common import dependency
from keystone.common import extension
from keystone.common import manager
from keystone import config
from keystone.contrib.revoke import model
from keystone import exception
from keystone.i18n import _
from keystone import notifications
from keystone.openstack.common import log
from keystone.openstack.common import versionutils
CONF = config.CONF
LOG = log.getLogger(__name__)
EXTENSION_DATA = {
'name': 'OpenStack Revoke API',
'namespace': 'http://docs.openstack.org/identity/api/ext/'
'OS-REVOKE/v1.0',
'alias': 'OS-REVOKE',
'updated': '2014-02-24T20:51:0-00:00',
'description': 'OpenStack revoked token reporting mechanism.',
'links': [
{
'rel': 'describedby',
'type': 'text/html',
'href': ('https://github.com/openstack/identity-api/blob/master/'
'openstack-identity-api/v3/src/markdown/'
'identity-api-v3-os-revoke-ext.md'),
}
]}
extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA)
SHOULD_CACHE = cache.should_cache_fn('revoke')
# TODO(ayoung): migrate from the token section
REVOCATION_CACHE_EXPIRATION_TIME = lambda: CONF.token.revocation_cache_time
def revoked_before_cutoff_time():
expire_delta = datetime.timedelta(
seconds=CONF.token.expiration + CONF.revoke.expiration_buffer)
oldest = timeutils.utcnow() - expire_delta
return oldest
@dependency.provider('revoke_api')
class Manager(manager.Manager):
"""Revoke API Manager.
Performs common logic for recording revocations.
"""
def __init__(self):
super(Manager, self).__init__(CONF.revoke.driver)
self._register_listeners()
self.model = model
def _user_callback(self, service, resource_type, operation,
payload):
self.revoke_by_user(payload['resource_info'])
def _role_callback(self, service, resource_type, operation,
payload):
self.revoke(
model.RevokeEvent(role_id=payload['resource_info']))
def _project_callback(self, service, resource_type, operation,
payload):
self.revoke(
model.RevokeEvent(project_id=payload['resource_info']))
def _domain_callback(self, service, resource_type, operation,
payload):
self.revoke(
model.RevokeEvent(domain_id=payload['resource_info']))
def _trust_callback(self, service, resource_type, operation,
payload):
self.revoke(
model.RevokeEvent(trust_id=payload['resource_info']))
def _consumer_callback(self, service, resource_type, operation,
payload):
self.revoke(
model.RevokeEvent(consumer_id=payload['resource_info']))
def _access_token_callback(self, service, resource_type, operation,
payload):
self.revoke(
model.RevokeEvent(access_token_id=payload['resource_info']))
def _group_callback(self, service, resource_type, operation, payload):
user_ids = (u['id'] for u in self.identity_api.list_users_in_group(
payload['resource_info']))
for uid in user_ids:
self.revoke(model.RevokeEvent(user_id=uid))
def _register_listeners(self):
callbacks = {
notifications.ACTIONS.deleted: [
['OS-TRUST:trust', self._trust_callback],
['OS-OAUTH1:consumer', self._consumer_callback],
['OS-OAUTH1:access_token', self._access_token_callback],
['role', self._role_callback],
['user', self._user_callback],
['project', self._project_callback],
],
notifications.ACTIONS.disabled: [
['user', self._user_callback],
['project', self._project_callback],
['domain', self._domain_callback],
],
notifications.ACTIONS.internal: [
[notifications.INVALIDATE_USER_TOKEN_PERSISTENCE,
self._user_callback],
]
}
for event, cb_info in six.iteritems(callbacks):
for resource_type, callback_fns in cb_info:
notifications.register_event_callback(event, resource_type,
callback_fns)
def revoke_by_user(self, user_id):
return self.revoke(model.RevokeEvent(user_id=user_id))
def _assert_not_domain_and_project_scoped(self, domain_id=None,
project_id=None):
if domain_id is not None and project_id is not None:
msg = _('The revoke call must not have both domain_id and '
'project_id. This is a bug in the Keystone server. The '
'current request is aborted.')
raise exception.UnexpectedError(exception=msg)
@versionutils.deprecated(as_of=versionutils.deprecated.JUNO,
remove_in=0)
def revoke_by_expiration(self, user_id, expires_at,
domain_id=None, project_id=None):
self._assert_not_domain_and_project_scoped(domain_id=domain_id,
project_id=project_id)
self.revoke(
model.RevokeEvent(user_id=user_id,
expires_at=expires_at,
domain_id=domain_id,
project_id=project_id))
def revoke_by_audit_id(self, audit_id):
self.revoke(model.RevokeEvent(audit_id=audit_id))
def revoke_by_audit_chain_id(self, audit_chain_id, project_id=None,
domain_id=None):
self._assert_not_domain_and_project_scoped(domain_id=domain_id,
project_id=project_id)
self.revoke(model.RevokeEvent(audit_chain_id=audit_chain_id,
domain_id=domain_id,
project_id=project_id))
def revoke_by_grant(self, role_id, user_id=None,
domain_id=None, project_id=None):
self.revoke(
model.RevokeEvent(user_id=user_id,
role_id=role_id,
domain_id=domain_id,
project_id=project_id))
def revoke_by_user_and_project(self, user_id, project_id):
self.revoke(
model.RevokeEvent(project_id=project_id, user_id=user_id))
def revoke_by_project_role_assignment(self, project_id, role_id):
self.revoke(model.RevokeEvent(project_id=project_id, role_id=role_id))
def revoke_by_domain_role_assignment(self, domain_id, role_id):
self.revoke(model.RevokeEvent(domain_id=domain_id, role_id=role_id))
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=REVOCATION_CACHE_EXPIRATION_TIME)
def _get_revoke_tree(self):
events = self.driver.get_events()
revoke_tree = model.RevokeTree(revoke_events=events)
return revoke_tree
def check_token(self, token_values):
"""Checks the values from a token against the revocation list
:param token_values: dictionary of values from a token,
normalized for differences between v2 and v3. The checked values are a
subset of the attributes of model.TokenEvent
:raises exception.TokenNotFound: if the token is invalid
"""
if self._get_revoke_tree().is_revoked(token_values):
raise exception.TokenNotFound(_('Failed to validate token'))
def revoke(self, event):
self.driver.revoke(event)
self._get_revoke_tree.invalidate(self)
@six.add_metaclass(abc.ABCMeta)
class Driver(object):
"""Interface for recording and reporting revocation events."""
@abc.abstractmethod
def get_events(self, last_fetch=None):
"""return the revocation events, as a list of objects
:param last_fetch: Time of last fetch. Return all events newer.
:returns: A list of keystone.contrib.revoke.model.RevokeEvent
newer than `last_fetch.`
If no last_fetch is specified, returns all events
for tokens issued after the expiration cutoff.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def revoke(self, event):
"""register a revocation event
:param event: An instance of
keystone.contrib.revoke.model.RevocationEvent
"""
raise exception.NotImplemented() # pragma: no cover
|
hughsaunders/keystone
|
keystone/contrib/revoke/core.py
|
Python
|
apache-2.0
| 9,549
|
"""Support for WUnderground weather service."""
import asyncio
from datetime import timedelta
import logging
import re
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.helpers.typing import HomeAssistantType, ConfigType
from homeassistant.components import sensor
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_MONITORED_CONDITIONS, CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE,
TEMP_FAHRENHEIT, TEMP_CELSIUS, LENGTH_INCHES, LENGTH_KILOMETERS,
LENGTH_MILES, LENGTH_FEET, ATTR_ATTRIBUTION)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.util import Throttle
import homeassistant.helpers.config_validation as cv
_RESOURCE = 'http://api.wunderground.com/api/{}/{}/{}/q/'
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by the WUnderground weather service"
CONF_PWS_ID = 'pws_id'
CONF_LANG = 'lang'
DEFAULT_LANG = 'EN'
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
# Helper classes for declaring sensor configurations
class WUSensorConfig:
"""WU Sensor Configuration.
defines basic HA properties of the weather sensor and
stores callbacks that can parse sensor values out of
the json data received by WU API.
"""
def __init__(self, friendly_name, feature, value,
unit_of_measurement=None, entity_picture=None,
icon="mdi:gauge", device_state_attributes=None,
device_class=None):
"""Constructor.
Args:
friendly_name (string|func): Friendly name
feature (string): WU feature. See:
https://www.wunderground.com/weather/api/d/docs?d=data/index
value (function(WUndergroundData)): callback that
extracts desired value from WUndergroundData object
unit_of_measurement (string): unit of measurement
entity_picture (string): value or callback returning
URL of entity picture
icon (string): icon name or URL
device_state_attributes (dict): dictionary of attributes,
or callable that returns it
"""
self.friendly_name = friendly_name
self.unit_of_measurement = unit_of_measurement
self.feature = feature
self.value = value
self.entity_picture = entity_picture
self.icon = icon
self.device_state_attributes = device_state_attributes or {}
self.device_class = device_class
class WUCurrentConditionsSensorConfig(WUSensorConfig):
"""Helper for defining sensor configurations for current conditions."""
def __init__(self, friendly_name, field, icon="mdi:gauge",
unit_of_measurement=None, device_class=None):
"""Constructor.
Args:
friendly_name (string|func): Friendly name of sensor
field (string): Field name in the "current_observation"
dictionary.
icon (string): icon name or URL, if None sensor
will use current weather symbol
unit_of_measurement (string): unit of measurement
"""
super().__init__(
friendly_name,
"conditions",
value=lambda wu: wu.data['current_observation'][field],
icon=icon,
unit_of_measurement=unit_of_measurement,
entity_picture=lambda wu: wu.data['current_observation'][
'icon_url'] if icon is None else None,
device_state_attributes={
'date': lambda wu: wu.data['current_observation'][
'observation_time']
},
device_class=device_class
)
class WUDailyTextForecastSensorConfig(WUSensorConfig):
"""Helper for defining sensor configurations for daily text forecasts."""
def __init__(self, period, field, unit_of_measurement=None):
"""Constructor.
Args:
period (int): forecast period number
field (string): field name to use as value
unit_of_measurement(string): unit of measurement
"""
super().__init__(
friendly_name=lambda wu: wu.data['forecast']['txt_forecast'][
'forecastday'][period]['title'],
feature='forecast',
value=lambda wu: wu.data['forecast']['txt_forecast'][
'forecastday'][period][field],
entity_picture=lambda wu: wu.data['forecast']['txt_forecast'][
'forecastday'][period]['icon_url'],
unit_of_measurement=unit_of_measurement,
device_state_attributes={
'date': lambda wu: wu.data['forecast']['txt_forecast']['date']
}
)
class WUDailySimpleForecastSensorConfig(WUSensorConfig):
"""Helper for defining sensor configurations for daily simpleforecasts."""
def __init__(self, friendly_name, period, field, wu_unit=None,
ha_unit=None, icon=None, device_class=None):
"""Constructor.
Args:
period (int): forecast period number
field (string): field name to use as value
wu_unit (string): "fahrenheit", "celsius", "degrees" etc.
see the example json at:
https://www.wunderground.com/weather/api/d/docs?d=data/forecast&MR=1
ha_unit (string): corresponding unit in home assistant
title (string): friendly_name of the sensor
"""
super().__init__(
friendly_name=friendly_name,
feature='forecast',
value=(lambda wu: wu.data['forecast']['simpleforecast'][
'forecastday'][period][field][wu_unit])
if wu_unit else
(lambda wu: wu.data['forecast']['simpleforecast'][
'forecastday'][period][field]),
unit_of_measurement=ha_unit,
entity_picture=lambda wu: wu.data['forecast']['simpleforecast'][
'forecastday'][period]['icon_url'] if not icon else None,
icon=icon,
device_state_attributes={
'date': lambda wu: wu.data['forecast']['simpleforecast'][
'forecastday'][period]['date']['pretty']
},
device_class=device_class
)
class WUHourlyForecastSensorConfig(WUSensorConfig):
"""Helper for defining sensor configurations for hourly text forecasts."""
def __init__(self, period, field):
"""Constructor.
Args:
period (int): forecast period number
field (int): field name to use as value
"""
super().__init__(
friendly_name=lambda wu: "{} {}".format(
wu.data['hourly_forecast'][period]['FCTTIME'][
'weekday_name_abbrev'],
wu.data['hourly_forecast'][period]['FCTTIME'][
'civil']),
feature='hourly',
value=lambda wu: wu.data['hourly_forecast'][period][
field],
entity_picture=lambda wu: wu.data['hourly_forecast'][
period]["icon_url"],
device_state_attributes={
'temp_c': lambda wu: wu.data['hourly_forecast'][
period]['temp']['metric'],
'temp_f': lambda wu: wu.data['hourly_forecast'][
period]['temp']['english'],
'dewpoint_c': lambda wu: wu.data['hourly_forecast'][
period]['dewpoint']['metric'],
'dewpoint_f': lambda wu: wu.data['hourly_forecast'][
period]['dewpoint']['english'],
'precip_prop': lambda wu: wu.data['hourly_forecast'][
period]['pop'],
'sky': lambda wu: wu.data['hourly_forecast'][
period]['sky'],
'precip_mm': lambda wu: wu.data['hourly_forecast'][
period]['qpf']['metric'],
'precip_in': lambda wu: wu.data['hourly_forecast'][
period]['qpf']['english'],
'humidity': lambda wu: wu.data['hourly_forecast'][
period]['humidity'],
'wind_kph': lambda wu: wu.data['hourly_forecast'][
period]['wspd']['metric'],
'wind_mph': lambda wu: wu.data['hourly_forecast'][
period]['wspd']['english'],
'pressure_mb': lambda wu: wu.data['hourly_forecast'][
period]['mslp']['metric'],
'pressure_inHg': lambda wu: wu.data['hourly_forecast'][
period]['mslp']['english'],
'date': lambda wu: wu.data['hourly_forecast'][
period]['FCTTIME']['pretty'],
}
)
class WUAlmanacSensorConfig(WUSensorConfig):
"""Helper for defining field configurations for almanac sensors."""
def __init__(self, friendly_name, field, value_type, wu_unit,
unit_of_measurement, icon, device_class=None):
"""Constructor.
Args:
friendly_name (string|func): Friendly name
field (string): value name returned in 'almanac' dict
as returned by the WU API
value_type (string): "record" or "normal"
wu_unit (string): unit name in WU API
icon (string): icon name or URL
unit_of_measurement (string): unit of measurement
"""
super().__init__(
friendly_name=friendly_name,
feature="almanac",
value=lambda wu: wu.data['almanac'][field][value_type][wu_unit],
unit_of_measurement=unit_of_measurement,
icon=icon,
device_class="temperature"
)
class WUAlertsSensorConfig(WUSensorConfig):
"""Helper for defining field configuration for alerts."""
def __init__(self, friendly_name):
"""Constructor.
Args:
friendly_name (string|func): Friendly name
"""
super().__init__(
friendly_name=friendly_name,
feature="alerts",
value=lambda wu: len(wu.data['alerts']),
icon=lambda wu: "mdi:alert-circle-outline"
if wu.data['alerts'] else "mdi:check-circle-outline",
device_state_attributes=self._get_attributes
)
@staticmethod
def _get_attributes(rest):
attrs = {}
if 'alerts' not in rest.data:
return attrs
alerts = rest.data['alerts']
multiple_alerts = len(alerts) > 1
for data in alerts:
for alert in ALERTS_ATTRS:
if data[alert]:
if multiple_alerts:
dkey = alert.capitalize() + '_' + data['type']
else:
dkey = alert.capitalize()
attrs[dkey] = data[alert]
return attrs
# Declaration of supported WU sensors
# (see above helper classes for argument explanation)
SENSOR_TYPES = {
'alerts': WUAlertsSensorConfig('Alerts'),
'dewpoint_c': WUCurrentConditionsSensorConfig(
'Dewpoint', 'dewpoint_c', 'mdi:water', TEMP_CELSIUS),
'dewpoint_f': WUCurrentConditionsSensorConfig(
'Dewpoint', 'dewpoint_f', 'mdi:water', TEMP_FAHRENHEIT),
'dewpoint_string': WUCurrentConditionsSensorConfig(
'Dewpoint Summary', 'dewpoint_string', 'mdi:water'),
'feelslike_c': WUCurrentConditionsSensorConfig(
'Feels Like', 'feelslike_c', 'mdi:thermometer', TEMP_CELSIUS),
'feelslike_f': WUCurrentConditionsSensorConfig(
'Feels Like', 'feelslike_f', 'mdi:thermometer', TEMP_FAHRENHEIT),
'feelslike_string': WUCurrentConditionsSensorConfig(
'Feels Like', 'feelslike_string', "mdi:thermometer"),
'heat_index_c': WUCurrentConditionsSensorConfig(
'Heat index', 'heat_index_c', "mdi:thermometer", TEMP_CELSIUS),
'heat_index_f': WUCurrentConditionsSensorConfig(
'Heat index', 'heat_index_f', "mdi:thermometer", TEMP_FAHRENHEIT),
'heat_index_string': WUCurrentConditionsSensorConfig(
'Heat Index Summary', 'heat_index_string', "mdi:thermometer"),
'elevation': WUSensorConfig(
'Elevation',
'conditions',
value=lambda wu: wu.data['current_observation'][
'observation_location']['elevation'].split()[0],
unit_of_measurement=LENGTH_FEET,
icon="mdi:elevation-rise"),
'location': WUSensorConfig(
'Location',
'conditions',
value=lambda wu: wu.data['current_observation'][
'display_location']['full'],
icon="mdi:map-marker"),
'observation_time': WUCurrentConditionsSensorConfig(
'Observation Time', 'observation_time', "mdi:clock"),
'precip_1hr_in': WUCurrentConditionsSensorConfig(
'Precipitation 1hr', 'precip_1hr_in', "mdi:umbrella", LENGTH_INCHES),
'precip_1hr_metric': WUCurrentConditionsSensorConfig(
'Precipitation 1hr', 'precip_1hr_metric', "mdi:umbrella", 'mm'),
'precip_1hr_string': WUCurrentConditionsSensorConfig(
'Precipitation 1hr', 'precip_1hr_string', "mdi:umbrella"),
'precip_today_in': WUCurrentConditionsSensorConfig(
'Precipitation Today', 'precip_today_in', "mdi:umbrella",
LENGTH_INCHES),
'precip_today_metric': WUCurrentConditionsSensorConfig(
'Precipitation Today', 'precip_today_metric', "mdi:umbrella", 'mm'),
'precip_today_string': WUCurrentConditionsSensorConfig(
'Precipitation Today', 'precip_today_string', "mdi:umbrella"),
'pressure_in': WUCurrentConditionsSensorConfig(
'Pressure', 'pressure_in', "mdi:gauge", 'inHg',
device_class="pressure"),
'pressure_mb': WUCurrentConditionsSensorConfig(
'Pressure', 'pressure_mb', "mdi:gauge", 'mb',
device_class="pressure"),
'pressure_trend': WUCurrentConditionsSensorConfig(
'Pressure Trend', 'pressure_trend', "mdi:gauge",
device_class="pressure"),
'relative_humidity': WUSensorConfig(
'Relative Humidity',
'conditions',
value=lambda wu: int(wu.data['current_observation'][
'relative_humidity'][:-1]),
unit_of_measurement='%',
icon="mdi:water-percent",
device_class="humidity"),
'station_id': WUCurrentConditionsSensorConfig(
'Station ID', 'station_id', "mdi:home"),
'solarradiation': WUCurrentConditionsSensorConfig(
'Solar Radiation', 'solarradiation', "mdi:weather-sunny", "w/m2"),
'temperature_string': WUCurrentConditionsSensorConfig(
'Temperature Summary', 'temperature_string', "mdi:thermometer"),
'temp_c': WUCurrentConditionsSensorConfig(
'Temperature', 'temp_c', "mdi:thermometer", TEMP_CELSIUS,
device_class="temperature"),
'temp_f': WUCurrentConditionsSensorConfig(
'Temperature', 'temp_f', "mdi:thermometer", TEMP_FAHRENHEIT,
device_class="temperature"),
'UV': WUCurrentConditionsSensorConfig(
'UV', 'UV', "mdi:sunglasses"),
'visibility_km': WUCurrentConditionsSensorConfig(
'Visibility (km)', 'visibility_km', "mdi:eye", LENGTH_KILOMETERS),
'visibility_mi': WUCurrentConditionsSensorConfig(
'Visibility (miles)', 'visibility_mi', "mdi:eye", LENGTH_MILES),
'weather': WUCurrentConditionsSensorConfig(
'Weather Summary', 'weather', None),
'wind_degrees': WUCurrentConditionsSensorConfig(
'Wind Degrees', 'wind_degrees', "mdi:weather-windy", "°"),
'wind_dir': WUCurrentConditionsSensorConfig(
'Wind Direction', 'wind_dir', "mdi:weather-windy"),
'wind_gust_kph': WUCurrentConditionsSensorConfig(
'Wind Gust', 'wind_gust_kph', "mdi:weather-windy", 'kph'),
'wind_gust_mph': WUCurrentConditionsSensorConfig(
'Wind Gust', 'wind_gust_mph', "mdi:weather-windy", 'mph'),
'wind_kph': WUCurrentConditionsSensorConfig(
'Wind Speed', 'wind_kph', "mdi:weather-windy", 'kph'),
'wind_mph': WUCurrentConditionsSensorConfig(
'Wind Speed', 'wind_mph', "mdi:weather-windy", 'mph'),
'wind_string': WUCurrentConditionsSensorConfig(
'Wind Summary', 'wind_string', "mdi:weather-windy"),
'temp_high_record_c': WUAlmanacSensorConfig(
lambda wu: 'High Temperature Record ({})'.format(
wu.data['almanac']['temp_high']['recordyear']),
'temp_high', 'record', 'C', TEMP_CELSIUS, 'mdi:thermometer'),
'temp_high_record_f': WUAlmanacSensorConfig(
lambda wu: 'High Temperature Record ({})'.format(
wu.data['almanac']['temp_high']['recordyear']),
'temp_high', 'record', 'F', TEMP_FAHRENHEIT, 'mdi:thermometer'),
'temp_low_record_c': WUAlmanacSensorConfig(
lambda wu: 'Low Temperature Record ({})'.format(
wu.data['almanac']['temp_low']['recordyear']),
'temp_low', 'record', 'C', TEMP_CELSIUS, 'mdi:thermometer'),
'temp_low_record_f': WUAlmanacSensorConfig(
lambda wu: 'Low Temperature Record ({})'.format(
wu.data['almanac']['temp_low']['recordyear']),
'temp_low', 'record', 'F', TEMP_FAHRENHEIT, 'mdi:thermometer'),
'temp_low_avg_c': WUAlmanacSensorConfig(
'Historic Average of Low Temperatures for Today',
'temp_low', 'normal', 'C', TEMP_CELSIUS, 'mdi:thermometer'),
'temp_low_avg_f': WUAlmanacSensorConfig(
'Historic Average of Low Temperatures for Today',
'temp_low', 'normal', 'F', TEMP_FAHRENHEIT, 'mdi:thermometer'),
'temp_high_avg_c': WUAlmanacSensorConfig(
'Historic Average of High Temperatures for Today',
'temp_high', 'normal', 'C', TEMP_CELSIUS, "mdi:thermometer"),
'temp_high_avg_f': WUAlmanacSensorConfig(
'Historic Average of High Temperatures for Today',
'temp_high', 'normal', 'F', TEMP_FAHRENHEIT, "mdi:thermometer"),
'weather_1d': WUDailyTextForecastSensorConfig(0, "fcttext"),
'weather_1d_metric': WUDailyTextForecastSensorConfig(0, "fcttext_metric"),
'weather_1n': WUDailyTextForecastSensorConfig(1, "fcttext"),
'weather_1n_metric': WUDailyTextForecastSensorConfig(1, "fcttext_metric"),
'weather_2d': WUDailyTextForecastSensorConfig(2, "fcttext"),
'weather_2d_metric': WUDailyTextForecastSensorConfig(2, "fcttext_metric"),
'weather_2n': WUDailyTextForecastSensorConfig(3, "fcttext"),
'weather_2n_metric': WUDailyTextForecastSensorConfig(3, "fcttext_metric"),
'weather_3d': WUDailyTextForecastSensorConfig(4, "fcttext"),
'weather_3d_metric': WUDailyTextForecastSensorConfig(4, "fcttext_metric"),
'weather_3n': WUDailyTextForecastSensorConfig(5, "fcttext"),
'weather_3n_metric': WUDailyTextForecastSensorConfig(5, "fcttext_metric"),
'weather_4d': WUDailyTextForecastSensorConfig(6, "fcttext"),
'weather_4d_metric': WUDailyTextForecastSensorConfig(6, "fcttext_metric"),
'weather_4n': WUDailyTextForecastSensorConfig(7, "fcttext"),
'weather_4n_metric': WUDailyTextForecastSensorConfig(7, "fcttext_metric"),
'weather_1h': WUHourlyForecastSensorConfig(0, "condition"),
'weather_2h': WUHourlyForecastSensorConfig(1, "condition"),
'weather_3h': WUHourlyForecastSensorConfig(2, "condition"),
'weather_4h': WUHourlyForecastSensorConfig(3, "condition"),
'weather_5h': WUHourlyForecastSensorConfig(4, "condition"),
'weather_6h': WUHourlyForecastSensorConfig(5, "condition"),
'weather_7h': WUHourlyForecastSensorConfig(6, "condition"),
'weather_8h': WUHourlyForecastSensorConfig(7, "condition"),
'weather_9h': WUHourlyForecastSensorConfig(8, "condition"),
'weather_10h': WUHourlyForecastSensorConfig(9, "condition"),
'weather_11h': WUHourlyForecastSensorConfig(10, "condition"),
'weather_12h': WUHourlyForecastSensorConfig(11, "condition"),
'weather_13h': WUHourlyForecastSensorConfig(12, "condition"),
'weather_14h': WUHourlyForecastSensorConfig(13, "condition"),
'weather_15h': WUHourlyForecastSensorConfig(14, "condition"),
'weather_16h': WUHourlyForecastSensorConfig(15, "condition"),
'weather_17h': WUHourlyForecastSensorConfig(16, "condition"),
'weather_18h': WUHourlyForecastSensorConfig(17, "condition"),
'weather_19h': WUHourlyForecastSensorConfig(18, "condition"),
'weather_20h': WUHourlyForecastSensorConfig(19, "condition"),
'weather_21h': WUHourlyForecastSensorConfig(20, "condition"),
'weather_22h': WUHourlyForecastSensorConfig(21, "condition"),
'weather_23h': WUHourlyForecastSensorConfig(22, "condition"),
'weather_24h': WUHourlyForecastSensorConfig(23, "condition"),
'weather_25h': WUHourlyForecastSensorConfig(24, "condition"),
'weather_26h': WUHourlyForecastSensorConfig(25, "condition"),
'weather_27h': WUHourlyForecastSensorConfig(26, "condition"),
'weather_28h': WUHourlyForecastSensorConfig(27, "condition"),
'weather_29h': WUHourlyForecastSensorConfig(28, "condition"),
'weather_30h': WUHourlyForecastSensorConfig(29, "condition"),
'weather_31h': WUHourlyForecastSensorConfig(30, "condition"),
'weather_32h': WUHourlyForecastSensorConfig(31, "condition"),
'weather_33h': WUHourlyForecastSensorConfig(32, "condition"),
'weather_34h': WUHourlyForecastSensorConfig(33, "condition"),
'weather_35h': WUHourlyForecastSensorConfig(34, "condition"),
'weather_36h': WUHourlyForecastSensorConfig(35, "condition"),
'temp_high_1d_c': WUDailySimpleForecastSensorConfig(
"High Temperature Today", 0, "high", "celsius", TEMP_CELSIUS,
"mdi:thermometer", device_class="temperature"),
'temp_high_2d_c': WUDailySimpleForecastSensorConfig(
"High Temperature Tomorrow", 1, "high", "celsius", TEMP_CELSIUS,
"mdi:thermometer", device_class="temperature"),
'temp_high_3d_c': WUDailySimpleForecastSensorConfig(
"High Temperature in 3 Days", 2, "high", "celsius", TEMP_CELSIUS,
"mdi:thermometer", device_class="temperature"),
'temp_high_4d_c': WUDailySimpleForecastSensorConfig(
"High Temperature in 4 Days", 3, "high", "celsius", TEMP_CELSIUS,
"mdi:thermometer", device_class="temperature"),
'temp_high_1d_f': WUDailySimpleForecastSensorConfig(
"High Temperature Today", 0, "high", "fahrenheit", TEMP_FAHRENHEIT,
"mdi:thermometer", device_class="temperature"),
'temp_high_2d_f': WUDailySimpleForecastSensorConfig(
"High Temperature Tomorrow", 1, "high", "fahrenheit", TEMP_FAHRENHEIT,
"mdi:thermometer", device_class="temperature"),
'temp_high_3d_f': WUDailySimpleForecastSensorConfig(
"High Temperature in 3 Days", 2, "high", "fahrenheit", TEMP_FAHRENHEIT,
"mdi:thermometer", device_class="temperature"),
'temp_high_4d_f': WUDailySimpleForecastSensorConfig(
"High Temperature in 4 Days", 3, "high", "fahrenheit", TEMP_FAHRENHEIT,
"mdi:thermometer", device_class="temperature"),
'temp_low_1d_c': WUDailySimpleForecastSensorConfig(
"Low Temperature Today", 0, "low", "celsius", TEMP_CELSIUS,
"mdi:thermometer", device_class="temperature"),
'temp_low_2d_c': WUDailySimpleForecastSensorConfig(
"Low Temperature Tomorrow", 1, "low", "celsius", TEMP_CELSIUS,
"mdi:thermometer", device_class="temperature"),
'temp_low_3d_c': WUDailySimpleForecastSensorConfig(
"Low Temperature in 3 Days", 2, "low", "celsius", TEMP_CELSIUS,
"mdi:thermometer", device_class="temperature"),
'temp_low_4d_c': WUDailySimpleForecastSensorConfig(
"Low Temperature in 4 Days", 3, "low", "celsius", TEMP_CELSIUS,
"mdi:thermometer", device_class="temperature"),
'temp_low_1d_f': WUDailySimpleForecastSensorConfig(
"Low Temperature Today", 0, "low", "fahrenheit", TEMP_FAHRENHEIT,
"mdi:thermometer", device_class="temperature"),
'temp_low_2d_f': WUDailySimpleForecastSensorConfig(
"Low Temperature Tomorrow", 1, "low", "fahrenheit", TEMP_FAHRENHEIT,
"mdi:thermometer", device_class="temperature"),
'temp_low_3d_f': WUDailySimpleForecastSensorConfig(
"Low Temperature in 3 Days", 2, "low", "fahrenheit", TEMP_FAHRENHEIT,
"mdi:thermometer", device_class="temperature"),
'temp_low_4d_f': WUDailySimpleForecastSensorConfig(
"Low Temperature in 4 Days", 3, "low", "fahrenheit", TEMP_FAHRENHEIT,
"mdi:thermometer", device_class="temperature"),
'wind_gust_1d_kph': WUDailySimpleForecastSensorConfig(
"Max. Wind Today", 0, "maxwind", "kph", "kph", "mdi:weather-windy"),
'wind_gust_2d_kph': WUDailySimpleForecastSensorConfig(
"Max. Wind Tomorrow", 1, "maxwind", "kph", "kph", "mdi:weather-windy"),
'wind_gust_3d_kph': WUDailySimpleForecastSensorConfig(
"Max. Wind in 3 Days", 2, "maxwind", "kph", "kph",
"mdi:weather-windy"),
'wind_gust_4d_kph': WUDailySimpleForecastSensorConfig(
"Max. Wind in 4 Days", 3, "maxwind", "kph", "kph",
"mdi:weather-windy"),
'wind_gust_1d_mph': WUDailySimpleForecastSensorConfig(
"Max. Wind Today", 0, "maxwind", "mph", "mph",
"mdi:weather-windy"),
'wind_gust_2d_mph': WUDailySimpleForecastSensorConfig(
"Max. Wind Tomorrow", 1, "maxwind", "mph", "mph",
"mdi:weather-windy"),
'wind_gust_3d_mph': WUDailySimpleForecastSensorConfig(
"Max. Wind in 3 Days", 2, "maxwind", "mph", "mph",
"mdi:weather-windy"),
'wind_gust_4d_mph': WUDailySimpleForecastSensorConfig(
"Max. Wind in 4 Days", 3, "maxwind", "mph", "mph",
"mdi:weather-windy"),
'wind_1d_kph': WUDailySimpleForecastSensorConfig(
"Avg. Wind Today", 0, "avewind", "kph", "kph",
"mdi:weather-windy"),
'wind_2d_kph': WUDailySimpleForecastSensorConfig(
"Avg. Wind Tomorrow", 1, "avewind", "kph", "kph",
"mdi:weather-windy"),
'wind_3d_kph': WUDailySimpleForecastSensorConfig(
"Avg. Wind in 3 Days", 2, "avewind", "kph", "kph",
"mdi:weather-windy"),
'wind_4d_kph': WUDailySimpleForecastSensorConfig(
"Avg. Wind in 4 Days", 3, "avewind", "kph", "kph",
"mdi:weather-windy"),
'wind_1d_mph': WUDailySimpleForecastSensorConfig(
"Avg. Wind Today", 0, "avewind", "mph", "mph",
"mdi:weather-windy"),
'wind_2d_mph': WUDailySimpleForecastSensorConfig(
"Avg. Wind Tomorrow", 1, "avewind", "mph", "mph",
"mdi:weather-windy"),
'wind_3d_mph': WUDailySimpleForecastSensorConfig(
"Avg. Wind in 3 Days", 2, "avewind", "mph", "mph",
"mdi:weather-windy"),
'wind_4d_mph': WUDailySimpleForecastSensorConfig(
"Avg. Wind in 4 Days", 3, "avewind", "mph", "mph",
"mdi:weather-windy"),
'precip_1d_mm': WUDailySimpleForecastSensorConfig(
"Precipitation Intensity Today", 0, 'qpf_allday', 'mm', 'mm',
"mdi:umbrella"),
'precip_2d_mm': WUDailySimpleForecastSensorConfig(
"Precipitation Intensity Tomorrow", 1, 'qpf_allday', 'mm', 'mm',
"mdi:umbrella"),
'precip_3d_mm': WUDailySimpleForecastSensorConfig(
"Precipitation Intensity in 3 Days", 2, 'qpf_allday', 'mm', 'mm',
"mdi:umbrella"),
'precip_4d_mm': WUDailySimpleForecastSensorConfig(
"Precipitation Intensity in 4 Days", 3, 'qpf_allday', 'mm', 'mm',
"mdi:umbrella"),
'precip_1d_in': WUDailySimpleForecastSensorConfig(
"Precipitation Intensity Today", 0, 'qpf_allday', 'in',
LENGTH_INCHES, "mdi:umbrella"),
'precip_2d_in': WUDailySimpleForecastSensorConfig(
"Precipitation Intensity Tomorrow", 1, 'qpf_allday', 'in',
LENGTH_INCHES, "mdi:umbrella"),
'precip_3d_in': WUDailySimpleForecastSensorConfig(
"Precipitation Intensity in 3 Days", 2, 'qpf_allday', 'in',
LENGTH_INCHES, "mdi:umbrella"),
'precip_4d_in': WUDailySimpleForecastSensorConfig(
"Precipitation Intensity in 4 Days", 3, 'qpf_allday', 'in',
LENGTH_INCHES, "mdi:umbrella"),
'precip_1d': WUDailySimpleForecastSensorConfig(
"Precipitation Probability Today", 0, "pop", None, "%",
"mdi:umbrella"),
'precip_2d': WUDailySimpleForecastSensorConfig(
"Precipitation Probability Tomorrow", 1, "pop", None, "%",
"mdi:umbrella"),
'precip_3d': WUDailySimpleForecastSensorConfig(
"Precipitation Probability in 3 Days", 2, "pop", None, "%",
"mdi:umbrella"),
'precip_4d': WUDailySimpleForecastSensorConfig(
"Precipitation Probability in 4 Days", 3, "pop", None, "%",
"mdi:umbrella"),
}
# Alert Attributes
ALERTS_ATTRS = [
'date',
'description',
'expires',
'message',
]
# Language Supported Codes
LANG_CODES = [
'AF', 'AL', 'AR', 'HY', 'AZ', 'EU',
'BY', 'BU', 'LI', 'MY', 'CA', 'CN',
'TW', 'CR', 'CZ', 'DK', 'DV', 'NL',
'EN', 'EO', 'ET', 'FA', 'FI', 'FR',
'FC', 'GZ', 'DL', 'KA', 'GR', 'GU',
'HT', 'IL', 'HI', 'HU', 'IS', 'IO',
'ID', 'IR', 'IT', 'JP', 'JW', 'KM',
'KR', 'KU', 'LA', 'LV', 'LT', 'ND',
'MK', 'MT', 'GM', 'MI', 'MR', 'MN',
'NO', 'OC', 'PS', 'GN', 'PL', 'BR',
'PA', 'RO', 'RU', 'SR', 'SK', 'SL',
'SP', 'SI', 'SW', 'CH', 'TL', 'TT',
'TH', 'TR', 'TK', 'UA', 'UZ', 'VU',
'CY', 'SN', 'JI', 'YI',
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_PWS_ID): cv.string,
vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.All(vol.In(LANG_CODES)),
vol.Inclusive(CONF_LATITUDE, 'coordinates',
'Latitude and longitude must exist together'): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, 'coordinates',
'Latitude and longitude must exist together'): cv.longitude,
vol.Required(CONF_MONITORED_CONDITIONS):
vol.All(cv.ensure_list, vol.Length(min=1), [vol.In(SENSOR_TYPES)])
})
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType,
async_add_entities, discovery_info=None):
"""Set up the WUnderground sensor."""
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
pws_id = config.get(CONF_PWS_ID)
rest = WUndergroundData(
hass, config.get(CONF_API_KEY), pws_id,
config.get(CONF_LANG), latitude, longitude)
if pws_id is None:
unique_id_base = "@{:06f},{:06f}".format(longitude, latitude)
else:
# Manually specified weather station, use that for unique_id
unique_id_base = pws_id
sensors = []
for variable in config[CONF_MONITORED_CONDITIONS]:
sensors.append(WUndergroundSensor(hass, rest, variable,
unique_id_base))
await rest.async_update()
if not rest.data:
raise PlatformNotReady
async_add_entities(sensors, True)
class WUndergroundSensor(Entity):
"""Implementing the WUnderground sensor."""
def __init__(self, hass: HomeAssistantType, rest, condition,
unique_id_base: str):
"""Initialize the sensor."""
self.rest = rest
self._condition = condition
self._state = None
self._attributes = {ATTR_ATTRIBUTION: ATTRIBUTION}
self._icon = None
self._entity_picture = None
self._unit_of_measurement = self._cfg_expand("unit_of_measurement")
self.rest.request_feature(SENSOR_TYPES[condition].feature)
# This is only the suggested entity id, it might get changed by
# the entity registry later.
self.entity_id = sensor.ENTITY_ID_FORMAT.format('pws_' + condition)
self._unique_id = "{},{}".format(unique_id_base, condition)
self._device_class = self._cfg_expand("device_class")
def _cfg_expand(self, what, default=None):
"""Parse and return sensor data."""
cfg = SENSOR_TYPES[self._condition]
val = getattr(cfg, what)
if not callable(val):
return val
try:
val = val(self.rest)
except (KeyError, IndexError, TypeError, ValueError) as err:
_LOGGER.warning("Failed to expand cfg from WU API."
" Condition: %s Attr: %s Error: %s",
self._condition, what, repr(err))
val = default
return val
def _update_attrs(self):
"""Parse and update device state attributes."""
attrs = self._cfg_expand("device_state_attributes", {})
for (attr, callback) in attrs.items():
if callable(callback):
try:
self._attributes[attr] = callback(self.rest)
except (KeyError, IndexError, TypeError, ValueError) as err:
_LOGGER.warning("Failed to update attrs from WU API."
" Condition: %s Attr: %s Error: %s",
self._condition, attr, repr(err))
else:
self._attributes[attr] = callback
@property
def name(self):
"""Return the name of the sensor."""
return self._cfg_expand("friendly_name")
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
@property
def icon(self):
"""Return icon."""
return self._icon
@property
def entity_picture(self):
"""Return the entity picture."""
return self._entity_picture
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return self._unit_of_measurement
@property
def device_class(self):
"""Return the units of measurement."""
return self._device_class
async def async_update(self):
"""Update current conditions."""
await self.rest.async_update()
if not self.rest.data:
# no data, return
return
self._state = self._cfg_expand("value")
self._update_attrs()
self._icon = self._cfg_expand("icon", super().icon)
url = self._cfg_expand("entity_picture")
if isinstance(url, str):
self._entity_picture = re.sub(r'^http://', 'https://',
url, flags=re.IGNORECASE)
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._unique_id
class WUndergroundData:
"""Get data from WUnderground."""
def __init__(self, hass, api_key, pws_id, lang, latitude, longitude):
"""Initialize the data object."""
self._hass = hass
self._api_key = api_key
self._pws_id = pws_id
self._lang = 'lang:{}'.format(lang)
self._latitude = latitude
self._longitude = longitude
self._features = set()
self.data = None
self._session = async_get_clientsession(self._hass)
def request_feature(self, feature):
"""Register feature to be fetched from WU API."""
self._features.add(feature)
def _build_url(self, baseurl=_RESOURCE):
url = baseurl.format(
self._api_key, '/'.join(sorted(self._features)), self._lang)
if self._pws_id:
url = url + 'pws:{}'.format(self._pws_id)
else:
url = url + '{},{}'.format(self._latitude, self._longitude)
return url + '.json'
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the latest data from WUnderground."""
try:
with async_timeout.timeout(10, loop=self._hass.loop):
response = await self._session.get(self._build_url())
result = await response.json()
if "error" in result['response']:
raise ValueError(result['response']["error"]["description"])
self.data = result
except ValueError as err:
_LOGGER.error("Check WUnderground API %s", err.args)
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
_LOGGER.error("Error fetching WUnderground data: %s", repr(err))
|
auduny/home-assistant
|
homeassistant/components/wunderground/sensor.py
|
Python
|
apache-2.0
| 36,257
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Copyright (C) 2012 Diego Torres Milano
Created on Feb 5, 2012
@author: diego
'''
import sys
import os
import time
import StringIO
import unittest
import exceptions
import platform
# PyDev sets PYTHONPATH, use it
try:
for p in os.environ['PYTHONPATH'].split(':'):
if not p in sys.path:
sys.path.append(p)
except:
pass
try:
sys.path.insert(0, os.path.join(os.environ['ANDROID_VIEW_CLIENT_HOME'], 'src'))
except:
pass
from com.dtmilano.android.viewclient import *
from mocks import MockDevice, MockViewServer
from mocks import DUMP, DUMP_SAMPLE_UI, VIEW_MAP, VIEW_MAP_API_8, VIEW_MAP_API_17, RUNNING, STOPPED, WINDOWS
os_name = platform.system()
if os_name.startswith('Linux'):
TRUE = '/bin/true'
else:
TRUE = '/usr/bin/true'
class ViewTest(unittest.TestCase):
def setUp(self):
self.view = View(VIEW_MAP, None, -1)
def tearDown(self):
try:
del os.environ['ANDROID_SERIAL']
except:
pass
def testViewFactory_View(self):
attrs = {'class': 'android.widget.AnyView', 'text:mText': 'Button with ID'}
view = View.factory(attrs, None, -1)
self.assertTrue(isinstance(view, View))
def testViewFactory_TextView(self):
attrs = {'class': 'android.widget.TextView', 'text:mText': 'Button with ID'}
view = View.factory(attrs, None, -1)
self.assertTrue(isinstance(view, TextView))
def testViewFactory_TextView(self):
attrs = {'class': 'android.widget.EditText', 'text:mText': 'Button with ID'}
view = View.factory(attrs, None, -1)
self.assertTrue(isinstance(view, EditText))
def testView_notSpecifiedSdkVersion(self):
device = MockDevice()
view = View(VIEW_MAP, device, -1)
self.assertEqual(device.version, view.build[VERSION_SDK_PROPERTY])
def testView_specifiedSdkVersion_8(self):
view = View(VIEW_MAP_API_8, MockDevice(), 8)
self.assertEqual(8, view.build[VERSION_SDK_PROPERTY])
def testView_specifiedSdkVersion_10(self):
view = View(VIEW_MAP, MockDevice(), 10)
self.assertEqual(10, view.build[VERSION_SDK_PROPERTY])
def testView_specifiedSdkVersion_16(self):
view = View(VIEW_MAP, MockDevice(), 16)
self.assertEqual(16, view.build[VERSION_SDK_PROPERTY])
def testInnerMethod(self):
v = View({'isChecked()':'true'}, None)
self.assertTrue(v.isChecked())
v.map['isChecked()'] = 'false'
self.assertFalse(v.isChecked(), "Expected False but is %s {%s}" % (v.isChecked(), v.map['isChecked()']))
self.assertFalse(v.isChecked())
v.map['other'] = 1
self.assertEqual(1, v.other())
v.map['evenMore'] = "ABC"
self.assertEqual("ABC", v.evenMore())
v.map['more'] = "abc"
v.map['more'] = v.evenMore()
self.assertEqual("ABC", v.more())
v.map['isMore()'] = 'true'
self.assertTrue(v.isMore())
def testGetClass(self):
self.assertEqual('android.widget.ToggleButton', self.view.getClass())
def testGetId(self):
self.assertEqual('id/button_with_id', self.view.getId())
def testTextPropertyForDifferentSdkVersions(self):
VP = { -1:TEXT_PROPERTY, 8:TEXT_PROPERTY_API_10, 10:TEXT_PROPERTY_API_10, 15:TEXT_PROPERTY, 16:TEXT_PROPERTY_UI_AUTOMATOR, 17:TEXT_PROPERTY_UI_AUTOMATOR}
for version, textProperty in VP.items():
view = View(None, None, version)
self.assertEqual(textProperty, view.textProperty, msg='version %d: expected: %s actual: %s' % (version, textProperty, view.textProperty))
def testTextPropertyForDifferentSdkVersions_device(self):
VP = { -1:TEXT_PROPERTY, 8:TEXT_PROPERTY_API_10, 10:TEXT_PROPERTY_API_10, 15:TEXT_PROPERTY, 16:TEXT_PROPERTY_UI_AUTOMATOR, 17:TEXT_PROPERTY_UI_AUTOMATOR}
for version, textProperty in VP.items():
device = MockDevice(version=version)
view = View(None, device, -1)
self.assertEqual(textProperty, view.textProperty, msg='version %d' % version)
def testLeftPropertyForDifferentSdkVersions(self):
VP = { -1:LEFT_PROPERTY, 8:LEFT_PROPERTY_API_8, 10:LEFT_PROPERTY, 15:LEFT_PROPERTY, 16:LEFT_PROPERTY, 17:LEFT_PROPERTY}
for version, leftProperty in VP.items():
view = View(None, None, version)
self.assertEqual(leftProperty, view.leftProperty, msg='version %d' % version)
def testLeftPropertyForDifferentSdkVersions_device(self):
VP = { -1:LEFT_PROPERTY, 8:LEFT_PROPERTY_API_8, 10:LEFT_PROPERTY, 15:LEFT_PROPERTY, 16:LEFT_PROPERTY, 17:LEFT_PROPERTY}
for version, leftProperty in VP.items():
device = MockDevice(version=version)
view = View(None, device, -1)
self.assertEqual(leftProperty, view.leftProperty, msg='version %d' % version)
def testTopPropertyForDifferentSdkVersions(self):
VP = { -1:TOP_PROPERTY, 8:TOP_PROPERTY_API_8, 10:TOP_PROPERTY, 15:TOP_PROPERTY, 16:TOP_PROPERTY, 17:TOP_PROPERTY}
for version, topProperty in VP.items():
view = View(None, None, version)
self.assertEqual(topProperty, view.topProperty, msg='version %d' % version)
def testTopPropertyForDifferentSdkVersions_device(self):
VP = { -1:TOP_PROPERTY, 8:TOP_PROPERTY_API_8, 10:TOP_PROPERTY, 15:TOP_PROPERTY, 16:TOP_PROPERTY, 17:TOP_PROPERTY}
for version, topProperty in VP.items():
device = MockDevice(version=version)
view = View(None, device, -1)
self.assertEqual(topProperty, view.topProperty, msg='version %d' % version)
def testWidthPropertyForDifferentSdkVersions(self):
VP = { -1:WIDTH_PROPERTY, 8:WIDTH_PROPERTY_API_8, 10:WIDTH_PROPERTY, 15:WIDTH_PROPERTY, 16:WIDTH_PROPERTY, 17:WIDTH_PROPERTY}
for version, widthProperty in VP.items():
view = View(None, None, version)
self.assertEqual(widthProperty, view.widthProperty, msg='version %d' % version)
def testWidthPropertyForDifferentSdkVersions_device(self):
VP = { -1:WIDTH_PROPERTY, 8:WIDTH_PROPERTY_API_8, 10:WIDTH_PROPERTY, 15:WIDTH_PROPERTY, 16:WIDTH_PROPERTY, 17:WIDTH_PROPERTY}
for version, widthProperty in VP.items():
device = MockDevice(version=version)
view = View(None, device, -1)
self.assertEqual(widthProperty, view.widthProperty, msg='version %d' % version)
def testHeightPropertyForDifferentSdkVersions(self):
VP = { -1:HEIGHT_PROPERTY, 8:HEIGHT_PROPERTY_API_8, 10:HEIGHT_PROPERTY, 15:HEIGHT_PROPERTY, 16:HEIGHT_PROPERTY, 17:HEIGHT_PROPERTY}
for version, heightProperty in VP.items():
view = View(None, None, version)
self.assertEqual(heightProperty, view.heightProperty, msg='version %d' % version)
def testHeightPropertyForDifferentSdkVersions_device(self):
VP = { -1:HEIGHT_PROPERTY, 8:HEIGHT_PROPERTY_API_8, 10:HEIGHT_PROPERTY, 15:HEIGHT_PROPERTY, 16:HEIGHT_PROPERTY, 17:HEIGHT_PROPERTY}
for version, heightProperty in VP.items():
device = MockDevice(version=version)
view = View(None, device, -1)
self.assertEqual(heightProperty, view.heightProperty, msg='version %d' % version)
def testGetText(self):
self.assertTrue(self.view.map.has_key('text:mText'))
self.assertEqual('Button with ID', self.view.getText())
self.assertEqual('Button with ID', self.view['text:mText'])
def testGetX_specifiedSdkVersion_8(self):
view = View(VIEW_MAP_API_8, MockDevice(), 8)
self.assertEqual(8, view.build[VERSION_SDK_PROPERTY])
self.assertEqual(50, view.getX())
def testGetX_specifiedSdkVersion_10(self):
view = View(VIEW_MAP, MockDevice(), 10)
self.assertEqual(10, view.build[VERSION_SDK_PROPERTY])
self.assertEqual(50, view.getX())
def testGetY_specifiedSdkVersion_8(self):
view = View(VIEW_MAP_API_8, MockDevice(), 8)
self.assertEqual(8, view.build[VERSION_SDK_PROPERTY])
self.assertEqual(316, view.getY())
def testGetY_specifiedSdkVersion_10(self):
view = View(VIEW_MAP, MockDevice(), 10)
self.assertEqual(10, view.build[VERSION_SDK_PROPERTY])
self.assertEqual(316, view.getY())
def testGetWidth(self):
self.assertEqual(1140, self.view.getWidth())
def testGetHeight(self):
self.assertEqual(48, self.view.getHeight())
def testGetUniqueId(self):
self.assertEqual('id/button_with_id', self.view.getUniqueId())
def testGetUniqueIdEqualsToIdWhenIdIsSpecified(self):
self.assertEqual(self.view.getId(), self.view.getUniqueId())
def testName_Layout_mLeft(self):
v = View({'layout:mLeft':200}, None)
self.assertEqual(200, v.layout_mLeft())
def testNameWithColon_this_is_a_fake_name(self):
v = View({'this:is_a_fake_name':1}, None)
self.assertEqual(1, v.this_is_a_fake_name())
def testNameWith2Colons_this_is_another_fake_name(self):
v = View({'this:is:another_fake_name':1}, None)
self.assertEqual(1, v.this_is_another_fake_name())
def testViewWithoutId(self):
v = View({'mID':'id/NO_ID', 'text:mText':'Some text'}, None)
self.assertEqual('id/NO_ID', v.getId())
def testInexistentMethodName(self):
v = View({'foo':1}, None)
try:
v.bar()
raise Exception("AttributeError not raised")
except AttributeError:
pass
def testViewTreeRoot(self):
root = View({'root':1}, None)
self.assertTrue(root.parent == None)
def testViewTree(self):
root = View({'root':1}, None)
children = ["A", "B", "C"]
for s in children:
root.add(View({s:1}, None))
self.assertEquals(len(children), len(root.children))
def testViewTreeParent(self):
root = View({'root':1}, None)
children = ["A", "B", "C"]
for s in children:
root.add(View({s:1}, None))
for ch in root.children:
self.assertTrue(ch.parent == root)
def testContainsPoint_api15(self):
v = View(VIEW_MAP, MockDevice(), 15)
(X, Y, W, H) = v.getPositionAndSize()
self.assertEqual(X, v.getX())
self.assertEqual(Y, v.getY())
self.assertEqual(W, v.getWidth())
self.assertEqual(H, v.getHeight())
self.assertTrue(v.containsPoint((v.getCenter())))
def testContainsPoint_api17(self):
v = View(VIEW_MAP_API_17, MockDevice(), 17)
(X, Y, W, H) = v.getPositionAndSize()
self.assertEqual(X, v.getX())
self.assertEqual(Y, v.getY())
self.assertEqual(W, v.getWidth())
self.assertEqual(H, v.getHeight())
self.assertTrue(v.containsPoint((v.getCenter())))
def testIsClickable_api15(self):
v = View(VIEW_MAP, MockDevice(), 15)
self.assertTrue(v.isClickable())
def testIsClickable_api17(self):
v = View(VIEW_MAP_API_17, MockDevice(), 17)
self.assertTrue(v.isClickable())
class ViewClientTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testInit_adb(self):
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEqual(None, vc)
def testInit_adbNone(self):
# FIXME: there's a problem here when the mock device is created,
# it's intended to be API=15, mock ViewServer is started and then
# adb tries (unsuccessfuly) to forward the ports (expected because
# adb does not know anything about mock devices).
# Then
# error: device not found
# appears in the console
device = MockDevice()
try:
vc = ViewClient(device, device.serialno, adb=None, autodump=False)
self.assertIsNotNone(vc)
except subprocess.CalledProcessError:
# This is needed because the ports cannot be forwarded if there is no device connected
pass
def testExceptionDeviceNotConnected(self):
try:
vc = ViewClient(None, None)
except Exception, e:
self.assertEqual('Device is not connected', e.message)
def testConnectToDeviceOrExit_environ(self):
sys.argv = ['']
os.environ['ANDROID_SERIAL'] = 'ABC123'
try:
ViewClient.connectToDeviceOrExit(timeout=1, verbose=True)
except RuntimeError, e:
msg = str(e)
if re.search('Is adb running on your computer?', msg):
# This test required adb running
self.fail(msg)
elif re.search("There are no connected devices", msg):
# special case, when there are no devices connected then the
# serialno specified doesn't matter
pass
elif not re.search("couldn't find device that matches 'ABC123'", msg):
self.fail(msg)
except exceptions.SystemExit, e:
self.assertEquals(3, e.code)
except Exception, e: #FIXME: java.lang.NullPointerException:
self.fail('Serialno was not taken from environment: ' + msg)
def testConnectToDeviceOrExit_serialno(self):
sys.argv = ['']
try:
ViewClient.connectToDeviceOrExit(timeout=1, verbose=True, serialno='ABC123')
except RuntimeError, e:
msg = str(e)
if re.search('Is adb running on your computer?', msg):
# This test required adb running
self.fail(msg)
elif re.search("There are no connected devices", msg):
# special case, when there are no devices connected then the
# serialno specified doesn't matter
pass
elif not re.search("couldn't find device that matches 'ABC123'", msg):
self.fail(msg)
except exceptions.SystemExit, e:
self.assertEquals(3, e.code)
except Exception, e: #FIXME: java.lang.NullPointerException:
self.fail('Serialno was not taken from argument: ' + str(e))
def testConstructor(self):
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
def testMapSerialNo_noPortSpecified(self):
vc = ViewClient(MockDevice(), serialno='192.168.1.100', adb=TRUE, autodump=False)
self.assertEqual('192.168.1.100:5555', vc.serialno)
def testMapSerialNo_portSpecified(self):
vc = ViewClient(MockDevice(), serialno='192.168.1.100:5555', adb=TRUE, autodump=False)
self.assertEqual('192.168.1.100:5555', vc.serialno)
def testMapSerialNo_emulator(self):
vc = ViewClient(MockDevice(), serialno='emulator-5556', adb=TRUE, autodump=False)
self.assertEqual('emulator-5556', vc.serialno)
def testMapSerialNo_regex(self):
# This is an edge case. A regex should not be used as the serialno in ViewClient as it's
# behavior is not well defined.
# MonkeyRunner.waitForConnection() accepts a regexp as serialno but adb -s doesn't
try:
ViewClient(MockDevice(), serialno='.*', adb=TRUE, autodump=False)
self.fail()
except ValueError:
pass
def testMapSerialNo_None(self):
device = MockDevice()
try:
ViewClient(device, None, adb=TRUE, autodump=False)
self.fail()
except ValueError:
pass
def testGetProperty_displayWidth(self):
device = MockDevice()
self.assertEqual(768, device.getProperty('display.width'))
def testGetProperty_displayHeight(self):
device = MockDevice()
self.assertEqual(1184, device.getProperty('display.height'))
def __mockTree(self, dump=DUMP, version=15, language='en'):
device = MockDevice(version=version, language=language)
vc = ViewClient(device, serialno=device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
if version <= 15:
# We don't want to invoke the ViewServer or MockViewServer for this
vc.setViews(dump)
else:
vc.dump()
return vc
def __mockWindows(self, windows=WINDOWS):
device = MockDevice()
vc = ViewClient(device, serialno=device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.windows = windows
return vc
def testRoot(self):
vc = self.__mockTree()
root = vc.root
self.assertTrue(root != None)
self.assertTrue(root.parent == None)
self.assertTrue(root.getClass() == 'com.android.internal.policy.impl.PhoneWindow$DecorView')
def testParseTree(self):
vc = self.__mockTree()
# eat all the output
vc.traverse(vc.root, transform=self.__eatIt)
# We know there are 23 views in ViewServer mock tree
self.assertEqual(23, len(vc.getViewIds()))
def testParsetree_api17(self):
vc = self.__mockTree(version=17)
# eat all the output
vc.traverse(vc.root, transform=self.__eatIt)
# We know there are 9 views in UiAutomator mock tree
self.assertEqual(9, len(vc.getViewIds()))
def testParsetree_api17_zh(self):
vc = self.__mockTree(version=17, language='zh')
# eat all the output
vc.traverse(vc.root, transform=self.__eatIt)
# We know there are 21 views in UiAutomator mock tree
self.assertEqual(21, len(vc.getViewIds()))
def __testViewByIds_apiIndependent(self, vc):
viewsbyId = vc.getViewsById()
self.assertNotEquals(None, viewsbyId)
for k, v in viewsbyId.items():
self.assertTrue(isinstance(k, str) or isinstance(k, unicode))
self.assertTrue(isinstance(v, View), "v=" + unicode(v) + " is not a View")
self.assertTrue(re.match("id/.*", v.getUniqueId()) != None)
self.assertEquals(k, v.getUniqueId())
def testGetViewsById(self):
vc = self.__mockTree()
self.__testViewByIds_apiIndependent(vc)
def testGetViewsById_api17(self):
vc = self.__mockTree(version=17)
self.__testViewByIds_apiIndependent(vc)
def testGetViewsById_api17_zh(self):
vc = self.__mockTree(version=17, language='zh')
self.__testViewByIds_apiIndependent(vc)
def testNewViewClientInstancesDontDuplicateTree(self):
vc = {}
n = {}
for i in range(10):
vc[i] = self.__mockTree()
n[i] = len(vc[i].getViewIds())
for i in range(1, 10):
self.assertEquals(n[0], n[i])
def testTraverseShowClassIdAndText(self):
device = MockDevice()
root = View({'text:mText':'0', 'class': 'android.widget.View', 'mID': 0}, device)
root.add(View({'text:mText':'1', 'class': 'android.widget.View', 'mID': 1}, device))
root.add(View({'text:mText':'2', 'class': 'android.widget.View', 'mID': 2}, device))
v3 = View({'text:mText':'3', 'class': 'android.widget.View', 'mID':3}, device)
root.add(v3)
v35 = View({'text:mText':'5', 'getTag()':'v35', 'class': 'android.widget.View', 'mID': 35}, device)
v3.add(v35)
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
treeStr = StringIO.StringIO()
vc.traverse(root=root, transform=ViewClient.TRAVERSE_CIT, stream=treeStr)
self.assertNotEquals(None, treeStr.getvalue())
lines = treeStr.getvalue().splitlines()
self.assertEqual(5, len(lines), "lines=%s" % lines)
self.assertEqual('android.widget.View 0 0', lines[0])
citRE = re.compile(' +android.widget.View \d+ \d+')
for l in lines[1:]:
self.assertTrue(citRE.match(l), 'l=%s' % l)
def testTraverseShowClassIdTextAndCenter(self):
device = MockDevice()
root = View({'mID':'0', 'text:mText':'0', 'layout:mLeft':0, 'layout:mTop':0}, device)
root.add(View({'mID':'1', 'text:mText':'1', 'layout:mLeft':1, 'layout:mTop':1}, device))
root.add(View({'mID':'2', 'text:mText':'2', 'layout:mLeft':2, 'layout:mTop':2}, device))
v3 = View({'mID':'3', 'text:mText':'3', 'layout:mLeft':3, 'layout:mTop':3}, device)
root.add(v3)
v35 = View({'mID':'5', 'text:mText':'5', 'getTag()':'v35', 'layout:mLeft':5, 'layout:mTop':5}, device)
v3.add(v35)
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
treeStr = StringIO.StringIO()
vc.traverse(root=root, transform=ViewClient.TRAVERSE_CITC, stream=treeStr)
self.assertNotEquals(None, treeStr.getvalue())
lines = treeStr.getvalue().splitlines()
self.assertEqual(5, len(lines))
self.assertEqual('None 0 0 (0, 0)', lines[0])
citRE = re.compile(' +None \d+ \d+ \(\d+, \d+\)')
for l in lines[1:]:
self.assertTrue(citRE.match(l))
def __getClassAndId(self, view):
try:
return "%s %s %s %s" % (view.getClass(), view.getId(), view.getUniqueId(), view.getCoords())
except Exception, e:
return "Exception in view=%s: %s" % (view.__smallStr__(), e)
def __eatIt(self, view):
return ""
def testViewWithNoIdReceivesUniqueId(self):
vc = self.__mockTree()
# We know there are 6 views without id in the mock tree
for i in range(1, 6):
self.assertNotEquals(None, vc.findViewById("id/no_id/%d" % i))
def testTextWithSpaces(self):
vc = self.__mockTree()
self.assertNotEqual(None, vc.findViewWithText('Medium Text'))
def testTextWithVeryLargeContent(self):
TEXT = """\
MOCK@412a9d08 mID=7,id/test drawing:mForeground=4,null padding:mForegroundPaddingBottom=1,0 text:mText=319,[! " # $ % & ' ( ) * + , - . / 0 1 2 3 4 5 6 7 8 9 : ; < = > ? @ A B C D E F G H I J K L M N O P Q R S T U V W X Y Z [ \ ] ^ _ ` a b c d e f g h i j k l m n o p] mViewFlags=11,-1744830336\
"""
vc = self.__mockTree(TEXT)
test = vc.findViewById('id/test')
text = test.getText()
self.assertEqual(319, len(text))
self.assertEqual('[', text[0])
self.assertEqual(']', text[318])
self.assertEqual('-1744830336', test['mViewFlags'])
def testActionBarSubtitleCoordinates(self):
vc = self.__mockTree(dump=DUMP_SAMPLE_UI)
toggleButton = vc.findViewById('id/button_with_id')
self.assertNotEqual(None, toggleButton)
textView3 = vc.findViewById('id/textView3')
self.assertNotEqual(None, textView3)
x = toggleButton.getX()
y = toggleButton.getY()
w = toggleButton.getWidth()
h = toggleButton.getHeight()
xy = toggleButton.getXY()
coords = toggleButton.getCoords()
self.assertNotEqual(None, textView3.getText())
self.assertNotEqual("", textView3.getText().strip())
lv = textView3.getText().strip().split()
_list = [ eval(str(v)) for v in lv ]
tx = _list[1][0]
ty = _list[1][1]
tsx = _list[1][0]
tsy = _list[1][1]
self.assertEqual(tx, x)
self.assertEqual(ty, y)
self.assertEqual((tsx, tsy), xy)
self.assertEqual(((tsx, tsy), (xy[0] + w, xy[1] + h)), coords)
def testServiceStoppedAfterDestructor(self):
device = MockDevice()
self.assertTrue(device.service == STOPPED)
if True:
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertTrue(device.service == RUNNING)
vc.__del__()
# Perhpas there are other ViewClients using the same server, we cannot expect it stops
#self.assertTrue(device.service == STOPPED)
def testList(self):
vc = self.__mockWindows()
self.assertNotEqual(None, vc.windows)
def testFindViewByIdOrRaise(self):
vc = self.__mockTree(dump=DUMP_SAMPLE_UI)
vc.findViewByIdOrRaise('id/up')
def testFindViewByIdOrRaise_api17(self):
vc = self.__mockTree(version=17)
vc.traverse(stream=self.openDevNull())
vc.findViewByIdOrRaise('id/no_id/9')
def testFindViewByIdOrRaise_api17_zh(self):
vc = self.__mockTree(version=17, language='zh')
vc.traverse(stream=self.openDevNull())
vc.findViewByIdOrRaise('id/no_id/21')
def testFindViewByIdOrRaise_nonExistentView(self):
vc = self.__mockTree(dump=DUMP_SAMPLE_UI)
try:
vc.findViewByIdOrRaise('id/nonexistent')
self.fail()
except ViewNotFoundException:
pass
def testFindViewByIdOrRaise_nonExistentView_api17(self):
vc = self.__mockTree(version=17)
try:
vc.findViewByIdOrRaise('id/nonexistent')
self.fail()
except ViewNotFoundException:
pass
def testFindViewByIdOrRaise_nonExistentView_api17_zh(self):
vc = self.__mockTree(version=17, language='zh')
try:
vc.findViewByIdOrRaise('id/nonexistent')
self.fail()
except ViewNotFoundException:
pass
def testFindViewById_root(self):
device = None
root = View({'mID':'0'}, device)
root.add(View({'mID':'1'}, device))
root.add(View({'mID':'2'}, device))
v3 = View({'mID':'3'}, device)
root.add(v3)
v35 = View({'mID':'5', 'getTag()':'v35'}, device)
v3.add(v35)
v4 = View({'mID':'4'}, device)
root.add(v4)
v45 = View({'mID':'5', 'getTag()':'v45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v5 = vc.findViewById('5')
self.assertNotEqual(v5, None)
self.assertEqual('v35', v5.getTag())
v5 = vc.findViewById('5', root=v4)
self.assertNotEqual(v5, None)
self.assertEqual('v45', v5.getTag())
v5 = vc.findViewById('5', root=v3)
self.assertNotEqual(v5, None)
self.assertEqual('v35', v5.getTag())
def testFindViewById_viewFilter(self):
vc = self.__mockTree(dump=DUMP_SAMPLE_UI)
def vf(view):
return view.getClass() == 'android.widget.ImageView'
view = vc.findViewById('id/up', viewFilter=vf)
self.assertNotEqual(view, None)
def testFindViewById_viewFilterUnmatched(self):
vc = self.__mockTree(dump=DUMP_SAMPLE_UI)
def vf(view):
return view.getClass() == 'android.widget.TextView'
view = vc.findViewById('id/up', viewFilter=vf)
self.assertEqual(view, None)
def testFindViewByIdOrRaise_root(self):
device = None
root = View({'mID':'0'}, device)
root.add(View({'mID':'1'}, device))
root.add(View({'mID':'2'}, device))
v3 = View({'mID':'3'}, device)
root.add(v3)
v35 = View({'mID':'5', 'getTag()':'v35'}, device)
v3.add(v35)
v4 = View({'mID':'4'}, device)
root.add(v4)
v45 = View({'mID':'5', 'getTag()':'v45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v5 = vc.findViewByIdOrRaise('5')
self.assertEqual('v35', v5.getTag())
v5 = vc.findViewByIdOrRaise('5', root=v4)
self.assertEqual('v45', v5.getTag())
v5 = vc.findViewByIdOrRaise('5', root=v3)
self.assertEqual('v35', v5.getTag())
def testFindViewByIdOrRaise_viewFilter(self):
vc = self.__mockTree(dump=DUMP_SAMPLE_UI)
def vf(view):
return view.getClass() == 'android.widget.ImageView'
view = vc.findViewByIdOrRaise('id/up', viewFilter=vf)
def testFindViewByIdOrRaise_viewFilterUnmatched(self):
vc = self.__mockTree(dump=DUMP_SAMPLE_UI)
def vf(view):
return view.getClass() == 'android.widget.TextView'
try:
view = vc.findViewByIdOrRaise('id/up', viewFilter=vf)
except ViewNotFoundException:
pass
def testFindViewWithText_root(self):
device = None
root = View({'text:mText':'0'}, device)
root.add(View({'text:mText':'1'}, device))
root.add(View({'text:mText':'2'}, device))
v3 = View({'text:mText':'3'}, device)
root.add(v3)
v35 = View({'text:mText':'5', 'getTag()':'v35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4'}, device)
root.add(v4)
v45 = View({'text:mText':'5', 'getTag()':'v45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v5 = vc.findViewWithText('5')
self.assertNotEqual(v5, None)
self.assertEqual('v35', v5.getTag())
v5 = vc.findViewWithText('5', root=v4)
self.assertNotEqual(v5, None)
self.assertEqual('v45', v5.getTag())
v5 = vc.findViewWithText('5', root=v3)
self.assertNotEqual(v5, None)
self.assertEqual('v35', v5.getTag())
def testFindViewWithText_regexRoot(self):
device = None
root = View({'text:mText':'0'}, device)
root.add(View({'text:mText':'1'}, device))
root.add(View({'text:mText':'2'}, device))
v3 = View({'text:mText':'3'}, device)
root.add(v3)
v35 = View({'text:mText':'5', 'getTag()':'v35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4'}, device)
root.add(v4)
v45 = View({'text:mText':'5', 'getTag()':'v45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v5 = vc.findViewWithText(re.compile('[5]'))
self.assertNotEqual(v5, None)
self.assertEqual('v35', v5.getTag())
v5 = vc.findViewWithText(re.compile('[5]'), root=v4)
self.assertNotEqual(v5, None)
self.assertEqual('v45', v5.getTag())
v5 = vc.findViewWithText(re.compile('[5]'), root=v3)
self.assertNotEqual(v5, None)
self.assertEqual('v35', v5.getTag())
def testFindViewWithTextOrRaise_root(self):
device = None
root = View({'text:mText':'0'}, device)
root.add(View({'text:mText':'1'}, device))
root.add(View({'text:mText':'2'}, device))
v3 = View({'text:mText':'3'}, device)
root.add(v3)
v35 = View({'text:mText':'5', 'getTag()':'v35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4'}, device)
root.add(v4)
v45 = View({'text:mText':'5', 'getTag()':'v45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v5 = vc.findViewWithTextOrRaise('5')
self.assertEqual('v35', v5.getTag())
v5 = vc.findViewWithTextOrRaise('5', root=v4)
self.assertEqual('v45', v5.getTag())
v5 = vc.findViewWithTextOrRaise('5', root=v3)
self.assertEqual('v35', v5.getTag())
def testFindViewWithTextOrRaise_root_disappearingView(self):
device = None
root = View({'text:mText':'0'}, device)
root.add(View({'text:mText':'1'}, device))
root.add(View({'text:mText':'2'}, device))
v3 = View({'text:mText':'3'}, device)
root.add(v3)
v35 = View({'text:mText':'5', 'getTag()':'v35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4'}, device)
root.add(v4)
v45 = View({'text:mText':'5', 'getTag()':'v45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v5 = vc.findViewWithTextOrRaise('5')
self.assertEqual('v35', v5.getTag())
v5 = vc.findViewWithTextOrRaise('5', root=v4)
self.assertEqual('v45', v5.getTag())
v5 = vc.findViewWithTextOrRaise('5', root=v3)
self.assertEqual('v35', v5.getTag())
# Then remove v4 and its children
root.children.remove(v4)
#vc.dump()
v4 = vc.findViewWithText('4')
self.assertEqual(v4, None, "v4 has not disappeared")
def testFindViewWithTextOrRaise_rootNonExistent(self):
device = None
root = View({'text:mText':'0'}, device)
root.add(View({'text:mText':'1'}, device))
root.add(View({'text:mText':'2'}, device))
v3 = View({'text:mText':'3'}, device)
root.add(v3)
v35 = View({'text:mText':'5', 'getTag()':'v35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4'}, device)
root.add(v4)
v45 = View({'text:mText':'5', 'getTag()':'v45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
try:
vc.findViewWithTextOrRaise('Non Existent', root=v4)
self.fail()
except ViewNotFoundException:
pass
def testFindViewWithTextOrRaise_api17(self):
vc = self.__mockTree(version=17)
vc.findViewWithTextOrRaise("Apps")
def openDevNull(self):
return open('/dev/null', 'a+')
def testFindViewWithTextOrRaise_api17_zh(self):
vc = self.__mockTree(version=17, language='zh')
vc.traverse(transform=ViewClient.TRAVERSE_CIT, stream=self.openDevNull())
vc.findViewWithTextOrRaise(u'语言')
def testFindViewWithTextOrRaise_nonExistent_api17(self):
vc = self.__mockTree(version=17)
try:
vc.findViewWithTextOrRaise('nonexistent text')
self.fail()
except ViewNotFoundException:
pass
def testFindViewWithTextOrRaise_nonExistent_api17_zh(self):
vc = self.__mockTree(version=17, language='zh')
try:
vc.findViewWithTextOrRaise(u'不存在的文本')
self.fail()
except ViewNotFoundException:
pass
def testFindViewWithContentDescription_root(self):
device = None
root = View({'text:mText':'0', 'content-desc':'CD0'}, device)
root.add(View({'text:mText':'1', 'content-desc':'CD1'}, device))
root.add(View({'text:mText':'2', 'content-desc':'CD2'}, device))
v3 = View({'text:mText':'3', 'content-desc':'CD3'}, device)
root.add(v3)
v35 = View({'text:mText':'35', 'content-desc':'CD35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4', 'conent-desc':'CD4'}, device)
root.add(v4)
v45 = View({'text:mText':'45', 'content-desc':'CD45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v45 = vc.findViewWithContentDescription('CD45')
self.assertNotEqual(v45, None)
self.assertEqual('45', v45.getText())
v45 = vc.findViewWithContentDescription('CD45', root=v4)
self.assertNotEqual(v45, None)
self.assertEqual('45', v45.getText())
v35 = vc.findViewWithContentDescription('CD35', root=v3)
self.assertNotEqual(v35, None)
self.assertEqual('35', v35.getText())
def testFindViewWithContentDescription_regexRoot(self):
device = None
root = View({'text:mText':'0', 'content-desc':'CD0'}, device)
root.add(View({'text:mText':'1', 'content-desc':'CD1'}, device))
root.add(View({'text:mText':'2', 'content-desc':'CD2'}, device))
v3 = View({'text:mText':'3', 'content-desc':'CD3'}, device)
root.add(v3)
v35 = View({'text:mText':'35', 'content-desc':'CD35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4', 'conent-desc':'CD4'}, device)
root.add(v4)
v45 = View({'text:mText':'45', 'content-desc':'CD45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v45 = vc.findViewWithContentDescription(re.compile('CD4\d'))
self.assertNotEqual(v45, None)
self.assertEqual('45', v45.getText())
v45 = vc.findViewWithContentDescription(re.compile('CD4\d'), root=v4)
self.assertNotEqual(v45, None)
self.assertEqual('45', v45.getText())
v35 = vc.findViewWithContentDescription(re.compile('CD3\d'), root=v3)
self.assertNotEqual(v35, None)
self.assertEqual('35', v35.getText())
def testFindViewWithContentDescriptionOrRaise_root(self):
device = None
root = View({'text:mText':'0', 'content-desc':'CD0'}, device)
root.add(View({'text:mText':'1', 'content-desc':'CD1'}, device))
root.add(View({'text:mText':'2', 'content-desc':'CD2'}, device))
v3 = View({'text:mText':'3', 'content-desc':'CD3'}, device)
root.add(v3)
v35 = View({'text:mText':'35', 'content-desc':'CD35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4', 'conent-desc':'CD4'}, device)
root.add(v4)
v45 = View({'text:mText':'45', 'content-desc':'CD45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
v45 = vc.findViewWithContentDescriptionOrRaise('CD45')
self.assertEqual('45', v45.getText())
v45 = vc.findViewWithContentDescriptionOrRaise('CD45', root=v4)
self.assertEqual('45', v45.getText())
v35 = vc.findViewWithContentDescriptionOrRaise('CD35', root=v3)
self.assertEqual('35', v35.getText())
def testFindViewWithContentDescriptionOrRaise_rootNonExistent(self):
device = None
root = View({'text:mText':'0', 'content-desc':'CD0'}, device)
root.add(View({'text:mText':'1', 'content-desc':'CD1'}, device))
root.add(View({'text:mText':'2', 'content-desc':'CD2'}, device))
v3 = View({'text:mText':'3', 'content-desc':'CD3'}, device)
root.add(v3)
v35 = View({'text:mText':'35', 'content-desc':'CD35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4', 'conent-desc':'CD4'}, device)
root.add(v4)
v45 = View({'text:mText':'45', 'content-desc':'CD45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
try:
vc.findViewWithContentDescriptionOrRaise('Non Existent', root=v4)
self.fail()
except ViewNotFoundException:
pass
def testFindViewWithContentDescriptionOrRaiseExceptionMessage_regexpRoot(self):
device = None
root = View({'text:mText':'0', 'content-desc':'CD0'}, device)
root.add(View({'text:mText':'1', 'content-desc':'CD1'}, device))
root.add(View({'text:mText':'2', 'content-desc':'CD2'}, device))
v3 = View({'text:mText':'3', 'content-desc':'CD3'}, device)
root.add(v3)
v35 = View({'text:mText':'35', 'content-desc':'CD35'}, device)
v3.add(v35)
v4 = View({'text:mText':'4', 'conent-desc':'CD4'}, device)
root.add(v4)
v45 = View({'text:mText':'45', 'content-desc':'CD45'}, device)
v4.add(v45)
device = MockDevice()
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
self.assertNotEquals(None, vc)
vc.root = root
try:
vc.findViewWithContentDescriptionOrRaise(re.compile('Non Existent'), root=v4)
self.fail()
except ViewNotFoundException, e:
self.assertNotEquals(None, re.search("that matches 'Non Existent'", e.message))
def testUiAutomatorDump(self):
device = MockDevice(version=16)
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=True)
def testUiAutomatorKilled(self):
device = MockDevice(version=16, uiautomatorkilled=True)
try:
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=True, ignoreuiautomatorkilled=True)
except Exception, e:
self.assertIsNotNone(re.search('''ERROR: UiAutomator output contains no valid information. UiAutomator was killed, no reason given.''', str(e)))
def testUiViewServerDump(self):
device = None
try:
device = MockDevice(version=15, startviewserver=True)
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
vc.dump()
vc.findViewByIdOrRaise('id/home')
finally:
if device:
device.shutdownMockViewServer()
def testUiViewServerDump_windowStr(self):
device = None
try:
device = MockDevice(version=15, startviewserver=True)
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
vc.dump(window='StatusBar')
vc.findViewByIdOrRaise('id/status_bar')
finally:
if device:
device.shutdownMockViewServer()
def testUiViewServerDump_windowInt(self):
device = None
try:
device = MockDevice(version=15, startviewserver=True)
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
vc.dump(window=0xb52f7c88)
vc.findViewByIdOrRaise('id/status_bar')
finally:
if device:
device.shutdownMockViewServer()
def testUiViewServerDump_windowIntStr(self):
device = None
try:
device = MockDevice(version=15, startviewserver=True)
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
vc.dump(window='0xb52f7c88')
vc.findViewByIdOrRaise('id/status_bar')
finally:
if device:
device.shutdownMockViewServer()
def testUiViewServerDump_windowIntM1(self):
device = None
try:
device = MockDevice(version=15, startviewserver=True)
vc = ViewClient(device, device.serialno, adb=TRUE, autodump=False)
vc.dump(window=-1)
vc.findViewByIdOrRaise('id/home')
finally:
if device:
device.shutdownMockViewServer()
def testFindViewsContainingPoint_api15(self):
device = None
try:
device = MockDevice(version=15, startviewserver=True)
vc = ViewClient(device, device.serialno, adb=TRUE)
list = vc.findViewsContainingPoint((200, 200))
self.assertNotEquals(None, list)
self.assertNotEquals(0, len(list))
finally:
if device:
device.shutdownMockViewServer()
def testFindViewsContainingPoint_api17(self):
device = MockDevice(version=17)
vc = ViewClient(device, device.serialno, adb=TRUE)
list = vc.findViewsContainingPoint((55, 75))
self.assertNotEquals(None, list)
self.assertNotEquals(0, len(list))
def testFindViewsContainingPoint_filterApi15(self):
device = None
try:
device = MockDevice(version=15, startviewserver=True)
vc = ViewClient(device, device.serialno, adb=TRUE)
list = vc.findViewsContainingPoint((200, 200), _filter=View.isClickable)
self.assertNotEquals(None, list)
self.assertNotEquals(0, len(list))
finally:
if device:
device.shutdownMockViewServer()
def testFindViewsContainingPoint_filterApi17(self):
device = MockDevice(version=17)
vc = ViewClient(device, device.serialno, adb=TRUE)
list = vc.findViewsContainingPoint((55, 75), _filter=View.isClickable)
self.assertNotEquals(None, list)
self.assertNotEquals(0, len(list))
if __name__ == "__main__":
print >> sys.stderr, "ViewClient.__main__:"
print >> sys.stderr, "argv=", sys.argv
#import sys;sys.argv = ['', 'Test.testName']
#sys.argv.append('ViewClientTest.testFindViewsContainingPoint_filterApi17')
unittest.main()
|
venomJ/AndroidViewClient
|
tests/com/dtmilano/android/viewclienttests.py
|
Python
|
apache-2.0
| 44,976
|
"""
mbed SDK
Copyright (c) 2011-2017 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from os.path import splitext, basename, join
from tools.utils import mkdir
from tools.export.gnuarmeclipse import GNUARMEclipse
from tools.export.gnuarmeclipse import UID
from tools.build_api import prepare_toolchain
from sys import flags, platform
# Global random number generator instance.
u = UID()
class Sw4STM32(GNUARMEclipse):
"""
Sw4STM32 class
"""
NAME = 'Sw4STM32'
TOOLCHAIN = 'GCC_ARM'
BOARDS = {
'B96B_F446VE':
{
'name': 'B96B-F446VE',
'mcuId': 'STM32F446VETx'
},
'DISCO_F051R8':
{
'name': 'STM32F0DISCOVERY',
'mcuId': 'STM32F051R8Tx'
},
'DISCO_F303VC':
{
'name': 'STM32F3DISCOVERY',
'mcuId': 'STM32F303VCTx'
},
'DISCO_F334C8':
{
'name': 'STM32F3348DISCOVERY',
'mcuId': 'STM32F334C8Tx'
},
'DISCO_F401VC':
{
'name': 'STM32F401C-DISCO',
'mcuId': 'STM32F401VCTx'
},
'DISCO_F407VG':
{
'name': 'STM32F4DISCOVERY',
'mcuId': 'STM32F407VGTx'
},
'DISCO_F413ZH':
{
'name': 'DISCO_F413',
'mcuId': 'STM32F413ZHTx'
},
'DISCO_F429ZI':
{
'name': 'STM32F429I-DISCO',
'mcuId': 'STM32F429ZITx'
},
'DISCO_F469NI':
{
'name': 'DISCO-F469NI',
'mcuId': 'STM32F469NIHx'
},
'DISCO_F746NG':
{
'name': 'STM32F746G-DISCO',
'mcuId': 'STM32F746NGHx'
},
'DISCO_F769NI':
{
'name': 'DISCO-F769NI',
'mcuId': 'STM32F769NIHx'
},
'DISCO_L053C8':
{
'name': 'STM32L0538DISCOVERY',
'mcuId': 'STM32L053C8Tx'
},
'DISCO_L072CZ_LRWAN1':
{
'name': 'DISCO-L072CZ-LRWAN1',
'mcuId': 'STM32L072CZTx'
},
'DISCO_L475VG_IOT01A':
{
'name': 'STM32L475G-DISCO',
'mcuId': 'STM32L475VGTx'
},
'DISCO_L476VG':
{
'name': 'STM32L476G-DISCO',
'mcuId': 'STM32L476VGTx'
},
'NUCLEO_F030R8':
{
'name': 'NUCLEO-F030R8',
'mcuId': 'STM32F030R8Tx'
},
'NUCLEO_F031K6':
{
'name': 'NUCLEO-F031K6',
'mcuId': 'STM32F031K6Tx'
},
'NUCLEO_F042K6':
{
'name': 'NUCLEO-F042K6',
'mcuId': 'STM32F042K6Tx'
},
'NUCLEO_F070RB':
{
'name': 'NUCLEO-F070RB',
'mcuId': 'STM32F070RBTx'
},
'NUCLEO_F072RB':
{
'name': 'NUCLEO-F072RB',
'mcuId': 'STM32F072RBTx'
},
'NUCLEO_F091RC':
{
'name': 'NUCLEO-F091RC',
'mcuId': 'STM32F091RCTx'
},
'NUCLEO_F103RB':
{
'name': 'NUCLEO-F103RB',
'mcuId': 'STM32F103RBTx'
},
'NUCLEO_F207ZG':
{
'name': 'NUCLEO-F207ZG',
'mcuId': 'STM32F207ZGTx'
},
'NUCLEO_F302R8':
{
'name': 'NUCLEO-F302R8',
'mcuId': 'STM32F302R8Tx'
},
'NUCLEO_F303K8':
{
'name': 'NUCLEO-F303K8',
'mcuId': 'STM32F303K8Tx'
},
'NUCLEO_F303RE':
{
'name': 'NUCLEO-F303RE',
'mcuId': 'STM32F303RETx'
},
'NUCLEO_F303ZE':
{
'name': 'NUCLEO-F303ZE',
'mcuId': 'STM32F303ZETx'
},
'NUCLEO_F334R8':
{
'name': 'NUCLEO-F334R8',
'mcuId': 'STM32F334R8Tx'
},
'NUCLEO_F401RE':
{
'name': 'NUCLEO-F401RE',
'mcuId': 'STM32F401RETx'
},
'NUCLEO_F410RB':
{
'name': 'NUCLEO-F410RB',
'mcuId': 'STM32F410RBTx'
},
'NUCLEO_F411RE':
{
'name': 'NUCLEO-F411RE',
'mcuId': 'STM32F411RETx'
},
'NUCLEO_F429ZI':
{
'name': 'NUCLEO-F429ZI',
'mcuId': 'STM32F429ZITx'
},
'NUCLEO_F446RE':
{
'name': 'NUCLEO-F446RE',
'mcuId': 'STM32F446RETx'
},
'NUCLEO_F446ZE':
{
'name': 'NUCLEO-F446ZE',
'mcuId': 'STM32F446ZETx'
},
'NUCLEO_F746ZG':
{
'name': 'NUCLEO-F746ZG',
'mcuId': 'STM32F746ZGTx'
},
'NUCLEO_F767ZI':
{
'name': 'NUCLEO-F767ZI',
'mcuId': 'STM32F767ZITx'
},
'NUCLEO_L011K4':
{
'name': 'NUCLEO-L011K4',
'mcuId': 'STM32L011K4Tx'
},
'NUCLEO_L031K6':
{
'name': 'NUCLEO-L031K6',
'mcuId': 'STM32L031K6Tx'
},
'NUCLEO_L053R8':
{
'name': 'NUCLEO-L053R8',
'mcuId': 'STM32L053R8Tx'
},
'NUCLEO_L073RZ':
{
'name': 'NUCLEO-L073RZ',
'mcuId': 'STM32L073RZTx'
},
'NUCLEO_L152RE':
{
'name': 'NUCLEO-L152RE',
'mcuId': 'STM32L152RETx'
},
'NUCLEO_L432KC':
{
'name': 'NUCLEO-L432KC',
'mcuId': 'STM32L432KCUx'
},
'NUCLEO_L476RG':
{
'name': 'NUCLEO-L476RG',
'mcuId': 'STM32L476RGTx'
},
'NUCLEO_L486RG':
{
'name': 'NUCLEO-L486RG',
'mcuId': 'STM32L486RGTx'
},
'NUCLEO_L496ZG':
{
'name': 'NUCLEO-L496ZG',
'mcuId': 'STM32L496ZGTx'
},
'NUCLEO_L496ZG_P':
{
'name': 'NUCLEO-L496ZG',
'mcuId': 'STM32L496ZGTx'
},
}
TARGETS = BOARDS.keys()
def __gen_dir(self, dir_name):
"""
Method that creates directory
"""
settings = join(self.export_dir, dir_name)
mkdir(settings)
def get_fpu_hardware(self, fpu_unit):
"""
Convert fpu unit name into hardware name.
"""
hw = ''
fpus = {
'fpv4spd16': 'fpv4-sp-d16',
'fpv5d16': 'fpv5-d16',
'fpv5spd16': 'fpv5-sp-d16'
}
if fpu_unit in fpus:
hw = fpus[fpu_unit]
return hw
def process_sw_options(self, opts, flags_in):
"""
Process System Workbench specific options.
System Workbench for STM32 has some compile options, which are not recognized by the GNUARMEclipse exporter.
Those are handled in this method.
"""
opts['c']['preprocess'] = False
if '-E' in flags_in['c_flags']:
opts['c']['preprocess'] = True
opts['cpp']['preprocess'] = False
if '-E' in flags_in['cxx_flags']:
opts['cpp']['preprocess'] = True
opts['c']['slowflashdata'] = False
if '-mslow-flash-data' in flags_in['c_flags']:
opts['c']['slowflashdata'] = True
opts['cpp']['slowflashdata'] = False
if '-mslow-flash-data' in flags_in['cxx_flags']:
opts['cpp']['slowflashdata'] = True
if opts['common']['optimization.messagelength']:
opts['common']['optimization.other'] += ' -fmessage-length=0'
if opts['common']['optimization.signedchar']:
opts['common']['optimization.other'] += ' -fsigned-char'
if opts['common']['optimization.nocommon']:
opts['common']['optimization.other'] += ' -fno-common'
if opts['common']['optimization.noinlinefunctions']:
opts['common']['optimization.other'] += ' -fno-inline-functions'
if opts['common']['optimization.freestanding']:
opts['common']['optimization.other'] += ' -ffreestanding'
if opts['common']['optimization.nobuiltin']:
opts['common']['optimization.other'] += ' -fno-builtin'
if opts['common']['optimization.spconstant']:
opts['common']['optimization.other'] += ' -fsingle-precision-constant'
if opts['common']['optimization.nomoveloopinvariants']:
opts['common']['optimization.other'] += ' -fno-move-loop-invariants'
if opts['common']['warnings.unused']:
opts['common']['warnings.other'] += ' -Wunused'
if opts['common']['warnings.uninitialized']:
opts['common']['warnings.other'] += ' -Wuninitialized'
if opts['common']['warnings.missingdeclaration']:
opts['common']['warnings.other'] += ' -Wmissing-declarations'
if opts['common']['warnings.pointerarith']:
opts['common']['warnings.other'] += ' -Wpointer-arith'
if opts['common']['warnings.padded']:
opts['common']['warnings.other'] += ' -Wpadded'
if opts['common']['warnings.shadow']:
opts['common']['warnings.other'] += ' -Wshadow'
if opts['common']['warnings.logicalop']:
opts['common']['warnings.other'] += ' -Wlogical-op'
if opts['common']['warnings.agreggatereturn']:
opts['common']['warnings.other'] += ' -Waggregate-return'
if opts['common']['warnings.floatequal']:
opts['common']['warnings.other'] += ' -Wfloat-equal'
opts['ld']['strip'] = False
if '-s' in flags_in['ld_flags']:
opts['ld']['strip'] = True
opts['ld']['shared'] = False
if '-shared' in flags_in['ld_flags']:
opts['ld']['shared'] = True
opts['ld']['soname'] = ''
opts['ld']['implname'] = ''
opts['ld']['defname'] = ''
for item in flags_in['ld_flags']:
if item.startswith('-Wl,-soname='):
opts['ld']['soname'] = item[len('-Wl,-soname='):]
if item.startswith('-Wl,--out-implib='):
opts['ld']['implname'] = item[len('-Wl,--out-implib='):]
if item.startswith('-Wl,--output-def='):
opts['ld']['defname'] = item[len('-Wl,--output-def='):]
opts['common']['arm.target.fpu.hardware'] = self.get_fpu_hardware(
opts['common']['arm.target.fpu.unit'])
opts['common']['debugging.codecov'] = False
if '-fprofile-arcs' in flags_in['common_flags'] and '-ftest-coverage' in flags_in['common_flags']:
opts['common']['debugging.codecov'] = True
# Passing linker options to linker with '-Wl,'-prefix.
for index in range(len(opts['ld']['flags'])):
item = opts['ld']['flags'][index]
if not item.startswith('-Wl,'):
opts['ld']['flags'][index] = '-Wl,' + item
# Strange System Workbench feature: If first parameter in Other flags is a
# define (-D...), Other flags will be replaced by defines and other flags
# are completely ignored. Moving -D parameters to defines.
for compiler in ['c', 'cpp', 'as']:
tmpList = opts[compiler]['other'].split(' ')
otherList = []
for item in tmpList:
if item.startswith('-D'):
opts[compiler]['defines'].append(str(item[2:]))
else:
otherList.append(item)
opts[compiler]['other'] = ' '.join(otherList)
# Assembler options
for as_def in opts['as']['defines']:
if '=' in as_def:
opts['as']['other'] += ' --defsym ' + as_def
else:
opts['as']['other'] += ' --defsym ' + as_def + '=1'
def generate(self):
"""
Generate the .project and .cproject files.
"""
options = {}
if not self.resources.linker_script:
raise NotSupportedException("No linker script found.")
print ('\nCreate a System Workbench for STM32 managed project')
print ('Project name: {0}'.format(self.project_name))
print ('Target: {0}'.format(self.toolchain.target.name))
print ('Toolchain: {0}'.format(self.TOOLCHAIN) + '\n')
self.resources.win_to_unix()
config_header = self.filter_dot(self.toolchain.get_config_header())
libraries = []
for lib in self.resources.libraries:
library, _ = splitext(basename(lib))
libraries.append(library[3:])
self.system_libraries = [
'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys'
]
profiles = self.get_all_profiles()
self.as_defines = [s.replace('"', '"')
for s in self.toolchain.get_symbols(True)]
self.c_defines = [s.replace('"', '"')
for s in self.toolchain.get_symbols()]
self.cpp_defines = self.c_defines
print 'Symbols: {0}'.format(len(self.c_defines))
self.include_path = []
for s in self.resources.inc_dirs:
self.include_path.append("../" + self.filter_dot(s))
print ('Include folders: {0}'.format(len(self.include_path)))
self.compute_exclusions()
print ('Exclude folders: {0}'.format(len(self.excluded_folders)))
ld_script = self.filter_dot(self.resources.linker_script)
print ('Linker script: {0}'.format(ld_script))
lib_dirs = [self.filter_dot(s) for s in self.resources.lib_dirs]
preproc_cmd = basename(self.toolchain.preproc[0]) + " " + " ".join(self.toolchain.preproc[1:])
for id in ['debug', 'release']:
opts = {}
opts['common'] = {}
opts['as'] = {}
opts['c'] = {}
opts['cpp'] = {}
opts['ld'] = {}
opts['id'] = id
opts['name'] = opts['id'].capitalize()
# TODO: Add prints to log or console in verbose mode.
#print ('\nBuild configuration: {0}'.format(opts['name']))
profile = profiles[id]
# A small hack, do not bother with src_path again,
# pass an empty string to avoid crashing.
src_paths = ['']
toolchain = prepare_toolchain(
src_paths, "", self.toolchain.target.name, self.TOOLCHAIN, build_profile=[profile])
# Hack to fill in build_dir
toolchain.build_dir = self.toolchain.build_dir
flags = self.toolchain_flags(toolchain)
# TODO: Add prints to log or console in verbose mode.
# print 'Common flags:', ' '.join(flags['common_flags'])
# print 'C++ flags:', ' '.join(flags['cxx_flags'])
# print 'C flags:', ' '.join(flags['c_flags'])
# print 'ASM flags:', ' '.join(flags['asm_flags'])
# print 'Linker flags:', ' '.join(flags['ld_flags'])
# Most GNU ARM Eclipse options have a parent,
# either debug or release.
if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
opts['parent_id'] = 'debug'
else:
opts['parent_id'] = 'release'
self.process_options(opts, flags)
opts['c']['defines'] = self.c_defines
opts['cpp']['defines'] = self.cpp_defines
opts['as']['defines'] = self.as_defines
self.process_sw_options(opts, flags)
opts['ld']['library_paths'] = [
self.filter_dot(s) for s in self.resources.lib_dirs]
opts['ld']['user_libraries'] = libraries
opts['ld']['system_libraries'] = self.system_libraries
opts['ld']['script'] = "linker-script-" + id + ".ld"
# Unique IDs used in multiple places.
uid = {}
uid['config'] = u.id
uid['tool_c_compiler'] = u.id
uid['tool_c_compiler_input'] = u.id
uid['tool_cpp_compiler'] = u.id
uid['tool_cpp_compiler_input'] = u.id
opts['uid'] = uid
options[id] = opts
ctx = {
'name': self.project_name,
'platform': platform,
'include_paths': self.include_path,
'config_header': config_header,
'exclude_paths': '|'.join(self.excluded_folders),
'ld_script': ld_script,
'library_paths': lib_dirs,
'object_files': self.resources.objects,
'libraries': libraries,
'board_name': self.BOARDS[self.target.upper()]['name'],
'mcu_name': self.BOARDS[self.target.upper()]['mcuId'],
'cpp_cmd': preproc_cmd,
'options': options,
# id property of 'u' will generate new random identifier every time
# when called.
'u': u
}
self.__gen_dir('.settings')
self.gen_file('sw4stm32/language_settings_commom.tmpl',
ctx, '.settings/language.settings.xml')
self.gen_file('sw4stm32/project_common.tmpl', ctx, '.project')
self.gen_file('sw4stm32/cproject_common.tmpl', ctx, '.cproject')
self.gen_file('sw4stm32/makefile.targets.tmpl', ctx,
'makefile.targets', trim_blocks=True, lstrip_blocks=True)
self.gen_file('sw4stm32/launch.tmpl', ctx, self.project_name +
' ' + options['debug']['name'] + '.launch')
|
CalSol/mbed
|
tools/export/sw4stm32/__init__.py
|
Python
|
apache-2.0
| 17,980
|
# Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Image caching and management.
"""
import os
import re
from os_win import utilsfactory
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
from oslo_utils import uuidutils
import nova.conf
from nova import exception
from nova.i18n import _
from nova import utils
from nova.virt.hyperv import pathutils
from nova.virt import imagecache
from nova.virt import images
LOG = logging.getLogger(__name__)
CONF = nova.conf.CONF
class ImageCache(imagecache.ImageCacheManager):
def __init__(self):
super(ImageCache, self).__init__()
self._pathutils = pathutils.PathUtils()
self._vhdutils = utilsfactory.get_vhdutils()
def _get_root_vhd_size_gb(self, instance):
if instance.old_flavor:
return instance.old_flavor.root_gb
else:
return instance.flavor.root_gb
def _resize_and_cache_vhd(self, instance, vhd_path):
vhd_size = self._vhdutils.get_vhd_size(vhd_path)['VirtualSize']
root_vhd_size_gb = self._get_root_vhd_size_gb(instance)
root_vhd_size = root_vhd_size_gb * units.Gi
root_vhd_internal_size = (
self._vhdutils.get_internal_vhd_size_by_file_size(
vhd_path, root_vhd_size))
if root_vhd_internal_size < vhd_size:
raise exception.FlavorDiskSmallerThanImage(
flavor_size=root_vhd_size, image_size=vhd_size)
if root_vhd_internal_size > vhd_size:
path_parts = os.path.splitext(vhd_path)
resized_vhd_path = '%s_%s%s' % (path_parts[0],
root_vhd_size_gb,
path_parts[1])
lock_path = os.path.dirname(resized_vhd_path)
lock_name = "%s-cache.lock" % os.path.basename(resized_vhd_path)
@utils.synchronized(name=lock_name, external=True,
lock_path=lock_path)
def copy_and_resize_vhd():
if not self._pathutils.exists(resized_vhd_path):
try:
LOG.debug("Copying VHD %(vhd_path)s to "
"%(resized_vhd_path)s",
{'vhd_path': vhd_path,
'resized_vhd_path': resized_vhd_path})
self._pathutils.copyfile(vhd_path, resized_vhd_path)
LOG.debug("Resizing VHD %(resized_vhd_path)s to new "
"size %(root_vhd_size)s",
{'resized_vhd_path': resized_vhd_path,
'root_vhd_size': root_vhd_size})
self._vhdutils.resize_vhd(resized_vhd_path,
root_vhd_internal_size,
is_file_max_size=False)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(resized_vhd_path):
self._pathutils.remove(resized_vhd_path)
copy_and_resize_vhd()
return resized_vhd_path
def get_cached_image(self, context, instance, rescue_image_id=None):
image_id = rescue_image_id or instance.image_ref
base_vhd_dir = self._pathutils.get_base_vhd_dir()
base_vhd_path = os.path.join(base_vhd_dir, image_id)
lock_name = "%s-cache.lock" % image_id
@utils.synchronized(name=lock_name, external=True,
lock_path=base_vhd_dir)
def fetch_image_if_not_existing():
vhd_path = None
for format_ext in ['vhd', 'vhdx']:
test_path = base_vhd_path + '.' + format_ext
if self._pathutils.exists(test_path):
vhd_path = test_path
break
if not vhd_path:
try:
images.fetch(context, image_id, base_vhd_path)
format_ext = self._vhdutils.get_vhd_format(base_vhd_path)
vhd_path = base_vhd_path + '.' + format_ext.lower()
self._pathutils.rename(base_vhd_path, vhd_path)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(base_vhd_path):
self._pathutils.remove(base_vhd_path)
return vhd_path
vhd_path = fetch_image_if_not_existing()
# Note: rescue images are not resized.
is_vhd = vhd_path.split('.')[-1].lower() == 'vhd'
if CONF.use_cow_images and is_vhd and not rescue_image_id:
# Resize the base VHD image as it's not possible to resize a
# differencing VHD. This does not apply to VHDX images.
resized_vhd_path = self._resize_and_cache_vhd(instance, vhd_path)
if resized_vhd_path:
return resized_vhd_path
if rescue_image_id:
self._verify_rescue_image(instance, rescue_image_id,
vhd_path)
return vhd_path
def _verify_rescue_image(self, instance, rescue_image_id,
rescue_image_path):
rescue_image_info = self._vhdutils.get_vhd_info(rescue_image_path)
rescue_image_size = rescue_image_info['VirtualSize']
flavor_disk_size = instance.flavor.root_gb * units.Gi
if rescue_image_size > flavor_disk_size:
err_msg = _('Using a rescue image bigger than the instance '
'flavor disk size is not allowed. '
'Rescue image size: %(rescue_image_size)s. '
'Flavor disk size:%(flavor_disk_size)s.') % dict(
rescue_image_size=rescue_image_size,
flavor_disk_size=flavor_disk_size)
raise exception.ImageUnacceptable(reason=err_msg,
image_id=rescue_image_id)
def get_image_details(self, context, instance):
image_id = instance.image_ref
return images.get_info(context, image_id)
def _age_and_verify_cached_images(self, context, all_instances, base_dir):
for img in self.originals:
if img in self.used_images:
# change the timestamp on the image so as to reflect the last
# time it was used
self._update_image_timestamp(img)
else:
self._remove_if_old_image(img)
def _update_image_timestamp(self, image):
backing_files = self._get_image_backing_files(image)
for img in backing_files:
os.utime(img, None)
def _get_image_backing_files(self, image):
base_file = self._pathutils.get_image_path(image)
if not base_file:
# not vhd or vhdx, ignore.
return []
backing_files = [base_file]
resize_re = re.compile('%s_[0-9]+$' % image)
for img in self.unexplained_images:
match = resize_re.match(img)
if match:
backing_files.append(self._pathutils.get_image_path(img))
return backing_files
def _remove_if_old_image(self, image):
backing_files = self._get_image_backing_files(image)
max_age_seconds = CONF.remove_unused_original_minimum_age_seconds
for img in backing_files:
age_seconds = self._pathutils.get_age_of_file(img)
if age_seconds > max_age_seconds:
LOG.info("Removing old, unused image: %s", img)
self._remove_old_image(img)
def _remove_old_image(self, image_path):
lock_path = os.path.dirname(image_path)
lock_name = "%s-cache.lock" % os.path.basename(image_path)
@utils.synchronized(name=lock_name, external=True,
lock_path=lock_path)
def _image_synchronized_remove():
self._pathutils.remove(image_path)
_image_synchronized_remove()
def update(self, context, all_instances):
base_vhd_dir = self._pathutils.get_base_vhd_dir()
running = self._list_running_instances(context, all_instances)
self.used_images = running['used_images'].keys()
all_files = self._list_base_images(base_vhd_dir)
self.originals = all_files['originals']
self.unexplained_images = all_files['unexplained_images']
self._age_and_verify_cached_images(context, all_instances,
base_vhd_dir)
def _list_base_images(self, base_dir):
unexplained_images = []
originals = []
for entry in os.listdir(base_dir):
file_name, extension = os.path.splitext(entry)
# extension has a leading '.'. E.g.: '.vhdx'
if extension.lstrip('.').lower() not in ['vhd', 'vhdx']:
# File is not an image. Ignore it.
# imagecache will not store images of any other formats.
continue
if uuidutils.is_uuid_like(file_name):
originals.append(file_name)
else:
unexplained_images.append(file_name)
return {'unexplained_images': unexplained_images,
'originals': originals}
|
mikalstill/nova
|
nova/virt/hyperv/imagecache.py
|
Python
|
apache-2.0
| 10,063
|
#!/usr/bin/env python
# Copyright 2012 Michael Still and Canonical Inc
# Copyright 2014 SoftLayer Technologies, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import httplib
import os
import sys
from oslo.config import cfg
from oslo.serialization import jsonutils
from oslo_log import log as logging
import six.moves.urllib.parse as urlparse
from webob import exc
from glance.common import config
from glance.common import exception
from glance.common import utils
from glance import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LI = i18n._LI
_LE = i18n._LE
_LW = i18n._LW
# NOTE: positional arguments <args> will be parsed before <command> until
# this bug is corrected https://bugs.launchpad.net/oslo.config/+bug/1392428
cli_opts = [
cfg.IntOpt('chunksize',
short='c',
default=65536,
help="Amount of data to transfer per HTTP write."),
cfg.StrOpt('dontreplicate',
short='D',
default=('created_at date deleted_at location updated_at'),
help="List of fields to not replicate."),
cfg.BoolOpt('metaonly',
short='m',
default=False,
help="Only replicate metadata, not images."),
cfg.StrOpt('token',
short='t',
default='',
help=("Pass in your authentication token if you have "
"one. If you use this option the same token is "
"used for both the master and the slave.")),
cfg.StrOpt('mastertoken',
short='M',
default='',
help=("Pass in your authentication token if you have "
"one. This is the token used for the master.")),
cfg.StrOpt('slavetoken',
short='S',
default='',
help=("Pass in your authentication token if you have "
"one. This is the token used for the slave.")),
cfg.StrOpt('command',
positional=True,
help="Command to be given to replicator"),
cfg.ListOpt('args',
positional=True,
help="Arguments for the command"),
]
CONF = cfg.CONF
CONF.register_cli_opts(cli_opts)
logging.register_options(CONF)
# If ../glance/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
sys.path.insert(0, possible_topdir)
COMMANDS = """Commands:
help <command> Output help for one of the commands below
compare What is missing from the slave glance?
dump Dump the contents of a glance instance to local disk.
livecopy Load the contents of one glance instance into another.
load Load the contents of a local directory into glance.
size Determine the size of a glance instance if dumped to disk.
"""
IMAGE_ALREADY_PRESENT_MESSAGE = _('The image %s is already present on '
'the slave, but our check for it did '
'not find it. This indicates that we '
'do not have permissions to see all '
'the images on the slave server.')
class ImageService(object):
def __init__(self, conn, auth_token):
"""Initialize the ImageService.
conn: a httplib.HTTPConnection to the glance server
auth_token: authentication token to pass in the x-auth-token header
"""
self.auth_token = auth_token
self.conn = conn
def _http_request(self, method, url, headers, body,
ignore_result_body=False):
"""Perform an HTTP request against the server.
method: the HTTP method to use
url: the URL to request (not including server portion)
headers: headers for the request
body: body to send with the request
ignore_result_body: the body of the result will be ignored
Returns: a httplib response object
"""
if self.auth_token:
headers.setdefault('x-auth-token', self.auth_token)
LOG.debug('Request: %(method)s http://%(server)s:%(port)s'
'%(url)s with headers %(headers)s'
% {'method': method,
'server': self.conn.host,
'port': self.conn.port,
'url': url,
'headers': repr(headers)})
self.conn.request(method, url, body, headers)
response = self.conn.getresponse()
headers = self._header_list_to_dict(response.getheaders())
code = response.status
code_description = httplib.responses[code]
LOG.debug('Response: %(code)s %(status)s %(headers)s'
% {'code': code,
'status': code_description,
'headers': repr(headers)})
if code == 400:
raise exc.HTTPBadRequest(
explanation=response.read())
if code == 500:
raise exc.HTTPInternalServerError(
explanation=response.read())
if code == 401:
raise exc.HTTPUnauthorized(
explanation=response.read())
if code == 403:
raise exc.HTTPForbidden(
explanation=response.read())
if code == 409:
raise exc.HTTPConflict(
explanation=response.read())
if ignore_result_body:
# NOTE: because we are pipelining requests through a single HTTP
# connection, httplib requires that we read the response body
# before we can make another request. If the caller knows they
# don't care about the body, they can ask us to do that for them.
response.read()
return response
def get_images(self):
"""Return a detailed list of images.
Yields a series of images as dicts containing metadata.
"""
params = {'is_public': None}
while True:
url = '/v1/images/detail'
query = urlparse.urlencode(params)
if query:
url += '?%s' % query
response = self._http_request('GET', url, {}, '')
result = jsonutils.loads(response.read())
if not result or 'images' not in result or not result['images']:
return
for image in result.get('images', []):
params['marker'] = image['id']
yield image
def get_image(self, image_uuid):
"""Fetch image data from glance.
image_uuid: the id of an image
Returns: a httplib Response object where the body is the image.
"""
url = '/v1/images/%s' % image_uuid
return self._http_request('GET', url, {}, '')
@staticmethod
def _header_list_to_dict(headers):
"""Expand a list of headers into a dictionary.
headers: a list of [(key, value), (key, value), (key, value)]
Returns: a dictionary representation of the list
"""
d = {}
for (header, value) in headers:
if header.startswith('x-image-meta-property-'):
prop = header.replace('x-image-meta-property-', '')
d.setdefault('properties', {})
d['properties'][prop] = value
else:
d[header.replace('x-image-meta-', '')] = value
return d
def get_image_meta(self, image_uuid):
"""Return the metadata for a single image.
image_uuid: the id of an image
Returns: image metadata as a dictionary
"""
url = '/v1/images/%s' % image_uuid
response = self._http_request('HEAD', url, {}, '',
ignore_result_body=True)
return self._header_list_to_dict(response.getheaders())
@staticmethod
def _dict_to_headers(d):
"""Convert a dictionary into one suitable for a HTTP request.
d: a dictionary
Returns: the same dictionary, with x-image-meta added to every key
"""
h = {}
for key in d:
if key == 'properties':
for subkey in d[key]:
if d[key][subkey] is None:
h['x-image-meta-property-%s' % subkey] = ''
else:
h['x-image-meta-property-%s' % subkey] = d[key][subkey]
else:
h['x-image-meta-%s' % key] = d[key]
return h
def add_image(self, image_meta, image_data):
"""Upload an image.
image_meta: image metadata as a dictionary
image_data: image data as a object with a read() method
Returns: a tuple of (http response headers, http response body)
"""
url = '/v1/images'
headers = self._dict_to_headers(image_meta)
headers['Content-Type'] = 'application/octet-stream'
headers['Content-Length'] = int(image_meta['size'])
response = self._http_request('POST', url, headers, image_data)
headers = self._header_list_to_dict(response.getheaders())
LOG.debug('Image post done')
body = response.read()
return headers, body
def add_image_meta(self, image_meta):
"""Update image metadata.
image_meta: image metadata as a dictionary
Returns: a tuple of (http response headers, http response body)
"""
url = '/v1/images/%s' % image_meta['id']
headers = self._dict_to_headers(image_meta)
headers['Content-Type'] = 'application/octet-stream'
response = self._http_request('PUT', url, headers, '')
headers = self._header_list_to_dict(response.getheaders())
LOG.debug('Image post done')
body = response.read()
return headers, body
def get_image_service():
"""Get a copy of the image service.
This is done like this to make it easier to mock out ImageService.
"""
return ImageService
def replication_size(options, args):
"""%(prog)s size <server:port>
Determine the size of a glance instance if dumped to disk.
server:port: the location of the glance instance.
"""
# Make sure server info is provided
if len(args) < 1:
raise TypeError(_("Too few arguments."))
server, port = utils.parse_valid_host_port(args.pop())
total_size = 0
count = 0
imageservice = get_image_service()
client = imageservice(httplib.HTTPConnection(server, port),
options.slavetoken)
for image in client.get_images():
LOG.debug('Considering image: %(image)s' % {'image': image})
if image['status'] == 'active':
total_size += int(image['size'])
count += 1
print(_('Total size is %(size)d bytes across %(img_count)d images') %
{'size': total_size,
'img_count': count})
def replication_dump(options, args):
"""%(prog)s dump <server:port> <path>
Dump the contents of a glance instance to local disk.
server:port: the location of the glance instance.
path: a directory on disk to contain the data.
"""
# Make sure server and path are provided
if len(args) < 2:
raise TypeError(_("Too few arguments."))
path = args.pop()
server, port = utils.parse_valid_host_port(args.pop())
imageservice = get_image_service()
client = imageservice(httplib.HTTPConnection(server, port),
options.mastertoken)
for image in client.get_images():
LOG.debug('Considering: %s' % image['id'])
data_path = os.path.join(path, image['id'])
if not os.path.exists(data_path):
LOG.info(_LI('Storing: %s') % image['id'])
# Dump glance information
with open(data_path, 'w') as f:
f.write(jsonutils.dumps(image))
if image['status'] == 'active' and not options.metaonly:
# Now fetch the image. The metadata returned in headers here
# is the same as that which we got from the detailed images
# request earlier, so we can ignore it here. Note that we also
# only dump active images.
LOG.debug('Image %s is active' % image['id'])
image_response = client.get_image(image['id'])
with open(data_path + '.img', 'wb') as f:
while True:
chunk = image_response.read(options.chunksize)
if not chunk:
break
f.write(chunk)
def _dict_diff(a, b):
"""A one way dictionary diff.
a: a dictionary
b: a dictionary
Returns: True if the dictionaries are different
"""
# Only things the master has which the slave lacks matter
if set(a.keys()) - set(b.keys()):
LOG.debug('metadata diff -- master has extra keys: %(keys)s'
% {'keys': ' '.join(set(a.keys()) - set(b.keys()))})
return True
for key in a:
if str(a[key]) != str(b[key]):
LOG.debug('metadata diff -- value differs for key '
'%(key)s: master "%(master_value)s" vs '
'slave "%(slave_value)s"' %
{'key': key,
'master_value': a[key],
'slave_value': b[key]})
return True
return False
def replication_load(options, args):
"""%(prog)s load <server:port> <path>
Load the contents of a local directory into glance.
server:port: the location of the glance instance.
path: a directory on disk containing the data.
"""
# Make sure server and path are provided
if len(args) < 2:
raise TypeError(_("Too few arguments."))
path = args.pop()
server, port = utils.parse_valid_host_port(args.pop())
imageservice = get_image_service()
client = imageservice(httplib.HTTPConnection(server, port),
options.slavetoken)
updated = []
for ent in os.listdir(path):
if utils.is_uuid_like(ent):
image_uuid = ent
LOG.info(_LI('Considering: %s') % image_uuid)
meta_file_name = os.path.join(path, image_uuid)
with open(meta_file_name) as meta_file:
meta = jsonutils.loads(meta_file.read())
# Remove keys which don't make sense for replication
for key in options.dontreplicate.split(' '):
if key in meta:
LOG.debug('Stripping %(header)s from saved '
'metadata', {'header': key})
del meta[key]
if _image_present(client, image_uuid):
# NOTE(mikal): Perhaps we just need to update the metadata?
# Note that we don't attempt to change an image file once it
# has been uploaded.
LOG.debug('Image %s already present', image_uuid)
headers = client.get_image_meta(image_uuid)
for key in options.dontreplicate.split(' '):
if key in headers:
LOG.debug('Stripping %(header)s from slave '
'metadata', {'header': key})
del headers[key]
if _dict_diff(meta, headers):
LOG.info(_LI('Image %s metadata has changed') %
image_uuid)
headers, body = client.add_image_meta(meta)
_check_upload_response_headers(headers, body)
updated.append(meta['id'])
else:
if not os.path.exists(os.path.join(path, image_uuid + '.img')):
LOG.debug('%s dump is missing image data, skipping' %
image_uuid)
continue
# Upload the image itself
with open(os.path.join(path, image_uuid + '.img')) as img_file:
try:
headers, body = client.add_image(meta, img_file)
_check_upload_response_headers(headers, body)
updated.append(meta['id'])
except exc.HTTPConflict:
LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE)
% image_uuid) # noqa
return updated
def replication_livecopy(options, args):
"""%(prog)s livecopy <fromserver:port> <toserver:port>
Load the contents of one glance instance into another.
fromserver:port: the location of the master glance instance.
toserver:port: the location of the slave glance instance.
"""
# Make sure from-server and to-server are provided
if len(args) < 2:
raise TypeError(_("Too few arguments."))
imageservice = get_image_service()
slave_server, slave_port = utils.parse_valid_host_port(args.pop())
slave_conn = httplib.HTTPConnection(slave_server, slave_port)
slave_client = imageservice(slave_conn, options.slavetoken)
master_server, master_port = utils.parse_valid_host_port(args.pop())
master_conn = httplib.HTTPConnection(master_server, master_port)
master_client = imageservice(master_conn, options.mastertoken)
updated = []
for image in master_client.get_images():
LOG.debug('Considering %(id)s' % {'id': image['id']})
for key in options.dontreplicate.split(' '):
if key in image:
LOG.debug('Stripping %(header)s from master metadata',
{'header': key})
del image[key]
if _image_present(slave_client, image['id']):
# NOTE(mikal): Perhaps we just need to update the metadata?
# Note that we don't attempt to change an image file once it
# has been uploaded.
headers = slave_client.get_image_meta(image['id'])
if headers['status'] == 'active':
for key in options.dontreplicate.split(' '):
if key in image:
LOG.debug('Stripping %(header)s from master '
'metadata', {'header': key})
del image[key]
if key in headers:
LOG.debug('Stripping %(header)s from slave '
'metadata', {'header': key})
del headers[key]
if _dict_diff(image, headers):
LOG.info(_LI('Image %s metadata has changed') %
image['id'])
headers, body = slave_client.add_image_meta(image)
_check_upload_response_headers(headers, body)
updated.append(image['id'])
elif image['status'] == 'active':
LOG.info(_LI('Image %s is being synced') % image['id'])
if not options.metaonly:
image_response = master_client.get_image(image['id'])
try:
headers, body = slave_client.add_image(image,
image_response)
_check_upload_response_headers(headers, body)
updated.append(image['id'])
except exc.HTTPConflict:
LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE) % image['id']) # noqa
return updated
def replication_compare(options, args):
"""%(prog)s compare <fromserver:port> <toserver:port>
Compare the contents of fromserver with those of toserver.
fromserver:port: the location of the master glance instance.
toserver:port: the location of the slave glance instance.
"""
# Make sure from-server and to-server are provided
if len(args) < 2:
raise TypeError(_("Too few arguments."))
imageservice = get_image_service()
slave_server, slave_port = utils.parse_valid_host_port(args.pop())
slave_conn = httplib.HTTPConnection(slave_server, slave_port)
slave_client = imageservice(slave_conn, options.slavetoken)
master_server, master_port = utils.parse_valid_host_port(args.pop())
master_conn = httplib.HTTPConnection(master_server, master_port)
master_client = imageservice(master_conn, options.mastertoken)
differences = {}
for image in master_client.get_images():
if _image_present(slave_client, image['id']):
headers = slave_client.get_image_meta(image['id'])
for key in options.dontreplicate.split(' '):
if key in image:
LOG.debug('Stripping %(header)s from master metadata',
{'header': key})
del image[key]
if key in headers:
LOG.debug('Stripping %(header)s from slave metadata',
{'header': key})
del headers[key]
for key in image:
if image[key] != headers.get(key, None):
LOG.warn(_LW('%(image_id)s: field %(key)s differs '
'(source is %(master_value)s, destination '
'is %(slave_value)s)')
% {'image_id': image['id'],
'key': key,
'master_value': image[key],
'slave_value': headers.get(key, 'undefined')})
differences[image['id']] = 'diff'
else:
LOG.debug('%(image_id)s is identical'
% {'image_id': image['id']})
elif image['status'] == 'active':
LOG.warn(_LW('Image %s entirely missing from the destination')
% image['id'])
differences[image['id']] = 'missing'
return differences
def _check_upload_response_headers(headers, body):
"""Check that the headers of an upload are reasonable.
headers: the headers from the upload
body: the body from the upload
"""
if 'status' not in headers:
try:
d = jsonutils.loads(body)
if 'image' in d and 'status' in d['image']:
return
except Exception:
raise exception.UploadException(body)
def _image_present(client, image_uuid):
"""Check if an image is present in glance.
client: the ImageService
image_uuid: the image uuid to check
Returns: True if the image is present
"""
headers = client.get_image_meta(image_uuid)
return 'status' in headers
def print_help(options, args):
"""Print help specific to a command.
options: the parsed command line options
args: the command line
"""
if len(args) != 1:
print(COMMANDS)
sys.exit(1)
command_name = args.pop()
command = lookup_command(command_name)
print(command.__doc__ % {'prog': os.path.basename(sys.argv[0])})
def lookup_command(command_name):
"""Lookup a command.
command_name: the command name
Returns: a method which implements that command
"""
BASE_COMMANDS = {'help': print_help}
REPLICATION_COMMANDS = {'compare': replication_compare,
'dump': replication_dump,
'livecopy': replication_livecopy,
'load': replication_load,
'size': replication_size}
commands = {}
for command_set in (BASE_COMMANDS, REPLICATION_COMMANDS):
commands.update(command_set)
try:
command = commands[command_name]
except KeyError:
sys.exit(_("Unknown command: %s") % command_name)
return command
def main():
"""The main function."""
try:
config.parse_args()
except RuntimeError as e:
sys.exit("ERROR: %s" % utils.exception_to_str(e))
# Setup logging
logging.setup('glance')
if CONF.token:
CONF.slavetoken = CONF.token
CONF.mastertoken = CONF.token
command = lookup_command(CONF.command)
try:
command(CONF, CONF.args)
except TypeError as e:
LOG.error(_LE(command.__doc__) % {'prog': command.__name__}) # noqa
sys.exit("ERROR: %s" % utils.exception_to_str(e))
except ValueError as e:
LOG.error(_LE(command.__doc__) % {'prog': command.__name__}) # noqa
sys.exit("ERROR: %s" % utils.exception_to_str(e))
if __name__ == '__main__':
main()
|
yanheven/glance
|
glance/cmd/replicator.py
|
Python
|
apache-2.0
| 25,493
|
# encoding: utf-8
from __future__ import unicode_literals
import os
import datetime
from nose.tools import * # noqa
from framework.auth.core import Auth
from website.addons.osfstorage.tests.utils import (
StorageTestCase, Delta, AssertDeltas,
recursively_create_file,
)
from website.addons.osfstorage.tests import factories
from framework.auth import signing
from website.util import rubeus
from website.addons.osfstorage import model
from website.addons.osfstorage import utils
from website.addons.osfstorage import views
from website.addons.base.views import make_auth
from website.addons.osfstorage import settings as storage_settings
def create_record_with_version(path, node_settings, **kwargs):
version = factories.FileVersionFactory(**kwargs)
node_settings.root_node.append_file(path)
record.versions.append(version)
record.save()
return record
class HookTestCase(StorageTestCase):
def send_hook(self, view_name, view_kwargs, payload, method='get', **kwargs):
method = getattr(self.app, method)
return method(
self.project.api_url_for(view_name, **view_kwargs),
signing.sign_data(signing.default_signer, payload),
**kwargs
)
class TestGetMetadataHook(HookTestCase):
def test_file_metata(self):
path = u'kind/of/magíc.mp3'
record = recursively_create_file(self.node_settings, path)
version = factories.FileVersionFactory()
record.versions.append(version)
record.save()
res = self.send_hook(
'osfstorage_get_metadata',
{'fid': record.parent._id},
{},
)
assert_true(isinstance(res.json, dict))
assert_equal(res.json, record.parent.serialized(True))
def test_children_metata(self):
path = u'kind/of/magíc.mp3'
record = recursively_create_file(self.node_settings, path)
version = factories.FileVersionFactory()
record.versions.append(version)
record.save()
res = self.send_hook(
'osfstorage_get_children',
{'fid': record.parent._id},
{},
)
assert_equal(len(res.json), 1)
assert_equal(
res.json[0],
record.serialized()
)
def test_osf_storage_root(self):
auth = Auth(self.project.creator)
result = views.osf_storage_root(self.node_settings, auth=auth)
node = self.project
expected = rubeus.build_addon_root(
node_settings=self.node_settings,
name='',
permissions=auth,
user=auth.user,
nodeUrl=node.url,
nodeApiUrl=node.api_url,
)
root = result[0]
assert_equal(root, expected)
def test_root_default(self):
res = self.send_hook('osfstorage_get_metadata', {}, {})
assert_equal(res.json['fullPath'], '/')
assert_equal(res.json['id'], self.node_settings.root_node._id)
def test_metadata_not_found(self):
res = self.send_hook(
'osfstorage_get_metadata',
{'fid': 'somebogusid'}, {},
expect_errors=True,
)
assert_equal(res.status_code, 404)
def test_metadata_not_found_lots_of_slashes(self):
res = self.send_hook(
'osfstorage_get_metadata',
{'fid': '/not/fo/u/nd/'}, {},
expect_errors=True,
)
assert_equal(res.status_code, 404)
class TestUploadFileHook(HookTestCase):
def setUp(self):
super(TestUploadFileHook, self).setUp()
self.name = 'pízza.png'
self.record = recursively_create_file(self.node_settings, self.name)
self.auth = make_auth(self.user)
def send_upload_hook(self, parent, payload=None, **kwargs):
return self.send_hook(
'osfstorage_create_child',
{'fid': parent._id},
payload=payload or {},
method='post_json',
**kwargs
)
def make_payload(self, **kwargs):
payload = {
'user': self.user._id,
'name': self.name,
'hashes': {'base64': '=='},
'worker': {
'uname': 'testmachine'
},
'settings': {
'provider': 'filesystem',
storage_settings.WATERBUTLER_RESOURCE: 'blah',
},
'metadata': {
'size': 123,
'name': 'file',
'provider': 'filesystem',
'modified': 'Mon, 16 Feb 2015 18:45:34 GMT'
},
}
payload.update(kwargs)
return payload
def test_upload_create(self):
name = 'slightly-mad'
res = self.send_upload_hook(self.node_settings.root_node, self.make_payload(name=name))
assert_equal(res.status_code, 201)
assert_equal(res.json['status'], 'success')
record = self.node_settings.root_node.find_child_by_name(name)
version = model.OsfStorageFileVersion.load(res.json['version'])
assert_equal(version.size, 123)
assert_equal(version.location_hash, 'file')
assert_equal(version.location, {
'object': 'file',
'uname': 'testmachine',
'service': 'filesystem',
'provider': 'filesystem',
storage_settings.WATERBUTLER_RESOURCE: 'blah',
})
assert_equal(version.metadata, {
'size': 123,
'name': 'file',
'base64': '==',
'provider': 'filesystem',
'modified': 'Mon, 16 Feb 2015 18:45:34 GMT'
})
assert_is_not(version, None)
assert_equal([version], list(record.versions))
assert_not_in(version, self.record.versions)
assert_equal(record.serialized(), res.json['data'])
assert_equal(res.json['data']['downloads'], self.record.get_download_count())
def test_upload_update(self):
delta = Delta(lambda: len(self.record.versions), lambda value: value + 1)
with AssertDeltas(delta):
res = self.send_upload_hook(self.node_settings.root_node, self.make_payload())
self.record.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['status'], 'success')
version = model.OsfStorageFileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_in(version, self.record.versions)
def test_upload_duplicate(self):
location = {
'service': 'cloud',
storage_settings.WATERBUTLER_RESOURCE: 'osf',
'object': 'file',
}
version = self.record.create_version(self.user, location)
with AssertDeltas(Delta(lambda: len(self.record.versions))):
res = self.send_upload_hook(self.node_settings.root_node, self.make_payload())
self.record.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['status'], 'success')
version = model.OsfStorageFileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_in(version, self.record.versions)
def test_upload_create_child(self):
name = 'ლ(ಠ益ಠლ).unicode'
parent = self.node_settings.root_node.append_folder('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name))
assert_equal(res.status_code, 201)
assert_equal(res.json['status'], 'success')
assert_equal(res.json['data']['downloads'], self.record.get_download_count())
version = model.OsfStorageFileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_not_in(version, self.record.versions)
record = parent.find_child_by_name(name)
assert_in(version, record.versions)
assert_equals(record.name, name)
assert_equals(record.parent, parent)
def test_upload_create_child_with_same_name(self):
name = 'ლ(ಠ益ಠლ).unicode'
self.node_settings.root_node.append_file(name)
parent = self.node_settings.root_node.append_folder('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name))
assert_equal(res.status_code, 201)
assert_equal(res.json['status'], 'success')
assert_equal(res.json['data']['downloads'], self.record.get_download_count())
version = model.OsfStorageFileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_not_in(version, self.record.versions)
record = parent.find_child_by_name(name)
assert_in(version, record.versions)
assert_equals(record.name, name)
assert_equals(record.parent, parent)
def test_update_nested_child(self):
name = 'ლ(ಠ益ಠლ).unicode'
parent = self.node_settings.root_node.append_folder('cheesey')
old_node = parent.append_file(name)
res = self.send_upload_hook(parent, self.make_payload(name=name))
old_node.reload()
new_node = parent.find_child_by_name(name)
assert_equal(res.status_code, 200)
assert_equal(res.json['status'], 'success')
assert_equal(res.json['data']['downloads'], new_node.get_download_count())
assert_equal(old_node, new_node)
version = model.OsfStorageFileVersion.load(res.json['version'])
assert_is_not(version, None)
assert_in(version, new_node.versions)
assert_in(version, new_node.versions)
assert_equals(new_node.name, name)
assert_equals(new_node.parent, parent)
def test_upload_weird_name(self):
name = 'another/dir/carpe.png'
parent = self.node_settings.root_node.append_folder('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name), expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(len(parent.children), 0)
def test_upload_to_file(self):
name = 'carpe.png'
parent = self.node_settings.root_node.append_file('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name), expect_errors=True)
assert_true(parent.is_file)
assert_equal(res.status_code, 400)
def test_upload_no_data(self):
res = self.send_upload_hook(self.node_settings.root_node, expect_errors=True)
assert_equal(res.status_code, 400)
def test_archive(self):
name = 'ლ(ಠ益ಠლ).unicode'
parent = self.node_settings.root_node.append_folder('cheesey')
res = self.send_upload_hook(parent, self.make_payload(name=name, hashes={'sha256': 'foo'}))
assert_equal(res.status_code, 201)
assert_equal(res.json['status'], 'success')
assert_is(res.json['archive'], True)
self.send_hook(
'osfstorage_update_metadata',
{},
payload={'metadata': {
'vault': 'Vault 101',
'archive': '101 tluaV',
}, 'version': res.json['version']},
method='put_json',
)
res = self.send_upload_hook(parent, self.make_payload(
name=name,
hashes={'sha256': 'foo'},
metadata={
'name': 'lakdjf',
'provider': 'testing',
}))
assert_equal(res.status_code, 200)
assert_equal(res.json['status'], 'success')
assert_is(res.json['archive'], False)
# def test_upload_update_deleted(self):
# pass
class TestUpdateMetadataHook(HookTestCase):
def setUp(self):
super(TestUpdateMetadataHook, self).setUp()
self.path = 'greasy/pízza.png'
self.record = recursively_create_file(self.node_settings, self.path)
self.version = factories.FileVersionFactory()
self.record.versions = [self.version]
self.record.save()
self.payload = {
'metadata': {
'size': 123,
'modified': 'Mon, 16 Feb 2015 18:45:34 GMT',
'md5': 'askjasdlk;jsadlkjsadf',
'sha256': 'sahduashduahdushaushda',
},
'version': self.version._id,
'size': 321, # Just to make sure the field is ignored
}
def send_metadata_hook(self, payload=None, **kwargs):
return self.send_hook(
'osfstorage_update_metadata',
{},
payload=payload or self.payload,
method='put_json',
**kwargs
)
def test_callback(self):
self.version.date_modified = None
self.version.save()
self.send_metadata_hook()
self.version.reload()
#Test fields are added
assert_equal(self.version.metadata['size'], 123)
assert_equal(self.version.metadata['md5'], 'askjasdlk;jsadlkjsadf')
assert_equal(self.version.metadata['modified'], 'Mon, 16 Feb 2015 18:45:34 GMT')
#Test attributes are populated
assert_equal(self.version.size, 123)
assert_true(isinstance(self.version.date_modified, datetime.datetime))
def test_archived(self):
self.send_metadata_hook({
'version': self.version._id,
'metadata': {
'vault': 'osf_storage_prod',
'archive': 'Some really long glacier object id here'
}
})
self.version.reload()
assert_equal(self.version.metadata['vault'], 'osf_storage_prod')
assert_equal(self.version.metadata['archive'], 'Some really long glacier object id here')
def test_archived_record_not_found(self):
res = self.send_metadata_hook(
payload={
'metadata': {'archive': 'glacier'},
'version': self.version._id[::-1],
'size': 123,
'modified': 'Mon, 16 Feb 2015 18:45:34 GMT'
},
expect_errors=True,
)
assert_equal(res.status_code, 404)
self.version.reload()
assert_not_in('archive', self.version.metadata)
class TestGetRevisions(StorageTestCase):
def setUp(self):
super(TestGetRevisions, self).setUp()
self.path = 'tie/your/mother/down.mp3'
self.record = recursively_create_file(self.node_settings, self.path)
self.record.versions = [factories.FileVersionFactory() for __ in range(15)]
self.record.save()
def get_revisions(self, fid=None, **kwargs):
return self.app.get(
self.project.api_url_for(
'osfstorage_get_revisions',
fid=fid or self.record._id,
**signing.sign_data(signing.default_signer, {})
),
auth=self.user.auth,
**kwargs
)
def test_get_revisions(self):
res = self.get_revisions()
expected = [
utils.serialize_revision(
self.project,
self.record,
version,
index=len(self.record.versions) - 1 - idx
)
for idx, version in enumerate(reversed(self.record.versions))
]
assert_equal(len(res.json['revisions']), 15)
assert_equal(res.json['revisions'], [x for x in expected])
assert_equal(res.json['revisions'][0]['index'], 15)
assert_equal(res.json['revisions'][-1]['index'], 1)
def test_get_revisions_path_not_found(self):
res = self.get_revisions(fid='missing', expect_errors=True)
assert_equal(res.status_code, 404)
class TestCreateFolder(HookTestCase):
def setUp(self):
super(TestCreateFolder, self).setUp()
self.root_node = self.node_settings.root_node
def create_folder(self, name, parent=None, **kwargs):
parent = parent or self.node_settings.root_node
return self.send_hook(
'osfstorage_create_child',
{'fid': parent._id},
payload={
'name': name,
'user': self.user._id,
'kind': 'folder'
},
method='post_json',
**kwargs
)
def test_create_folder(self):
resp = self.create_folder('name')
self.root_node.reload()
assert_equal(resp.status_code, 201)
assert_equal(len(self.root_node.children), 1)
assert_equal(self.root_node.children[0].serialized(), resp.json['data'])
def test_no_data(self):
resp = self.send_hook(
'osfstorage_create_child',
{'fid': self.root_node._id},
payload={},
method='post_json',
expect_errors=True
)
assert_equal(resp.status_code, 400)
def test_create_with_parent(self):
resp = self.create_folder('name')
assert_equal(resp.status_code, 201)
assert_equal(len(self.root_node.children), 1)
assert_equal(self.root_node.children[0].serialized(), resp.json['data'])
resp = self.create_folder('name', parent=model.OsfStorageFileNode.load(resp.json['data']['id']))
assert_equal(resp.status_code, 201)
assert_equal(len(self.root_node.children), 1)
assert_true(self.root_node.children[0].is_folder)
assert_equal(len(self.root_node.children[0].children), 1)
assert_true(self.root_node.children[0].children[0].is_folder)
assert_equal(self.root_node.children[0].children[0].serialized(), resp.json['data'])
class TestDeleteHook(HookTestCase):
def setUp(self):
super(TestDeleteHook, self).setUp()
self.root_node = self.node_settings.root_node
def send_hook(self, view_name, view_kwargs, payload, method='get', **kwargs):
method = getattr(self.app, method)
return method(
'{url}?payload={payload}&signature={signature}'.format(
url=self.project.api_url_for(view_name, **view_kwargs),
**signing.sign_data(signing.default_signer, payload)
),
**kwargs
)
def delete(self, file_node, **kwargs):
return self.send_hook(
'osfstorage_delete',
{'fid': file_node._id},
payload={
'user': self.user._id
},
method='delete',
**kwargs
)
def test_delete(self):
file = self.root_node.append_file('Newfile')
resp = self.delete(file)
assert_equal(resp.status_code, 200)
assert_equal(resp.json, {'status': 'success'})
fid = file._id
del file
model.OsfStorageFileNode._clear_object_cache()
assert_is(model.OsfStorageFileNode.load(fid), None)
assert_true(model.OsfStorageTrashedFileNode.load(fid))
def test_delete_deleted(self):
file = self.root_node.append_file('Newfile')
file.delete()
resp = self.delete(file, expect_errors=True)
assert_equal(resp.status_code, 404)
def test_cannot_delete_root(self):
resp = self.delete(self.root_node, expect_errors=True)
assert_equal(resp.status_code, 400)
|
ckc6cz/osf.io
|
website/addons/osfstorage/tests/test_views.py
|
Python
|
apache-2.0
| 19,055
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.histogram_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import histogram_ops
from tensorflow.python.platform import test
class BinValuesFixedWidth(test.TestCase):
def test_empty_input_gives_all_zero_counts(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = []
expected_bins = []
with self.cached_session():
bins = histogram_ops.histogram_fixed_width_bins(
values, value_range, nbins=5)
self.assertEqual(dtypes.int32, bins.dtype)
self.assertAllClose(expected_bins, self.evaluate(bins))
def test_1d_values_int32_output(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
expected_bins = [0, 0, 1, 2, 4, 4]
with self.cached_session():
bins = histogram_ops.histogram_fixed_width_bins(
values, value_range, nbins=5, dtype=dtypes.int64)
self.assertEqual(dtypes.int32, bins.dtype)
self.assertAllClose(expected_bins, self.evaluate(bins))
def test_1d_float64_values_int32_output(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = np.float64([0.0, 5.0])
values = np.float64([-1.0, 0.0, 1.5, 2.0, 5.0, 15])
expected_bins = [0, 0, 1, 2, 4, 4]
with self.cached_session():
bins = histogram_ops.histogram_fixed_width_bins(
values, value_range, nbins=5)
self.assertEqual(dtypes.int32, bins.dtype)
self.assertAllClose(expected_bins, self.evaluate(bins))
def test_2d_values(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = constant_op.constant(
[[-1.0, 0.0, 1.5], [2.0, 5.0, 15]], shape=(2, 3))
expected_bins = [[0, 0, 1], [2, 4, 4]]
with self.cached_session():
bins = histogram_ops.histogram_fixed_width_bins(
values, value_range, nbins=5)
self.assertEqual(dtypes.int32, bins.dtype)
self.assertAllClose(expected_bins, self.evaluate(bins))
class HistogramFixedWidthTest(test.TestCase):
def setUp(self):
self.rng = np.random.RandomState(0)
@test_util.run_deprecated_v1
def test_with_invalid_value_range(self):
values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
with self.assertRaisesRegex(ValueError,
"Shape must be rank 1 but is rank 0"):
histogram_ops.histogram_fixed_width(values, 1.0)
with self.assertRaisesRegex(ValueError, "Dimension must be 2 but is 3"):
histogram_ops.histogram_fixed_width(values, [1.0, 2.0, 3.0])
@test_util.run_deprecated_v1
def test_with_invalid_nbins(self):
values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
with self.assertRaisesRegex(ValueError,
"Shape must be rank 0 but is rank 1"):
histogram_ops.histogram_fixed_width(values, [1.0, 5.0], nbins=[1, 2])
with self.assertRaisesRegex(ValueError, "Requires nbins > 0"):
histogram_ops.histogram_fixed_width(values, [1.0, 5.0], nbins=-5)
def test_empty_input_gives_all_zero_counts(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = []
expected_bin_counts = [0, 0, 0, 0, 0]
with self.session():
hist = histogram_ops.histogram_fixed_width(values, value_range, nbins=5)
self.assertEqual(dtypes.int32, hist.dtype)
self.assertAllClose(expected_bin_counts, self.evaluate(hist))
def test_1d_values_int64_output(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
expected_bin_counts = [2, 1, 1, 0, 2]
with self.session():
hist = histogram_ops.histogram_fixed_width(
values, value_range, nbins=5, dtype=dtypes.int64)
self.assertEqual(dtypes.int64, hist.dtype)
self.assertAllClose(expected_bin_counts, self.evaluate(hist))
def test_1d_float64_values(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = np.float64([0.0, 5.0])
values = np.float64([-1.0, 0.0, 1.5, 2.0, 5.0, 15])
expected_bin_counts = [2, 1, 1, 0, 2]
with self.session():
hist = histogram_ops.histogram_fixed_width(values, value_range, nbins=5)
self.assertEqual(dtypes.int32, hist.dtype)
self.assertAllClose(expected_bin_counts, self.evaluate(hist))
def test_2d_values(self):
# Bins will be:
# (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
value_range = [0.0, 5.0]
values = [[-1.0, 0.0, 1.5], [2.0, 5.0, 15]]
expected_bin_counts = [2, 1, 1, 0, 2]
with self.session():
hist = histogram_ops.histogram_fixed_width(values, value_range, nbins=5)
self.assertEqual(dtypes.int32, hist.dtype)
self.assertAllClose(expected_bin_counts, self.evaluate(hist))
@test_util.run_deprecated_v1
def test_shape_inference(self):
value_range = [0.0, 5.0]
values = [[-1.0, 0.0, 1.5], [2.0, 5.0, 15]]
expected_bin_counts = [2, 1, 1, 0, 2]
placeholder = array_ops.placeholder(dtypes.int32)
with self.session():
hist = histogram_ops.histogram_fixed_width(values, value_range, nbins=5)
self.assertAllEqual(hist.shape.as_list(), (5,))
self.assertEqual(dtypes.int32, hist.dtype)
self.assertAllClose(expected_bin_counts, self.evaluate(hist))
hist = histogram_ops.histogram_fixed_width(
values, value_range, nbins=placeholder)
self.assertEqual(hist.shape.ndims, 1)
self.assertIs(hist.shape.dims[0].value, None)
self.assertEqual(dtypes.int32, hist.dtype)
self.assertAllClose(expected_bin_counts, hist.eval({placeholder: 5}))
if __name__ == '__main__':
test.main()
|
annarev/tensorflow
|
tensorflow/python/ops/histogram_ops_test.py
|
Python
|
apache-2.0
| 6,797
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import Release, TagValue
@register(Release)
class ReleaseSerializer(Serializer):
def get_attrs(self, item_list, user):
tags = {
tk.value: tk
for tk in TagValue.objects.filter(
project=item_list[0].project,
key='sentry:release',
value__in=[o.version for o in item_list],
)
}
result = {}
for item in item_list:
result[item] = {
'tag': tags.get(item.version),
}
return result
def serialize(self, obj, attrs, user):
d = {
'version': obj.version,
'shortVersion': obj.short_version,
'ref': obj.ref,
'url': obj.url,
'dateStarted': obj.date_started,
'dateReleased': obj.date_released,
'dateCreated': obj.date_added,
'data': obj.data,
'newGroups': obj.new_groups,
}
if attrs['tag']:
d.update({
'lastEvent': attrs['tag'].last_seen,
'firstEvent': attrs['tag'].first_seen,
})
else:
d.update({
'lastEvent': None,
'firstEvent': None,
})
return d
|
wong2/sentry
|
src/sentry/api/serializers/models/release.py
|
Python
|
bsd-3-clause
| 1,382
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides implementations of basic image processing functions.
Implements basic image processing functions, such as reading/writing images,
cropping, finding the bounding box of a color and diffing images.
When numpy is present, image_util_numpy_impl is used for the implementation of
this interface. The old bitmap implementation (image_util_bitmap_impl) is used
as a fallback when numpy is not present."""
import base64
from telemetry.util import external_modules
np = external_modules.ImportOptionalModule('numpy')
if np is None:
from telemetry.image_processing import image_util_bitmap_impl
impl = image_util_bitmap_impl
else:
from telemetry.image_processing import image_util_numpy_impl
impl = image_util_numpy_impl
def Channels(image):
"""Number of color channels in the image."""
return impl.Channels(image)
def Width(image):
"""Width of the image."""
return impl.Width(image)
def Height(image):
"""Height of the image."""
return impl.Height(image)
def Pixels(image):
"""Flat RGB pixel array of the image."""
return impl.Pixels(image)
def GetPixelColor(image, x, y):
"""Returns a RgbaColor for the pixel at (x, y)."""
return impl.GetPixelColor(image, x, y)
def WritePngFile(image, path):
"""Write an image to a PNG file.
Args:
image: an image object.
path: The path to the PNG file. Must end in 'png' or an
AssertionError will be raised."""
assert path.endswith('png')
return impl.WritePngFile(image, path)
def FromRGBPixels(width, height, pixels, bpp=3):
"""Create an image from an array of rgb pixels.
Ignores alpha channel if present.
Args:
width, height: int, the width and height of the image.
pixels: The flat array of pixels in the form of [r,g,b[,a],r,g,b[,a],...]
bpp: 3 for RGB, 4 for RGBA."""
return impl.FromRGBPixels(width, height, pixels, bpp)
def FromPng(png_data):
"""Create an image from raw PNG data."""
return impl.FromPng(png_data)
def FromPngFile(path):
"""Create an image from a PNG file.
Args:
path: The path to the PNG file."""
return impl.FromPngFile(path)
def FromBase64Png(base64_png):
"""Create an image from raw PNG data encoded in base64."""
return FromPng(base64.b64decode(base64_png))
def AreEqual(image1, image2, tolerance=0, likely_equal=True):
"""Determines whether two images are identical within a given tolerance.
Setting likely_equal to False enables short-circuit equality testing, which
is about 2-3x slower for equal images, but can be image height times faster
if the images are not equal."""
return impl.AreEqual(image1, image2, tolerance, likely_equal)
def Diff(image1, image2):
"""Returns a new image that represents the difference between this image
and another image."""
return impl.Diff(image1, image2)
def GetBoundingBox(image, color, tolerance=0):
"""Finds the minimum box surrounding all occurrences of bgr |color|.
Ignores the alpha channel.
Args:
color: RbgaColor, bounding box color.
tolerance: int, per-channel tolerance for the bounding box color.
Returns:
(top, left, width, height), match_count"""
return impl.GetBoundingBox(image, color, tolerance)
def Crop(image, left, top, width, height):
"""Crops the current image down to the specified box."""
return impl.Crop(image, left, top, width, height)
def GetColorHistogram(image, ignore_color=None, tolerance=0):
"""Computes a histogram of the pixel colors in this image.
Args:
ignore_color: An RgbaColor to exclude from the bucket counts.
tolerance: A tolerance for the ignore_color.
Returns:
A ColorHistogram namedtuple with 256 integers in each field: r, g, and b."""
return impl.GetColorHistogram(image, ignore_color, tolerance)
|
guorendong/iridium-browser-ubuntu
|
tools/telemetry/telemetry/image_processing/image_util.py
|
Python
|
bsd-3-clause
| 3,897
|
from __future__ import unicode_literals
from django.db import models
from django.db.models import Max, F
from django.utils.translation import pgettext_lazy
from versatileimagefield.fields import VersatileImageField, PPOIField
from .base import Product
class ImageManager(models.Manager):
def first(self):
try:
return self.get_queryset()[0]
except IndexError:
pass
class ProductImage(models.Model):
product = models.ForeignKey(Product, related_name='images')
image = VersatileImageField(
upload_to='products', ppoi_field='ppoi', blank=False)
ppoi = PPOIField()
alt = models.CharField(
pgettext_lazy('Product image field', 'short description'),
max_length=128, blank=True)
order = models.PositiveIntegerField(editable=False)
objects = ImageManager()
class Meta:
ordering = ['order']
app_label = 'product'
def get_ordering_queryset(self):
return self.product.images.all()
def save(self, *args, **kwargs):
if self.order is None:
qs = self.get_ordering_queryset()
existing_max = qs.aggregate(Max('order'))
existing_max = existing_max.get('order__max')
self.order = 0 if existing_max is None else existing_max + 1
super(ProductImage, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
qs = self.get_ordering_queryset()
qs.filter(order__gt=self.order).update(order=F('order') - 1)
super(ProductImage, self).delete(*args, **kwargs)
class VariantImage(models.Model):
variant = models.ForeignKey('ProductVariant',
related_name='variant_images')
image = models.ForeignKey(ProductImage, related_name='variant_images')
|
rodrigozn/CW-Shop
|
saleor/product/models/images.py
|
Python
|
bsd-3-clause
| 1,791
|
#!/usr/bin/env python
from django.conf import settings
from django.test import TransactionTestCase
from django.test.client import Client
from django.utils import simplejson as json
from panda.models import SearchSubscription, UserProxy
from panda.tests import utils
class TestAPISearchSubscriptions(TransactionTestCase):
fixtures = ['init_panda.json', 'test_users.json']
def setUp(self):
settings.CELERY_ALWAYS_EAGER = True
utils.setup_test_solr()
self.user = utils.get_panda_user()
self.dataset = utils.get_test_dataset(self.user)
self.upload = utils.get_test_data_upload(self.user, self.dataset)
self.dataset.import_data(self.user, self.upload, 0)
self.auth_headers = utils.get_auth_headers()
self.client = Client()
def test_get(self):
sub = SearchSubscription.objects.create(
user=self.user,
dataset=self.dataset,
query='*'
)
response = self.client.get('/api/1.0/search_subscription/%i/' % sub.id, **self.auth_headers)
self.assertEqual(response.status_code, 200)
def test_get_not_user(self):
sub = SearchSubscription.objects.create(
user=self.user,
dataset=self.dataset,
query='*'
)
response = self.client.get('/api/1.0/search_subscription/%i/' % sub.id)
self.assertEqual(response.status_code, 401)
def test_get_unauthorized(self):
UserProxy.objects.create_user('nobody@nobody.com', 'nobody@nobody.com', 'password')
sub = SearchSubscription.objects.create(
user=self.user,
dataset=self.dataset,
query='*'
)
response = self.client.get('/api/1.0/search_subscription/%i/' % sub.id, **utils.get_auth_headers('nobody@nobody.com'))
self.assertEqual(response.status_code, 404)
def test_list(self):
SearchSubscription.objects.create(
user=self.user,
dataset=self.dataset,
query='*'
)
response = self.client.get('/api/1.0/search_subscription/', data={ 'limit': 5 }, **self.auth_headers)
self.assertEqual(response.status_code, 200)
body = json.loads(response.content)
self.assertEqual(len(body['objects']), 1)
self.assertEqual(body['meta']['total_count'], 1)
self.assertEqual(body['meta']['limit'], 5)
self.assertEqual(body['meta']['offset'], 0)
self.assertEqual(body['meta']['next'], None)
self.assertEqual(body['meta']['previous'], None)
def test_list_unauthorized(self):
UserProxy.objects.create_user('nobody@nobody.com', 'nobody@nobody.com', 'password')
response = self.client.get('/api/1.0/search_subscription/', data={ 'limit': 5 }, **utils.get_auth_headers('nobody@nobody.com'))
self.assertEqual(response.status_code, 200)
body = json.loads(response.content)
self.assertEqual(len(body['objects']), 0)
self.assertEqual(body['meta']['total_count'], 0)
self.assertEqual(body['meta']['limit'], 5)
self.assertEqual(body['meta']['offset'], 0)
self.assertEqual(body['meta']['next'], None)
self.assertEqual(body['meta']['previous'], None)
def test_update(self):
sub = SearchSubscription.objects.create(
user=self.user,
dataset=self.dataset,
query='*'
)
response = self.client.put('/api/1.0/search_subscription/%i/' % sub.id, data=json.dumps({}), content_type='application/json', **self.auth_headers)
self.assertEqual(response.status_code, 405)
def test_update_unauthorized(self):
UserProxy.objects.create_user('nobody@nobody.com', 'nobody@nobody.com', 'password')
sub = SearchSubscription.objects.create(
user=self.user,
dataset=self.dataset,
query='*'
)
response = self.client.put('/api/1.0/search_subscription/%i/' % sub.id, data=json.dumps({}), content_type='application/json', **utils.get_auth_headers('nobody@nobody.com'))
# This returns 201 (rather than 401), because the PUT fails to match an
# existing subscription that the user has access to and thus falls
# back to creating a new one.
# This is probably not ideal, but works.
self.assertEqual(response.status_code, 405)
def test_delete(self):
sub = SearchSubscription.objects.create(
user=self.user,
dataset=self.dataset,
query='*'
)
response = self.client.delete('/api/1.0/search_subscription/%i/' % sub.id, **self.auth_headers)
self.assertEqual(response.status_code, 204)
response = self.client.get('/api/1.0/search_subscription/%i/' % sub.id, **self.auth_headers)
self.assertEqual(response.status_code, 404)
with self.assertRaises(SearchSubscription.DoesNotExist):
SearchSubscription.objects.get(id=sub.id)
def test_delete_unauthorized(self):
UserProxy.objects.create_user('nobody@nobody.com', 'nobody@nobody.com', 'password')
sub = SearchSubscription.objects.create(
user=self.user,
dataset=self.dataset,
query='*'
)
response = self.client.delete('/api/1.0/search_subscription/%i/' % sub.id, **utils.get_auth_headers('nobody@nobody.com'))
self.assertEqual(response.status_code, 404)
response = self.client.get('/api/1.0/search_subscription/%i/' % sub.id, **self.auth_headers)
self.assertEqual(response.status_code, 200)
# Verify no exception is raised
SearchSubscription.objects.get(id=sub.id)
|
ibrahimcesar/panda
|
panda/tests/test_api_search_subscriptions.py
|
Python
|
mit
| 5,736
|
from __future__ import print_function
import os
import sys
import subprocess
import traceback
import glob
print('Using python: {prefix}'.format(prefix=sys.prefix))
repo_tag = os.environ.get('APPVEYOR_REPO_TAG', 'false')
tag_name = os.environ.get('APPVEYOR_REPO_TAG_NAME', '')
token = os.environ.get('PYPI_PASS', 'NOT_A_TOKEN')
if repo_tag == 'true' and tag_name.startswith('v'):
print('Uploading to PyPI')
try:
cmd = ' '.join(['twine', 'upload', '-u', 'mcflugen', '-p', token,
'dist/*'])
resp = subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError:
traceback.print_exc()
else:
print('OK')
else:
print('Not a tagged release. Not deploying to PyPI.')
|
SiccarPoint/landlab
|
.ci/appveyor/pypi_upload.py
|
Python
|
mit
| 756
|
# test builtin print function, using file= argument
import sys
try:
sys.stdout
except AttributeError:
print("SKIP")
raise SystemExit
print(file=sys.stdout)
print("test", file=sys.stdout)
try:
print(file=1)
except (AttributeError, OSError): # CPython and uPy differ in error message
print("Error")
|
tobbad/micropython
|
tests/io/builtin_print_file.py
|
Python
|
mit
| 322
|
"""C and C++ analysis using a clang compiler plugin
This plugin handles structural analysis of C++ code by building the project
under clang while interposing a custom compiler plugin that dumps out
structural data to CSV files during compilation. This is then pulled into
elasticsearch as a post-processing phase.
"""
from dxr.plugins import Plugin, filters_from_namespace, refs_from_namespace
from dxr.plugins.clang import direct, filters, menus
from dxr.plugins.clang.indexers import TreeToIndex, mappings
plugin = Plugin(filters=filters_from_namespace(filters.__dict__),
tree_to_index=TreeToIndex,
mappings=mappings,
direct_searchers=direct.searchers,
refs=refs_from_namespace(menus.__dict__))
|
pombredanne/dxr
|
dxr/plugins/clang/__init__.py
|
Python
|
mit
| 765
|
"""If you have Ned Batchelder's coverage_ module installed, you may activate a
coverage report with the ``--with-coverage`` switch or NOSE_WITH_COVERAGE
environment variable. The coverage report will cover any python source module
imported after the start of the test run, excluding modules that match
testMatch. If you want to include those modules too, use the ``--cover-tests``
switch, or set the NOSE_COVER_TESTS environment variable to a true value. To
restrict the coverage report to modules from a particular package or packages,
use the ``--cover-package`` switch or the NOSE_COVER_PACKAGE environment
variable.
.. _coverage: http://www.nedbatchelder.com/code/modules/coverage.html
"""
import logging
import re
import sys
import io
from nose.plugins.base import Plugin
from nose.util import src, tolist
log = logging.getLogger(__name__)
class Coverage(Plugin):
"""
Activate a coverage report using Ned Batchelder's coverage module.
"""
coverTests = False
coverPackages = None
coverInstance = None
coverErase = False
coverMinPercentage = None
score = 200
status = {}
def options(self, parser, env):
"""
Add options to command line.
"""
super(Coverage, self).options(parser, env)
parser.add_option("--cover-package", action="append",
default=env.get('NOSE_COVER_PACKAGE'),
metavar="PACKAGE",
dest="cover_packages",
help="Restrict coverage output to selected packages "
"[NOSE_COVER_PACKAGE]")
parser.add_option("--cover-erase", action="store_true",
default=env.get('NOSE_COVER_ERASE'),
dest="cover_erase",
help="Erase previously collected coverage "
"statistics before run")
parser.add_option("--cover-tests", action="store_true",
dest="cover_tests",
default=env.get('NOSE_COVER_TESTS'),
help="Include test modules in coverage report "
"[NOSE_COVER_TESTS]")
parser.add_option("--cover-min-percentage", action="store",
dest="cover_min_percentage",
default=env.get('NOSE_COVER_MIN_PERCENTAGE'),
help="Minimum percentage of coverage for tests "
"to pass [NOSE_COVER_MIN_PERCENTAGE]")
parser.add_option("--cover-inclusive", action="store_true",
dest="cover_inclusive",
default=env.get('NOSE_COVER_INCLUSIVE'),
help="Include all python files under working "
"directory in coverage report. Useful for "
"discovering holes in test coverage if not all "
"files are imported by the test suite. "
"[NOSE_COVER_INCLUSIVE]")
parser.add_option("--cover-html", action="store_true",
default=env.get('NOSE_COVER_HTML'),
dest='cover_html',
help="Produce HTML coverage information")
parser.add_option('--cover-html-dir', action='store',
default=env.get('NOSE_COVER_HTML_DIR', 'cover'),
dest='cover_html_dir',
metavar='DIR',
help='Produce HTML coverage information in dir')
parser.add_option("--cover-branches", action="store_true",
default=env.get('NOSE_COVER_BRANCHES'),
dest="cover_branches",
help="Include branch coverage in coverage report "
"[NOSE_COVER_BRANCHES]")
parser.add_option("--cover-xml", action="store_true",
default=env.get('NOSE_COVER_XML'),
dest="cover_xml",
help="Produce XML coverage information")
parser.add_option("--cover-xml-file", action="store",
default=env.get('NOSE_COVER_XML_FILE', 'coverage.xml'),
dest="cover_xml_file",
metavar="FILE",
help="Produce XML coverage information in file")
def configure(self, options, conf):
"""
Configure plugin.
"""
try:
self.status.pop('active')
except KeyError:
pass
super(Coverage, self).configure(options, conf)
if self.enabled:
try:
import coverage
if not hasattr(coverage, 'coverage'):
raise ImportError("Unable to import coverage module")
except ImportError:
log.error("Coverage not available: "
"unable to import coverage module")
self.enabled = False
return
self.conf = conf
self.coverErase = options.cover_erase
self.coverTests = options.cover_tests
self.coverPackages = []
if options.cover_packages:
if isinstance(options.cover_packages, (list, tuple)):
cover_packages = options.cover_packages
else:
cover_packages = [options.cover_packages]
for pkgs in [tolist(x) for x in cover_packages]:
self.coverPackages.extend(pkgs)
self.coverInclusive = options.cover_inclusive
if self.coverPackages:
log.info("Coverage report will include only packages: %s",
self.coverPackages)
self.coverHtmlDir = None
if options.cover_html:
self.coverHtmlDir = options.cover_html_dir
log.debug('Will put HTML coverage report in %s', self.coverHtmlDir)
self.coverBranches = options.cover_branches
self.coverXmlFile = None
if options.cover_min_percentage:
self.coverMinPercentage = int(options.cover_min_percentage.rstrip('%'))
if options.cover_xml:
self.coverXmlFile = options.cover_xml_file
log.debug('Will put XML coverage report in %s', self.coverXmlFile)
if self.enabled:
self.status['active'] = True
self.coverInstance = coverage.coverage(auto_data=False,
branch=self.coverBranches, data_suffix=conf.worker,
source=self.coverPackages)
self.coverInstance._warn_no_data = False
self.coverInstance.is_worker = conf.worker
self.coverInstance.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]')
log.debug("Coverage begin")
self.skipModules = list(sys.modules.keys())[:]
if self.coverErase:
log.debug("Clearing previously collected coverage statistics")
self.coverInstance.combine()
self.coverInstance.erase()
if not self.coverInstance.is_worker:
self.coverInstance.load()
self.coverInstance.start()
def beforeTest(self, *args, **kwargs):
"""
Begin recording coverage information.
"""
if self.coverInstance.is_worker:
self.coverInstance.load()
self.coverInstance.start()
def afterTest(self, *args, **kwargs):
"""
Stop recording coverage information.
"""
if self.coverInstance.is_worker:
self.coverInstance.stop()
self.coverInstance.save()
def report(self, stream):
"""
Output code coverage report.
"""
log.debug("Coverage report")
self.coverInstance.stop()
self.coverInstance.combine()
self.coverInstance.save()
modules = [module
for name, module in list(sys.modules.items())
if self.wantModuleCoverage(name, module)]
log.debug("Coverage report will cover modules: %s", modules)
self.coverInstance.report(modules, file=stream)
import coverage
if self.coverHtmlDir:
log.debug("Generating HTML coverage report")
try:
self.coverInstance.html_report(modules, self.coverHtmlDir)
except coverage.misc.CoverageException as e:
log.warning("Failed to generate HTML report: %s" % str(e))
if self.coverXmlFile:
log.debug("Generating XML coverage report")
try:
self.coverInstance.xml_report(modules, self.coverXmlFile)
except coverage.misc.CoverageException as e:
log.warning("Failed to generate XML report: %s" % str(e))
# make sure we have minimum required coverage
if self.coverMinPercentage:
f = io.StringIO()
self.coverInstance.report(modules, file=f)
multiPackageRe = (r'-------\s\w+\s+\d+\s+\d+(?:\s+\d+\s+\d+)?'
r'\s+(\d+)%\s+\d*\s{0,1}$')
singlePackageRe = (r'-------\s[\w./]+\s+\d+\s+\d+(?:\s+\d+\s+\d+)?'
r'\s+(\d+)%(?:\s+[-\d, ]+)\s{0,1}$')
m = re.search(multiPackageRe, f.getvalue())
if m is None:
m = re.search(singlePackageRe, f.getvalue())
if m:
percentage = int(m.groups()[0])
if percentage < self.coverMinPercentage:
log.error('TOTAL Coverage did not reach minimum '
'required: %d%%' % self.coverMinPercentage)
sys.exit(1)
else:
log.error("No total percentage was found in coverage output, "
"something went wrong.")
def wantModuleCoverage(self, name, module):
if not hasattr(module, '__file__'):
log.debug("no coverage of %s: no __file__", name)
return False
module_file = src(module.__file__)
if not module_file or not module_file.endswith('.py'):
log.debug("no coverage of %s: not a python file", name)
return False
if self.coverPackages:
for package in self.coverPackages:
if (re.findall(r'^%s\b' % re.escape(package), name)
and (self.coverTests
or not self.conf.testMatch.search(name))):
log.debug("coverage for %s", name)
return True
if name in self.skipModules:
log.debug("no coverage for %s: loaded before coverage start",
name)
return False
if self.conf.testMatch.search(name) and not self.coverTests:
log.debug("no coverage for %s: is a test", name)
return False
# accept any package that passed the previous tests, unless
# coverPackages is on -- in that case, if we wanted this
# module, we would have already returned True
return not self.coverPackages
def wantFile(self, file, package=None):
"""If inclusive coverage enabled, return true for all source files
in wanted packages.
"""
if self.coverInclusive:
if file.endswith(".py"):
if package and self.coverPackages:
for want in self.coverPackages:
if package.startswith(want):
return True
else:
return True
return None
|
Reagankm/KnockKnock
|
venv/lib/python3.4/site-packages/nose/plugins/cover.py
|
Python
|
gpl-2.0
| 11,677
|
# The contents of this file are subject to the BitTorrent Open Source License
# Version 1.1 (the License). You may not copy or use this file, in either
# source code or executable form, except in compliance with the License. You
# may obtain a copy of the License at http://www.bittorrent.com/license/.
#
# Software distributed under the License is distributed on an AS IS basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
# Written by Bram Cohen and Greg Hazel
import array
import random
import itertools
def resolve_typecode(n):
if n < 32768:
return 'h'
return 'l'
class PieceBuckets(object):
"""A PieceBuckets object is an array of arrays. ith bucket contains
pieces that have i known instances within the network. Pieces
within each bucket are randomly ordered."""
def __init__(self, typecode):
self.typecode = typecode
# [[piece]]
self.buckets = []
# {piece: (bucket, bucketpos)}
self.place_in_buckets = {}
def get_position(self, piece): # returns which bucket piece is in.
return self.place_in_buckets[piece][0]
def __contains__(self, piece):
return piece in self.place_in_buckets
def add(self, piece, bucketindex):
assert not self.place_in_buckets.has_key(piece)
while len(self.buckets) <= bucketindex:
self.buckets.append(array.array(self.typecode))
bucket = self.buckets[bucketindex]
# randomly swap piece with piece already in bucket...
newspot = random.randrange(len(bucket) + 1)
if newspot == len(bucket):
bucket.append(piece)
else:
tomove = bucket[newspot]
self.place_in_buckets[tomove] = (bucketindex, len(bucket))
bucket.append(tomove)
bucket[newspot] = piece
self.place_in_buckets[piece] = (bucketindex, newspot)
def remove(self, piece):
bucketindex, bucketpos = self.place_in_buckets.pop(piece)
bucket = self.buckets[bucketindex]
tomove = bucket[-1]
if tomove != piece:
bucket[bucketpos] = tomove
self.place_in_buckets[tomove] = (bucketindex, bucketpos)
del bucket[-1]
while len(self.buckets) > 0 and len(self.buckets[-1]) == 0:
del self.buckets[-1]
return bucketindex
# to be removed
def bump(self, piece):
bucketindex, bucketpos = self.place_in_buckets[piece]
bucket = self.buckets[bucketindex]
tomove = bucket[-1]
if tomove != piece:
bucket[bucketpos] = tomove
self.place_in_buckets[tomove] = (bucketindex, bucketpos)
bucket[-1] = piece
self.place_in_buckets[piece] = (bucketindex, len(bucket)-1)
def prepend_bucket(self):
# it' possible we had everything to begin with
if len(self.buckets) == 0:
return
self.buckets.insert(0, array.array(self.typecode))
# bleh.
for piece in self.place_in_buckets:
index, pos = self.place_in_buckets[piece]
self.place_in_buckets[piece] = (index + 1, pos)
def popleft_bucket(self):
# it' possible we had everything to begin with
if len(self.buckets) == 0:
return
self.buckets.pop(0)
# bleh.
for piece in self.place_in_buckets:
index, pos = self.place_in_buckets[piece]
self.place_in_buckets[piece] = (index - 1, pos)
class PiecePicker(object):
def __init__(self, config, numpieces, not_have):
self.config = config
self.numpieces = numpieces
self.typecode = resolve_typecode(numpieces)
self.piece_bucketss = [PieceBuckets(self.typecode)]
self.scrambled = array.array(self.typecode)
self.numgot = self.numpieces
for i in not_have:
self.scrambled.append(i)
self.piece_bucketss[0].add(i, 0)
self.numgot -= 1
random.shuffle(self.scrambled)
def get_distributed_copies(self):
base = 0
for i, bucket in enumerate(self.piece_bucketss[0].buckets):
l = len(bucket)
if l == 0:
# the whole bucket is full. keep going
continue
base = i + 1
# remove the fractional size of this bucket, and stop
base -= (float(l) / float(self.numpieces))
break
return base
def set_priority(self, pieces, priority):
while len(self.piece_bucketss) <= priority:
self.piece_bucketss.append(PieceBuckets(self.typecode))
for piece in pieces:
for p in self.piece_bucketss:
if piece in p:
self.piece_bucketss[priority].add(piece, p.remove(piece))
break
else:
assert False
def got_have_all(self):
for p in self.piece_bucketss:
p.prepend_bucket()
def got_have(self, piece):
for p in self.piece_bucketss:
if piece in p:
p.add(piece, p.remove(piece) + 1)
return
def lost_have_all(self):
for p in self.piece_bucketss:
p.popleft_bucket()
def lost_have(self, piece):
for p in self.piece_bucketss:
if piece in p:
p.add(piece, p.remove(piece) - 1)
return
def complete(self, piece):
self.numgot += 1
if self.numgot < self.config['rarest_first_cutoff']:
self.scrambled.remove(piece)
else:
self.scrambled = None
for p in self.piece_bucketss:
if piece in p:
p.remove(piece)
break
else:
assert False
def from_behind(self, haves, bans):
for piece_buckets in self.piece_bucketss:
for i in xrange(len(piece_buckets.buckets) - 1, 0, -1):
for j in piece_buckets.buckets[i]:
if haves[j] and j not in bans:
return j
return None
def next(self, haves, tiebreaks, bans, suggests):
"""returns next piece to download.
@param haves: set of pieces the remote peer has.
@param tiebreaks: pieces with active (started) requests
@param bans: pieces not to pick.
@param suggests: set of suggestions made by the remote peer.
"""
# first few pieces are provided in random rather than rarest-first
if self.numgot < self.config['rarest_first_cutoff']:
for i in itertools.chain(tiebreaks, self.scrambled):
if haves[i] and i not in bans:
return i
return None
# from highest priority to lowest priority piece buckets...
for k in xrange(len(self.piece_bucketss) - 1, -1, -1):
piece_buckets = self.piece_bucketss[k]
# Of the same priority, a suggestion is taken first.
for s in suggests:
if s not in bans and haves[s] and s in piece_buckets:
return s
bestnum = None
best = None
rarity_of_started = [(piece_buckets.get_position(i), i)
for i in tiebreaks if i not in bans and haves[i] and
i in piece_buckets]
if rarity_of_started:
bestnum = min(rarity_of_started)[0] # smallest bucket index
best = random.choice([j for (i, j) in rarity_of_started
if i == bestnum]) # random pick of those in smallest bkt
for i in xrange(1, len(piece_buckets.buckets)):
if bestnum == i: # if best of started is also rarest...
return best
for j in piece_buckets.buckets[i]:
if haves[j] and j not in bans:
return j # return first found.
return None
# to be removed
def bump(self, piece):
for p in self.piece_bucketss:
if piece in p:
p.bump(piece)
break
else:
assert False
|
sulaweyo/torrentflux-b4rt-php7
|
html/bin/clients/mainline/BitTorrent/PiecePicker.py
|
Python
|
gpl-2.0
| 8,356
|
'''Parser for gromacs edr'''
from .base import IOHandler
import numpy as np
import xdrlib
import difflib
class QuantityNotAvailable(Exception):
pass
class EdrIO(IOHandler):
'''EDR files store per-frame information for gromacs
trajectories. Examples of properties obtainable from EDR files are::
- temperature
- pressure
- density
- potential energy
- total energy
- etc.
To know which quantities are available in a certain edr file you
can access the feature 'avail quantity'::
>>> datafile('ener.edr').read('avail quantities')
['Temperature', 'Pressure', 'Potential', ...]
To get the frame information for a certain quantity you may use
the "quantity" property passing the quantity as additional
argument, this will return two arrays, the first is an array of
times in ps and the second are the corrisponding quantities::
>>> time, temp = datafile('ener.edr').read('quantity', 'Temperature')
**Features**
.. method:: read("quantity", quant)
Return an array of times in ps and the corresponding quantities
at that times.
.. method:: read("avail quantities")
Return the available quantities in the file.
.. method:: read("units")
Return a dictionary where the keys are the quantities and
the value are the units in which that quantity is expressed.
.. method:: read("frames")
Return a dictionary where the keys are the quantities and
the value are the units in which that quantity is expressed.
'''
can_read = ['quantity', 'units', 'avail quantities']
can_write = []
def __init__(self, fd):
super(EdrIO, self).__init__(fd)
self.processed = False
def read(self, feature, *args):
self.check_feature(feature, 'read')
if not self.processed:
self.frames = frames = self.process_frames()
self.processed = True
else:
frames = self.frames
if feature == 'quantity':
if not args[0]:
raise Exception('the method read("quantity", arg) requires a quantity to get')
quant = args[0]
if quant not in self.props:
close = difflib.get_close_matches(quant, self.props)
raise QuantityNotAvailable('Quantity %s not available. Close matches: %s'%
(str(quant), str(close)))
i = self.props.index(quant)
ret = []
for f in frames:
ret.append(f[i][0])
return np.array(self.times), np.array(ret)
if feature == 'units':
quant = args[0]
i = self.props.index(quant)
return self.units[i]
if feature == 'avail quantities':
return self.props
def process_frames(self):
f = self.fd.read()
self.up = xdrlib.Unpacker(f)
self.times = []
self.dts = []
self.frames = []
self._unpack_start()
fr = self._unpack_frame()
self.frames.append(fr)
while True:
try:
fr = self._unpack_frame()
except EOFError:
break
self.frames.append(fr)
return self.frames
def _unpack_start(self):
up = self.up
magic = up.unpack_int()
if (magic != -55555):
raise Exception('Format not supported: magic number -55555 not matching')
self.version = up.unpack_int()
# Number of properties
self.nre = up.unpack_int()
self.props = props = []
self.units = units = []
# Strings and units of quantities
for i in range(self.nre):
prop = up.unpack_string()
unit = up.unpack_string()
props.append(prop.decode('utf-8'))
units.append(unit.decode('utf-8'))
def _unpack_eheader(self):
up = self.up
first_real_to_check = -2e10
# Checking the first real for format
first_real = up.unpack_double()
if (first_real != first_real_to_check):
raise Exception('Format not supported, first real not matching.')
magic = up.unpack_int()
if (magic != -7777777):
raise Exception('Format not supported, magic number not matching -7777777')
version = up.unpack_int()
time = up.unpack_double()
self.times.append(time)
# This two should give us large int that represent the step number
min = up.unpack_int()
maj = up.unpack_int()
self.nsum = up.unpack_int()
# NSTEPS (again?)
min = up.unpack_int()
maj = up.unpack_int()
# For version 5
dt = up.unpack_double()
self.dts.append(dt)
# Number of properties?
self.nre = up.unpack_int()
dum = up.unpack_int()
nblock = up.unpack_int() + 1
# Block headers:
id = up.unpack_int()
nsubblocks = up.unpack_int()
e_size = up.unpack_int()
#dum = up.unpack_int()
#dum = up.unpack_int()
#up.unpack_int()
def _unpack_frame(self):
# Energies, averages and rmsd
self._unpack_eheader()
frame = []
for i in range(self.nre):
en = self.up.unpack_double()
if self.nsum > 0:
avg = self.up.unpack_double()
rmsd = self.up.unpack_double()
frame.append([en, avg, rmsd])
else:
frame.append([en, en, 0.0])
return frame
|
chemlab/chemlab
|
chemlab/io/handlers/edr.py
|
Python
|
gpl-3.0
| 5,949
|
def hide_traj_controls():
viewer.traj_controls.hide()
def show_traj_controls():
viewer.traj_controls.show()
|
chemlab/chemlab
|
chemlab/mviewer/api/ui.py
|
Python
|
gpl-3.0
| 120
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Replica.replication_ip'
db.add_column(u'smart_manager_replica', 'replication_ip',
self.gf('django.db.models.fields.CharField')(max_length=4096, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Replica.replication_ip'
db.delete_column(u'smart_manager_replica', 'replication_ip')
models = {
'smart_manager.cpumetric': {
'Meta': {'object_name': 'CPUMetric'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idle': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'smode': ('django.db.models.fields.IntegerField', [], {}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'umode': ('django.db.models.fields.IntegerField', [], {}),
'umode_nice': ('django.db.models.fields.IntegerField', [], {})
},
'smart_manager.diskstat': {
'Meta': {'object_name': 'DiskStat'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ios_progress': ('django.db.models.fields.FloatField', [], {}),
'ms_ios': ('django.db.models.fields.FloatField', [], {}),
'ms_reading': ('django.db.models.fields.FloatField', [], {}),
'ms_writing': ('django.db.models.fields.FloatField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'reads_completed': ('django.db.models.fields.FloatField', [], {}),
'reads_merged': ('django.db.models.fields.FloatField', [], {}),
'sectors_read': ('django.db.models.fields.FloatField', [], {}),
'sectors_written': ('django.db.models.fields.FloatField', [], {}),
'ts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'weighted_ios': ('django.db.models.fields.FloatField', [], {}),
'writes_completed': ('django.db.models.fields.FloatField', [], {}),
'writes_merged': ('django.db.models.fields.FloatField', [], {})
},
'smart_manager.loadavg': {
'Meta': {'object_name': 'LoadAvg'},
'active_threads': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idle_seconds': ('django.db.models.fields.IntegerField', [], {}),
'latest_pid': ('django.db.models.fields.IntegerField', [], {}),
'load_1': ('django.db.models.fields.FloatField', [], {}),
'load_15': ('django.db.models.fields.FloatField', [], {}),
'load_5': ('django.db.models.fields.FloatField', [], {}),
'total_threads': ('django.db.models.fields.IntegerField', [], {}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'smart_manager.meminfo': {
'Meta': {'object_name': 'MemInfo'},
'active': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'buffers': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'cached': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'dirty': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'free': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inactive': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'swap_free': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'swap_total': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'total': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'smart_manager.netstat': {
'Meta': {'object_name': 'NetStat'},
'carrier': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'colls': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'compressed_rx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'compressed_tx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'device': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'drop_rx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'drop_tx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'errs_rx': ('django.db.models.fields.FloatField', [], {}),
'errs_tx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'fifo_rx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'fifo_tx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'frame': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kb_rx': ('django.db.models.fields.FloatField', [], {}),
'kb_tx': ('django.db.models.fields.FloatField', [], {}),
'multicast_rx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'packets_rx': ('django.db.models.fields.FloatField', [], {}),
'packets_tx': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'smart_manager.nfsdcalldistribution': {
'Meta': {'object_name': 'NFSDCallDistribution'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_commit': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_create': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_lookup': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_remove': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'rid': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.SProbe']"}),
'sum_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'sum_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'smart_manager.nfsdclientdistribution': {
'Meta': {'object_name': 'NFSDClientDistribution'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'num_commit': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_create': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_lookup': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_remove': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'rid': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.SProbe']"}),
'sum_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'sum_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {})
},
'smart_manager.nfsdshareclientdistribution': {
'Meta': {'object_name': 'NFSDShareClientDistribution'},
'client': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_commit': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_create': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_lookup': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_remove': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'rid': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.SProbe']"}),
'share': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sum_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'sum_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'smart_manager.nfsdsharedistribution': {
'Meta': {'object_name': 'NFSDShareDistribution'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_commit': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_create': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_lookup': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_remove': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'rid': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.SProbe']"}),
'share': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sum_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'sum_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'})
},
'smart_manager.nfsduidgiddistribution': {
'Meta': {'object_name': 'NFSDUidGidDistribution'},
'client': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'gid': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_commit': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_create': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_lookup': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_remove': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'num_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'rid': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.SProbe']"}),
'share': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sum_read': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'sum_write': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'uid': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'smart_manager.poolusage': {
'Meta': {'object_name': 'PoolUsage'},
'count': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'free': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pool': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'reclaimable': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'smart_manager.receivetrail': {
'Meta': {'object_name': 'ReceiveTrail'},
'end_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'error': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kb_received': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'receive_failed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'receive_pending': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'receive_succeeded': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'rshare': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.ReplicaShare']"}),
'snap_name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'smart_manager.replica': {
'Meta': {'object_name': 'Replica'},
'appliance': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'crontab': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'data_port': ('django.db.models.fields.IntegerField', [], {'default': '10002'}),
'dpool': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'dshare': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'meta_port': ('django.db.models.fields.IntegerField', [], {'default': '10003'}),
'pool': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'replication_ip': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}),
'share': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'})
},
'smart_manager.replicashare': {
'Meta': {'object_name': 'ReplicaShare'},
'appliance': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'data_port': ('django.db.models.fields.IntegerField', [], {'default': '10002'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'meta_port': ('django.db.models.fields.IntegerField', [], {'default': '10003'}),
'pool': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'share': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4096'}),
'src_share': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'})
},
'smart_manager.replicatrail': {
'Meta': {'object_name': 'ReplicaTrail'},
'end_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'error': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kb_sent': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'replica': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.Replica']"}),
'send_failed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'send_pending': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'send_succeeded': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'snap_name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'snapshot_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'snapshot_failed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'smart_manager.service': {
'Meta': {'object_name': 'Service'},
'config': ('django.db.models.fields.CharField', [], {'max_length': '8192', 'null': 'True'}),
'display_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '24'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '24'})
},
'smart_manager.servicestatus': {
'Meta': {'object_name': 'ServiceStatus'},
'count': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.Service']"}),
'status': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'smart_manager.shareusage': {
'Meta': {'object_name': 'ShareUsage'},
'count': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'e_usage': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'r_usage': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'smart_manager.sprobe': {
'Meta': {'object_name': 'SProbe'},
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'smart': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '7'})
},
'smart_manager.task': {
'Meta': {'object_name': 'Task'},
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'task_def': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['smart_manager.TaskDefinition']"})
},
'smart_manager.taskdefinition': {
'Meta': {'object_name': 'TaskDefinition'},
'crontab': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json_meta': ('django.db.models.fields.CharField', [], {'max_length': '8192'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'task_type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'smart_manager.vmstat': {
'Meta': {'object_name': 'VmStat'},
'free_pages': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
}
}
complete_apps = ['smart_manager']
|
schakrava/rockstor-core
|
src/rockstor/smart_manager/south_migrations/0007_auto__add_field_replica_replication_ip.py
|
Python
|
gpl-3.0
| 22,078
|
from datetime import datetime, timedelta
from webdriver.transport import Response
from tests.support.asserts import assert_error, assert_success
from tests.support.helpers import clear_all_cookies
def add_cookie(session, cookie):
return session.transport.send(
"POST", "session/{session_id}/cookie".format(**vars(session)),
{"cookie": cookie})
def test_null_parameter_value(session, http):
path = "/session/{session_id}/cookie".format(**vars(session))
with http.post(path, None) as response:
assert_error(Response.from_http(response), "invalid argument")
def test_null_response_value(session, url):
new_cookie = {
"name": "hello",
"value": "world",
}
session.url = url("/common/blank.html")
clear_all_cookies(session)
response = add_cookie(session, new_cookie)
value = assert_success(response)
assert value is None
def test_no_browsing_context(session, closed_window):
new_cookie = {
"name": "hello",
"value": "world",
}
response = add_cookie(session, new_cookie)
assert_error(response, "no such window")
def test_add_domain_cookie(session, url, server_config):
new_cookie = {
"name": "hello",
"value": "world",
"domain": server_config["browser_host"],
"path": "/",
"httpOnly": False,
"secure": False
}
session.url = url("/common/blank.html")
clear_all_cookies(session)
result = add_cookie(session, new_cookie)
assert_success(result)
cookie = session.cookies("hello")
assert "domain" in cookie
assert isinstance(cookie["domain"], basestring)
assert "name" in cookie
assert isinstance(cookie["name"], basestring)
assert "value" in cookie
assert isinstance(cookie["value"], basestring)
assert cookie["name"] == "hello"
assert cookie["value"] == "world"
assert cookie["domain"] == server_config["browser_host"] or \
cookie["domain"] == ".%s" % server_config["browser_host"]
def test_add_cookie_for_ip(session, url, server_config, configuration):
new_cookie = {
"name": "hello",
"value": "world",
"domain": "127.0.0.1",
"path": "/",
"httpOnly": False,
"secure": False
}
session.url = "http://127.0.0.1:%s/common/blank.html" % (server_config["ports"]["http"][0])
clear_all_cookies(session)
result = add_cookie(session, new_cookie)
assert_success(result)
cookie = session.cookies("hello")
assert "name" in cookie
assert isinstance(cookie["name"], basestring)
assert "value" in cookie
assert isinstance(cookie["value"], basestring)
assert "domain" in cookie
assert isinstance(cookie["domain"], basestring)
assert cookie["name"] == "hello"
assert cookie["value"] == "world"
assert cookie["domain"] == "127.0.0.1"
def test_add_non_session_cookie(session, url):
a_year_from_now = int(
(datetime.utcnow() + timedelta(days=365) - datetime.utcfromtimestamp(0)).total_seconds())
new_cookie = {
"name": "hello",
"value": "world",
"expiry": a_year_from_now
}
session.url = url("/common/blank.html")
clear_all_cookies(session)
result = add_cookie(session, new_cookie)
assert_success(result)
cookie = session.cookies("hello")
assert "name" in cookie
assert isinstance(cookie["name"], basestring)
assert "value" in cookie
assert isinstance(cookie["value"], basestring)
assert "expiry" in cookie
assert isinstance(cookie["expiry"], int)
assert cookie["name"] == "hello"
assert cookie["value"] == "world"
assert cookie["expiry"] == a_year_from_now
def test_add_session_cookie(session, url):
new_cookie = {
"name": "hello",
"value": "world"
}
session.url = url("/common/blank.html")
clear_all_cookies(session)
result = add_cookie(session, new_cookie)
assert_success(result)
cookie = session.cookies("hello")
assert "name" in cookie
assert isinstance(cookie["name"], basestring)
assert "value" in cookie
assert isinstance(cookie["value"], basestring)
if "expiry" in cookie:
assert cookie.get("expiry") is None
assert cookie["name"] == "hello"
assert cookie["value"] == "world"
def test_add_session_cookie_with_leading_dot_character_in_domain(session, url, server_config):
new_cookie = {
"name": "hello",
"value": "world",
"domain": ".%s" % server_config["browser_host"]
}
session.url = url("/common/blank.html")
clear_all_cookies(session)
result = add_cookie(session, new_cookie)
assert_success(result)
cookie = session.cookies("hello")
assert "name" in cookie
assert isinstance(cookie["name"], basestring)
assert "value" in cookie
assert isinstance(cookie["value"], basestring)
assert "domain" in cookie
assert isinstance(cookie["domain"], basestring)
assert cookie["name"] == "hello"
assert cookie["value"] == "world"
assert cookie["domain"] == server_config["browser_host"] or \
cookie["domain"] == ".%s" % server_config["browser_host"]
|
peterjoel/servo
|
tests/wpt/web-platform-tests/webdriver/tests/add_cookie/add.py
|
Python
|
mpl-2.0
| 5,180
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2010-2013 Elico Corp. All Rights Reserved.
# Author: Yannick Gouin <yannick.gouin@elico-corp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import import_from_tmpl
import import_from_sheet
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
iw3hxn/LibrERP
|
office_automation/gap_analysis/wizard/__init__.py
|
Python
|
agpl-3.0
| 1,141
|
import sys
import os
import json
import hashlib
import logging
import collections
import urlparse
import re
import copy
import pprint
from StringIO import StringIO
from . import validate
from .aslist import aslist
from .flatten import flatten
import requests
from cachecontrol.wrapper import CacheControl
from cachecontrol.caches import FileCache
import ruamel.yaml as yaml
try:
from ruamel.yaml import CSafeLoader as SafeLoader
except ImportError:
from ruamel.yaml import SafeLoader # type: ignore
import rdflib
from rdflib.namespace import RDF, RDFS, OWL
from rdflib.plugins.parsers.notation3 import BadSyntax
import xml.sax
from typing import (Any, AnyStr, Callable, cast, Dict, List, Iterable, Tuple,
TypeVar, Union)
_logger = logging.getLogger("salad")
class NormDict(dict):
def __init__(self, normalize=unicode): # type: (type) -> None
super(NormDict, self).__init__()
self.normalize = normalize
def __getitem__(self, key):
return super(NormDict, self).__getitem__(self.normalize(key))
def __setitem__(self, key, value):
return super(NormDict, self).__setitem__(self.normalize(key), value)
def __delitem__(self, key):
return super(NormDict, self).__delitem__(self.normalize(key))
def __contains__(self, key):
return super(NormDict, self).__contains__(self.normalize(key))
def merge_properties(a, b):
c = {}
for i in a:
if i not in b:
c[i] = a[i]
for i in b:
if i not in a:
c[i] = b[i]
for i in a:
if i in b:
c[i] = aslist(a[i]) + aslist(b[i])
return c
def SubLoader(loader): # type: (Loader) -> Loader
return Loader(loader.ctx, schemagraph=loader.graph,
foreign_properties=loader.foreign_properties, idx=loader.idx,
cache=loader.cache, session=loader.session)
class Loader(object):
ContextType = Dict[unicode, Union[Dict, unicode, Iterable[unicode]]]
DocumentType = TypeVar('DocumentType', List, Dict[unicode, Any])
def __init__(self, ctx, schemagraph=None, foreign_properties=None,
idx=None, cache=None, session=None):
# type: (Loader.ContextType, rdflib.Graph, Set[unicode], Dict[unicode, Union[List, Dict[unicode, Any], unicode]], Dict[unicode, Any], requests.sessions.Session) -> None
normalize = lambda url: urlparse.urlsplit(url).geturl()
if idx is not None:
self.idx = idx
else:
self.idx = NormDict(normalize)
self.ctx = {} # type: Loader.ContextType
if schemagraph is not None:
self.graph = schemagraph
else:
self.graph = rdflib.graph.Graph()
if foreign_properties is not None:
self.foreign_properties = foreign_properties
else:
self.foreign_properties = set()
if cache is not None:
self.cache = cache
else:
self.cache = {}
self.session = None # type: requests.sessions.Session
if session is not None:
self.session = session
else:
self.session = CacheControl(requests.Session(),
cache=FileCache(os.path.join(os.environ["HOME"], ".cache", "salad")))
self.url_fields = None # type: Set[unicode]
self.scoped_ref_fields = None # type: Dict[unicode, int]
self.vocab_fields = None # type: Set[unicode]
self.identifiers = None # type: Set[unicode]
self.identity_links = None # type: Set[unicode]
self.standalone = None # type: Set[unicode]
self.nolinkcheck = None # type: Set[unicode]
self.vocab = {} # type: Dict[unicode, unicode]
self.rvocab = {} # type: Dict[unicode, unicode]
self.idmap = None # type: Dict[unicode, Any]
self.mapPredicate = None # type: Dict[unicode, unicode]
self.type_dsl_fields = None # type: Set[unicode]
self.add_context(ctx)
def expand_url(self, url, base_url, scoped_id=False, vocab_term=False, scoped_ref=None):
# type: (unicode, unicode, bool, bool, int) -> unicode
if url in (u"@id", u"@type"):
return url
if vocab_term and url in self.vocab:
return url
if self.vocab and u":" in url:
prefix = url.split(u":")[0]
if prefix in self.vocab:
url = self.vocab[prefix] + url[len(prefix) + 1:]
split = urlparse.urlsplit(url)
if split.scheme or url.startswith(u"$(") or url.startswith(u"${"):
pass
elif scoped_id and not split.fragment:
splitbase = urlparse.urlsplit(base_url)
frg = u""
if splitbase.fragment:
frg = splitbase.fragment + u"/" + split.path
else:
frg = split.path
pt = splitbase.path if splitbase.path else "/"
url = urlparse.urlunsplit(
(splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg))
elif scoped_ref is not None and not split.fragment:
pass
else:
url = urlparse.urljoin(base_url, url)
if vocab_term and url in self.rvocab:
return self.rvocab[url]
else:
return url
def _add_properties(self, s): # type: (unicode) -> None
for _, _, rng in self.graph.triples((s, RDFS.range, None)):
literal = ((unicode(rng).startswith(
u"http://www.w3.org/2001/XMLSchema#") and
not unicode(rng) == u"http://www.w3.org/2001/XMLSchema#anyURI")
or unicode(rng) ==
u"http://www.w3.org/2000/01/rdf-schema#Literal")
if not literal:
self.url_fields.add(unicode(s))
self.foreign_properties.add(unicode(s))
def add_namespaces(self, ns): # type: (Dict[unicode, unicode]) -> None
self.vocab.update(ns)
def add_schemas(self, ns, base_url):
# type: (Union[List[unicode], unicode], unicode) -> None
for sch in aslist(ns):
fetchurl = urlparse.urljoin(base_url, sch)
if fetchurl not in self.cache:
_logger.info("Getting external schema %s", fetchurl)
content = self.fetch_text(fetchurl)
self.cache[fetchurl] = rdflib.graph.Graph()
for fmt in ['xml', 'turtle', 'rdfa']:
try:
self.cache[fetchurl].parse(data=content, format=fmt)
self.graph += self.cache[fetchurl]
break
except xml.sax.SAXParseException: # type: ignore
pass
except TypeError:
pass
except BadSyntax:
pass
for s, _, _ in self.graph.triples((None, RDF.type, RDF.Property)):
self._add_properties(s)
for s, _, o in self.graph.triples((None, RDFS.subPropertyOf, None)):
self._add_properties(s)
self._add_properties(o)
for s, _, _ in self.graph.triples((None, RDFS.range, None)):
self._add_properties(s)
for s, _, _ in self.graph.triples((None, RDF.type, OWL.ObjectProperty)):
self._add_properties(s)
for s, _, _ in self.graph.triples((None, None, None)):
self.idx[unicode(s)] = None
def add_context(self, newcontext, baseuri=""):
# type: (Loader.ContextType, unicode) -> None
if self.vocab:
raise validate.ValidationException(
"Refreshing context that already has stuff in it")
self.url_fields = set()
self.scoped_ref_fields = {}
self.vocab_fields = set()
self.identifiers = set()
self.identity_links = set()
self.standalone = set()
self.nolinkcheck = set()
self.idmap = {}
self.mapPredicate = {}
self.vocab = {}
self.rvocab = {}
self.type_dsl_fields = set()
self.ctx.update(_copy_dict_without_key(newcontext, u"@context"))
_logger.debug("ctx is %s", self.ctx)
for key, value in self.ctx.items():
if value == u"@id":
self.identifiers.add(key)
self.identity_links.add(key)
elif isinstance(value, dict) and value.get(u"@type") == u"@id":
self.url_fields.add(key)
if u"refScope" in value:
self.scoped_ref_fields[key] = value[u"refScope"]
if value.get(u"identity", False):
self.identity_links.add(key)
elif isinstance(value, dict) and value.get(u"@type") == u"@vocab":
self.url_fields.add(key)
self.vocab_fields.add(key)
if u"refScope" in value:
self.scoped_ref_fields[key] = value[u"refScope"]
if value.get(u"typeDSL"):
self.type_dsl_fields.add(key)
if isinstance(value, dict) and value.get(u"noLinkCheck"):
self.nolinkcheck.add(key)
if isinstance(value, dict) and value.get(u"mapSubject"):
self.idmap[key] = value[u"mapSubject"]
if isinstance(value, dict) and value.get(u"mapPredicate"):
self.mapPredicate[key] = value[u"mapPredicate"]
if isinstance(value, dict) and u"@id" in value:
self.vocab[key] = value[u"@id"]
elif isinstance(value, basestring):
self.vocab[key] = value
for k, v in self.vocab.items():
self.rvocab[self.expand_url(v, u"", scoped_id=False)] = k
_logger.debug("identifiers is %s", self.identifiers)
_logger.debug("identity_links is %s", self.identity_links)
_logger.debug("url_fields is %s", self.url_fields)
_logger.debug("vocab_fields is %s", self.vocab_fields)
_logger.debug("vocab is %s", self.vocab)
def resolve_ref(self, ref, base_url=None, checklinks=True):
# type: (Union[Dict[unicode, Any], unicode], unicode, bool) -> Tuple[Union[List, Dict[unicode, Any], unicode], Dict[unicode, Any]]
base_url = base_url or u'file://%s/' % os.path.abspath('.')
obj = None # type: Dict[unicode, Any]
inc = False
mixin = None
# If `ref` is a dict, look for special directives.
if isinstance(ref, dict):
obj = ref
if u"$import" in obj:
if len(obj) == 1:
ref = obj[u"$import"]
obj = None
else:
raise ValueError(
u"'$import' must be the only field in %s" % (str(obj)))
elif u"$include" in obj:
if len(obj) == 1:
ref = obj[u"$include"]
inc = True
obj = None
else:
raise ValueError(
u"'$include' must be the only field in %s" % (str(obj)))
elif u"$mixin" in obj:
ref = obj[u"$mixin"]
mixin = obj
obj = None
else:
ref = None
for identifier in self.identifiers:
if identifier in obj:
ref = obj[identifier]
break
if not ref:
raise ValueError(
u"Object `%s` does not have identifier field in %s" % (obj, self.identifiers))
if not isinstance(ref, (str, unicode)):
raise ValueError(u"Must be string: `%s`" % str(ref))
url = self.expand_url(ref, base_url, scoped_id=(obj is not None))
# Has this reference been loaded already?
if url in self.idx and (not mixin):
return self.idx[url], {}
# "$include" directive means load raw text
if inc:
return self.fetch_text(url), {}
doc = None
if obj:
for identifier in self.identifiers:
obj[identifier] = url
doc_url = url
else:
# Load structured document
doc_url, frg = urlparse.urldefrag(url)
if doc_url in self.idx and (not mixin):
# If the base document is in the index, it was already loaded,
# so if we didn't find the reference earlier then it must not
# exist.
raise validate.ValidationException(
u"Reference `#%s` not found in file `%s`." % (frg, doc_url))
doc = self.fetch(doc_url, inject_ids=(not mixin))
# Recursively expand urls and resolve directives
if mixin:
doc = copy.deepcopy(doc)
doc.update(mixin)
del doc["$mixin"]
url = None
resolved_obj, metadata = self.resolve_all(
doc, base_url, file_base=doc_url, checklinks=checklinks)
else:
resolved_obj, metadata = self.resolve_all(
doc if doc else obj, doc_url, checklinks=checklinks)
# Requested reference should be in the index now, otherwise it's a bad
# reference
if url is not None:
if url in self.idx:
resolved_obj = self.idx[url]
else:
raise RuntimeError("Reference `%s` is not in the index. "
"Index contains:\n %s" % (url, "\n ".join(self.idx)))
if isinstance(resolved_obj, (dict)):
if u"$graph" in resolved_obj:
metadata = _copy_dict_without_key(resolved_obj, u"$graph")
return resolved_obj[u"$graph"], metadata
else:
return resolved_obj, metadata
else:
return resolved_obj, metadata
def _resolve_idmap(self, document, loader):
# type: (Dict[unicode, Union[Dict[unicode, Dict[unicode, unicode]], List[Dict[unicode, Any]]]], Loader) -> None
# Convert fields with mapSubject into lists
# use mapPredicate if the mapped value isn't a dict.
for idmapField in loader.idmap:
if (idmapField in document):
idmapFieldValue = document[idmapField]
if (isinstance(idmapFieldValue, dict)
and "$import" not in idmapFieldValue
and "$include" not in idmapFieldValue):
ls = []
for k in sorted(idmapFieldValue.keys()):
val = idmapFieldValue[k]
v = None # type: Dict[unicode, Any]
if not isinstance(val, dict):
if idmapField in loader.mapPredicate:
v = {loader.mapPredicate[idmapField]: val}
else:
raise validate.ValidationException(
"mapSubject '%s' value '%s' is not a dict"
"and does not have a mapPredicate", k, v)
else:
v = val
v[loader.idmap[idmapField]] = k
ls.append(v)
document[idmapField] = ls
typeDSLregex = re.compile(ur"^([^[?]+)(\[\])?(\?)?$")
def _type_dsl(self, t):
# type: (Union[unicode, Dict, List]) -> Union[unicode, Dict[unicode, unicode], List[Union[unicode, Dict[unicode, unicode]]]]
if not isinstance(t, (str, unicode)):
return t
m = Loader.typeDSLregex.match(t)
if not m:
return t
first = m.group(1)
second = third = None
if m.group(2):
second = {u"type": u"array",
u"items": first}
if m.group(3):
third = [u"null", second or first]
return third or second or first
def _resolve_type_dsl(self, document, loader):
# type: (Dict[unicode, Union[unicode, Dict[unicode, unicode], List]], Loader) -> None
for d in loader.type_dsl_fields:
if d in document:
datum = document[d]
if isinstance(datum, (str, unicode)):
document[d] = self._type_dsl(datum)
elif isinstance(datum, list):
document[d] = [self._type_dsl(t) for t in datum]
datum2 = document[d]
if isinstance(datum2, list):
document[d] = flatten(datum2)
seen = [] # type: List[unicode]
uniq = []
for item in document[d]:
if item not in seen:
uniq.append(item)
seen.append(item)
document[d] = uniq
def _resolve_identifier(self, document, loader, base_url):
# type: (Dict[unicode, unicode], Loader, unicode) -> unicode
# Expand identifier field (usually 'id') to resolve scope
for identifer in loader.identifiers:
if identifer in document:
if isinstance(document[identifer], basestring):
document[identifer] = loader.expand_url(
document[identifer], base_url, scoped_id=True)
if (document[identifer] not in loader.idx
or isinstance(
loader.idx[document[identifer]], basestring)):
loader.idx[document[identifer]] = document
base_url = document[identifer]
else:
raise validate.ValidationException(
"identifier field '%s' must be a string"
% (document[identifer]))
return base_url
def _resolve_identity(self, document, loader, base_url):
# type: (Dict[unicode, List[unicode]], Loader, unicode) -> None
# Resolve scope for identity fields (fields where the value is the
# identity of a standalone node, such as enum symbols)
for identifer in loader.identity_links:
if identifer in document and isinstance(document[identifer], list):
for n, v in enumerate(document[identifer]):
if isinstance(document[identifer][n], basestring):
document[identifer][n] = loader.expand_url(
document[identifer][n], base_url, scoped_id=True)
if document[identifer][n] not in loader.idx:
loader.idx[document[identifer][
n]] = document[identifer][n]
def _normalize_fields(self, document, loader):
# type: (Dict[unicode, unicode], Loader) -> None
# Normalize fields which are prefixed or full URIn to vocabulary terms
for d in document:
d2 = loader.expand_url(d, u"", scoped_id=False, vocab_term=True)
if d != d2:
document[d2] = document[d]
del document[d]
def _resolve_uris(self, document, loader, base_url):
# type: (Dict[unicode, Union[unicode, List[unicode]]], Loader, unicode) -> None
# Resolve remaining URLs based on document base
for d in loader.url_fields:
if d in document:
datum = document[d]
if isinstance(datum, (str, unicode)):
document[d] = loader.expand_url(
datum, base_url, scoped_id=False,
vocab_term=(d in loader.vocab_fields),
scoped_ref=self.scoped_ref_fields.get(d))
elif isinstance(datum, list):
document[d] = [
loader.expand_url(
url, base_url, scoped_id=False,
vocab_term=(d in loader.vocab_fields),
scoped_ref=self.scoped_ref_fields.get(d))
if isinstance(url, (str, unicode))
else url for url in datum]
def resolve_all(self, document, base_url, file_base=None, checklinks=True):
# type: (DocumentType, unicode, unicode, bool) -> Tuple[Union[List, Dict[unicode, Any], unicode], Dict[unicode, Any]]
loader = self
metadata = {} # type: Dict[unicode, Any]
if file_base is None:
file_base = base_url
if isinstance(document, dict):
# Handle $import and $include
if (u'$import' in document or u'$include' in document):
return self.resolve_ref(document, base_url=file_base, checklinks=checklinks)
elif u'$mixin' in document:
return self.resolve_ref(document, base_url=base_url, checklinks=checklinks)
elif isinstance(document, list):
pass
else:
return (document, metadata)
newctx = None # type: Loader
if isinstance(document, dict):
# Handle $base, $profile, $namespaces, $schemas and $graph
if u"$base" in document:
base_url = document[u"$base"]
if u"$profile" in document:
if not newctx:
newctx = SubLoader(self)
prof = self.fetch(document[u"$profile"])
newctx.add_namespaces(document.get(u"$namespaces", {}))
newctx.add_schemas(document.get(
u"$schemas", []), document[u"$profile"])
if u"$namespaces" in document:
if not newctx:
newctx = SubLoader(self)
newctx.add_namespaces(document[u"$namespaces"])
if u"$schemas" in document:
if not newctx:
newctx = SubLoader(self)
newctx.add_schemas(document[u"$schemas"], file_base)
if newctx:
loader = newctx
if u"$graph" in document:
metadata = _copy_dict_without_key(document, u"$graph")
document = document[u"$graph"]
resolved_metadata = loader.resolve_all(metadata, base_url,
file_base=file_base, checklinks=False)[0]
if isinstance(resolved_metadata, dict):
metadata = resolved_metadata
else:
raise validate.ValidationException(
"Validation error, metadata must be dict: %s"
% (resolved_metadata))
if isinstance(document, dict):
self._normalize_fields(document, loader)
self._resolve_idmap(document, loader)
self._resolve_type_dsl(document, loader)
base_url = self._resolve_identifier(document, loader, base_url)
self._resolve_identity(document, loader, base_url)
self._resolve_uris(document, loader, base_url)
try:
for key, val in document.items():
document[key], _ = loader.resolve_all(
val, base_url, file_base=file_base, checklinks=False)
except validate.ValidationException as v:
_logger.warn("loader is %s", id(loader), exc_info=True)
raise validate.ValidationException("(%s) (%s) Validation error in field %s:\n%s" % (
id(loader), file_base, key, validate.indent(str(v))))
elif isinstance(document, list):
i = 0
try:
while i < len(document):
val = document[i]
if isinstance(val, dict) and (u"$import" in val or u"$mixin" in val):
l, _ = loader.resolve_ref(val, base_url=file_base, checklinks=False)
if isinstance(l, list): # never true?
del document[i]
for item in aslist(l):
document.insert(i, item)
i += 1
else:
document[i] = l
i += 1
else:
document[i], _ = loader.resolve_all(
val, base_url, file_base=file_base, checklinks=False)
i += 1
except validate.ValidationException as v:
_logger.warn("failed", exc_info=True)
raise validate.ValidationException("(%s) (%s) Validation error in position %i:\n%s" % (
id(loader), file_base, i, validate.indent(str(v))))
for identifer in loader.identity_links:
if identifer in metadata:
if isinstance(metadata[identifer], (str, unicode)):
metadata[identifer] = loader.expand_url(
metadata[identifer], base_url, scoped_id=True)
loader.idx[metadata[identifer]] = document
if checklinks:
document = self.validate_links(document, u"")
return document, metadata
def fetch_text(self, url):
# type: (unicode) -> unicode
if url in self.cache:
return self.cache[url]
split = urlparse.urlsplit(url)
scheme, path = split.scheme, split.path
if scheme in [u'http', u'https'] and self.session:
try:
resp = self.session.get(url)
resp.raise_for_status()
except Exception as e:
raise RuntimeError(url, e)
return resp.text
elif scheme == 'file':
try:
with open(path) as fp:
read = fp.read()
if hasattr(read, "decode"):
return read.decode("utf-8")
else:
return read
except (OSError, IOError) as e:
raise RuntimeError('Error reading %s %s' % (url, e))
else:
raise ValueError('Unsupported scheme in url: %s' % url)
def fetch(self, url, inject_ids=True): # type: (unicode, bool) -> Any
if url in self.idx:
return self.idx[url]
try:
text = self.fetch_text(url)
if isinstance(text, bytes):
textIO = StringIO(text.decode('utf-8'))
else:
textIO = StringIO(text)
textIO.name = url # type: ignore
result = yaml.load(textIO, Loader=SafeLoader)
except yaml.parser.ParserError as e:
raise validate.ValidationException("Syntax error %s" % (e))
if isinstance(result, dict) and inject_ids and self.identifiers:
for identifier in self.identifiers:
if identifier not in result:
result[identifier] = url
self.idx[self.expand_url(result[identifier], url)] = result
else:
self.idx[url] = result
return result
def check_file(self, fn): # type: (unicode) -> bool
if fn.startswith("file://"):
u = urlparse.urlsplit(fn)
return os.path.exists(u.path)
else:
return False
FieldType = TypeVar('FieldType', unicode, List[unicode], Dict[unicode, Any])
def validate_scoped(self, field, link, docid):
# type: (unicode, unicode, unicode) -> unicode
split = urlparse.urlsplit(docid)
sp = split.fragment.split(u"/")
n = self.scoped_ref_fields[field]
while n > 0 and len(sp) > 0:
sp.pop()
n -= 1
tried = []
while True:
sp.append(link)
url = urlparse.urlunsplit((
split.scheme, split.netloc, split.path, split.query,
u"/".join(sp)))
tried.append(url)
if url in self.idx:
return url
sp.pop()
if len(sp) == 0:
break
sp.pop()
raise validate.ValidationException(
"Field `%s` contains undefined reference to `%s`, tried %s" % (field, link, tried))
def validate_link(self, field, link, docid):
# type: (unicode, FieldType, unicode) -> FieldType
if field in self.nolinkcheck:
return link
if isinstance(link, (str, unicode)):
if field in self.vocab_fields:
if link not in self.vocab and link not in self.idx and link not in self.rvocab:
if field in self.scoped_ref_fields:
return self.validate_scoped(field, link, docid)
elif not self.check_file(link):
raise validate.ValidationException(
"Field `%s` contains undefined reference to `%s`" % (field, link))
elif link not in self.idx and link not in self.rvocab:
if field in self.scoped_ref_fields:
return self.validate_scoped(field, link, docid)
elif not self.check_file(link):
raise validate.ValidationException(
"Field `%s` contains undefined reference to `%s`" % (field, link))
elif isinstance(link, list):
errors = []
for n, i in enumerate(link):
try:
link[n] = self.validate_link(field, i, docid)
except validate.ValidationException as v:
errors.append(v)
if errors:
raise validate.ValidationException(
"\n".join([str(e) for e in errors]))
elif isinstance(link, dict):
self.validate_links(link, docid)
else:
raise validate.ValidationException("Link must be a str, unicode, "
"list, or a dict.")
return link
def getid(self, d): # type: (Any) -> unicode
if isinstance(d, dict):
for i in self.identifiers:
if i in d:
if isinstance(d[i], (str, unicode)):
return d[i]
return None
def validate_links(self, document, base_url):
# type: (DocumentType, unicode) -> DocumentType
docid = self.getid(document)
if not docid:
docid = base_url
errors = []
iterator = None # type: Any
if isinstance(document, list):
iterator = enumerate(document)
elif isinstance(document, dict):
try:
for d in self.url_fields:
if d in document and d not in self.identity_links:
document[d] = self.validate_link(d, document[d], docid)
except validate.ValidationException as v:
errors.append(v)
if hasattr(document, "iteritems"):
iterator = document.iteritems()
else:
iterator = document.items()
else:
return document
for key, val in iterator:
try:
document[key] = self.validate_links(val, docid)
except validate.ValidationException as v:
if key not in self.nolinkcheck:
docid2 = self.getid(val)
if docid2:
errors.append(validate.ValidationException(
"While checking object `%s`\n%s" % (docid2, validate.indent(str(v)))))
else:
if isinstance(key, basestring):
errors.append(validate.ValidationException(
"While checking field `%s`\n%s" % (key, validate.indent(str(v)))))
else:
errors.append(validate.ValidationException(
"While checking position %s\n%s" % (key, validate.indent(str(v)))))
if errors:
if len(errors) > 1:
raise validate.ValidationException(
"\n".join([str(e) for e in errors]))
else:
raise errors[0]
return document
def _copy_dict_without_key(from_dict, filtered_key):
# type: (Dict, Any) -> Dict
new_dict = {}
for key, value in from_dict.items():
if key != filtered_key:
new_dict[key] = value
return new_dict
|
ohsu-computational-biology/common-workflow-language
|
v1.1.0-dev1/salad/schema_salad/ref_resolver.py
|
Python
|
apache-2.0
| 32,490
|
__version__ = '0.3.5'
|
texastribune/the-dp
|
tx_highered/__init__.py
|
Python
|
apache-2.0
| 22
|
# Copyright 2015: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.common import costilius
from rally.task import atomic
from tests.unit import test
class ActionTimerMixinTestCase(test.TestCase):
def test_atomic_actions(self):
inst = atomic.ActionTimerMixin()
self.assertEqual(inst._atomic_actions, inst.atomic_actions())
class AtomicActionTestCase(test.TestCase):
@mock.patch("time.time", side_effect=[1, 3, 6, 10, 15, 21])
def test_action_timer_context(self, mock_time):
inst = atomic.ActionTimerMixin()
with atomic.ActionTimer(inst, "test"):
with atomic.ActionTimer(inst, "test"):
with atomic.ActionTimer(inst, "some"):
pass
expected = [("test", 20), ("test (2)", 12), ("some", 4)]
self.assertEqual(costilius.OrderedDict(expected),
inst.atomic_actions())
@mock.patch("time.time", side_effect=[1, 3])
def test_action_timer_context_with_exception(self, mock_time):
inst = atomic.ActionTimerMixin()
class TestException(Exception):
pass
try:
with atomic.ActionTimer(inst, "test"):
raise TestException("test")
except TestException:
pass
expected = [("test", 2)]
self.assertEqual(costilius.OrderedDict(expected),
inst.atomic_actions())
@mock.patch("time.time", side_effect=[1, 3])
def test_action_timer_decorator(self, mock_time):
class Some(atomic.ActionTimerMixin):
@atomic.action_timer("some")
def some_func(self, a, b):
return a + b
inst = Some()
self.assertEqual(5, inst.some_func(2, 3))
self.assertEqual(costilius.OrderedDict({"some": 2}),
inst.atomic_actions())
@mock.patch("time.time", side_effect=[1, 3])
def test_action_timer_decorator_with_exception(self, mock_time):
class TestException(Exception):
pass
class TestTimer(atomic.ActionTimerMixin):
@atomic.action_timer("test")
def some_func(self):
raise TestException("test")
inst = TestTimer()
self.assertRaises(TestException, inst.some_func)
self.assertEqual(costilius.OrderedDict({"test": 2}),
inst.atomic_actions())
@mock.patch("time.time", side_effect=[1, 3, 1, 3])
def test_optional_action_timer_decorator(self, mock_time):
class TestAtomicTimer(atomic.ActionTimerMixin):
@atomic.optional_action_timer("some")
def some_func(self, a, b):
return a + b
@atomic.optional_action_timer("some", argument_name="foo",
default=False)
def other_func(self, a, b):
return a + b
inst = TestAtomicTimer()
self.assertEqual(5, inst.some_func(2, 3))
self.assertEqual(costilius.OrderedDict({"some": 2}),
inst.atomic_actions())
inst = TestAtomicTimer()
self.assertEqual(5, inst.some_func(2, 3, atomic_action=False))
self.assertEqual(costilius.OrderedDict(),
inst.atomic_actions())
inst = TestAtomicTimer()
self.assertEqual(5, inst.other_func(2, 3))
self.assertEqual(costilius.OrderedDict(),
inst.atomic_actions())
inst = TestAtomicTimer()
self.assertEqual(5, inst.other_func(2, 3, foo=True))
self.assertEqual(costilius.OrderedDict({"some": 2}),
inst.atomic_actions())
|
vishnu-kumar/PeformanceFramework
|
tests/unit/task/test_atomic.py
|
Python
|
apache-2.0
| 4,257
|
#Importing libraries
import discord
from discord.ext import commands
from sys import argv
class Load:
"""
Load commands.
"""
def __init__(self, bot):
self.bot = bot
print('Addon "{}" loaded'.format(self.__class__.__name__))
# Load test
@commands.has_permissions(ban_members=True)
@commands.command(hidden=True)
async def load(self, *, module : str):
"""Loads an addon."""
try:
if module[0:7] != "addons.":
module = "addons." + module
self.bot.load_extension(module)
await self.bot.say('✅ Extension loaded.')
except Exception as e:
await self.bot.say('💢 Failed!\n```\n{}: {}\n```'.format(type(e).__name__, e))
@commands.has_permissions(ban_members=True)
@commands.command(hidden=True)
async def unload(self, *, module : str):
"""Unloads an addon."""
try:
if module[0:7] != "addons.":
module = "addons." + module
if module == "addons.load":
await self.bot.say("❌ I don't think you want to unload that!")
else:
self.bot.unload_extension(module)
await self.bot.say('✅ Extension unloaded.')
except Exception as e:
await self.bot.say('💢 Failed!\n```\n{}: {}\n```'.format(type(e).__name__, e))
@commands.has_permissions(ban_members=True)
@commands.command(name='reload', hidden=True)
async def _reload(self, *, module : str):
"""Reloads an addon."""
try:
if module[0:7] != "addons.":
module = "addons." + module
self.bot.unload_extension(module)
self.bot.load_extension(module)
await self.bot.say('✅ Extension reloaded.')
except Exception as e:
await self.bot.say('💢 Failed!\n```\n{}: {}\n```'.format(type(e).__name__, e))
def setup(bot):
bot.add_cog(Load(bot))
|
helpyellowsn0w/Kurisu
|
addons/load.py
|
Python
|
apache-2.0
| 1,977
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from absl.testing import parameterized
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class DefunCollectionTest(test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
dict(testcase_name='Defun', function_decorator=function.defun),
dict(
testcase_name='DefFunction',
function_decorator=def_function.function))
def testCollectionValueAccess(self, function_decorator):
"""Read values from graph collections inside of defun."""
with ops.Graph().as_default() as g:
with self.session(graph=g):
x = 2
y = 5
ops.add_to_collection('x', x)
ops.add_to_collection('y', y)
@function_decorator
def fn():
x_const = constant_op.constant(ops.get_collection('x')[0])
y_const = constant_op.constant(ops.get_collection('y')[0])
z = math_ops.add(x_const, y_const)
ops.add_to_collection('z', 7)
return z
self.assertEqual(7, int(self.evaluate(fn())))
self.assertEqual(ops.get_collection('x'), [2])
self.assertEqual(ops.get_collection('y'), [5])
self.assertEqual(ops.get_collection('z'), [])
@parameterized.named_parameters(
dict(testcase_name='Defun', function_decorator=function.defun),
dict(
testcase_name='DefFunction',
function_decorator=def_function.function))
def testCollectionVariableValueAccess(self, function_decorator):
"""Read variable value from graph collections inside of defun."""
with ops.Graph().as_default() as g:
with self.session(graph=g):
v = resource_variable_ops.ResourceVariable(1.0)
@function_decorator
def f():
return v.read_value()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, float(self.evaluate(f())))
self.assertLen(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES), 1)
def testCollectionVariableValueWrite(self):
"""Write variable value inside defun."""
with ops.Graph().as_default() as g:
with self.session(graph=g):
@function.defun
def f():
v = resource_variable_ops.ResourceVariable(2.0)
return v
_ = f.get_concrete_function()
self.evaluate(variables.global_variables_initializer())
self.assertEqual(2.0, float(self.evaluate(f())))
self.assertLen(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES), 1)
if __name__ == '__main__':
ops.enable_eager_execution(
config=config_pb2.ConfigProto(device_count={'CPU': 4}))
test.main()
|
tensorflow/tensorflow
|
tensorflow/python/eager/function_defun_collection_test.py
|
Python
|
apache-2.0
| 3,653
|
#
# The Python Imaging Library.
# $Id$
#
# image palette object
#
# History:
# 1996-03-11 fl Rewritten.
# 1997-01-03 fl Up and running.
# 1997-08-23 fl Added load hack
# 2001-04-16 fl Fixed randint shadow bug in random()
#
# Copyright (c) 1997-2001 by Secret Labs AB
# Copyright (c) 1996-1997 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
import array
from PIL import Image, ImageColor
class ImagePalette:
"Color palette for palette mapped images"
def __init__(self, mode = "RGB", palette = None):
self.mode = mode
self.rawmode = None # if set, palette contains raw data
self.palette = palette or list(range(256))*len(self.mode)
self.colors = {}
self.dirty = None
if len(self.mode)*256 != len(self.palette):
raise ValueError("wrong palette size")
def getdata(self):
"""
Get palette contents in format suitable # for the low-level
``im.putpalette`` primitive.
.. warning:: This method is experimental.
"""
if self.rawmode:
return self.rawmode, self.palette
return self.mode + ";L", self.tobytes()
def tobytes(self):
"""Convert palette to bytes.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(self.palette, bytes):
return self.palette
arr = array.array("B", self.palette)
if hasattr(arr, 'tobytes'):
#py3k has a tobytes, tostring is deprecated.
return arr.tobytes()
return arr.tostring()
# Declare tostring as an alias for tobytes
tostring = tobytes
def getcolor(self, color):
"""Given an rgb tuple, allocate palette entry.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(color, tuple):
try:
return self.colors[color]
except KeyError:
# allocate new color slot
if isinstance(self.palette, bytes):
self.palette = [int(x) for x in self.palette]
index = len(self.colors)
if index >= 256:
raise ValueError("cannot allocate more than 256 colors")
self.colors[color] = index
self.palette[index] = color[0]
self.palette[index+256] = color[1]
self.palette[index+512] = color[2]
self.dirty = 1
return index
else:
raise ValueError("unknown color specifier: %r" % color)
def save(self, fp):
"""Save palette to text file.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(fp, str):
fp = open(fp, "w")
fp.write("# Palette\n")
fp.write("# Mode: %s\n" % self.mode)
for i in range(256):
fp.write("%d" % i)
for j in range(i, len(self.palette), 256):
fp.write(" %d" % self.palette[j])
fp.write("\n")
fp.close()
# --------------------------------------------------------------------
# Internal
def raw(rawmode, data):
palette = ImagePalette()
palette.rawmode = rawmode
palette.palette = data
palette.dirty = 1
return palette
# --------------------------------------------------------------------
# Factories
def _make_linear_lut(black, white):
lut = []
if black == 0:
for i in range(256):
lut.append(white*i//255)
else:
raise NotImplementedError # FIXME
return lut
def _make_gamma_lut(exp, mode="RGB"):
lut = []
for i in range(256):
lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5))
return lut
def new(mode, data):
return Image.core.new_palette(mode, data)
def negative(mode="RGB"):
palette = list(range(256))
palette.reverse()
return ImagePalette(mode, palette * len(mode))
def random(mode="RGB"):
from random import randint
palette = []
for i in range(256*len(mode)):
palette.append(randint(0, 255))
return ImagePalette(mode, palette)
def sepia(white="#fff0c0"):
r, g, b = ImageColor.getrgb(white)
r = _make_linear_lut(0, r)
g = _make_linear_lut(0, g)
b = _make_linear_lut(0, b)
return ImagePalette("RGB", r + g + b)
def wedge(mode="RGB"):
return ImagePalette(mode, list(range(256)) * len(mode))
def load(filename):
# FIXME: supports GIMP gradients only
fp = open(filename, "rb")
lut = None
if not lut:
try:
from PIL import GimpPaletteFile
fp.seek(0)
p = GimpPaletteFile.GimpPaletteFile(fp)
lut = p.getpalette()
except (SyntaxError, ValueError):
#import traceback
#traceback.print_exc()
pass
if not lut:
try:
from PIL import GimpGradientFile
fp.seek(0)
p = GimpGradientFile.GimpGradientFile(fp)
lut = p.getpalette()
except (SyntaxError, ValueError):
#import traceback
#traceback.print_exc()
pass
if not lut:
try:
from PIL import PaletteFile
fp.seek(0)
p = PaletteFile.PaletteFile(fp)
lut = p.getpalette()
except (SyntaxError, ValueError):
import traceback
traceback.print_exc()
pass
if not lut:
raise IOError("cannot load palette")
return lut # data, rawmode
|
Amechi101/concepteur-market-app
|
venv/lib/python2.7/site-packages/PIL/ImagePalette.py
|
Python
|
mit
| 5,792
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2014 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .tektronixAWG2000 import *
class tektronixAWG2005(tektronixAWG2000):
"Tektronix AWG2005 arbitrary waveform generator driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'AWG2005')
super(tektronixAWG2005, self).__init__(*args, **kwargs)
self._output_count = 2
|
elopezga/ErrorRate
|
ivi/tektronix/tektronixAWG2005.py
|
Python
|
mit
| 1,490
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Stderr built-in backend.
"""
__author__ = "Lluís Vilanova <vilanova@ac.upc.edu>"
__copyright__ = "Copyright 2012-2017, Lluís Vilanova <vilanova@ac.upc.edu>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "stefanha@linux.vnet.ibm.com"
from tracetool import out
PUBLIC = True
def generate_h_begin(events, group):
out('#include "qemu/log-for-trace.h"',
'')
def generate_h(event, group):
argnames = ", ".join(event.args.names())
if len(event.args) > 0:
argnames = ", " + argnames
if "vcpu" in event.properties:
# already checked on the generic format code
cond = "true"
else:
cond = "trace_event_get_state(%s)" % ("TRACE_" + event.name.upper())
out(' if (%(cond)s && qemu_loglevel_mask(LOG_TRACE)) {',
' struct timeval _now;',
' gettimeofday(&_now, NULL);',
' qemu_log("%%d@%%zu.%%06zu:%(name)s " %(fmt)s "\\n",',
' qemu_get_thread_id(),',
' (size_t)_now.tv_sec, (size_t)_now.tv_usec',
' %(argnames)s);',
' }',
cond=cond,
name=event.name,
fmt=event.fmt.rstrip("\n"),
argnames=argnames)
def generate_h_backend_dstate(event, group):
out(' trace_event_get_state_dynamic_by_id(%(event_id)s) || \\',
event_id="TRACE_" + event.name.upper())
|
marioli/qemu
|
scripts/tracetool/backend/log.py
|
Python
|
gpl-2.0
| 1,521
|
import docker
import yaml
import sys
yamlFile = sys.argv[1]
client = docker.from_env()
images={}
result = open('test_result.txt', 'w')
## Build Image ##
def build_image(ipath, itag):
try:
print "Building " + image
client.images.build(pull=True, path=ipath, tag=itag, rm=True, stream=True)
except docker.errors.BuildError as exc:
return "ERROR " + exc.__str__()
except docker.errors.APIError as exc:
return "ERROR " + exc.__str__()
return "OK"
## remove Image ##
def remove_image(itag):
try:
print "Removing " + image
client.images.remove(image=itag, force=True)
except docker.errors.BuildError as exc:
pass
except docker.errors.APIError as exc:
print (exc)
## Open File ##
with open(yamlFile, 'r') as stream:
try:
images=yaml.load(stream)
except yaml.YAMLError as exc:
print(exc)
for image in images:
if images[image]["run"]:
sol = build_image(images[image]["path"], image)
remove_image(image)
result.write(image + ": " + sol + "\n")
result.close()
|
fqez/JdeRobot
|
test/packages/run_test.py
|
Python
|
gpl-3.0
| 1,058
|
import numpy as np
import pele.potentials.lj as lj
#import potentials.ljcpp as lj
from pele.mc import MonteCarlo
from pele.takestep import RandomDisplacement, AdaptiveStepsize
from ptmc import PTMC, getTemps
import copy
from pele.utils.histogram import EnergyHistogram, PrintHistogram
from pele.optimize import mylbfgs as quench
from pele.accept_tests.spherical_container import SphericalContainer
def runptmc(nsteps_tot = 100000):
natoms = 31
nreplicas = 4
Tmin = 0.2
Tmax = 0.4
nsteps_equil = 10000
nsteps_tot = 100000
histiprint = nsteps_tot / 10
exchange_frq = 100*nreplicas
coords=np.random.random(3*natoms)
#quench the coords so we start from a reasonable location
mypot = lj.LJ()
ret = quench(coords, mypot)
coords = ret.coords
Tlist = getTemps(Tmin, Tmax, nreplicas)
replicas = []
ostreams = []
histograms = []
takesteplist = []
radius = 2.5
# create all the replicas which will be passed to PTMC
for i in range(nreplicas):
T = Tlist[i]
potential = lj.LJ()
takestep = RandomDisplacement( stepsize=0.01)
adaptive = AdaptiveStepsize(takestep, last_step = nsteps_equil)
takesteplist.append( adaptive )
file = "mcout." + str(i+1)
ostream = open(file, "w")
hist = EnergyHistogram( -134., 10., 1000)
histograms.append(hist)
event_after_step=[hist]
radiustest = SphericalContainer(radius)
accept_tests = [radiustest]
mc = MonteCarlo(coords, potential, takeStep=takestep, temperature=T, \
outstream=ostream, event_after_step = event_after_step, \
confCheck = accept_tests)
mc.histogram = hist #for convienence
mc.printfrq = 1
replicas.append(mc)
#is it possible to pickle a mc object?
#cp = copy.deepcopy(replicas[0])
#import pickle
#with open("mc.pickle", "w") as fout:
#pickle.dump(takesteplist[0], fout)
#attach an event to print xyz coords
from pele.printing.print_atoms_xyz import PrintEvent
printxyzlist = []
for n, rep in enumerate(replicas):
outf = "dumpstruct.%d.xyz" % (n+1)
printxyz = PrintEvent(outf, frq=500)
printxyzlist.append( printxyz)
rep.addEventAfterStep(printxyz)
#attach an event to print histograms
for n, rep in enumerate(replicas):
outf = "hist.%d" % (n+1)
histprint = PrintHistogram(outf, rep.histogram, histiprint)
rep.addEventAfterStep(histprint)
ptmc = PTMC(replicas)
ptmc.use_independent_exchange = True
ptmc.exchange_frq = exchange_frq
ptmc.run(nsteps_tot)
#do production run
#fix the step sizes
#for takestep in takesteplist:
# takestep.useFixedStep()
#ptmc.run(30000)
if False: #this doesn't work
print "final energies"
for rep in ptmc.replicas:
print rep.temperature, rep.markovE
for rep in ptmc.replicas_par:
print rep.mcsys.markovE
for k in range(nreplicas):
e,T = ptmc.getRepEnergyT(k)
print T, e
if False: #this doesn't work
print "histograms"
for i,hist in enumerate(histograms):
fname = "hist." + str(i)
print fname
with open(fname, "w") as fout:
for (e, visits) in hist:
fout.write( "%g %d\n" % (e, visits) )
ptmc.end() #close the open threads
def getReplicaPath(fname = "exchanges", nreps = 4):
paths = [ [i] for i in range(nreps)]
positions = np.array(range(nreps))
newpositions = np.array(range(nreps))
oldpositions = np.array(range(nreps))
with open(fname, "r") as fin:
for line in fin:
sline = line.split()
time = int(sline[0])
for newposition in range(nreps):
oldposition = int(sline[3+newposition])
oldpositions[newposition] = oldposition
#replica = position2replica[oldposition]
#newpositions[replica] =
print oldpositions
print positions
#positions[:] = positions[newpositions]
positions[:] = positions[oldpositions]
print positions
#print ""
for i, j in enumerate(positions):
paths[i].append(j)
if True:
import matplotlib.pyplot as plt
nppaths = np.array(paths)
print np.shape(nppaths)
for i in range(4):
plt.subplot(2,2,i+1)
plt.plot( nppaths[i,:])
plt.show()
print paths[0]
if __name__ == "__main__":
if True:
runptmc(
nsteps_tot = 1000000
)
getReplicaPath()
|
smcantab/pele
|
playground/parallel_tempering/run_ptmc.py
|
Python
|
gpl-3.0
| 4,924
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_fabric_node
short_description: Manage Fabric Node Members (fabric:NodeIdentP)
description:
- Manage Fabric Node Members on Cisco ACI fabrics.
version_added: '2.5'
options:
pod_id:
description:
- The pod id of the new Fabric Node Member.
type: int
serial:
description:
- Serial Number for the new Fabric Node Member.
type: str
aliases: [ serial_number ]
node_id:
description:
- Node ID Number for the new Fabric Node Member.
type: int
switch:
description:
- Switch Name for the new Fabric Node Member.
type: str
aliases: [ name, switch_name ]
description:
description:
- Description for the new Fabric Node Member.
type: str
aliases: [ descr ]
role:
description:
- Role for the new Fabric Node Member.
type: str
aliases: [ role_name ]
choices: [ leaf, spine, unspecified ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
seealso:
- name: APIC Management Information Model reference
description: More information about the internal APIC class B(fabric:NodeIdentP).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Bruno Calogero (@brunocalogero)
'''
EXAMPLES = r'''
- name: Add fabric node
aci_fabric_node:
host: apic
username: admin
password: SomeSecretPassword
serial: FDO2031124L
node_id: 1011
switch: fab4-sw1011
state: present
delegate_to: localhost
- name: Remove fabric node
aci_fabric_node:
host: apic
username: admin
password: SomeSecretPassword
serial: FDO2031124L
node_id: 1011
state: absent
delegate_to: localhost
- name: Query fabric nodes
aci_fabric_node:
host: apic
username: admin
password: SomeSecretPassword
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: '?rsp-prop-include=config-only'
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
# NOTE: (This problem is also present on the APIC GUI)
# NOTE: When specifying a C(role) the new Fabric Node Member will be created but Role on GUI will be "unknown", hence not what seems to be a module problem
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
description=dict(type='str', aliases=['descr']),
node_id=dict(type='int'), # Not required for querying all objects
pod_id=dict(type='int'),
role=dict(type='str', choices=['leaf', 'spine', 'unspecified'], aliases=['role_name']),
serial=dict(type='str', aliases=['serial_number']), # Not required for querying all objects
switch=dict(type='str', aliases=['name', 'switch_name']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['node_id', 'serial']],
['state', 'present', ['node_id', 'serial']],
],
)
pod_id = module.params.get('pod_id')
serial = module.params.get('serial')
node_id = module.params.get('node_id')
switch = module.params.get('switch')
description = module.params.get('description')
role = module.params.get('role')
state = module.params.get('state')
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fabricNodeIdentP',
aci_rn='controller/nodeidentpol/nodep-{0}'.format(serial),
module_object=serial,
target_filter={'serial': serial},
)
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='fabricNodeIdentP',
class_config=dict(
descr=description,
name=switch,
nodeId=node_id,
podId=pod_id,
# NOTE: Originally we were sending 'rn', but now we need 'dn' for idempotency
# FIXME: Did this change with ACI version ?
dn='uni/controller/nodeidentpol/nodep-{0}'.format(serial),
# rn='nodep-{0}'.format(serial),
role=role,
serial=serial,
)
)
aci.get_diff(aci_class='fabricNodeIdentP')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json(**aci.result)
if __name__ == "__main__":
main()
|
kustodian/ansible
|
lib/ansible/modules/network/aci/aci_fabric_node.py
|
Python
|
gpl-3.0
| 7,804
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# -*- coding: utf-8 -*-
from odoo.tests import tagged
from odoo.tests.common import TransactionCase
@tagged('post_install', '-at_install')
class TestWebsiteEvent(TransactionCase):
def test_event_app_name(self):
website0 = self.env['website'].create({'name': 'Foo'})
self.assertEqual(website0.events_app_name, 'Foo Events')
website1 = self.env['website'].create({'name': 'Foo', 'events_app_name': 'Bar Events'})
self.assertEqual(website1.events_app_name, 'Bar Events')
website2 = self.env['website'].create({'name': 'Foo'})
self.assertEqual(website2.events_app_name, 'Foo Events')
website2.write({'name': 'Bar'})
self.assertEqual(website2.events_app_name, 'Foo Events')
|
jeremiahyan/odoo
|
addons/website_event_track/tests/test_website_event.py
|
Python
|
gpl-3.0
| 818
|
from collections import defaultdict
import mock
from searx.engines import searchcode_code
from searx.testing import SearxTestCase
class TestSearchcodeCodeEngine(SearxTestCase):
def test_request(self):
query = 'test_query'
dicto = defaultdict(dict)
dicto['pageno'] = 0
params = searchcode_code.request(query, dicto)
self.assertIn('url', params)
self.assertIn(query, params['url'])
self.assertIn('searchcode.com', params['url'])
def test_response(self):
self.assertRaises(AttributeError, searchcode_code.response, None)
self.assertRaises(AttributeError, searchcode_code.response, [])
self.assertRaises(AttributeError, searchcode_code.response, '')
self.assertRaises(AttributeError, searchcode_code.response, '[]')
response = mock.Mock(text='{}')
self.assertEqual(searchcode_code.response(response), [])
response = mock.Mock(text='{"data": []}')
self.assertEqual(searchcode_code.response(response), [])
json = """
{
"matchterm": "test",
"previouspage": null,
"searchterm": "test",
"query": "test",
"total": 1000,
"page": 0,
"nextpage": 1,
"results": [
{
"repo": "https://repo",
"linescount": 1044,
"location": "/tests",
"name": "Name",
"url": "https://url",
"md5hash": "ecac6e479edd2b9406c9e08603cec655",
"lines": {
"1": "// Test 011",
"2": "// Source: "
},
"id": 51223527,
"filename": "File.CPP"
}
]
}
"""
response = mock.Mock(text=json)
results = searchcode_code.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['title'], 'Name - File.CPP')
self.assertEqual(results[0]['url'], 'https://url')
self.assertEqual(results[0]['repository'], 'https://repo')
self.assertEqual(results[0]['code_language'], 'cpp')
json = r"""
{"toto":[
{"id":200,"name":"Artist Name",
"link":"http:\/\/www.searchcode_code.com\/artist\/1217","type":"artist"}
]}
"""
response = mock.Mock(text=json)
results = searchcode_code.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 0)
|
misnyo/searx
|
tests/unit/engines/test_searchcode_code.py
|
Python
|
agpl-3.0
| 2,524
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use :mod:`airflow.providers.google.cloud.hooks.video_intelligence`."""
import warnings
from airflow.providers.google.cloud.hooks.video_intelligence import CloudVideoIntelligenceHook # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.google.cloud.hooks.video_intelligence`",
DeprecationWarning,
stacklevel=2,
)
|
apache/incubator-airflow
|
airflow/contrib/hooks/gcp_video_intelligence_hook.py
|
Python
|
apache-2.0
| 1,179
|
#!/usr/bin/env python
from translate.storage import txt
from translate.storage import test_monolingual
from translate.misc import wStringIO
class TestTxtUnit(test_monolingual.TestMonolingualUnit):
UnitClass = txt.TxtUnit
class TestTxtFile(test_monolingual.TestMonolingualStore):
StoreClass = txt.TxtFile
def txtparse(self, txtsource):
"""helper that parses txt source without requiring files"""
dummyfile = wStringIO.StringIO(txtsource)
txtfile = self.StoreClass(dummyfile)
return txtfile
def txtregen(self, txtsource):
"""helper that converts txt source to txtfile object and back"""
return str(self.txtparse(txtsource))
def test_simpleblock(self):
"""checks that a simple txt block is parsed correctly"""
txtsource = 'bananas for sale'
txtfile = self.txtparse(txtsource)
assert len(txtfile.units) == 1
assert txtfile.units[0].source == txtsource
assert self.txtregen(txtsource) == txtsource
def test_multipleblocks(self):
""" check that multiple blocks are parsed correctly"""
txtsource = '''One\nOne\n\nTwo\n---\n\nThree'''
txtfile = self.txtparse(txtsource)
assert len(txtfile.units) == 3
print txtsource
print str(txtfile)
print "*%s*" % txtfile.units[0]
assert str(txtfile) == txtsource
assert self.txtregen(txtsource) == txtsource
|
dbbhattacharya/kitsune
|
vendor/packages/translate-toolkit/translate/storage/test_txt.py
|
Python
|
bsd-3-clause
| 1,437
|
import numpy as np
import pytest
from pandas import (
DataFrame,
Index,
MultiIndex,
)
import pandas._testing as tm
class TestDataFrameRenameAxis:
def test_rename_axis_inplace(self, float_frame):
# GH#15704
expected = float_frame.rename_axis("foo")
result = float_frame.copy()
return_value = no_return = result.rename_axis("foo", inplace=True)
assert return_value is None
assert no_return is None
tm.assert_frame_equal(result, expected)
expected = float_frame.rename_axis("bar", axis=1)
result = float_frame.copy()
return_value = no_return = result.rename_axis("bar", axis=1, inplace=True)
assert return_value is None
assert no_return is None
tm.assert_frame_equal(result, expected)
def test_rename_axis_raises(self):
# GH#17833
df = DataFrame({"A": [1, 2], "B": [1, 2]})
with pytest.raises(ValueError, match="Use `.rename`"):
df.rename_axis(id, axis=0)
with pytest.raises(ValueError, match="Use `.rename`"):
df.rename_axis({0: 10, 1: 20}, axis=0)
with pytest.raises(ValueError, match="Use `.rename`"):
df.rename_axis(id, axis=1)
with pytest.raises(ValueError, match="Use `.rename`"):
df["A"].rename_axis(id)
def test_rename_axis_mapper(self):
# GH#19978
mi = MultiIndex.from_product([["a", "b", "c"], [1, 2]], names=["ll", "nn"])
df = DataFrame(
{"x": list(range(len(mi))), "y": [i * 10 for i in range(len(mi))]}, index=mi
)
# Test for rename of the Index object of columns
result = df.rename_axis("cols", axis=1)
tm.assert_index_equal(result.columns, Index(["x", "y"], name="cols"))
# Test for rename of the Index object of columns using dict
result = result.rename_axis(columns={"cols": "new"}, axis=1)
tm.assert_index_equal(result.columns, Index(["x", "y"], name="new"))
# Test for renaming index using dict
result = df.rename_axis(index={"ll": "foo"})
assert result.index.names == ["foo", "nn"]
# Test for renaming index using a function
result = df.rename_axis(index=str.upper, axis=0)
assert result.index.names == ["LL", "NN"]
# Test for renaming index providing complete list
result = df.rename_axis(index=["foo", "goo"])
assert result.index.names == ["foo", "goo"]
# Test for changing index and columns at same time
sdf = df.reset_index().set_index("nn").drop(columns=["ll", "y"])
result = sdf.rename_axis(index="foo", columns="meh")
assert result.index.name == "foo"
assert result.columns.name == "meh"
# Test different error cases
with pytest.raises(TypeError, match="Must pass"):
df.rename_axis(index="wrong")
with pytest.raises(ValueError, match="Length of names"):
df.rename_axis(index=["wrong"])
with pytest.raises(TypeError, match="bogus"):
df.rename_axis(bogus=None)
@pytest.mark.parametrize(
"kwargs, rename_index, rename_columns",
[
({"mapper": None, "axis": 0}, True, False),
({"mapper": None, "axis": 1}, False, True),
({"index": None}, True, False),
({"columns": None}, False, True),
({"index": None, "columns": None}, True, True),
({}, False, False),
],
)
def test_rename_axis_none(self, kwargs, rename_index, rename_columns):
# GH 25034
index = Index(list("abc"), name="foo")
columns = Index(["col1", "col2"], name="bar")
data = np.arange(6).reshape(3, 2)
df = DataFrame(data, index, columns)
result = df.rename_axis(**kwargs)
expected_index = index.rename(None) if rename_index else index
expected_columns = columns.rename(None) if rename_columns else columns
expected = DataFrame(data, expected_index, expected_columns)
tm.assert_frame_equal(result, expected)
|
rs2/pandas
|
pandas/tests/frame/methods/test_rename_axis.py
|
Python
|
bsd-3-clause
| 4,091
|
# -*- coding: utf-8 -*-
"""
XForms - Controllers
"""
module = request.controller
# -----------------------------------------------------------------------------
def create():
"""
Given a Table, returns an XForms to create an instance:
http://code.javarosa.org/wiki/buildxforms
http://www.w3schools.com/xforms/
http://oreilly.com/catalog/9780596003692/preview.html
Known field requirements that don't work properly:
IS_IN_DB
IS_NOT_ONE_OF
IS_EMAIL
IS_DATE_IN_RANGE
IS_DATETIME_IN_RANGE
"""
try:
tablename = request.args[0]
except:
session.error = T("Need to specify a table!")
redirect(URL(r=request))
title = tablename
table = s3db[tablename]
instance_list = []
bindings_list = []
controllers_list = []
itext_list = [TAG["text"](TAG["value"](s3.crud_strings[tablename].title_list),
_id="title")]
for fieldname in table.fields:
if fieldname in ["id", "created_on", "modified_on", "uuid", "mci",
"deleted", "created_by", "modified_by", "deleted_fk",
"owned_by_group", "owned_by_user"]:
# These will get added server-side
pass
elif table[fieldname].writable == False:
pass
else:
ref = "/" + title + "/" + fieldname
instance_list.append(generate_instance(table, fieldname))
bindings_list.append(generate_bindings(table, fieldname, ref))
controller, _itext_list = generate_controllers(table, fieldname, ref)
controllers_list.append(controller)
itext_list.extend(_itext_list)
#bindings_list.append(TAG["itext"](TAG["translation"](itext_list, _lang="eng")))
instance = TAG[title](instance_list, _xmlns="")
bindings = bindings_list
controllers = TAG["h:body"](controllers_list)
response.headers["Content-Type"] = "text/xml"
response.view = "xforms.xml"
return dict(title=title, instance=instance, bindings=bindings,
controllers=controllers, itext_list=itext_list)
# -----------------------------------------------------------------------------
def uses_requirement(requirement, field):
"""
Check if a given database field uses the specified requirement
(IS_IN_SET, IS_INT_IN_RANGE, etc)
"""
if hasattr(field.requires, "other") or requirement in str(field.requires):
if hasattr(field.requires, "other"):
if requirement in str(field.requires.other):
return True
elif requirement in str(field.requires):
return True
return False
# -----------------------------------------------------------------------------
def generate_instance(table, fieldname):
"""
Generates XML for the instance of the specified field.
"""
if table[fieldname].default:
instance = TAG[fieldname](table[fieldname].default)
else:
instance = TAG[fieldname]()
return instance
# -----------------------------------------------------------------------------
def generate_bindings(table, fieldname, ref):
"""
Generates the XML for bindings for the specified database field.
"""
field = table[fieldname]
if "IS_NOT_EMPTY" in str(field.requires):
required = "true()"
else:
required = "false()"
if field.type == "string":
_type = "string"
elif field.type == "double":
_type = "decimal"
# Collect doesn't support datetime yet
elif field.type == "date":
_type = "date"
elif field.type == "datetime":
_type = "datetime"
elif field.type == "integer":
_type = "int"
elif field.type == "boolean":
_type = "boolean"
elif field.type == "upload": # For images
_type = "binary"
elif field.type == "text":
_type = "text"
else:
# Unknown type
_type = "string"
if uses_requirement("IS_INT_IN_RANGE", field) \
or uses_requirement("IS_FLOAT_IN_RANGE", field):
if hasattr(field.requires, "other"):
maximum = field.requires.other.maximum
minimum = field.requires.other.minimum
else:
maximum = field.requires.maximum
minimum = field.requires.minimum
if minimum is None:
constraint = "(. < " + str(maximum) + ")"
elif maximum is None:
constraint = "(. > " + str(minimum) + ")"
else:
constraint = "(. > " + str(minimum) + " and . < " + str(maximum) + ")"
binding = TAG["bind"](_nodeset=ref,
_type=_type,
_required=required,
_constraint=constraint)
#elif uses_requirement("IS_DATETIME_IN_RANGE", field):
# pass
#elif uses_requirement("IS_EMAIL", field):
# pass
elif uses_requirement("IS_IN_SET", field):
binding = TAG["bind"](_nodeset=ref, _required=required)
else:
binding = TAG["bind"](_nodeset=ref, _type=_type, _required=required)
return binding
# -----------------------------------------------------------------------------
def generate_controllers(table, fieldname, ref):
"""
Generates the controllers XML for the database table field.
"""
itext_list = [] # Internationalization
controllers_list = []
field = table[fieldname]
itext_list.append(TAG["text"](TAG["value"](field.label),
_id=ref + ":label"))
itext_list.append(TAG["text"](TAG["value"](field.comment),
_id=ref + ":hint"))
if hasattr(field.requires, "option"):
items_list = []
for option in field.requires.theset:
items_list.append(TAG["item"](TAG["label"](option), TAG["value"](option)))
controllers_list.append(TAG["select1"](items_list, _ref=fieldname))
#elif uses_requirement("IS_IN_DB", field):
# ToDo (similar to IS_IN_SET)?
#pass
#elif uses_requirement("IS_NOT_ONE_OF", field):
# ToDo
#pass
elif uses_requirement("IS_IN_SET", field): # Defined below
if hasattr(field.requires, "other"):
insetrequires = field.requires.other
else:
insetrequires = field.requires
theset = insetrequires.theset
items_list = []
items_list.append(TAG["label"](_ref="jr:itext('" + ref + ":label')"))
items_list.append(TAG["hint"](_ref="jr:itext('" + ref + ":hint')"))
if theset:
option_num = 0 # for formatting something like "jr:itext('stuff:option0')"
for option in theset:
if field.type == "integer":
option = int(option)
option_ref = ref + ":option" + str(option_num)
items_list.append(TAG["item"](TAG["label"](_ref="jr:itext('" + option_ref + "')"),
TAG["value"](option)))
#itext_list.append(TAG["text"](TAG["value"](field.represent(option)), _id=option_ref))
itext_list.append(TAG["text"](TAG["value"](insetrequires.labels[theset.index(str(option))]),
_id=option_ref))
option_num += 1
if insetrequires.multiple:
controller = TAG["select"](items_list, _ref=ref)
else:
controller = TAG["select1"](items_list, _ref=ref)
elif field.type == "boolean": # Using select1, is there an easier way to do this?
items_list=[]
items_list.append(TAG["label"](_ref="jr:itext('" + ref + ":label')"))
items_list.append(TAG["hint"](_ref="jr:itext('" + ref + ":hint')"))
# True option
items_list.append(TAG["item"](TAG["label"](_ref="jr:itext('" + ref + ":option0')"),
TAG["value"](1)))
itext_list.append(TAG["text"](TAG["value"]("True"),
_id=ref + ":option0"))
# False option
items_list.append(TAG["item"](TAG["label"](_ref="jr:itext('" + ref + ":option1')"),
TAG["value"](0)))
itext_list.append(TAG["text"](TAG["value"]("False"),
_id=ref + ":option1"))
controller = TAG["select1"](items_list, _ref=ref)
elif field.type == "upload": # For uploading images
items_list=[]
items_list.append(TAG["label"](_ref="jr:itext('" + ref + ":label')"))
items_list.append(TAG["hint"](_ref="jr:itext('" + ref + ":hint')"))
controller = TAG["upload"](items_list, _ref=ref, _mediatype="image/*")
elif field.writable == False:
controller = TAG["input"](TAG["label"](field.label), _ref=ref,
_readonly="true",
_default=field.default.upper())
else:
# Normal Input field
controller = TAG["input"](TAG["label"](field.label), _ref=ref)
return controller, itext_list
# -----------------------------------------------------------------------------
def csvdata(nodelist):
"""
Returns the data in the given node as a comma separated string
"""
data = ""
for subnode in nodelist:
if (subnode.nodeType == subnode.ELEMENT_NODE):
try:
data = data + "," + subnode.childNodes[0].data
except:
data = data+ ","
return data[1:] + "\n"
# -----------------------------------------------------------------------------
def csvheader(parent, nodelist):
"""
Gives the header for the CSV
"""
header = ""
for subnode in nodelist:
if (subnode.nodeType == subnode.ELEMENT_NODE):
header = header + "," + parent + "." + subnode.tagName
return header[1:] + "\n"
# -----------------------------------------------------------------------------
def importxml(db, xmlinput):
"""
Converts the XML to a CSV compatible with the import_from_csv_file of web2py
@ToDo: rewrite this to go via S3Resource for proper Auth checking, Audit.
"""
import cStringIO
import xml.dom.minidom
try:
doc = xml.dom.minidom.parseString(xmlinput)
except:
raise Exception("XML parse error")
parent = doc.childNodes[0].tagName
csvout = csvheader(parent, doc.childNodes[0].childNodes)
for subnode in doc.childNodes:
csvout = csvout + csvdata(subnode.childNodes)
fh = cStringIO.StringIO()
fh.write(csvout)
fh.seek(0, 0)
db[parent].import_from_csv_file(fh)
# -----------------------------------------------------------------------------
@auth.s3_requires_membership(1)
def post():
data = importxml(db, request.body.read())
return data
# -----------------------------------------------------------------------------
def formList():
"""
Generates a list of Xforms based on database tables for ODK Collect
http://code.google.com/p/opendatakit/
"""
# Test statements
#xml = TAG.forms(*[TAG.form(getName("Name"), _url = "http://" + request.env.http_host + URL(c="static", "current.xml"))])
#xml = TAG.forms(*[TAG.form(getName(t), _url = "http://" + request.env.http_host + URL(f="create", args=t)) for t in db.tables()])
# List of a couple simple tables to avoid a giant list of all the tables
#tables = ["pf_missing_report", "pr_presence"]
tables = ["irs_ireport", "rms_req", "cr_shelter", "pr_person", "pr_image"]
xml = TAG.forms()
for tablename in tables:
xml.append(TAG.form(get_name(tablename),
_url = "http://" + request.env.http_host + URL(f="create", args=tablename)))
response.headers["Content-Type"] = "text/xml"
response.view = "xforms.xml"
return xml
# -----------------------------------------------------------------------------
def submission():
"""
Allows for submission of Xforms by ODK Collect
http://code.google.com/p/opendatakit/
"""
# @ToDo: Something better than this crude check
if not auth.s3_logged_in():
auth.permission.fail()
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
import cgi
from lxml import etree
source = request.post_vars.get("xml_submission_file", None)
if isinstance(source, cgi.FieldStorage):
if source.filename:
xmlinput = source.file
else:
xmlinput = source.value
if isinstance(xmlinput, basestring):
xmlinput = StringIO(xmlinput)
else:
raise HTTP(400, "Invalid Request: Expected an XForm")
tree = etree.parse(xmlinput)
tablename = tree.getroot().tag
resource = s3db.resource(tablename)
stylesheet = os.path.join(request.folder, "static", "formats", "odk",
"import.xsl")
try:
result = resource.import_xml(source=tree, stylesheet=stylesheet)
except IOError, SyntaxError:
raise HTTP(500, "Internal server error")
# Parse response
status = json.loads(result)["statuscode"]
if status == 200:
r = HTTP(201, "Saved") # ODK Collect only accepts 201
r.headers["Location"] = request.env.http_host
raise r
else:
raise HTTP(status, result)
# -----------------------------------------------------------------------------
@auth.s3_requires_membership(2)
def submission_old():
"""
Allows for submission of xforms by ODK Collect
http://code.google.com/p/opendatakit/
"""
response.headers["Content-Type"] = "text/xml"
xml = str(request.post_vars.xml_submission_file.value)
if len(xml) == 0:
raise HTTP(400, "Need some xml!")
importxml(db, xml)
r = HTTP(201, "Saved.")
r.headers["Location"] = request.env.http_host
raise r
# -----------------------------------------------------------------------------
def get_name(tablename):
"""
Generates a pretty(er) name from a database table name.
"""
return tablename[tablename.find("_") + 1:].replace("_", " ").capitalize()
# END =========================================================================
|
madhurauti/Map-Polygon
|
controllers/xforms.py
|
Python
|
mit
| 14,374
|
"""
homeassistant.components.keyboard
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides functionality to emulate keyboard presses on host machine.
"""
import logging
from homeassistant.const import (
SERVICE_VOLUME_UP, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_MUTE,
SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PREVIOUS_TRACK,
SERVICE_MEDIA_PLAY_PAUSE)
DOMAIN = "keyboard"
DEPENDENCIES = []
REQUIREMENTS = ['pyuserinput>=0.1.9']
def volume_up(hass):
""" Press the keyboard button for volume up. """
hass.services.call(DOMAIN, SERVICE_VOLUME_UP)
def volume_down(hass):
""" Press the keyboard button for volume down. """
hass.services.call(DOMAIN, SERVICE_VOLUME_DOWN)
def volume_mute(hass):
""" Press the keyboard button for muting volume. """
hass.services.call(DOMAIN, SERVICE_VOLUME_MUTE)
def media_play_pause(hass):
""" Press the keyboard button for play/pause. """
hass.services.call(DOMAIN, SERVICE_MEDIA_PLAY_PAUSE)
def media_next_track(hass):
""" Press the keyboard button for next track. """
hass.services.call(DOMAIN, SERVICE_MEDIA_NEXT_TRACK)
def media_prev_track(hass):
""" Press the keyboard button for prev track. """
hass.services.call(DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK)
def setup(hass, config):
""" Listen for keyboard events. """
try:
import pykeyboard
except ImportError:
logging.getLogger(__name__).exception(
"Error while importing dependency PyUserInput.")
return False
keyboard = pykeyboard.PyKeyboard()
keyboard.special_key_assignment()
hass.services.register(DOMAIN, SERVICE_VOLUME_UP,
lambda service:
keyboard.tap_key(keyboard.volume_up_key))
hass.services.register(DOMAIN, SERVICE_VOLUME_DOWN,
lambda service:
keyboard.tap_key(keyboard.volume_down_key))
hass.services.register(DOMAIN, SERVICE_VOLUME_MUTE,
lambda service:
keyboard.tap_key(keyboard.volume_mute_key))
hass.services.register(DOMAIN, SERVICE_MEDIA_PLAY_PAUSE,
lambda service:
keyboard.tap_key(keyboard.media_play_pause_key))
hass.services.register(DOMAIN, SERVICE_MEDIA_NEXT_TRACK,
lambda service:
keyboard.tap_key(keyboard.media_next_track_key))
hass.services.register(DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK,
lambda service:
keyboard.tap_key(keyboard.media_prev_track_key))
return True
|
vitorespindola/home-assistant
|
homeassistant/components/keyboard.py
|
Python
|
mit
| 2,654
|
import unittest
from rope.base.oi import objectdb, memorydb
from ropetest import testutils
def _do_for_all_dbs(function):
def called(self):
for db in self.dbs:
function(self, db)
return called
class _MockValidation(object):
def is_value_valid(self, value):
return value != -1
def is_more_valid(self, new, old):
return new != -1
def is_file_valid(self, path):
return path != 'invalid'
def is_scope_valid(self, path, key):
return path != 'invalid' and key != 'invalid'
class _MockFileListObserver(object):
log = ''
def added(self, path):
self.log += 'added %s ' % path
def removed(self, path):
self.log += 'removed %s ' % path
class ObjectDBTest(unittest.TestCase):
def setUp(self):
super(ObjectDBTest, self).setUp()
self.project = testutils.sample_project()
validation = _MockValidation()
self.dbs = [
objectdb.ObjectDB(memorydb.MemoryDB(self.project), validation)]
def tearDown(self):
for db in self.dbs:
db.write()
testutils.remove_project(self.project)
super(ObjectDBTest, self).tearDown()
@_do_for_all_dbs
def test_simple_per_name(self, db):
db.add_pername('file', 'key', 'name', 1)
self.assertEqual(1, db.get_pername('file', 'key', 'name'))
@_do_for_all_dbs
def test_simple_per_name_does_not_exist(self, db):
self.assertEquals(None, db.get_pername('file', 'key', 'name'))
@_do_for_all_dbs
def test_simple_per_name_after_syncing(self, db):
db.add_pername('file', 'key', 'name', 1)
db.write()
self.assertEquals(1, db.get_pername('file', 'key', 'name'))
@_do_for_all_dbs
def test_getting_returned(self, db):
db.add_callinfo('file', 'key', (1, 2), 3)
self.assertEquals(3, db.get_returned('file', 'key', (1, 2)))
@_do_for_all_dbs
def test_getting_returned_when_does_not_match(self, db):
db.add_callinfo('file', 'key', (1, 2), 3)
self.assertEquals(None, db.get_returned('file', 'key', (1, 1)))
@_do_for_all_dbs
def test_getting_call_info(self, db):
db.add_callinfo('file', 'key', (1, 2), 3)
call_infos = list(db.get_callinfos('file', 'key'))
self.assertEquals(1, len(call_infos))
self.assertEquals((1, 2), call_infos[0].get_parameters())
self.assertEquals(3, call_infos[0].get_returned())
@_do_for_all_dbs
def test_invalid_per_name(self, db):
db.add_pername('file', 'key', 'name', -1)
self.assertEquals(None, db.get_pername('file', 'key', 'name'))
@_do_for_all_dbs
def test_overwriting_per_name(self, db):
db.add_pername('file', 'key', 'name', 1)
db.add_pername('file', 'key', 'name', 2)
self.assertEquals(2, db.get_pername('file', 'key', 'name'))
@_do_for_all_dbs
def test_not_overwriting_with_invalid_per_name(self, db):
db.add_pername('file', 'key', 'name', 1)
db.add_pername('file', 'key', 'name', -1)
self.assertEquals(1, db.get_pername('file', 'key', 'name'))
@_do_for_all_dbs
def test_getting_invalid_returned(self, db):
db.add_callinfo('file', 'key', (1, 2), -1)
self.assertEquals(None, db.get_returned('file', 'key', (1, 2)))
@_do_for_all_dbs
def test_not_overwriting_with_invalid_returned(self, db):
db.add_callinfo('file', 'key', (1, 2), 3)
db.add_callinfo('file', 'key', (1, 2), -1)
self.assertEquals(3, db.get_returned('file', 'key', (1, 2)))
@_do_for_all_dbs
def test_get_files(self, db):
db.add_callinfo('file1', 'key', (1, 2), 3)
db.add_callinfo('file2', 'key', (1, 2), 3)
self.assertEquals(set(['file1', 'file2']), set(db.get_files()))
@_do_for_all_dbs
def test_validating_files(self, db):
db.add_callinfo('invalid', 'key', (1, 2), 3)
db.validate_files()
self.assertEquals(0, len(db.get_files()))
@_do_for_all_dbs
def test_validating_file_for_scopes(self, db):
db.add_callinfo('file', 'invalid', (1, 2), 3)
db.validate_file('file')
self.assertEquals(1, len(db.get_files()))
self.assertEquals(0, len(list(db.get_callinfos('file', 'invalid'))))
@_do_for_all_dbs
def test_validating_file_moved(self, db):
db.add_callinfo('file', 'key', (1, 2), 3)
db.file_moved('file', 'newfile')
self.assertEquals(1, len(db.get_files()))
self.assertEquals(1, len(list(db.get_callinfos('newfile', 'key'))))
@_do_for_all_dbs
def test_using_file_list_observer(self, db):
db.add_callinfo('invalid', 'key', (1, 2), 3)
observer = _MockFileListObserver()
db.add_file_list_observer(observer)
db.validate_files()
self.assertEquals('removed invalid ', observer.log)
def suite():
result = unittest.TestSuite()
result.addTests(unittest.makeSuite(ObjectDBTest))
return result
if __name__ == '__main__':
unittest.main()
|
timwee/emacs-starter-kit-mr-flip-forked
|
vendor/rope/ropetest/objectdbtest.py
|
Python
|
gpl-3.0
| 5,054
|
"""This file is part of DING0, the DIstribution Network GeneratOr.
DING0 is a tool to generate synthetic medium and low voltage power
distribution grids based on open data.
It is developed in the project open_eGo: https://openegoproject.wordpress.com
DING0 lives at github: https://github.com/openego/ding0/
The documentation is available on RTD: http://ding0.readthedocs.io"""
__copyright__ = "Reiner Lemoine Institut gGmbH"
__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
__url__ = "https://github.com/openego/ding0/blob/master/LICENSE"
__author__ = "nesnoj, gplssm"
|
openego/dingo
|
ding0/grid/mv_grid/util/__init__.py
|
Python
|
agpl-3.0
| 612
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core.backends import codepen_credentials_backend
from telemetry.core.backends import form_based_credentials_backend_unittest_base
class TestCodePenCredentialsBackend(
form_based_credentials_backend_unittest_base.
FormBasedCredentialsBackendUnitTestBase):
def setUp(self):
self._credentials_type = 'codepen'
def testLoginUsingMock(self):
backend = codepen_credentials_backend.CodePenCredentialsBackend()
self._LoginUsingMock(backend, backend.url, backend.login_input_id,
backend.password_input_id, backend.login_form_id,
backend.logged_in_javascript)
|
guorendong/iridium-browser-ubuntu
|
tools/telemetry/telemetry/core/backends/codepen_credentials_backend_unittest.py
|
Python
|
bsd-3-clause
| 804
|
import unittest
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.ext.db import non_transactional
from django.db import models
from django.http import HttpRequest
from django.core.signals import request_finished, request_started
from django.core.cache import cache
from djangae.contrib import sleuth
from djangae.test import TestCase
from djangae.db import unique_utils
from djangae.db import transaction
from djangae.db.backends.appengine.context import ContextStack
from djangae.db.backends.appengine import caching
from djangae.db.caching import disable_cache, clear_context_cache
class FakeEntity(dict):
COUNTER = 1
def __init__(self, data, id=0):
self.id = id or FakeEntity.COUNTER
FakeEntity.COUNTER += 1
self.update(data)
def key(self):
return datastore.Key.from_path("auth_user", self.id)
class ContextStackTests(TestCase):
def test_push_pop(self):
stack = ContextStack()
self.assertEqual({}, stack.top.cache)
entity = FakeEntity({"bananas": 1})
stack.top.cache_entity(["bananas:1"], entity, caching.CachingSituation.DATASTORE_PUT)
self.assertEqual({"bananas": 1}, stack.top.cache.values()[0])
stack.push()
self.assertEqual([], stack.top.cache.values())
self.assertEqual(2, stack.size)
stack.push()
stack.top.cache_entity(["apples:2"], entity, caching.CachingSituation.DATASTORE_PUT)
self.assertItemsEqual(["apples:2"], stack.top.cache.keys())
stack.pop()
self.assertItemsEqual([], stack.top.cache.keys())
self.assertEqual(2, stack.size)
self.assertEqual(1, stack.staged_count)
updated = FakeEntity({"bananas": 3})
stack.top.cache_entity(["bananas:1"], updated, caching.CachingSituation.DATASTORE_PUT)
stack.pop(apply_staged=True, clear_staged=True)
self.assertEqual(1, stack.size)
self.assertEqual({"bananas": 3}, stack.top.cache["bananas:1"])
self.assertEqual(0, stack.staged_count)
def test_property_deletion(self):
stack = ContextStack()
entity = FakeEntity({"field1": "one", "field2": "two"})
stack.top.cache_entity(["entity"], entity, caching.CachingSituation.DATASTORE_PUT)
stack.push() # Enter transaction
entity["field1"] = "oneone"
del entity["field2"]
stack.top.cache_entity(["entity"], entity, caching.CachingSituation.DATASTORE_PUT)
stack.pop(apply_staged=True, clear_staged=True)
self.assertEqual({"field1": "oneone"}, stack.top.cache["entity"])
class CachingTestModel(models.Model):
field1 = models.CharField(max_length=255, unique=True)
comb1 = models.IntegerField(default=0)
comb2 = models.CharField(max_length=255)
class Meta:
unique_together = [
("comb1", "comb2")
]
app_label = "djangae"
class MemcacheCachingTests(TestCase):
"""
We need to be pretty selective with our caching in memcache, because unlike
the context caching, this stuff is global.
For that reason, we have the following rules:
- save/update caches entities outside transactions
- Inside transactions save/update wipes out the cache for updated entities (a subsequent read by key will populate it again)
- Inside transactions filter/get does not hit memcache (that just breaks transactions)
- filter/get by key caches entities (consistent)
- filter/get by anything else does not (eventually consistent)
"""
@disable_cache(memcache=False, context=True)
def test_save_inside_transaction_evicts_cache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))
instance = CachingTestModel.objects.create(id=222, **entity_data)
for identifier in identifiers:
self.assertEqual(entity_data, cache.get(identifier))
with transaction.atomic():
instance.field1 = "Banana"
instance.save()
# Make sure that altering inside the transaction evicted the item from the cache
# and that a get then hits the datastore (which then in turn caches)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
self.assertEqual("Banana", CachingTestModel.objects.get(pk=instance.pk).field1)
self.assertTrue(datastore_get.called)
@disable_cache(memcache=False, context=True)
def test_save_caches_outside_transaction_only(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
instance = CachingTestModel.objects.create(id=222, **entity_data)
for identifier in identifiers:
self.assertEqual(entity_data, cache.get(identifier))
instance.delete()
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
with transaction.atomic():
instance = CachingTestModel.objects.create(**entity_data)
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
@disable_cache(memcache=False, context=True)
def test_save_wipes_entity_from_cache_inside_transaction(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
instance = CachingTestModel.objects.create(id=222, **entity_data)
for identifier in identifiers:
self.assertEqual(entity_data, cache.get(identifier))
with transaction.atomic():
instance.save()
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
@disable_cache(memcache=False, context=True)
def test_transactional_save_wipes_the_cache_only_after_its_result_is_consistently_available(self):
entity_data = {
"field1": "old",
}
identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
instance = CachingTestModel.objects.create(id=222, **entity_data)
for identifier in identifiers:
self.assertEqual("old", cache.get(identifier)["field1"])
@non_transactional
def non_transactional_read(instance_pk):
CachingTestModel.objects.get(pk=instance_pk)
with transaction.atomic():
instance.field1 = "new"
instance.save()
non_transactional_read(instance.pk) # could potentially recache the old object
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
@disable_cache(memcache=False, context=True)
def test_consistent_read_updates_memcache_outside_transaction(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
CachingTestModel.objects.create(id=222, **entity_data)
for identifier in identifiers:
self.assertEqual(entity_data, cache.get(identifier))
cache.clear()
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
CachingTestModel.objects.get(id=222) # Consistent read
for identifier in identifiers:
self.assertEqual(entity_data, cache.get(identifier))
@disable_cache(memcache=False, context=True)
def test_eventual_read_doesnt_update_memcache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
identifiers = unique_utils.unique_identifiers_from_entity(CachingTestModel, FakeEntity(entity_data, id=222))
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
CachingTestModel.objects.create(id=222, **entity_data)
for identifier in identifiers:
self.assertEqual(entity_data, cache.get(identifier))
cache.clear()
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
CachingTestModel.objects.all()[0] # Inconsistent read
for identifier in identifiers:
self.assertIsNone(cache.get(identifier))
@disable_cache(memcache=False, context=True)
def test_unique_filter_hits_memcache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
instance = CachingTestModel.objects.filter(field1="Apple").all()[0]
self.assertEqual(original, instance)
self.assertFalse(datastore_query.called)
@disable_cache(memcache=False, context=True)
def test_unique_filter_applies_all_filters(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
# Expect no matches
num_instances = CachingTestModel.objects.filter(field1="Apple", comb1=0).count()
self.assertEqual(num_instances, 0)
@disable_cache(memcache=False, context=True)
def test_non_unique_filter_hits_datastore(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
instance = CachingTestModel.objects.filter(comb1=1).all()[0]
self.assertEqual(original, instance)
self.assertTrue(datastore_query.called)
@disable_cache(memcache=False, context=True)
def test_get_by_key_hits_memcache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
instance = CachingTestModel.objects.get(pk=original.pk)
self.assertEqual(original, instance)
self.assertFalse(datastore_get.called)
@disable_cache(memcache=False, context=True)
def test_get_by_key_hits_datastore_inside_transaction(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
with transaction.atomic():
instance = CachingTestModel.objects.get(pk=original.pk)
self.assertEqual(original, instance)
self.assertTrue(datastore_get.called)
@disable_cache(memcache=False, context=True)
def test_unique_get_hits_memcache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
instance = CachingTestModel.objects.get(field1="Apple")
self.assertEqual(original, instance)
self.assertFalse(datastore_get.called)
@disable_cache(memcache=False, context=True)
def test_unique_get_hits_datastore_inside_transaction(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
with transaction.atomic():
try:
CachingTestModel.objects.get(field1="Apple")
except datastore_errors.BadRequestError:
# You can't query in a transaction, but still
pass
self.assertTrue(datastore_query.called)
class ContextCachingTests(TestCase):
"""
We can be a bit more liberal with hitting the context cache as it's
thread-local and request-local
The context cache is actually a stack. When you start a transaction we push a
copy of the current context onto the stack, when we finish a transaction we pop
the current context and apply the changes onto the outer transaction.
The rules are thus:
- Entering a transaction pushes a copy of the current context
- Rolling back a transaction pops the top of the stack
- Committing a transaction pops the top of the stack, and adds it to a queue
- When all transactions exit, the queue is applied to the current context one at a time
- save/update caches entities
- filter/get by key caches entities (consistent)
- filter/get by anything else does not (eventually consistent)
"""
@disable_cache(memcache=True, context=False)
def test_that_transactions_dont_inherit_context_cache(self):
"""
It's fine to hit the context cache inside an independent transaction,
providing that the cache doesn't inherit the outer cache! Otherwise we have
a situation where the transaction never hits the database when reloading an entity
"""
entity_data = {
"field1": u"Apple",
"comb1": 1,
"comb2": u"Cherry"
}
instance = CachingTestModel.objects.create(**entity_data)
with transaction.atomic():
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
instance = CachingTestModel.objects.get(pk=instance.pk)
self.assertEqual(1, datastore_get.call_count) # Shouldn't hit the cache!
instance.save()
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
self.assertEqual(0, datastore_get.call_count) # Should hit the cache
@disable_cache(memcache=True, context=False)
def test_caching_bug(self):
entity_data = {
"field1": u"Apple",
"comb1": 1,
"comb2": u"Cherry"
}
instance = CachingTestModel.objects.create(**entity_data)
expected = entity_data.copy()
expected[u"id"] = instance.pk
# Fetch the object, which causes it to be added to the context cache
self.assertItemsEqual(CachingTestModel.objects.filter(pk=instance.pk).values(), [expected])
# Doing a .values_list() fetches from the cache and wipes out the other fields from the entity
self.assertItemsEqual(CachingTestModel.objects.filter(pk=instance.pk).values_list("field1"), [("Apple",)])
# Now fetch from the cache again, checking that the previously wiped fields are still in tact
self.assertItemsEqual(CachingTestModel.objects.filter(pk=instance.pk).values(), [expected])
@disable_cache(memcache=True, context=False)
def test_transactions_get_their_own_context(self):
with sleuth.watch("djangae.db.backends.appengine.context.ContextStack.push") as context_push:
with transaction.atomic():
pass
self.assertTrue(context_push.called)
@disable_cache(memcache=True, context=False)
def test_independent_transaction_applies_to_outer_context(self):
"""
When a transaction commits successfully, we can apply its cache to the outer stack. This
alters the behaviour of transactions a little but in a positive way. Things that change are:
1. If you run an independent transaction inside another transaction, a subsequent Get for an entity
updated there will return the updated instance from the cache. Due to serialization of transactions
it's possible that this would have happened anyway (the outer transaction wouldn't start until the independent
one had finished). It makes this behaviour consistent even when serialization isn't possible.
2. Due to the fact the context cache is hit within a transaction, you can now Put, then Get an entity and it
will return its current state (as the transaction would see it), rather than the state at the beginning of the
transaction. This behaviour is nicer than the default.
"""
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with transaction.atomic():
with transaction.atomic(independent=True):
inner = CachingTestModel.objects.get(pk=original.pk)
inner.field1 = "Banana"
inner.save()
outer = CachingTestModel.objects.get(pk=original.pk)
self.assertEqual("Banana", outer.field1)
outer.field1 = "Apple"
outer.save()
original = CachingTestModel.objects.get(pk=original.pk)
self.assertEqual("Apple", original.field1)
@disable_cache(memcache=True, context=False)
def test_nested_transactions_dont_get_their_own_context(self):
"""
The datastore doesn't support nested transactions, so when there is a nested
atomic block which isn't marked as independent, the atomic is a no-op. Therefore
we shouldn't push a context here, and we shouldn't pop it at the end either.
"""
self.assertEqual(1, caching._context.stack.size)
with transaction.atomic():
self.assertEqual(2, caching._context.stack.size)
with transaction.atomic():
self.assertEqual(2, caching._context.stack.size)
with transaction.atomic():
self.assertEqual(2, caching._context.stack.size)
self.assertEqual(2, caching._context.stack.size)
self.assertEqual(2, caching._context.stack.size)
self.assertEqual(1, caching._context.stack.size)
@disable_cache(memcache=True, context=False)
def test_nested_rollback_doesnt_apply_on_outer_commit(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with transaction.atomic():
try:
with transaction.atomic(independent=True):
inner = CachingTestModel.objects.get(pk=original.pk)
inner.field1 = "Banana"
inner.save()
raise ValueError() # Will rollback the transaction
except ValueError:
pass
outer = CachingTestModel.objects.get(pk=original.pk)
self.assertEqual("Apple", outer.field1)
original = CachingTestModel.objects.get(pk=original.pk)
self.assertEqual("Apple", original.field1) # Shouldn't have changed
@disable_cache(memcache=True, context=False)
def test_save_caches(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
with sleuth.watch("django.core.cache.cache.get") as memcache_get:
original = CachingTestModel.objects.get(pk=original.pk)
self.assertFalse(datastore_get.called)
self.assertFalse(memcache_get.called)
@disable_cache(memcache=True, context=False)
def test_consistent_read_updates_cache_outside_transaction(self):
"""
A read inside a transaction shouldn't update the context cache outside that
transaction
"""
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
clear_context_cache()
CachingTestModel.objects.get(pk=original.pk) # Should update the cache
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
CachingTestModel.objects.get(pk=original.pk)
self.assertFalse(datastore_get.called)
clear_context_cache()
with transaction.atomic():
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
CachingTestModel.objects.get(pk=original.pk) # Should *not* update the cache
self.assertTrue(datastore_get.called)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
CachingTestModel.objects.get(pk=original.pk)
self.assertTrue(datastore_get.called)
@disable_cache(memcache=True, context=False)
def test_inconsistent_read_doesnt_update_cache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
clear_context_cache()
CachingTestModel.objects.all() # Inconsistent
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
CachingTestModel.objects.get(pk=original.pk)
self.assertTrue(datastore_get.called)
@disable_cache(memcache=True, context=False)
def test_unique_filter_hits_cache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
list(CachingTestModel.objects.filter(field1="Apple"))
self.assertFalse(datastore_get.called)
@disable_cache(memcache=True, context=False)
def test_unique_filter_applies_all_filters(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Query.Run") as datastore_query:
# Expect no matches
num_instances = CachingTestModel.objects.filter(field1="Apple", comb1=0).count()
self.assertEqual(num_instances, 0)
@disable_cache(memcache=True, context=False)
def test_get_by_key_hits_cache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
original = CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
CachingTestModel.objects.get(pk=original.pk)
self.assertFalse(datastore_get.called)
@disable_cache(memcache=True, context=False)
def test_unique_get_hits_cache(self):
entity_data = {
"field1": "Apple",
"comb1": 1,
"comb2": "Cherry"
}
CachingTestModel.objects.create(**entity_data)
with sleuth.watch("google.appengine.api.datastore.Get") as datastore_get:
CachingTestModel.objects.get(field1="Apple")
self.assertFalse(datastore_get.called)
@disable_cache(memcache=True, context=False)
def test_context_cache_cleared_after_request(self):
""" The context cache should be cleared between requests. """
CachingTestModel.objects.create(field1="test")
with sleuth.watch("google.appengine.api.datastore.Query.Run") as query:
CachingTestModel.objects.get(field1="test")
self.assertEqual(query.call_count, 0)
# Now start a new request, which should clear the cache
request_started.send(HttpRequest(), keep_disabled_flags=True)
CachingTestModel.objects.get(field1="test")
self.assertEqual(query.call_count, 1)
# Now do another call, which should use the cache (because it would have been
# populated by the previous call)
CachingTestModel.objects.get(field1="test")
self.assertEqual(query.call_count, 1)
# Now clear the cache again by *finishing* a request
request_finished.send(HttpRequest(), keep_disabled_flags=True)
CachingTestModel.objects.get(field1="test")
self.assertEqual(query.call_count, 2)
|
wangjun/djangae
|
djangae/tests/test_caching.py
|
Python
|
bsd-3-clause
| 25,805
|
#!/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
gluon.rewrite parses incoming URLs and formats outgoing URLs for gluon.html.URL.
In addition, it rewrites both incoming and outgoing URLs based on the (optional) user-supplied routes.py,
which also allows for rewriting of certain error messages.
routes.py supports two styles of URL rewriting, depending on whether 'routers' is defined.
Refer to router.example.py and routes.example.py for additional documentation.
"""
import os
import re
import logging
import traceback
import threading
import urllib
from gluon.storage import Storage, List
from gluon.http import HTTP
from gluon.fileutils import abspath, read_file
from gluon.settings import global_settings
isdir = os.path.isdir
isfile = os.path.isfile
exists = os.path.exists
pjoin = os.path.join
logger = logging.getLogger('web2py.rewrite')
THREAD_LOCAL = threading.local() # thread-local storage for routing params
regex_at = re.compile(r'(?<!\\)\$[a-zA-Z]\w*')
regex_anything = re.compile(r'(?<!\\)\$anything')
regex_redirect = re.compile(r'(\d+)->(.*)')
regex_full_url = re.compile(
r'^(?P<scheme>http|https|HTTP|HTTPS)\://(?P<host>[^/]*)(?P<uri>.*)')
regex_version = re.compile(r'^(_[\d]+\.[\d]+\.[\d]+)$')
# pattern to find valid paths in url /application/controller/...
# this could be:
# for static pages:
# /<b:application>/static/<x:file>
# for dynamic pages:
# /<a:application>[/<c:controller>[/<f:function>[.<e:ext>][/<s:args>]]]
# application, controller, function and ext may only contain [a-zA-Z0-9_]
# file and args may also contain '-', '=', '.' and '/'
# apps in routes_apps_raw must parse raw_args into args
regex_url = re.compile('^/((?P<a>\w+)(/(?P<c>\w+)(/(?P<z>(?P<f>\w+)(\.(?P<e>[\w.]+))?(?P<s>.*)))?)?)?$')
regex_args = re.compile('[^\w/.@=-]')
def _router_default():
"""Returns new copy of default base router"""
router = Storage(
default_application='init',
applications='ALL',
default_controller='default',
controllers='DEFAULT',
default_function='index',
functions=dict(),
default_language=None,
languages=None,
root_static=['favicon.ico', 'robots.txt'],
map_static=None,
domains=None,
exclusive_domain=False,
map_hyphen=False,
acfe_match=r'\w+$', # legal app/ctlr/fcn/ext
#
# Implementation note:
# The file_match & args_match patterns use look-behind to avoid
# pathological backtracking from nested patterns.
#
file_match = r'([-+=@$%\w]|(?<=[-+=@$%\w])[./])*$', # legal static subpath
args_match=r'([\w@ -]|(?<=[\w@ -])[.=])*$', # legal arg in args
)
return router
def _params_default(app=None):
"""Returns a new copy of default parameters"""
p = Storage()
p.name = app or "BASE"
p.default_application = app or "init"
p.default_controller = "default"
p.default_function = "index"
p.routes_app = []
p.routes_in = []
p.routes_out = []
p.routes_onerror = []
p.routes_apps_raw = []
p.error_handler = None
p.error_message = '<html><body><h1>%s</h1></body></html>'
p.error_message_ticket = \
'<html><body><h1>Internal error</h1>Ticket issued: <a href="/admin/default/ticket/%(ticket)s" target="_blank">%(ticket)s</a></body><!-- this is junk text else IE does not display the page: ' + ('x' * 512) + ' //--></html>'
p.routers = None
p.logging = 'off'
return p
params_apps = dict()
params = _params_default(app=None) # regex rewrite parameters
THREAD_LOCAL.routes = params # default to base regex rewrite parameters
routers = None
def log_rewrite(string):
"""Log rewrite activity under control of routes.py"""
if params.logging == 'debug': # catch common cases first
logger.debug(string)
elif params.logging == 'off' or not params.logging:
pass
elif params.logging == 'print':
print string
elif params.logging == 'info':
logger.info(string)
elif params.logging == 'warning':
logger.warning(string)
elif params.logging == 'error':
logger.error(string)
elif params.logging == 'critical':
logger.critical(string)
else:
logger.debug(string)
ROUTER_KEYS = set(
('default_application', 'applications',
'default_controller', 'controllers',
'default_function', 'functions',
'default_language', 'languages',
'domain', 'domains', 'root_static', 'path_prefix',
'exclusive_domain', 'map_hyphen', 'map_static',
'acfe_match', 'file_match', 'args_match'))
ROUTER_BASE_KEYS = set(
('applications', 'default_application',
'domains', 'path_prefix'))
# The external interface to rewrite consists of:
#
# load: load routing configuration file(s)
# url_in: parse and rewrite incoming URL
# url_out: assemble and rewrite outgoing URL
#
# THREAD_LOCAL.routes.default_application
# THREAD_LOCAL.routes.error_message
# THREAD_LOCAL.routes.error_message_ticket
# THREAD_LOCAL.routes.try_redirect_on_error
# THREAD_LOCAL.routes.error_handler
#
# filter_url: helper for doctest & unittest
# filter_err: helper for doctest & unittest
# regex_filter_out: doctest
def fixup_missing_path_info(environ):
eget = environ.get
path_info = eget('PATH_INFO')
request_uri = eget('REQUEST_URI')
if not path_info and request_uri:
# for fcgi, get path_info and
# query_string from request_uri
items = request_uri.split('?')
path_info = environ['PATH_INFO'] = items[0]
environ['QUERY_STRING'] = items[1] if len(items) > 1 else ''
elif not request_uri:
query_string = eget('QUERY_STRING')
if query_string:
environ['REQUEST_URI'] = '%s?%s' % (path_info, query_string)
else:
environ['REQUEST_URI'] = path_info
if not eget('HTTP_HOST'):
environ['HTTP_HOST'] = \
'%s:%s' % (eget('SERVER_NAME'), eget('SERVER_PORT'))
def url_in(request, environ):
"""Parses and rewrites incoming URL"""
if routers:
return map_url_in(request, environ)
return regex_url_in(request, environ)
def url_out(request, environ, application, controller, function,
args, other, scheme, host, port, language=None):
"""Assembles and rewrites outgoing URL"""
if routers:
acf = map_url_out(request, environ, application, controller,
function, args, other, scheme, host, port, language)
url = '%s%s' % (acf, other)
else:
url = '/%s/%s/%s%s' % (application, controller, function, other)
url = regex_filter_out(url, environ)
#
# fill in scheme and host if absolute URL is requested
# scheme can be a string, eg 'http', 'https', 'ws', 'wss'
#
if host is True or (host is None and (scheme or port is not None)):
host = request.env.http_host
if not scheme or scheme is True:
scheme = request.env.get('wsgi_url_scheme', 'http').lower() \
if request else 'http'
if host:
host_port = host if not port else host.split(':', 1)[0] + ':%s' % port
url = '%s://%s%s' % (scheme, host_port, url)
return url
def try_rewrite_on_error(http_response, request, environ, ticket=None):
"""
Called from main.wsgibase to rewrite the http response.
"""
status = int(str(http_response.status).split()[0])
if status >= 399 and THREAD_LOCAL.routes.routes_onerror:
keys = set(('%s/%s' % (request.application, status),
'%s/*' % (request.application),
'*/%s' % (status),
'*/*'))
for (key, uri) in THREAD_LOCAL.routes.routes_onerror:
if key in keys:
if uri == '!':
# do nothing!
return http_response, environ
elif '?' in uri:
path_info, query_string = uri.split('?', 1)
query_string += '&'
else:
path_info, query_string = uri, ''
query_string += \
'code=%s&ticket=%s&requested_uri=%s&request_url=%s' % \
(status, ticket, urllib.quote_plus(
request.env.request_uri), request.url)
if uri.startswith('http://') or uri.startswith('https://'):
# make up a response
url = path_info + '?' + query_string
message = 'You are being redirected <a href="%s">here</a>'
return HTTP(303, message % url, Location=url), environ
elif not environ.get('__ROUTES_ONERROR__', False):
# wsgibase will be called recursively with
# the routes_onerror path.
environ['__ROUTES_ONERROR__'] = True # limit recursion
path_info = '/' + path_info.lstrip('/')
environ['PATH_INFO'] = path_info
environ['QUERY_STRING'] = query_string
environ['WEB2PY_STATUS_CODE'] = status
return None, environ
# do nothing!
return http_response, environ
def try_redirect_on_error(http_object, request, ticket=None):
"""Called from main.wsgibase to rewrite the http response"""
status = int(str(http_object.status).split()[0])
if status > 399 and THREAD_LOCAL.routes.routes_onerror:
keys = set(('%s/%s' % (request.application, status),
'%s/*' % (request.application),
'*/%s' % (status),
'*/*'))
for (key, redir) in THREAD_LOCAL.routes.routes_onerror:
if key in keys:
if redir == '!':
break
elif '?' in redir:
url = '%s&code=%s&ticket=%s&requested_uri=%s&request_url=%s' % \
(redir, status, ticket,
urllib.quote_plus(request.env.request_uri),
request.url)
else:
url = '%s?code=%s&ticket=%s&requested_uri=%s&request_url=%s' % \
(redir, status, ticket,
urllib.quote_plus(request.env.request_uri),
request.url)
return HTTP(303, 'You are being redirected <a href="%s">here</a>' % url, Location=url)
return http_object
def load(routes='routes.py', app=None, data=None, rdict=None):
"""
load: read (if file) and parse routes
store results in params
(called from main.py at web2py initialization time)
If data is present, it's used instead of the routes.py contents.
If rdict is present, it must be a dict to be used for routers (unit test)
"""
global params
global routers
if app is None:
# reinitialize
global params_apps
params_apps = dict()
params = _params_default(app=None) # regex rewrite parameters
THREAD_LOCAL.routes = params # default to base regex rewrite parameters
routers = None
if isinstance(rdict, dict):
symbols = dict(routers=rdict)
path = 'rdict'
else:
if data is not None:
path = 'routes'
else:
if app is None:
path = abspath(routes)
else:
path = abspath('applications', app, routes)
if not exists(path):
return
data = read_file(path).replace('\r\n', '\n')
symbols = dict(app=app)
try:
exec (data + '\n') in symbols
except SyntaxError, e:
logger.error(
'%s has a syntax error and will not be loaded\n' % path
+ traceback.format_exc())
raise e
p = _params_default(app)
for sym in ('routes_app', 'routes_in', 'routes_out'):
if sym in symbols:
for items in symbols[sym]:
p[sym].append(compile_regex(*items))
for sym in ('routes_onerror', 'routes_apps_raw',
'error_handler', 'error_message', 'error_message_ticket',
'default_application', 'default_controller', 'default_function',
'logging'):
if sym in symbols:
p[sym] = symbols[sym]
if 'routers' in symbols:
p.routers = Storage(symbols['routers'])
for key in p.routers:
if isinstance(p.routers[key], dict):
p.routers[key] = Storage(p.routers[key])
if app is None:
params = p # install base rewrite parameters
THREAD_LOCAL.routes = params # install default as current routes
#
# create the BASE router if routers in use
#
routers = params.routers # establish routers if present
if isinstance(routers, dict):
routers = Storage(routers)
if routers is not None:
router = _router_default()
if routers.BASE:
router.update(routers.BASE)
routers.BASE = router
# scan each app in applications/
# create a router, if routers are in use
# parse the app-specific routes.py if present
#
all_apps = []
apppath = abspath('applications')
for appname in os.listdir(apppath):
if not appname.startswith('.') and \
isdir(abspath(apppath, appname)) and \
isdir(abspath(apppath, appname, 'controllers')):
all_apps.append(appname)
if routers:
router = Storage(routers.BASE) # new copy
if appname in routers:
for key in routers[appname].keys():
if key in ROUTER_BASE_KEYS:
raise SyntaxError("BASE-only key '%s' in router '%s'" % (key, appname))
router.update(routers[appname])
routers[appname] = router
if exists(abspath('applications', appname, routes)):
load(routes, appname)
if routers:
load_routers(all_apps)
else: # app
params_apps[app] = p
if routers and p.routers:
if app in p.routers:
routers[app].update(p.routers[app])
log_rewrite('URL rewrite is on. configuration in %s' % path)
def compile_regex(k, v, env=None):
"""
Preprocess and compile the regular expressions in routes_app/in/out
The resulting regex will match a pattern of the form::
[remote address]:[protocol]://[host]:[method] [path]
We allow abbreviated regexes on input; here we try to complete them.
"""
k0 = k # original k for error reporting
# bracket regex in ^...$ if not already done
if not k[0] == '^':
k = '^%s' % k
if not k[-1] == '$':
k = '%s$' % k
# if there are no :-separated parts, prepend a catch-all for the IP address
if k.find(':') < 0:
# k = '^.*?:%s' % k[1:]
k = '^.*?:https?://[^:/]+:[a-z]+ %s' % k[1:]
# if there's no ://, provide a catch-all for the protocol, host & method
if k.find('://') < 0:
i = k.find(':/')
if i < 0:
raise SyntaxError("routes pattern syntax error: path needs leading '/' [%s]" % k0)
k = r'%s:https?://[^:/]+:[a-z]+ %s' % (k[:i], k[i + 1:])
# $anything -> ?P<anything>.*
for item in regex_anything.findall(k):
k = k.replace(item, '(?P<anything>.*)')
# $a (etc) -> ?P<a>\w+
for item in regex_at.findall(k):
k = k.replace(item, r'(?P<%s>\w+)' % item[1:])
# same for replacement pattern, but with \g
for item in regex_at.findall(v):
v = v.replace(item, r'\g<%s>' % item[1:])
return (re.compile(k, re.DOTALL), v, env or {})
def load_routers(all_apps):
"""Load-time post-processing of routers"""
for app in routers:
# initialize apps with routers that aren't present,
# on behalf of unit tests
if app not in all_apps:
all_apps.append(app)
router = Storage(routers.BASE) # new copy
if app != 'BASE':
keys = set(routers[app]).intersection(ROUTER_BASE_KEYS)
if keys:
raise SyntaxError("BASE-only key(s) %s in router '%s'" % (
tuple(keys), app))
router.update(routers[app])
routers[app] = router
router = routers[app]
keys = set(router).difference(ROUTER_KEYS)
if keys:
raise SyntaxError("unknown key(s) %s in router '%s'" % (
tuple(keys), app))
if not router.controllers:
router.controllers = set()
elif not isinstance(router.controllers, str):
router.controllers = set(router.controllers)
if router.languages:
router.languages = set(router.languages)
else:
router.languages = set()
if router.functions:
if isinstance(router.functions, (set, tuple, list)):
functions = set(router.functions)
if isinstance(router.default_function, str):
functions.add(
router.default_function) # legacy compatibility
router.functions = {router.default_controller: functions}
for controller in router.functions:
router.functions[controller] = set(
router.functions[controller])
else:
router.functions = dict()
if app != 'BASE':
for base_only in ROUTER_BASE_KEYS:
router.pop(base_only, None)
if 'domain' in router:
routers.BASE.domains[router.domain] = app
if isinstance(router.controllers, str) and router.controllers == 'DEFAULT':
router.controllers = set()
if isdir(abspath('applications', app)):
cpath = abspath('applications', app, 'controllers')
for cname in os.listdir(cpath):
if isfile(abspath(cpath, cname)) and cname.endswith('.py'):
router.controllers.add(cname[:-3])
if router.controllers:
router.controllers.add('static')
router.controllers.add(router.default_controller)
if isinstance(routers.BASE.applications, str) and routers.BASE.applications == 'ALL':
routers.BASE.applications = list(all_apps)
if routers.BASE.applications:
routers.BASE.applications = set(routers.BASE.applications)
else:
routers.BASE.applications = set()
for app in routers.keys():
# set router name
router = routers[app]
router.name = app
# compile URL validation patterns
router._acfe_match = re.compile(router.acfe_match)
router._file_match = re.compile(router.file_match)
if router.args_match:
router._args_match = re.compile(router.args_match)
# convert path_prefix to a list of path elements
if router.path_prefix:
if isinstance(router.path_prefix, str):
router.path_prefix = router.path_prefix.strip('/').split('/')
# rewrite BASE.domains as tuples
#
# key: 'domain[:port]' -> (domain, port)
# value: 'application[/controller] -> (application, controller)
# (port and controller may be None)
#
domains = dict()
if routers.BASE.domains:
for (d, a) in routers.BASE.domains.iteritems():
(domain, app) = (d.strip(':'), a.strip('/'))
if ':' in domain:
(domain, port) = domain.split(':')
else:
port = None
if '/' in app:
(app, ctlr) = app.split('/', 1)
else:
ctlr = None
if ctlr and '/' in ctlr:
(ctlr, fcn) = ctlr.split('/')
else:
fcn = None
if app not in all_apps and app not in routers:
raise SyntaxError("unknown app '%s' in domains" % app)
domains[(domain, port)] = (app, ctlr, fcn)
routers.BASE.domains = domains
def regex_uri(e, regexes, tag, default=None):
"""Filters incoming URI against a list of regexes"""
path = e['PATH_INFO']
host = e.get('HTTP_HOST', e.get('SERVER_NAME', 'localhost')).lower()
i = host.find(':')
if i > 0:
host = host[:i]
key = '%s:%s://%s:%s %s' % \
(e.get('REMOTE_ADDR', 'localhost'),
e.get('wsgi.url_scheme', 'http').lower(), host,
e.get('REQUEST_METHOD', 'get').lower(), path)
for (regex, value, custom_env) in regexes:
if regex.match(key):
e.update(custom_env)
rewritten = regex.sub(value, key)
log_rewrite('%s: [%s] [%s] -> %s' % (tag, key, value, rewritten))
return rewritten
log_rewrite('%s: [%s] -> %s (not rewritten)' % (tag, key, default))
return default
def regex_select(env=None, app=None, request=None):
"""
Selects a set of regex rewrite params for the current request
"""
if app:
THREAD_LOCAL.routes = params_apps.get(app, params)
elif env and params.routes_app:
if routers:
map_url_in(request, env, app=True)
else:
app = regex_uri(env, params.routes_app, "routes_app")
THREAD_LOCAL.routes = params_apps.get(app, params)
else:
THREAD_LOCAL.routes = params # default to base rewrite parameters
log_rewrite("select routing parameters: %s" % THREAD_LOCAL.routes.name)
return app # for doctest
def regex_filter_in(e):
"""Regex rewrite incoming URL"""
routes = THREAD_LOCAL.routes
query = e.get('QUERY_STRING', None)
e['WEB2PY_ORIGINAL_URI'] = e['PATH_INFO'] + (query and ('?' + query) or '')
if routes.routes_in:
path = regex_uri(e, routes.routes_in,
"routes_in", e['PATH_INFO'])
rmatch = regex_redirect.match(path)
if rmatch:
raise HTTP(int(rmatch.group(1)), location=rmatch.group(2))
items = path.split('?', 1)
e['PATH_INFO'] = items[0]
if len(items) > 1:
if query:
query = items[1] + '&' + query
else:
query = items[1]
e['QUERY_STRING'] = query
e['REQUEST_URI'] = e['PATH_INFO'] + (query and ('?' + query) or '')
return e
def sluggify(key):
return key.lower().replace('.', '_')
def invalid_url(routes):
raise HTTP(400,
routes.error_message % 'invalid request',
web2py_error='invalid path')
def regex_url_in(request, environ):
"""Rewrites and parses incoming URL"""
# ##################################################
# select application
# rewrite URL if routes_in is defined
# update request.env
# ##################################################
regex_select(env=environ, request=request)
routes = THREAD_LOCAL.routes
if routes.routes_in:
environ = regex_filter_in(environ)
request.env.update(
(k.lower().replace('.', '_'), v) for k, v in environ.iteritems())
# ##################################################
# serve if a static file
# ##################################################
path = urllib.unquote(request.env.path_info) or '/'
path = path.replace('\\', '/')
if path.endswith('/') and len(path) > 1:
path = path[:-1]
match = regex_url.match(path)
if not match:
invalid_url(routes)
request.raw_args = (match.group('s') or '')
if request.raw_args.startswith('/'):
request.raw_args = request.raw_args[1:]
if match.group('c') == 'static':
application = match.group('a')
version, filename = None, match.group('z').replace(' ', '_')
if not filename:
raise HTTP(404)
items = filename.split('/', 1)
if regex_version.match(items[0]):
version, filename = items
static_folder = pjoin(global_settings.applications_parent,
'applications', application, 'static')
static_file = os.path.abspath(pjoin(static_folder, filename))
if not static_file.startswith(static_folder):
invalid_url(routes)
return (static_file, version, environ)
else:
# ##################################################
# parse application, controller and function
# ##################################################
request.application = match.group('a') or routes.default_application
request.controller = match.group('c') or routes.default_controller
request.function = match.group('f') or routes.default_function
request.raw_extension = match.group('e')
request.extension = request.raw_extension or 'html'
if request.application in routes.routes_apps_raw:
# application is responsible for parsing args
request.args = None
elif request.raw_args:
args = regex_args.sub('_', request.raw_args)
request.args = List(args.split('/'))
else:
request.args = List([])
return (None, None, environ)
def regex_filter_out(url, e=None):
"""Regex rewrite outgoing URL"""
if not hasattr(THREAD_LOCAL, 'routes'):
regex_select() # ensure routes is set (for application threads)
routes = THREAD_LOCAL.routes
if routers:
return url # already filtered
if routes.routes_out:
items = url.split('?', 1)
if e:
host = e.get('http_host', 'localhost').lower()
i = host.find(':')
if i > 0:
host = host[:i]
items[0] = '%s:%s://%s:%s %s' % \
(e.get('remote_addr', ''),
e.get('wsgi_url_scheme', 'http').lower(), host,
e.get('request_method', 'get').lower(), items[0])
else:
items[0] = ':http://localhost:get %s' % items[0]
for (regex, value, tmp) in routes.routes_out:
if regex.match(items[0]):
rewritten = '?'.join([regex.sub(value, items[0])] + items[1:])
log_rewrite('routes_out: [%s] -> %s' % (url, rewritten))
return rewritten
log_rewrite('routes_out: [%s] not rewritten' % url)
return url
def filter_url(url, method='get', remote='0.0.0.0',
out=False, app=False, lang=None,
domain=(None, None), env=False, scheme=None,
host=None, port=None, language=None):
"""
doctest/unittest interface to regex_filter_in() and regex_filter_out()
"""
match = regex_full_url.match(url)
urlscheme = match.group('scheme').lower()
urlhost = match.group('host').lower()
uri = match.group('uri')
k = uri.find('?')
if k < 0:
k = len(uri)
if isinstance(domain, str):
domain = (domain, None)
(path_info, query_string) = (uri[:k], uri[k + 1:])
path_info = urllib.unquote(path_info) # simulate server
e = {
'REMOTE_ADDR': remote,
'REQUEST_METHOD': method,
'wsgi.url_scheme': urlscheme,
'HTTP_HOST': urlhost,
'REQUEST_URI': uri,
'PATH_INFO': path_info,
'QUERY_STRING': query_string,
#for filter_out request.env use lowercase
'remote_addr': remote,
'request_method': method,
'wsgi_url_scheme': urlscheme,
'http_host': urlhost
}
request = Storage()
e["applications_parent"] = global_settings.applications_parent
request.env = Storage(e)
request.uri_language = lang
# determine application only
#
if app:
if routers:
return map_url_in(request, e, app=True)
return regex_select(e)
# rewrite outbound URL
#
if out:
(request.env.domain_application,
request.env.domain_controller) = domain
items = path_info.lstrip('/').split('/')
if items[-1] == '':
items.pop() # adjust trailing empty args
assert len(items) >= 3, "at least /a/c/f is required"
a = items.pop(0)
c = items.pop(0)
f = items.pop(0)
if not routers:
return regex_filter_out(uri, e)
acf = map_url_out(
request, None, a, c, f, items, None, scheme, host, port, language=language)
if items:
url = '%s/%s' % (acf, '/'.join(items))
if items[-1] == '':
url += '/'
else:
url = acf
if query_string:
url += '?' + query_string
return url
# rewrite inbound URL
#
(static, version, e) = url_in(request, e)
if static:
return static
result = "/%s/%s/%s" % (
request.application, request.controller, request.function)
if request.extension and request.extension != 'html':
result += ".%s" % request.extension
if request.args:
result += " %s" % request.args
if e['QUERY_STRING']:
result += " ?%s" % e['QUERY_STRING']
if request.uri_language:
result += " (%s)" % request.uri_language
if env:
return request.env
return result
def filter_err(status, application='app', ticket='tkt'):
"""doctest/unittest interface to routes_onerror"""
routes = THREAD_LOCAL.routes
if status > 399 and routes.routes_onerror:
keys = set(('%s/%s' % (application, status),
'%s/*' % (application),
'*/%s' % (status),
'*/*'))
for (key, redir) in routes.routes_onerror:
if key in keys:
if redir == '!':
break
elif '?' in redir:
url = redir + '&' + 'code=%s&ticket=%s' % (status, ticket)
else:
url = redir + '?' + 'code=%s&ticket=%s' % (status, ticket)
return url # redirection
return status # no action
# router support
#
class MapUrlIn(object):
"""Logic for mapping incoming URLs"""
def __init__(self, request=None, env=None):
"""Initializes a map-in object"""
self.request = request
self.env = env
self.router = None
self.application = None
self.language = None
self.controller = None
self.function = None
self.extension = 'html'
self.controllers = set()
self.functions = dict()
self.languages = set()
self.default_language = None
self.map_hyphen = False
self.exclusive_domain = False
path = self.env['PATH_INFO']
self.query = self.env.get('QUERY_STRING', None)
path = path.lstrip('/')
self.env['PATH_INFO'] = '/' + path
self.env['WEB2PY_ORIGINAL_URI'] = self.env['PATH_INFO'] + (
self.query and ('?' + self.query) or '')
# to handle empty args, strip exactly one trailing slash, if present
# .../arg1// represents one trailing empty arg
#
if path.endswith('/'):
path = path[:-1]
self.args = List(path and path.split('/') or [])
# see http://www.python.org/dev/peps/pep-3333/#url-reconstruction for URL composition
self.remote_addr = self.env.get('REMOTE_ADDR', 'localhost')
self.scheme = self.env.get('wsgi.url_scheme', 'http').lower()
self.method = self.env.get('REQUEST_METHOD', 'get').lower()
(self.host, self.port) = (self.env.get('HTTP_HOST'), None)
if not self.host:
(self.host, self.port) = (
self.env.get('SERVER_NAME'), self.env.get('SERVER_PORT'))
if not self.host:
(self.host, self.port) = ('localhost', '80')
if ':' in self.host:
(self.host, self.port) = self.host.rsplit(':', 1) # for ipv6 support
if not self.port:
self.port = '443' if self.scheme == 'https' else '80'
def map_prefix(self):
"""Strips path prefix, if present in its entirety"""
prefix = routers.BASE.path_prefix
if prefix:
prefixlen = len(prefix)
if prefixlen > len(self.args):
return
for i in xrange(prefixlen):
if prefix[i] != self.args[i]:
return # prefix didn't match
self.args = List(self.args[prefixlen:]) # strip the prefix
def map_app(self):
"""Determines application name"""
base = routers.BASE # base router
self.domain_application = None
self.domain_controller = None
self.domain_function = None
self.map_hyphen = base.map_hyphen
arg0 = self.harg0
if not base.exclusive_domain and base.applications and arg0 in base.applications:
self.application = arg0
elif not base.exclusive_domain and arg0 and not base.applications:
self.application = arg0
elif (self.host, self.port) in base.domains:
(self.application, self.domain_controller,
self.domain_function) = base.domains[(self.host, self.port)]
self.env['domain_application'] = self.application
self.env['domain_controller'] = self.domain_controller
self.env['domain_function'] = self.domain_function
elif (self.host, None) in base.domains:
(self.application, self.domain_controller,
self.domain_function) = base.domains[(self.host, None)]
self.env['domain_application'] = self.application
self.env['domain_controller'] = self.domain_controller
self.env['domain_function'] = self.domain_function
elif base.applications and arg0 in base.applications:
self.application = arg0
elif arg0 and not base.applications:
self.application = arg0
else:
self.application = base.default_application or ''
self.pop_arg_if(self.application == arg0)
if not base._acfe_match.match(self.application):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error="invalid application: '%s'" % self.application)
if self.application not in routers and \
(self.application != THREAD_LOCAL.routes.default_application or self.application == 'welcome'):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error="unknown application: '%s'" % self.application)
# set the application router
#
log_rewrite("select application=%s" % self.application)
self.request.application = self.application
if self.application not in routers:
self.router = routers.BASE # support gluon.main.wsgibase init->welcome
else:
self.router = routers[self.application] # application router
self.controllers = self.router.controllers
self.default_controller = self.domain_controller or self.router.default_controller
self.functions = self.router.functions
self.languages = self.router.languages
self.default_language = self.router.default_language
self.map_hyphen = self.router.map_hyphen
self.exclusive_domain = self.router.exclusive_domain
self._acfe_match = self.router._acfe_match
self.file_match = self.router.file_match
self._file_match = self.router._file_match
self._args_match = self.router._args_match
def map_root_static(self):
"""
Handles root-static files (no hyphen mapping)
a root-static file is one whose incoming URL expects it to be at the root,
typically robots.txt & favicon.ico
"""
if len(self.args) == 1 and self.arg0 in self.router.root_static:
self.controller = self.request.controller = 'static'
root_static_file = pjoin(global_settings.applications_parent,
'applications', self.application,
self.controller, self.arg0)
log_rewrite("route: root static=%s" % root_static_file)
return root_static_file, None
return None, None
def map_language(self):
"""Handles language (no hyphen mapping)"""
arg0 = self.arg0 # no hyphen mapping
if arg0 and self.languages and arg0 in self.languages:
self.language = arg0
else:
self.language = self.default_language
if self.language:
log_rewrite("route: language=%s" % self.language)
self.pop_arg_if(self.language == arg0)
arg0 = self.arg0
def map_controller(self):
"""Identifies controller"""
# handle controller
#
arg0 = self.harg0 # map hyphens
if not arg0 or (self.controllers and arg0 not in self.controllers):
self.controller = self.default_controller or ''
else:
self.controller = arg0
self.pop_arg_if(arg0 == self.controller)
log_rewrite("route: controller=%s" % self.controller)
if not self.router._acfe_match.match(self.controller):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid controller')
def map_static(self):
"""
Handles static files
file_match but no hyphen mapping
"""
if self.controller != 'static':
return None, None
version = regex_version.match(self.args(0))
if self.args and version:
file = '/'.join(self.args[1:])
else:
file = '/'.join(self.args)
if len(self.args) == 0:
bad_static = True # require a file name
elif '/' in self.file_match:
# match the path
bad_static = not self.router._file_match.match(file)
else:
# match path elements
bad_static = False
for name in self.args:
bad_static = bad_static or name in (
'', '.', '..') or not self.router._file_match.match(name)
if bad_static:
log_rewrite('bad static path=%s' % file)
raise HTTP(400,
THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid static file')
#
# support language-specific static subdirectories,
# eg /appname/en/static/filename => applications/appname/static/en/filename
# if language-specific file doesn't exist, try same file in static
#
if self.language:
static_file = pjoin(global_settings.applications_parent,
'applications', self.application,
'static', self.language, file)
if not self.language or not isfile(static_file):
static_file = pjoin(global_settings.applications_parent,
'applications', self.application,
'static', file)
self.extension = None
log_rewrite("route: static=%s" % static_file)
return static_file, version
def map_function(self):
"""Handles function.extension"""
arg0 = self.harg0 # map hyphens
functions = self.functions.get(self.controller, set())
if isinstance(self.router.default_function, dict):
default_function = self.router.default_function.get(
self.controller, None)
else:
default_function = self.router.default_function # str or None
default_function = self.domain_function or default_function
if not arg0 or functions and arg0 not in functions:
self.function = default_function or ""
self.pop_arg_if(arg0 and self.function == arg0)
else:
func_ext = arg0.split('.')
if len(func_ext) > 1:
self.function = func_ext[0]
self.extension = func_ext[-1]
else:
self.function = arg0
self.pop_arg_if(True)
log_rewrite(
"route: function.ext=%s.%s" % (self.function, self.extension))
if not self.router._acfe_match.match(self.function):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid function')
if self.extension and not self.router._acfe_match.match(self.extension):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid extension')
def validate_args(self):
"""
Checks args against validation pattern
"""
for arg in self.args:
if not self.router._args_match.match(arg):
raise HTTP(
400, THREAD_LOCAL.routes.error_message % 'invalid request',
web2py_error='invalid arg <%s>' % arg)
def sluggify(self):
self.request.env.update(
(k.lower().replace('.', '_'), v) for k, v in self.env.iteritems())
def update_request(self):
"""
Updates request from self
Builds env.request_uri
Makes lower-case versions of http headers in env
"""
self.request.application = self.application
self.request.controller = self.controller
self.request.function = self.function
self.request.extension = self.extension
self.request.args = self.args
if self.language:
self.request.uri_language = self.language
uri = '/%s/%s' % (self.controller, self.function)
app = self.application
if self.map_hyphen:
uri = uri.replace('_', '-')
app = app.replace('_', '-')
if self.extension and self.extension != 'html':
uri += '.' + self.extension
if self.language:
uri = '/%s%s' % (self.language, uri)
uri = '/%s%s%s%s' % (
app,
uri,
urllib.quote('/' + '/'.join(
str(x) for x in self.args)) if self.args else '',
('?' + self.query) if self.query else '')
self.env['REQUEST_URI'] = uri
self.sluggify()
@property
def arg0(self):
"""Returns first arg"""
return self.args(0)
@property
def harg0(self):
"""Returns first arg with optional hyphen mapping"""
if self.map_hyphen and self.args(0):
return self.args(0).replace('-', '_')
return self.args(0)
def pop_arg_if(self, dopop):
"""Conditionally removes first arg and returns new first arg"""
if dopop:
self.args.pop(0)
class MapUrlOut(object):
"""Logic for mapping outgoing URLs"""
def __init__(self, request, env, application, controller,
function, args, other, scheme, host, port, language):
"""initialize a map-out object"""
self.default_application = routers.BASE.default_application
if application in routers:
self.router = routers[application]
else:
self.router = routers.BASE
self.request = request
self.env = env
self.application = application
self.controller = controller
self.is_static = (
controller == 'static' or controller.startswith('static/'))
self.function = function
self.args = args
self.other = other
self.scheme = scheme
self.host = host
self.port = port
self.language = language
self.applications = routers.BASE.applications
self.controllers = self.router.controllers
self.functions = self.router.functions.get(self.controller, set())
self.languages = self.router.languages
self.default_language = self.router.default_language
self.exclusive_domain = self.router.exclusive_domain
self.map_hyphen = self.router.map_hyphen
self.map_static = self.router.map_static
self.path_prefix = routers.BASE.path_prefix
self.domain_application = request and self.request.env.domain_application
self.domain_controller = request and self.request.env.domain_controller
if isinstance(self.router.default_function, dict):
self.default_function = self.router.default_function.get(
self.controller, None)
else:
self.default_function = self.router.default_function
if (self.router.exclusive_domain
and self.domain_application
and self.domain_application != self.application
and not self.host):
raise SyntaxError('cross-domain conflict: must specify host')
lang = self.language if self.language else request and request.uri_language
if lang and self.languages and lang in self.languages:
self.language = lang
else:
self.language = None
self.omit_application = False
self.omit_language = False
self.omit_controller = False
self.omit_function = False
def omit_lang(self):
"""Omits language if possible"""
if not self.language or self.language == self.default_language:
self.omit_language = True
def omit_acf(self):
"""Omits what we can of a/c/f"""
router = self.router
# Handle the easy no-args case of tail-defaults: /a/c /a /
#
if not self.args and self.function == self.default_function:
self.omit_function = True
if self.controller == router.default_controller:
self.omit_controller = True
if self.application == self.default_application:
self.omit_application = True
# omit default application
# (which might be the domain default application)
#
default_application = self.domain_application or self.default_application
if self.application == default_application:
self.omit_application = True
# omit controller if default controller
#
default_controller = ((self.application == self.domain_application) and self.domain_controller) or router.default_controller or ''
if self.controller == default_controller:
self.omit_controller = True
# omit function if possible
#
if self.functions and self.function in self.functions and self.function == self.default_function:
self.omit_function = True
# prohibit ambiguous cases
#
# because we presume the lang string to be unambiguous, its presence protects application omission
#
if self.exclusive_domain:
applications = [self.domain_application]
else:
applications = self.applications
if self.omit_language:
if not applications or self.controller in applications:
self.omit_application = False
if self.omit_application:
if not applications or self.function in applications:
self.omit_controller = False
if not self.controllers or self.function in self.controllers:
self.omit_controller = False
if self.args:
if self.args[0] in self.functions or self.args[0] in self.controllers or self.args[0] in applications:
self.omit_function = False
if self.omit_controller:
if self.function in self.controllers or self.function in applications:
self.omit_controller = False
if self.omit_application:
if self.controller in applications:
self.omit_application = False
# handle static as a special case
# (easier for external static handling)
#
if self.is_static:
if not self.map_static:
self.omit_application = False
if self.language:
self.omit_language = False
self.omit_controller = False
self.omit_function = False
def build_acf(self):
"Builds a/c/f from components"
acf = ''
if self.map_hyphen:
self.controller = self.controller.replace('_', '-')
if self.controller != 'static' and not self.controller.startswith('static/'):
self.application = self.application.replace('_', '-')
self.function = self.function.replace('_', '-')
if not self.omit_application:
acf += '/' + self.application
# handle case of flipping lang/static/file to static/lang/file for external rewrite
if self.is_static and self.map_static is False and not self.omit_language:
acf += '/' + self.controller + '/' + self.language
else:
if not self.omit_language:
acf += '/' + self.language
if not self.omit_controller:
acf += '/' + self.controller
if not self.omit_function:
acf += '/' + self.function
if self.path_prefix:
acf = '/' + '/'.join(self.path_prefix) + acf
if self.args:
return acf
return acf or '/'
def acf(self):
"""Converts components to /app/lang/controller/function"""
if not routers:
return None # use regex filter
self.omit_lang() # try to omit language
self.omit_acf() # try to omit a/c/f
return self.build_acf() # build and return the /a/lang/c/f string
def map_url_in(request, env, app=False):
"""Routes incoming URL"""
# initialize router-url object
#
THREAD_LOCAL.routes = params # default to base routes
map = MapUrlIn(request=request, env=env)
map.sluggify()
map.map_prefix() # strip prefix if present
map.map_app() # determine application
# configure THREAD_LOCAL.routes for error rewrite
#
if params.routes_app:
THREAD_LOCAL.routes = params_apps.get(app, params)
if app:
return map.application
root_static_file, version = map.map_root_static(
) # handle root-static files
if root_static_file:
map.update_request()
return (root_static_file, version, map.env)
# handle mapping of lang/static to static/lang in externally-rewritten URLs
# in case we have to handle them ourselves
if map.languages and map.map_static is False and map.arg0 == 'static' and map.args(1) in map.languages:
map.map_controller()
map.map_language()
else:
map.map_language()
map.map_controller()
static_file, version = map.map_static()
if static_file:
map.update_request()
return (static_file, version, map.env)
map.map_function()
map.validate_args()
map.update_request()
return (None, None, map.env)
def map_url_out(request, env, application, controller,
function, args, other, scheme, host, port, language=None):
"""
Supply /a/c/f (or /a/lang/c/f) portion of outgoing url
The basic rule is that we can only make transformations
that map_url_in can reverse.
Suppose that the incoming arguments are a,c,f,args,lang
and that the router defaults are da, dc, df, dl.
We can perform these transformations trivially if args=[] and lang=None or dl::
/da/dc/df => /
/a/dc/df => /a
/a/c/df => /a/c
We would also like to be able to strip the default application or application/controller
from URLs with function/args present, thus::
/da/c/f/args => /c/f/args
/da/dc/f/args => /f/args
We use [applications] and [controllers] and {functions} to suppress ambiguous omissions.
We assume that language names do not collide with a/c/f names.
"""
map = MapUrlOut(request, env, application, controller,
function, args, other, scheme, host, port, language)
return map.acf()
def get_effective_router(appname):
"""Returns a private copy of the effective router for the specified application"""
if not routers or appname not in routers:
return None
return Storage(routers[appname]) # return a copy
|
manuelep/openshift_v3_test
|
wsgi/web2py/gluon/rewrite.py
|
Python
|
mit
| 52,811
|
from cms.utils.compat.type_checks import string_types
from cms.utils.compat.string_io import StringIO
from django.conf import settings
from django.core.handlers.wsgi import WSGIRequest
from django.http import SimpleCookie
from django.test.client import (FakePayload, MULTIPART_CONTENT, encode_multipart,
BOUNDARY, CONTENT_TYPE_RE)
from django.utils.encoding import smart_str
from cms.utils.compat.urls import urlencode, urlparse, unquote
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = StringIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': '/',
'QUERY_STRING': '',
'REMOTE_ADDR': '127.0.0.1',
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'SERVER_NAME': 'testserver',
'SERVER_PORT': '80',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1,0),
'wsgi.url_scheme': 'http',
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
req = WSGIRequest(self._base_environ(**request))
req.session = {}
return req
def _get_path(self, parsed):
# If there are parameters, add them
if parsed[3]:
return unquote(parsed[2] + ";" + parsed[3])
else:
return unquote(parsed[2])
def get(self, path, data={}, **extra):
"Construct a GET request"
parsed = urlparse(path)
r = {
'CONTENT_TYPE': 'text/html; charset=utf-8',
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': urlencode(data, doseq=True) or parsed[4],
'REQUEST_METHOD': 'GET',
'wsgi.input': FakePayload('')
}
r.update(extra)
return self.request(**r)
def post(self, path, data={}, content_type=MULTIPART_CONTENT,
**extra):
"Construct a POST request."
if content_type is MULTIPART_CONTENT:
post_data = encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
post_data = smart_str(data, encoding=charset)
parsed = urlparse(path)
r = {
'CONTENT_LENGTH': len(post_data),
'CONTENT_TYPE': content_type,
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': parsed[4],
'REQUEST_METHOD': 'POST',
'wsgi.input': FakePayload(post_data),
}
r.update(extra)
return self.request(**r)
def head(self, path, data={}, **extra):
"Construct a HEAD request."
parsed = urlparse(path)
r = {
'CONTENT_TYPE': 'text/html; charset=utf-8',
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': urlencode(data, doseq=True) or parsed[4],
'REQUEST_METHOD': 'HEAD',
'wsgi.input': FakePayload('')
}
r.update(extra)
return self.request(**r)
def options(self, path, data={}, **extra):
"Constrict an OPTIONS request"
parsed = urlparse(path)
r = {
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': urlencode(data, doseq=True) or parsed[4],
'REQUEST_METHOD': 'OPTIONS',
'wsgi.input': FakePayload('')
}
r.update(extra)
return self.request(**r)
def put(self, path, data={}, content_type=MULTIPART_CONTENT,
**extra):
"Construct a PUT request."
if content_type is MULTIPART_CONTENT:
post_data = encode_multipart(BOUNDARY, data)
else:
post_data = data
# Make `data` into a querystring only if it's not already a string. If
# it is a string, we'll assume that the caller has already encoded it.
query_string = None
if not isinstance(data, string_types):
query_string = urlencode(data, doseq=True)
parsed = urlparse(path)
r = {
'CONTENT_LENGTH': len(post_data),
'CONTENT_TYPE': content_type,
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': query_string or parsed[4],
'REQUEST_METHOD': 'PUT',
'wsgi.input': FakePayload(post_data),
}
r.update(extra)
return self.request(**r)
def delete(self, path, data={}, **extra):
"Construct a DELETE request."
parsed = urlparse(path)
r = {
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': urlencode(data, doseq=True) or parsed[4],
'REQUEST_METHOD': 'DELETE',
'wsgi.input': FakePayload('')
}
r.update(extra)
return self.request(**r)
|
SurfasJones/djcmsrc3
|
venv/lib/python2.7/site-packages/cms/test_utils/util/request_factory.py
|
Python
|
mit
| 5,923
|
'''tzinfo timezone information for Europe/Tirane.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Tirane(DstTzInfo):
'''Europe/Tirane timezone definition. See datetime.tzinfo for details'''
zone = 'Europe/Tirane'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1913,12,31,22,40,40),
d(1940,6,15,23,0,0),
d(1942,11,2,1,0,0),
d(1943,3,29,1,0,0),
d(1943,4,10,1,0,0),
d(1974,5,3,23,0,0),
d(1974,10,1,22,0,0),
d(1975,4,30,23,0,0),
d(1975,10,1,22,0,0),
d(1976,5,1,23,0,0),
d(1976,10,2,22,0,0),
d(1977,5,7,23,0,0),
d(1977,10,1,22,0,0),
d(1978,5,5,23,0,0),
d(1978,9,30,22,0,0),
d(1979,5,4,23,0,0),
d(1979,9,29,22,0,0),
d(1980,5,2,23,0,0),
d(1980,10,3,22,0,0),
d(1981,4,25,23,0,0),
d(1981,9,26,22,0,0),
d(1982,5,1,23,0,0),
d(1982,10,2,22,0,0),
d(1983,4,17,23,0,0),
d(1983,9,30,22,0,0),
d(1984,3,31,23,0,0),
d(1984,9,30,1,0,0),
d(1985,3,31,1,0,0),
d(1985,9,29,1,0,0),
d(1986,3,30,1,0,0),
d(1986,9,28,1,0,0),
d(1987,3,29,1,0,0),
d(1987,9,27,1,0,0),
d(1988,3,27,1,0,0),
d(1988,9,25,1,0,0),
d(1989,3,26,1,0,0),
d(1989,9,24,1,0,0),
d(1990,3,25,1,0,0),
d(1990,9,30,1,0,0),
d(1991,3,31,1,0,0),
d(1991,9,29,1,0,0),
d(1992,3,29,1,0,0),
d(1992,9,27,1,0,0),
d(1993,3,28,1,0,0),
d(1993,9,26,1,0,0),
d(1994,3,27,1,0,0),
d(1994,9,25,1,0,0),
d(1995,3,26,1,0,0),
d(1995,9,24,1,0,0),
d(1996,3,31,1,0,0),
d(1996,10,27,1,0,0),
d(1997,3,30,1,0,0),
d(1997,10,26,1,0,0),
d(1998,3,29,1,0,0),
d(1998,10,25,1,0,0),
d(1999,3,28,1,0,0),
d(1999,10,31,1,0,0),
d(2000,3,26,1,0,0),
d(2000,10,29,1,0,0),
d(2001,3,25,1,0,0),
d(2001,10,28,1,0,0),
d(2002,3,31,1,0,0),
d(2002,10,27,1,0,0),
d(2003,3,30,1,0,0),
d(2003,10,26,1,0,0),
d(2004,3,28,1,0,0),
d(2004,10,31,1,0,0),
d(2005,3,27,1,0,0),
d(2005,10,30,1,0,0),
d(2006,3,26,1,0,0),
d(2006,10,29,1,0,0),
d(2007,3,25,1,0,0),
d(2007,10,28,1,0,0),
d(2008,3,30,1,0,0),
d(2008,10,26,1,0,0),
d(2009,3,29,1,0,0),
d(2009,10,25,1,0,0),
d(2010,3,28,1,0,0),
d(2010,10,31,1,0,0),
d(2011,3,27,1,0,0),
d(2011,10,30,1,0,0),
d(2012,3,25,1,0,0),
d(2012,10,28,1,0,0),
d(2013,3,31,1,0,0),
d(2013,10,27,1,0,0),
d(2014,3,30,1,0,0),
d(2014,10,26,1,0,0),
d(2015,3,29,1,0,0),
d(2015,10,25,1,0,0),
d(2016,3,27,1,0,0),
d(2016,10,30,1,0,0),
d(2017,3,26,1,0,0),
d(2017,10,29,1,0,0),
d(2018,3,25,1,0,0),
d(2018,10,28,1,0,0),
d(2019,3,31,1,0,0),
d(2019,10,27,1,0,0),
d(2020,3,29,1,0,0),
d(2020,10,25,1,0,0),
d(2021,3,28,1,0,0),
d(2021,10,31,1,0,0),
d(2022,3,27,1,0,0),
d(2022,10,30,1,0,0),
d(2023,3,26,1,0,0),
d(2023,10,29,1,0,0),
d(2024,3,31,1,0,0),
d(2024,10,27,1,0,0),
d(2025,3,30,1,0,0),
d(2025,10,26,1,0,0),
d(2026,3,29,1,0,0),
d(2026,10,25,1,0,0),
d(2027,3,28,1,0,0),
d(2027,10,31,1,0,0),
d(2028,3,26,1,0,0),
d(2028,10,29,1,0,0),
d(2029,3,25,1,0,0),
d(2029,10,28,1,0,0),
d(2030,3,31,1,0,0),
d(2030,10,27,1,0,0),
d(2031,3,30,1,0,0),
d(2031,10,26,1,0,0),
d(2032,3,28,1,0,0),
d(2032,10,31,1,0,0),
d(2033,3,27,1,0,0),
d(2033,10,30,1,0,0),
d(2034,3,26,1,0,0),
d(2034,10,29,1,0,0),
d(2035,3,25,1,0,0),
d(2035,10,28,1,0,0),
d(2036,3,30,1,0,0),
d(2036,10,26,1,0,0),
d(2037,3,29,1,0,0),
d(2037,10,25,1,0,0),
]
_transition_info = [
i(4740,0,'LMT'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
]
Tirane = Tirane()
|
newvem/pytz
|
pytz/zoneinfo/Europe/Tirane.py
|
Python
|
mit
| 5,688
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# lv2docgen, a documentation generator for LV2 plugins
# Copyright 2012 David Robillard <d@drobilla.net>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import errno
import os
import sys
__date__ = '2012-03-27'
__version__ = '0.0.0'
__authors__ = 'David Robillard'
__license__ = 'ISC License <http://www.opensource.org/licenses/isc>'
__contact__ = 'devel@lists.lv2plug.in'
try:
import rdflib
except ImportError:
sys.exit('Error importing rdflib')
doap = rdflib.Namespace('http://usefulinc.com/ns/doap#')
lv2 = rdflib.Namespace('http://lv2plug.in/ns/lv2core#')
rdf = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
rdfs = rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#')
def uri_to_path(uri):
path = uri[uri.find(':'):]
while not path[0].isalpha():
path = path[1:]
return path
def get_doc(model, subject):
comment = model.value(subject, rdfs.comment, None)
if comment:
return '<p class="content">%s</p>' % comment
return ''
def port_doc(model, port):
name = model.value(port, lv2.name, None)
comment = model.value(port, rdfs.comment, None)
html = '<div class="specterm"><h3>%s</h3>' % name
html += get_doc(model, port)
html += '</div>'
return html
def plugin_doc(model, plugin, style_uri):
uri = str(plugin)
name = model.value(plugin, doap.name, None)
html = '''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML+RDFa 1.0//EN" "http://www.w3.org/MarkUp/DTD/xhtml-rdfa-1.dtd">
<html about="%s"
xmlns="http://www.w3.org/1999/xhtml"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"
xmlns:lv2="http://lv2plug.in/ns/lv2core#"
xml:lang="en">''' % uri
html += '''<head>
<title>%s</title>
<meta http-equiv="content-type" content="text/xhtml+xml; charset=utf-8" />
<meta name="generator" content="lv2docgen" />
<link href="%s" rel="stylesheet" type="text/css" />
</head>
<body>''' % (name, style_uri)
html += '''
<!-- HEADER -->
<div id="header">
<h1 id="title">%s</h1>
<table id="meta">
<tr><th>URI</th><td><a href="%s">%s</a></td></tr>
<tr><th>Version</th><td>%s</td></tr>
</table>
</div>
''' % (name, uri, uri, '0.0.0')
html += get_doc(model, plugin)
ports_html = ''
for p in model.triples([plugin, lv2.port, None]):
ports_html += port_doc(model, p[2])
if len(ports_html):
html += '''
<h2 class="sec">Ports</h2>
<div class="content">
%s
</div>''' % ports_html
html += ' </body></html>'
return html
if __name__ == '__main__':
'LV2 plugin documentation generator'
if len(sys.argv) < 2:
print('Usage: %s OUTDIR FILE...' % sys.argv[0])
sys.exit(1)
outdir = sys.argv[1]
files = sys.argv[2:]
model = rdflib.ConjunctiveGraph()
for f in files:
model.parse(f, format='n3')
style_uri = os.path.abspath(os.path.join(outdir, 'style.css'))
for p in model.triples([None, rdf.type, lv2.Plugin]):
plugin = p[0]
html = plugin_doc(model, plugin, style_uri)
path = uri_to_path(plugin)
outpath = os.path.join(outdir, path + '.html')
try:
os.makedirs(os.path.dirname(outpath))
except OSError:
e = sys.exc_info()[1]
if e.errno == errno.EEXIST:
pass
else:
raise
print('Writing <%s> documentation to %s' % (plugin, outpath))
out = open(outpath, 'w')
out.write(html)
out.close()
|
moddevices/mod-sdk
|
lv2specgen/lv2docgen.py
|
Python
|
gpl-3.0
| 4,319
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import tools
from osv import fields,osv
class timesheet_report(osv.osv):
_name = "timesheet.report"
_description = "Timesheet"
_auto = False
_columns = {
'year': fields.char('Year',size=64,required=False, readonly=True),
'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'),
('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'),
('10','October'), ('11','November'), ('12','December')], 'Month',readonly=True),
'day': fields.char('Day', size=128, readonly=True),
'name': fields.char('Description', size=64,readonly=True),
'product_id' : fields.many2one('product.product', 'Product'),
'general_account_id' : fields.many2one('account.account', 'General Account', readonly=True),
'user_id': fields.many2one('res.users', 'User',readonly=True),
'to_invoice': fields.many2one('hr_timesheet_invoice.factor', 'Type of Invoicing',readonly=True),
'account_id': fields.many2one('account.analytic.account', 'Analytic Account',readonly=True),
'nbr': fields.integer('#Nbr',readonly=True),
'total_diff': fields.float('#Total Diff',readonly=True),
'total_timesheet': fields.float('#Total Timesheet',readonly=True),
'total_attendance': fields.float('#Total Attendance',readonly=True),
'company_id': fields.many2one('res.company', 'Company',readonly=True),
'department_id':fields.many2one('hr.department','Department',readonly=True),
'date_from': fields.date('Date from',readonly=True,),
'date_to': fields.date('Date to',readonly=True),
'date_current': fields.date('Current date', required=True),
'state' : fields.selection([
('new', 'New'),
('draft','Draft'),
('confirm','Confirmed'),
('done','Done')], 'State', readonly=True),
'quantity': fields.float('#Quantity',readonly=True),
'cost': fields.float('#Cost',readonly=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'timesheet_report')
cr.execute("""
create or replace view timesheet_report as (
select
min(aal.id) as id,
htss.date_current,
htss.name,
htss.date_from,
htss.date_to,
to_char(htss.date_current,'YYYY') as year,
to_char(htss.date_current,'MM') as month,
to_char(htss.date_current, 'YYYY-MM-DD') as day,
count(*) as nbr,
aal.unit_amount as quantity,
aal.amount as cost,
aal.account_id,
aal.product_id,
(SELECT sum(day.total_difference)
FROM hr_timesheet_sheet_sheet AS sheet
LEFT JOIN hr_timesheet_sheet_sheet_day AS day
ON (sheet.id = day.sheet_id
AND day.name = sheet.date_current) where sheet.id=htss.id) as total_diff,
(SELECT sum(day.total_timesheet)
FROM hr_timesheet_sheet_sheet AS sheet
LEFT JOIN hr_timesheet_sheet_sheet_day AS day
ON (sheet.id = day.sheet_id
AND day.name = sheet.date_current) where sheet.id=htss.id) as total_timesheet,
(SELECT sum(day.total_attendance)
FROM hr_timesheet_sheet_sheet AS sheet
LEFT JOIN hr_timesheet_sheet_sheet_day AS day
ON (sheet.id = day.sheet_id
AND day.name = sheet.date_current) where sheet.id=htss.id) as total_attendance,
aal.to_invoice,
aal.general_account_id,
htss.user_id,
htss.company_id,
htss.department_id,
htss.state
from account_analytic_line as aal
left join hr_analytic_timesheet as hat ON (hat.line_id=aal.id)
left join hr_timesheet_sheet_sheet as htss ON (hat.line_id=htss.id)
group by
to_char(htss.date_current,'YYYY'),
to_char(htss.date_current,'MM'),
to_char(htss.date_current, 'YYYY-MM-DD'),
aal.account_id,
htss.date_from,
htss.date_to,
aal.unit_amount,
aal.amount,
htss.date_current,
aal.to_invoice,
aal.product_id,
aal.general_account_id,
htss.name,
htss.company_id,
htss.state,
htss.id,
htss.department_id,
htss.user_id
)
""")
timesheet_report()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
crmccreary/openerp_server
|
openerp/addons/hr_timesheet_sheet/report/timesheet_report.py
|
Python
|
agpl-3.0
| 6,314
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class account_sequence_installer(osv.osv_memory):
_name = 'account.sequence.installer'
_inherit = 'res.config.installer'
_columns = {
'name': fields.char('Name',size=64, required=True),
'prefix': fields.char('Prefix',size=64, help="Prefix value of the record for the sequence"),
'suffix': fields.char('Suffix',size=64, help="Suffix value of the record for the sequence"),
'number_next': fields.integer('Next Number', required=True, help="Next number of this sequence"),
'number_increment': fields.integer('Increment Number', required=True, help="The next number of the sequence will be incremented by this number"),
'padding' : fields.integer('Number padding', required=True, help="OpenERP will automatically adds some '0' on the left of the 'Next Number' to get the required padding size."),
'company_id': fields.many2one('res.company', 'Company'),
}
_defaults = {
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'ir.sequence', context=c),
'number_increment': 1,
'number_next': 1,
'padding' : 0,
'name': 'Internal Sequence Journal',
}
def execute(self, cr, uid, ids, context=None):
if context is None:
context = {}
record = self.browse(cr, uid, ids, context=context)[0]
j_ids = []
if record.company_id:
company_id = record.company_id.id,
search_criteria = [('company_id', '=', company_id)]
else:
company_id = False
search_criteria = []
vals = {
'id': 'internal_sequence_journal',
'code': 'account.journal',
'name': record.name,
'prefix': record.prefix,
'suffix': record.suffix,
'number_next': record.number_next,
'number_increment': record.number_increment,
'padding' : record.padding,
'company_id': company_id,
}
obj_sequence = self.pool.get('ir.sequence')
ir_seq = obj_sequence.create(cr, uid, vals, context)
res = super(account_sequence_installer, self).execute(cr, uid, ids, context=context)
jou_obj = self.pool.get('account.journal')
journal_ids = jou_obj.search(cr, uid, search_criteria, context=context)
for journal in jou_obj.browse(cr, uid, journal_ids, context=context):
if not journal.internal_sequence_id:
j_ids.append(journal.id)
if j_ids:
jou_obj.write(cr, uid, j_ids, {'internal_sequence_id': ir_seq})
ir_values_obj = self.pool.get('ir.values')
ir_values_obj.set(cr, uid, key='default', key2=False, name='internal_sequence_id', models =[('account.journal', False)], value=ir_seq)
return res
account_sequence_installer()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Johnzero/erp
|
openerp/addons/account_sequence/account_sequence_installer.py
|
Python
|
agpl-3.0
| 3,925
|
# Natural Language Toolkit: Parser API
#
# Copyright (C) 2001-2008 University of Pennsylvania
# Author: Steven Bird <sb@csse.unimelb.edu.au>
# Edward Loper <edloper@gradient.cis.upenn.edu>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
#
import itertools
from nltk.internals import deprecated, Deprecated, overridden
class ParserI(object):
"""
A processing class for deriving trees that represent possible
structures for a sequence of tokens. These tree structures are
known as X{parses}. Typically, parsers are used to derive syntax
trees for sentences. But parsers can also be used to derive other
kinds of tree structure, such as morphological trees and discourse
structures.
Subclasses must define:
- at least one of: L{parse()}, L{nbest_parse()}, L{iter_parse()},
L{batch_parse()}, L{batch_nbest_parse()}, L{batch_iter_parse()}.
Subclasses may define:
- L{grammar()}
- either L{prob_classify()} or L{batch_prob_classify()} (or both)
"""
def grammar(self):
"""
@return: The grammar used by this parser.
"""
raise NotImplementedError()
def parse(self, sent):
"""
@return: A parse tree that represents the structure of the
given sentence, or C{None} if no parse tree is found. If
multiple parses are found, then return the best parse.
@param sent: The sentence to be parsed
@type sent: L{list} of L{string}
@rtype: L{Tree}
"""
if overridden(self.batch_parse):
return self.batch_parse([sent])[0]
else:
trees = self.nbest_parse(sent, 1)
if trees: return trees[0]
else: return None
def nbest_parse(self, sent, n=None):
"""
@return: A list of parse trees that represent possible
structures for the given sentence. When possible, this list is
sorted from most likely to least likely. If C{n} is
specified, then the returned list will contain at most C{n}
parse trees.
@param sent: The sentence to be parsed
@type sent: L{list} of L{string}
@param n: The maximum number of trees to return.
@type n: C{int}
@rtype: C{list} of L{Tree}
"""
if overridden(self.batch_nbest_parse):
return self.batch_nbest_parse([sent],n)[0]
elif overridden(self.parse) or overriden(self.batch_parse):
tree = self.parse(sent)
if tree: return [tree]
else: return []
else:
return list(itertools.islice(self.iter_parse(sent), n))
def iter_parse(self, sent):
"""
@return: An iterator that generates parse trees that represent
possible structures for the given sentence. When possible,
this list is sorted from most likely to least likely.
@param sent: The sentence to be parsed
@type sent: L{list} of L{string}
@rtype: C{iterator} of L{Tree}
"""
if overridden(self.batch_iter_parse):
return self.batch_iter_parse([sent])[0]
elif overridden(self.nbest_parse) or overridden(self.batch_nbest_parse):
return iter(self.nbest_parse(sent))
elif overridden(self.parse) or overriden(self.batch_parse):
tree = self.parse(sent)
if tree: return iter([tree])
else: return iter([])
else:
raise NotImplementedError()
def prob_parse(self, sent):
"""
@return: A probability distribution over the possible parse
trees for the given sentence. If there are no possible parse
trees for the given sentence, return a probability distribution
that assigns a probability of 1.0 to C{None}.
@param sent: The sentence to be parsed
@type sent: L{list} of L{string}
@rtype: L{ProbDist} of L{Tree}
"""
if overridden(self.batch_prob_parse):
return self.batch_prob_parse([sent])[0]
else:
raise NotImplementedError
def batch_parse(self, sents):
"""
Apply L{self.parse()} to each element of C{sents}. I.e.:
>>> return [self.parse(sent) for sent in sents]
@rtype: C{list} of L{Tree}
"""
return [self.parse(sent) for sent in sents]
def batch_nbest_parse(self, sents, n=None):
"""
Apply L{self.nbest_parse()} to each element of C{sents}. I.e.:
>>> return [self.nbest_parse(sent, n) for sent in sents]
@rtype: C{list} of C{list} of L{Tree}
"""
return [self.nbest_parse(sent,n ) for sent in sents]
def batch_iter_parse(self, sents):
"""
Apply L{self.iter_parse()} to each element of C{sents}. I.e.:
>>> return [self.iter_parse(sent) for sent in sents]
@rtype: C{list} of C{iterator} of L{Tree}
"""
return [self.iter_parse(sent) for sent in sents]
def batch_prob_parse(self, sents):
"""
Apply L{self.prob_parse()} to each element of C{sents}. I.e.:
>>> return [self.prob_parse(sent) for sent in sents]
@rtype: C{list} of L{ProbDist} of L{Tree}
"""
return [self.prob_parse(sent) for sent in sents]
#////////////////////////////////////////////////////////////
#{ Deprecated
@deprecated("Use parse() instead.")
def get_parse(self, sent):
return self.parse(sent)
@deprecated("Use nbest_parse() instead.")
def get_parse_list(self, sent):
return self.nbest_parse(sent)
@deprecated("Use prob_parse() instead.")
def get_parse_prob(self, sent):
return self.prob_parse(sent)
@deprecated("Use prob_parse() instead.")
def get_parse_dict(self, sent):
return self.prob_parse(sent)
@deprecated("No longer supported.")
def batch_test(self, filename):
f = open(filename)
for line in f:
line = line.strip()
if not line: continue
if line.startswith('#'):
print line
continue
print "Sentence:", line
parses = self.nbest_parse(line)
print "%d parses." % len(parses)
for tree in parses: print tree
#}
#////////////////////////////////////////////////////////////
######################################################################
#{ Deprecated
class ParseI(ParserI, Deprecated):
"Use ParserI instead."
class AbstractParser(Deprecated, ParserI):
"""Use ParserI instead."""
@deprecated("Use nltk.cfg.Grammar.check_coverage() instead.")
def _check_coverage(self, tokens):
self._grammar.check_coverage(tokens)
#}
######################################################################
|
hectormartinez/rougexstem
|
taln2016/icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk/parse/api.py
|
Python
|
apache-2.0
| 6,878
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.client.graph_util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import math_ops # pylint: disable=unused-import
# Utility device function to use for testing
def test_device_func_pin_variable_to_cpu(op):
if op.device:
return op.device
return "/cpu:0" if op.node_def.op == "Variable" else op.device
class DeviceFunctionsTest(tf.test.TestCase):
def testTwoDeviceFunctions(self):
with ops.Graph().as_default() as g:
var_0 = gen_state_ops._variable(shape=[1], dtype=dtypes.float32,
name="var_0", container="", shared_name="")
with g.device(test_device_func_pin_variable_to_cpu):
var_1 = gen_state_ops._variable(shape=[1], dtype=dtypes.float32,
name="var_1", container="", shared_name="")
var_2 = gen_state_ops._variable(shape=[1], dtype=dtypes.float32,
name="var_2", container="", shared_name="")
var_3 = gen_state_ops._variable(shape=[1], dtype=dtypes.float32,
name="var_3", container="", shared_name="")
with g.device(test_device_func_pin_variable_to_cpu):
var_4 = gen_state_ops._variable(shape=[1], dtype=dtypes.float32,
name="var_4", container="", shared_name="")
with g.device("/device:GPU:0"):
var_5 = gen_state_ops._variable(shape=[1], dtype=dtypes.float32,
name="var_5", container="", shared_name="")
var_6 = gen_state_ops._variable(shape=[1], dtype=dtypes.float32,
name="var_6", container="", shared_name="")
self.assertDeviceEqual(var_0.device, None)
self.assertDeviceEqual(var_1.device, "/device:CPU:0")
self.assertDeviceEqual(var_2.device, None)
self.assertDeviceEqual(var_3.device, None)
self.assertDeviceEqual(var_4.device, "/device:CPU:0")
self.assertDeviceEqual(var_5.device, "/device:GPU:0")
self.assertDeviceEqual(var_6.device, "/device:CPU:0")
def testNestedDeviceFunctions(self):
with tf.Graph().as_default():
var_0 = tf.Variable(0)
with tf.device(test_device_func_pin_variable_to_cpu):
var_1 = tf.Variable(1)
with tf.device(lambda op: "/gpu:0"):
var_2 = tf.Variable(2)
with tf.device("/gpu:0"): # Implicit merging device function.
var_3 = tf.Variable(3)
self.assertDeviceEqual(var_0.device, None)
self.assertDeviceEqual(var_1.device, "/device:CPU:0")
self.assertDeviceEqual(var_2.device, "/device:GPU:0")
self.assertDeviceEqual(var_3.device, "/device:GPU:0")
def testExplicitDevice(self):
with ops.Graph().as_default() as g:
const_0 = constant_op.constant(5.0)
with g.device("/device:GPU:0"):
const_1 = constant_op.constant(5.0)
with g.device("/device:GPU:1"):
const_2 = constant_op.constant(5.0)
with g.device("/device:CPU:0"):
const_3 = constant_op.constant(5.0)
with g.device("/device:CPU:1"):
const_4 = constant_op.constant(5.0)
with g.device("/job:ps"):
const_5 = constant_op.constant(5.0)
self.assertDeviceEqual(const_0.device, None)
self.assertDeviceEqual(const_1.device, "/device:GPU:0")
self.assertDeviceEqual(const_2.device, "/device:GPU:1")
self.assertDeviceEqual(const_3.device, "/device:CPU:0")
self.assertDeviceEqual(const_4.device, "/device:CPU:1")
self.assertDeviceEqual(const_5.device, "/job:ps")
def testDefaultDevice(self):
with ops.Graph().as_default() as g, g.device(
test_device_func_pin_variable_to_cpu):
with g.device("/job:ps"):
const_0 = constant_op.constant(5.0)
with g.device("/device:GPU:0"):
const_1 = constant_op.constant(5.0)
with g.device("/device:GPU:1"):
const_2 = constant_op.constant(5.0)
with g.device("/device:CPU:0"):
const_3 = constant_op.constant(5.0)
with g.device("/device:CPU:1"):
const_4 = constant_op.constant(5.0)
with g.device("/replica:0"):
const_5 = constant_op.constant(5.0)
self.assertDeviceEqual(const_0.device, "/job:ps")
self.assertDeviceEqual(const_1.device, "/device:GPU:0")
self.assertDeviceEqual(const_2.device, "/device:GPU:1")
self.assertDeviceEqual(const_3.device, "/device:CPU:0")
self.assertDeviceEqual(const_4.device, "/device:CPU:1")
self.assertDeviceEqual(const_5.device, "/replica:0")
def testExtractSubGraph(self):
graph_def = tf.GraphDef()
n1 = graph_def.node.add()
n1.name = "n1"
n1.input.extend(["n5"])
n2 = graph_def.node.add()
n2.name = "n2"
# Take the first output of the n1 node as the input.
n2.input.extend(["n1:0"])
n3 = graph_def.node.add()
n3.name = "n3"
# Add a control input (which isn't really needed by the kernel, but
# rather to enforce execution order between nodes).
n3.input.extend(["^n2"])
n4 = graph_def.node.add()
n4.name = "n4"
# It is fine to have a loops in the graph as well.
n5 = graph_def.node.add()
n5.name = "n5"
n5.input.extend(["n1"])
sub_graph = graph_util.extract_sub_graph(graph_def, ["n3"])
self.assertEqual("n1", sub_graph.node[0].name)
self.assertEqual("n2", sub_graph.node[1].name)
self.assertEqual("n3", sub_graph.node[2].name)
self.assertEqual("n5", sub_graph.node[3].name)
def testConvertVariablesToConsts(self):
with tf.Graph().as_default():
variable_node = tf.Variable(1.0, name="variable_node")
_ = tf.Variable(1.0, name="unused_variable_node")
output_node = tf.mul(variable_node, 2.0, name="output_node")
with tf.Session() as sess:
init = tf.initialize_variables([variable_node])
sess.run(init)
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
variable_graph_def = sess.graph.as_graph_def()
# First get the constant_graph_def when variable_names_whitelist is set,
# note that if variable_names_whitelist is not set an error will be
# thrown because unused_variable_node is not initialized.
constant_graph_def = graph_util.convert_variables_to_constants(
sess, variable_graph_def, ["output_node"],
variable_names_whitelist=set(["variable_node"]))
# Then initialize the unused variable, and get another
# constant_graph_def when variable_names_whitelist is not set.
sess.run(tf.global_variables_initializer())
constant_graph_def_without_variable_whitelist = (
graph_util.convert_variables_to_constants(
sess, variable_graph_def, ["output_node"]))
# The unused variable should be cleared so the two graphs should be
# equivalent.
self.assertEqual(str(constant_graph_def),
str(constant_graph_def_without_variable_whitelist))
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with tf.Graph().as_default():
_ = tf.import_graph_def(constant_graph_def, name="")
self.assertEqual(4, len(constant_graph_def.node))
for node in constant_graph_def.node:
self.assertNotEqual("Variable", node.op)
with tf.Session() as sess:
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
def create_node_def(self, op, name, inputs):
new_node = tf.NodeDef()
new_node.op = op
new_node.name = name
for input_name in inputs:
new_node.input.extend([input_name])
return new_node
def create_constant_node_def(self, name, value, dtype, shape=None):
node = self.create_node_def("Const", name, [])
self.set_attr_dtype(node, "dtype", dtype)
self.set_attr_tensor(node, "value", value, dtype, shape)
return node
def set_attr_dtype(self, node, key, value):
node.attr[key].CopyFrom(tf.AttrValue(type=value.as_datatype_enum))
def set_attr_tensor(self, node, key, value, dtype, shape=None):
node.attr[key].CopyFrom(tf.AttrValue(
tensor=tensor_util.make_tensor_proto(value,
dtype=dtype,
shape=shape)))
def testRemoveTrainingNodes(self):
a_constant_name = "a_constant"
b_constant_name = "b_constant"
a_check_name = "a_check"
b_check_name = "b_check"
a_identity_name = "a_identity"
b_identity_name = "b_identity"
add_name = "add"
graph_def = tf.GraphDef()
a_constant = self.create_constant_node_def(a_constant_name,
value=1,
dtype=tf.float32,
shape=[])
graph_def.node.extend([a_constant])
a_check_node = self.create_node_def("CheckNumerics", a_check_name,
[a_constant_name])
graph_def.node.extend([a_check_node])
a_identity_node = self.create_node_def("Identity", a_identity_name,
[a_constant_name,
"^" + a_check_name])
graph_def.node.extend([a_identity_node])
b_constant = self.create_constant_node_def(b_constant_name,
value=1,
dtype=tf.float32,
shape=[])
graph_def.node.extend([b_constant])
b_check_node = self.create_node_def("CheckNumerics", b_check_name,
[b_constant_name])
graph_def.node.extend([b_check_node])
b_identity_node = self.create_node_def("Identity", b_identity_name,
[b_constant_name,
"^" + b_check_name])
graph_def.node.extend([b_identity_node])
add_node = self.create_node_def("Add", add_name,
[a_identity_name,
b_identity_name])
self.set_attr_dtype(add_node, "T", tf.float32)
graph_def.node.extend([add_node])
expected_output = tf.GraphDef()
a_constant = self.create_constant_node_def(a_constant_name,
value=1,
dtype=tf.float32,
shape=[])
expected_output.node.extend([a_constant])
b_constant = self.create_constant_node_def(b_constant_name,
value=1,
dtype=tf.float32,
shape=[])
expected_output.node.extend([b_constant])
add_node = self.create_node_def("Add", add_name,
[a_constant_name,
b_constant_name])
self.set_attr_dtype(add_node, "T", tf.float32)
expected_output.node.extend([add_node])
output = graph_util.remove_training_nodes(graph_def)
self.assertProtoEquals(expected_output, output)
if __name__ == "__main__":
tf.test.main()
|
laosiaudi/tensorflow
|
tensorflow/python/framework/graph_util_test.py
|
Python
|
apache-2.0
| 12,229
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Check to see if the various BadMessage enums in histograms.xml need to be
updated. This can be called from a chromium PRESUBMIT.py to ensure updates to
bad_message.h also include the generated changes to histograms.xml
"""
import update_histogram_enum
def PrecheckBadMessage(input_api,
output_api,
histogram_name,
end_marker='^BAD_MESSAGE_MAX',
strip_k_prefix=False):
source_path = ''
# This function is called once per bad_message.h-containing directory. Check
# for the |bad_message.h| file, and if present, remember its path.
for f in input_api.AffectedFiles():
if f.LocalPath().endswith('bad_message.h'):
source_path = f.LocalPath()
break
# If the |bad_message.h| wasn't found in this change, then there is nothing to
# do and histogram.xml does not need to be updated.
if source_path == '':
return []
START_MARKER='^enum (class )?BadMessageReason {'
presubmit_error = update_histogram_enum.CheckPresubmitErrors(
histogram_enum_name=histogram_name,
update_script_name='update_bad_message_reasons.py',
source_enum_path=source_path,
start_marker=START_MARKER,
end_marker=end_marker,
strip_k_prefix=strip_k_prefix)
if presubmit_error:
return [output_api.PresubmitPromptWarning(presubmit_error,
items=[source_path])]
return []
|
scheib/chromium
|
tools/metrics/histograms/presubmit_bad_message_reasons.py
|
Python
|
bsd-3-clause
| 1,619
|
"""
homeassistant.components.sensor.vera
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support for Vera sensors.
Configuration:
To use the Vera sensors you will need to add something like the following to
your config/configuration.yaml
sensor:
platform: vera
vera_controller_url: http://YOUR_VERA_IP:3480/
device_data:
12:
name: My awesome sensor
exclude: true
13:
name: Another sensor
Variables:
vera_controller_url
*Required
This is the base URL of your vera controller including the port number if not
running on 80
Example: http://192.168.1.21:3480/
device_data
*Optional
This contains an array additional device info for your Vera devices. It is not
required and if not specified all sensors configured in your Vera controller
will be added with default values. You should use the id of your vera device
as the key for the device within device_data
These are the variables for the device_data array:
name
*Optional
This parameter allows you to override the name of your Vera device in the HA
interface, if not specified the value configured for the device in your Vera
will be used
exclude
*Optional
This parameter allows you to exclude the specified device from homeassistant,
it should be set to "true" if you want this device excluded
"""
import logging
from requests.exceptions import RequestException
import homeassistant.util.dt as dt_util
from homeassistant.helpers.entity import Entity
from homeassistant.const import (
ATTR_BATTERY_LEVEL, ATTR_TRIPPED, ATTR_ARMED, ATTR_LAST_TRIP_TIME,
TEMP_CELCIUS, TEMP_FAHRENHEIT)
# pylint: disable=no-name-in-module, import-error
import homeassistant.external.vera.vera as veraApi
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def get_devices(hass, config):
""" Find and return Vera Sensors. """
base_url = config.get('vera_controller_url')
if not base_url:
_LOGGER.error(
"The required parameter 'vera_controller_url'"
" was not found in config"
)
return False
device_data = config.get('device_data', {})
vera_controller = veraApi.VeraController(base_url)
categories = ['Temperature Sensor', 'Light Sensor', 'Sensor']
devices = []
try:
devices = vera_controller.get_devices(categories)
except RequestException:
# There was a network related error connecting to the vera controller
_LOGGER.exception("Error communicating with Vera API")
return False
vera_sensors = []
for device in devices:
extra_data = device_data.get(device.deviceId, {})
exclude = extra_data.get('exclude', False)
if exclude is not True:
vera_sensors.append(VeraSensor(device, extra_data))
return vera_sensors
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Performs setup for Vera controller devices. """
add_devices(get_devices(hass, config))
class VeraSensor(Entity):
""" Represents a Vera Sensor. """
def __init__(self, vera_device, extra_data=None):
self.vera_device = vera_device
self.extra_data = extra_data
if self.extra_data and self.extra_data.get('name'):
self._name = self.extra_data.get('name')
else:
self._name = self.vera_device.name
self.current_value = ''
self._temperature_units = None
def __str__(self):
return "%s %s %s" % (self.name, self.vera_device.deviceId, self.state)
@property
def state(self):
return self.current_value
@property
def name(self):
""" Get the mame of the sensor. """
return self._name
@property
def unit_of_measurement(self):
""" Unit of measurement of this entity, if any. """
return self._temperature_units
@property
def state_attributes(self):
attr = super().state_attributes
if self.vera_device.has_battery:
attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level + '%'
if self.vera_device.is_armable:
armed = self.vera_device.refresh_value('Armed')
attr[ATTR_ARMED] = 'True' if armed == '1' else 'False'
if self.vera_device.is_trippable:
last_tripped = self.vera_device.refresh_value('LastTrip')
if last_tripped is not None:
utc_time = dt_util.utc_from_timestamp(int(last_tripped))
attr[ATTR_LAST_TRIP_TIME] = dt_util.datetime_to_str(
utc_time)
else:
attr[ATTR_LAST_TRIP_TIME] = None
tripped = self.vera_device.refresh_value('Tripped')
attr[ATTR_TRIPPED] = 'True' if tripped == '1' else 'False'
attr['Vera Device Id'] = self.vera_device.vera_device_id
return attr
def update(self):
if self.vera_device.category == "Temperature Sensor":
self.vera_device.refresh_value('CurrentTemperature')
current_temp = self.vera_device.get_value('CurrentTemperature')
vera_temp_units = self.vera_device.veraController.temperature_units
if vera_temp_units == 'F':
self._temperature_units = TEMP_FAHRENHEIT
else:
self._temperature_units = TEMP_CELCIUS
if self.hass:
temp = self.hass.config.temperature(
current_temp,
self._temperature_units)
current_temp, self._temperature_units = temp
self.current_value = current_temp
elif self.vera_device.category == "Light Sensor":
self.vera_device.refresh_value('CurrentLevel')
self.current_value = self.vera_device.get_value('CurrentLevel')
elif self.vera_device.category == "Sensor":
tripped = self.vera_device.refresh_value('Tripped')
self.current_value = 'Tripped' if tripped == '1' else 'Not Tripped'
else:
self.current_value = 'Unknown'
|
michaelarnauts/home-assistant
|
homeassistant/components/sensor/vera.py
|
Python
|
mit
| 6,024
|
#
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# Session management
#
import hashlib
import time
import string
import sys
from spacewalk.common.rhnConfig import CFG
from spacewalk.common.usix import raise_with_tb
import rhnSQL
class InvalidSessionError(Exception):
pass
class ExpiredSessionError(Exception):
pass
class Session:
def __init__(self, session_id=None):
self.session_id = session_id
self.expires = None
self.uid = None
self.duration = None
def generate(self, duration=None, web_user_id=None):
# Grabs a session ID
self.session_id = rhnSQL.Sequence('pxt_id_seq').next()
self.duration = int(duration or CFG.SESSION_LIFETIME)
self.web_user_id(web_user_id)
return self
def _get_secrets(self):
# Reads the four secrets from the config file
return list(map(lambda x, cfg=CFG: getattr(cfg, 'session_secret_%s' % x),
range(1, 5)))
def get_secrets(self):
# Validates the secrets from the config file
secrets = self._get_secrets()
if len(secrets) != len([_f for _f in secrets if _f]):
# the list of secrets has unset items
raise Exception("Secrets not set in the config file")
return secrets
def digest(self):
if self.session_id is None:
raise ValueError("session id not supplied")
secrets = self.get_secrets()
ctx = hashlib.new('sha256')
ctx.update(string.join(secrets[:2] + [str(self.session_id)] +
secrets[2:], ':'))
return string.join(["%02x" % ord(a) for a in ctx.digest()], '')
def get_session(self):
return "%sx%s" % (self.session_id, self.digest())
def web_user_id(self, uid=None):
if uid:
self.uid = uid
return self.uid
def load(self, session):
arr = string.split(session, 'x', 1)
if len(arr) != 2:
raise InvalidSessionError("Invalid session string")
digest = arr[1]
if len(digest) != 64:
raise InvalidSessionError("Invalid session string (wrong length)")
try:
self.session_id = int(arr[0])
except ValueError:
raise_with_tb(InvalidSessionError("Invalid session identifier"), sys.exc_info()[2])
if digest != self.digest():
raise InvalidSessionError("Bad session checksum")
h = rhnSQL.prepare("""
select web_user_id, expires, value
from pxtSessions
where id = :session_id
""")
h.execute(session_id=self.session_id)
row = h.fetchone_dict()
if row:
# Session is stored in the DB
if time.time() < row['expires']:
# And it's not expired yet - good to go
self.expires = row['expires']
self.uid = row['web_user_id']
return self
# Old session - clean it up
h = rhnSQL.prepare("""
delete from pxtSessions where id = :session_id
""")
h.execute(session_id=self.session_id)
rhnSQL.commit()
raise ExpiredSessionError("Session not found")
def save(self):
expires = int(time.time()) + self.duration
h = rhnSQL.prepare("""
insert into PXTSessions (id, web_user_id, expires, value)
values (:id, :web_user_id, :expires, :value)
""")
h.execute(id=self.session_id, web_user_id=self.uid,
expires=expires, value='RHNAPP')
rhnSQL.commit()
return self
def load(session_string):
return Session().load(session_string)
def generate(web_user_id=None, duration=None):
return Session().generate(web_user_id=web_user_id, duration=duration).save()
|
ogajduse/spacewalk
|
backend/server/rhnSession.py
|
Python
|
gpl-2.0
| 4,431
|
# -*- coding: utf-8 -*-
"""
Tests of the Capa XModule
"""
# pylint: disable=C0111
# pylint: disable=R0904
# pylint: disable=C0103
# pylint: disable=C0302
import datetime
import json
import random
import os
import textwrap
import unittest
from mock import Mock, patch
import webob
from webob.multidict import MultiDict
import xmodule
from xmodule.tests import DATA_DIR
from capa.responsetypes import (StudentInputError, LoncapaProblemError,
ResponseError)
from capa.xqueue_interface import XQueueInterface
from xmodule.capa_module import CapaModule, ComplexEncoder
from xmodule.modulestore import Location
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from . import get_test_system
from pytz import UTC
from capa.correctmap import CorrectMap
class CapaFactory(object):
"""
A helper class to create problem modules with various parameters for testing.
"""
sample_problem_xml = textwrap.dedent("""\
<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal places?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
""")
num = 0
@classmethod
def next_num(cls):
cls.num += 1
return cls.num
@classmethod
def input_key(cls, response_num=2, input_num=1):
"""
Return the input key to use when passing GET parameters
"""
return ("input_" + cls.answer_key(response_num, input_num))
@classmethod
def answer_key(cls, response_num=2, input_num=1):
"""
Return the key stored in the capa problem answer dict
"""
return (
"%s_%d_%d" % (
"-".join(['i4x', 'edX', 'capa_test', 'problem', 'SampleProblem%d' % cls.num]),
response_num,
input_num
)
)
@classmethod
def create(cls,
attempts=None,
problem_state=None,
correct=False,
xml=None,
**kwargs
):
"""
All parameters are optional, and are added to the created problem if specified.
Arguments:
graceperiod:
due:
max_attempts:
showanswer:
force_save_button:
rerandomize: all strings, as specified in the policy for the problem
problem_state: a dict to to be serialized into the instance_state of the
module.
attempts: also added to instance state. Will be converted to an int.
"""
location = Location(["i4x", "edX", "capa_test", "problem",
"SampleProblem{0}".format(cls.next_num())])
if xml is None:
xml = cls.sample_problem_xml
field_data = {'data': xml}
field_data.update(kwargs)
descriptor = Mock(weight="1")
if problem_state is not None:
field_data.update(problem_state)
if attempts is not None:
# converting to int here because I keep putting "0" and "1" in the tests
# since everything else is a string.
field_data['attempts'] = int(attempts)
system = get_test_system()
system.render_template = Mock(return_value="<div>Test Template HTML</div>")
module = CapaModule(
descriptor,
system,
DictFieldData(field_data),
ScopeIds(None, None, location, location),
)
if correct:
# TODO: probably better to actually set the internal state properly, but...
module.get_score = lambda: {'score': 1, 'total': 1}
else:
module.get_score = lambda: {'score': 0, 'total': 1}
return module
class CapaFactoryWithFiles(CapaFactory):
"""
A factory for creating a Capa problem with files attached.
"""
sample_problem_xml = textwrap.dedent("""\
<problem>
<coderesponse queuename="BerkeleyX-cs188x">
<!-- actual filenames here don't matter for server-side tests,
they are only acted upon in the browser. -->
<filesubmission
points="25"
allowed_files="prog1.py prog2.py prog3.py"
required_files="prog1.py prog2.py prog3.py"
/>
<codeparam>
<answer_display>
If you're having trouble with this Project,
please refer to the Lecture Slides and attend office hours.
</answer_display>
<grader_payload>{"project": "p3"}</grader_payload>
</codeparam>
</coderesponse>
<customresponse>
<text>
If you worked with a partner, enter their username or email address. If you
worked alone, enter None.
</text>
<textline points="0" size="40" correct_answer="Your partner's username or 'None'"/>
<answer type="loncapa/python">
correct=['correct']
s = str(submission[0]).strip()
if submission[0] == '':
correct[0] = 'incorrect'
</answer>
</customresponse>
</problem>
""")
class CapaModuleTest(unittest.TestCase):
def setUp(self):
now = datetime.datetime.now(UTC)
day_delta = datetime.timedelta(days=1)
self.yesterday_str = str(now - day_delta)
self.today_str = str(now)
self.tomorrow_str = str(now + day_delta)
# in the capa grace period format, not in time delta format
self.two_day_delta_str = "2 days"
def test_import(self):
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
self.assertNotEqual(module.url_name, other_module.url_name,
"Factory should be creating unique names for each problem")
def test_correct(self):
"""
Check that the factory creates correct and incorrect problems properly.
"""
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create(correct=True)
self.assertEqual(other_module.get_score()['score'], 1)
def test_showanswer_default(self):
"""
Make sure the show answer logic does the right thing.
"""
# default, no due date, showanswer 'closed', so problem is open, and show_answer
# not visible.
problem = CapaFactory.create()
self.assertFalse(problem.answer_available())
def test_showanswer_attempted(self):
problem = CapaFactory.create(showanswer='attempted')
self.assertFalse(problem.answer_available())
problem.attempts = 1
self.assertTrue(problem.answer_available())
def test_showanswer_closed(self):
# can see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertTrue(used_all_attempts.answer_available())
# can see after due date
after_due_date = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(after_due_date.answer_available())
# can't see because attempts left
attempts_left_open = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# Can't see because grace period hasn't expired
still_in_grace = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertFalse(still_in_grace.answer_available())
def test_showanswer_past_due(self):
"""
With showanswer="past_due" should only show answer after the problem is closed
for everyone--e.g. after due date + grace period.
"""
# can't see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertFalse(used_all_attempts.answer_available())
# can see after due date
past_due_date = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(past_due_date.answer_available())
# can't see because attempts left
attempts_left_open = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# Can't see because grace period hasn't expired, even though have no more
# attempts.
still_in_grace = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="1",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertFalse(still_in_grace.answer_available())
def test_showanswer_finished(self):
"""
With showanswer="finished" should show answer after the problem is closed,
or after the answer is correct.
"""
# can see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertTrue(used_all_attempts.answer_available())
# can see after due date
past_due_date = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(past_due_date.answer_available())
# can't see because attempts left and wrong
attempts_left_open = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# _can_ see because attempts left and right
correct_ans = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.tomorrow_str,
correct=True)
self.assertTrue(correct_ans.answer_available())
# Can see even though grace period hasn't expired, because have no more
# attempts.
still_in_grace = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="1",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertTrue(still_in_grace.answer_available())
def test_closed(self):
# Attempts < Max attempts --> NOT closed
module = CapaFactory.create(max_attempts="1", attempts="0")
self.assertFalse(module.closed())
# Attempts < Max attempts --> NOT closed
module = CapaFactory.create(max_attempts="2", attempts="1")
self.assertFalse(module.closed())
# Attempts = Max attempts --> closed
module = CapaFactory.create(max_attempts="1", attempts="1")
self.assertTrue(module.closed())
# Attempts > Max attempts --> closed
module = CapaFactory.create(max_attempts="1", attempts="2")
self.assertTrue(module.closed())
# Max attempts = 0 --> closed
module = CapaFactory.create(max_attempts="0", attempts="2")
self.assertTrue(module.closed())
# Past due --> closed
module = CapaFactory.create(max_attempts="1", attempts="0",
due=self.yesterday_str)
self.assertTrue(module.closed())
def test_due_date_extension(self):
module = CapaFactory.create(
max_attempts="1", attempts="0", due=self.yesterday_str,
extended_due=self.tomorrow_str)
self.assertFalse(module.closed())
def test_parse_get_params(self):
# Valid GET param dict
# 'input_5' intentionally left unset,
valid_get_dict = MultiDict({
'input_1': 'test',
'input_1_2': 'test',
'input_1_2_3': 'test',
'input_[]_3': 'test',
'input_4': None,
'input_6': 5
})
result = CapaModule.make_dict_of_responses(valid_get_dict)
# Expect that we get a dict with "input" stripped from key names
# and that we get the same values back
for key in result.keys():
original_key = "input_" + key
self.assertTrue(original_key in valid_get_dict,
"Output dict should have key %s" % original_key)
self.assertEqual(valid_get_dict[original_key], result[key])
# Valid GET param dict with list keys
# Each tuple represents a single parameter in the query string
valid_get_dict = MultiDict((('input_2[]', 'test1'), ('input_2[]', 'test2')))
result = CapaModule.make_dict_of_responses(valid_get_dict)
self.assertTrue('2' in result)
self.assertEqual(['test1', 'test2'], result['2'])
# If we use [] at the end of a key name, we should always
# get a list, even if there's just one value
valid_get_dict = MultiDict({'input_1[]': 'test'})
result = CapaModule.make_dict_of_responses(valid_get_dict)
self.assertEqual(result['1'], ['test'])
# If we have no underscores in the name, then the key is invalid
invalid_get_dict = MultiDict({'input': 'test'})
with self.assertRaises(ValueError):
result = CapaModule.make_dict_of_responses(invalid_get_dict)
# Two equivalent names (one list, one non-list)
# One of the values would overwrite the other, so detect this
# and raise an exception
invalid_get_dict = MultiDict({'input_1[]': 'test 1',
'input_1': 'test 2'})
with self.assertRaises(ValueError):
result = CapaModule.make_dict_of_responses(invalid_get_dict)
def test_check_problem_correct(self):
module = CapaFactory.create(attempts=1)
# Simulate that all answers are marked correct, no matter
# what the input is, by patching CorrectMap.is_correct()
# Also simulate rendering the HTML
# TODO: pep8 thinks the following line has invalid syntax
with patch('capa.correctmap.CorrectMap.is_correct') as mock_is_correct, \
patch('xmodule.capa_module.CapaModule.get_problem_html') as mock_html:
mock_is_correct.return_value = True
mock_html.return_value = "Test HTML"
# Check the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'correct')
# Expect that we get the (mocked) HTML
self.assertEqual(result['contents'], 'Test HTML')
# Expect that the number of attempts is incremented by 1
self.assertEqual(module.attempts, 2)
def test_check_problem_incorrect(self):
module = CapaFactory.create(attempts=0)
# Simulate marking the input incorrect
with patch('capa.correctmap.CorrectMap.is_correct') as mock_is_correct:
mock_is_correct.return_value = False
# Check the problem
get_request_dict = {CapaFactory.input_key(): '0'}
result = module.check_problem(get_request_dict)
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'incorrect')
# Expect that the number of attempts is incremented by 1
self.assertEqual(module.attempts, 1)
def test_check_problem_closed(self):
module = CapaFactory.create(attempts=3)
# Problem closed -- cannot submit
# Simulate that CapaModule.closed() always returns True
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
with self.assertRaises(xmodule.exceptions.NotFoundError):
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that number of attempts NOT incremented
self.assertEqual(module.attempts, 3)
def test_check_problem_resubmitted_with_randomize(self):
rerandomize_values = ['always', 'true']
for rerandomize in rerandomize_values:
# Randomize turned on
module = CapaFactory.create(rerandomize=rerandomize, attempts=0)
# Simulate that the problem is completed
module.done = True
# Expect that we cannot submit
with self.assertRaises(xmodule.exceptions.NotFoundError):
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that number of attempts NOT incremented
self.assertEqual(module.attempts, 0)
def test_check_problem_resubmitted_no_randomize(self):
rerandomize_values = ['never', 'false', 'per_student']
for rerandomize in rerandomize_values:
# Randomize turned off
module = CapaFactory.create(rerandomize=rerandomize, attempts=0, done=True)
# Expect that we can submit successfully
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
self.assertEqual(result['success'], 'correct')
# Expect that number of attempts IS incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_queued(self):
module = CapaFactory.create(attempts=1)
# Simulate that the problem is queued
with patch('capa.capa_problem.LoncapaProblem.is_queued') \
as mock_is_queued, \
patch('capa.capa_problem.LoncapaProblem.get_recentmost_queuetime') \
as mock_get_queuetime:
mock_is_queued.return_value = True
mock_get_queuetime.return_value = datetime.datetime.now(UTC)
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertTrue('You must wait' in result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_with_files(self):
# Check a problem with uploaded files, using the check_problem API.
# pylint: disable=W0212
# The files we'll be uploading.
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
module = CapaFactoryWithFiles.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok"))
module.system.xqueue['interface'] = xqueue_interface
# Create a request dictionary for check_problem.
get_request_dict = {
CapaFactoryWithFiles.input_key(response_num=2): fileobjs,
CapaFactoryWithFiles.input_key(response_num=3): 'None',
}
module.check_problem(get_request_dict)
# _http_post is called like this:
# _http_post(
# 'http://example.com/xqueue/xqueue/submit/',
# {
# 'xqueue_header': '{"lms_key": "df34fb702620d7ae892866ba57572491", "lms_callback_url": "/", "queue_name": "BerkeleyX-cs188x"}',
# 'xqueue_body': '{"student_info": "{\\"anonymous_student_id\\": \\"student\\", \\"submission_time\\": \\"20131117183318\\"}", "grader_payload": "{\\"project\\": \\"p3\\"}", "student_response": ""}',
# },
# files={
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/asset.html'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/asset.html', mode 'r' at 0x49c5f60>,
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/image.jpg'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/image.jpg', mode 'r' at 0x49c56f0>,
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/textbook.pdf'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/textbook.pdf', mode 'r' at 0x49c5a50>,
# },
# )
self.assertEqual(xqueue_interface._http_post.call_count, 1)
_, kwargs = xqueue_interface._http_post.call_args
self.assertItemsEqual(fpaths, kwargs['files'].keys())
for fpath, fileobj in kwargs['files'].iteritems():
self.assertEqual(fpath, fileobj.name)
def test_check_problem_with_files_as_xblock(self):
# Check a problem with uploaded files, using the XBlock API.
# pylint: disable=W0212
# The files we'll be uploading.
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
module = CapaFactoryWithFiles.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok"))
module.system.xqueue['interface'] = xqueue_interface
# Create a webob Request with the files uploaded.
post_data = []
for fname, fileobj in zip(fnames, fileobjs):
post_data.append((CapaFactoryWithFiles.input_key(response_num=2), (fname, fileobj)))
post_data.append((CapaFactoryWithFiles.input_key(response_num=3), 'None'))
request = webob.Request.blank("/some/fake/url", POST=post_data, content_type='multipart/form-data')
module.handle('xmodule_handler', request, 'problem_check')
self.assertEqual(xqueue_interface._http_post.call_count, 1)
_, kwargs = xqueue_interface._http_post.call_args
self.assertItemsEqual(fnames, kwargs['files'].keys())
for fpath, fileobj in kwargs['files'].iteritems():
self.assertEqual(fpath, fileobj.name)
def test_check_problem_error(self):
# Try each exception that capa_module should handle
exception_classes = [StudentInputError,
LoncapaProblemError,
ResponseError]
for exception_class in exception_classes:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class('test error')
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
expected_msg = 'Error: test error'
self.assertEqual(expected_msg, result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_other_errors(self):
"""
Test that errors other than the expected kinds give an appropriate message.
See also `test_check_problem_error` for the "expected kinds" or errors.
"""
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Ensure that DEBUG is on
module.system.DEBUG = True
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
error_msg = u"Superterrible error happened: ☠"
mock_grade.side_effect = Exception(error_msg)
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertTrue(error_msg in result['success'])
def test_check_problem_error_nonascii(self):
# Try each exception that capa_module should handle
exception_classes = [StudentInputError,
LoncapaProblemError,
ResponseError]
for exception_class in exception_classes:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class(u"ȧƈƈḗƞŧḗḓ ŧḗẋŧ ƒǿř ŧḗşŧīƞɠ")
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
expected_msg = u'Error: ȧƈƈḗƞŧḗḓ ŧḗẋŧ ƒǿř ŧḗşŧīƞɠ'
self.assertEqual(expected_msg, result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_error_with_staff_user(self):
# Try each exception that capa module should handle
for exception_class in [StudentInputError,
LoncapaProblemError,
ResponseError]:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user IS staff
module.system.user_is_staff = True
# Simulate answering a problem that raises an exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class('test error')
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertTrue('test error' in result['success'])
# We DO include traceback information for staff users
self.assertTrue('Traceback' in result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_reset_problem(self):
module = CapaFactory.create(done=True)
module.new_lcp = Mock(wraps=module.new_lcp)
module.choose_new_seed = Mock(wraps=module.choose_new_seed)
# Stub out HTML rendering
with patch('xmodule.capa_module.CapaModule.get_problem_html') as mock_html:
mock_html.return_value = "<div>Test HTML</div>"
# Reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the request was successful
self.assertTrue('success' in result and result['success'])
# Expect that the problem HTML is retrieved
self.assertTrue('html' in result)
self.assertEqual(result['html'], "<div>Test HTML</div>")
# Expect that the problem was reset
module.new_lcp.assert_called_once_with(None)
def test_reset_problem_closed(self):
# pre studio default
module = CapaFactory.create(rerandomize="always")
# Simulate that the problem is closed
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
# Try to reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the problem was NOT reset
self.assertTrue('success' in result and not result['success'])
def test_reset_problem_not_done(self):
# Simulate that the problem is NOT done
module = CapaFactory.create(done=False)
# Try to reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the problem was NOT reset
self.assertTrue('success' in result and not result['success'])
def test_rescore_problem_correct(self):
module = CapaFactory.create(attempts=1, done=True)
# Simulate that all answers are marked correct, no matter
# what the input is, by patching LoncapaResponse.evaluate_answers()
with patch('capa.responsetypes.LoncapaResponse.evaluate_answers') as mock_evaluate_answers:
mock_evaluate_answers.return_value = CorrectMap(CapaFactory.answer_key(), 'correct')
result = module.rescore_problem()
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'correct')
# Expect that we get no HTML
self.assertFalse('contents' in result)
# Expect that the number of attempts is not incremented
self.assertEqual(module.attempts, 1)
def test_rescore_problem_incorrect(self):
# make sure it also works when attempts have been reset,
# so add this to the test:
module = CapaFactory.create(attempts=0, done=True)
# Simulate that all answers are marked incorrect, no matter
# what the input is, by patching LoncapaResponse.evaluate_answers()
with patch('capa.responsetypes.LoncapaResponse.evaluate_answers') as mock_evaluate_answers:
mock_evaluate_answers.return_value = CorrectMap(CapaFactory.answer_key(), 'incorrect')
result = module.rescore_problem()
# Expect that the problem is marked incorrect
self.assertEqual(result['success'], 'incorrect')
# Expect that the number of attempts is not incremented
self.assertEqual(module.attempts, 0)
def test_rescore_problem_not_done(self):
# Simulate that the problem is NOT done
module = CapaFactory.create(done=False)
# Try to rescore the problem, and get exception
with self.assertRaises(xmodule.exceptions.NotFoundError):
module.rescore_problem()
def test_rescore_problem_not_supported(self):
module = CapaFactory.create(done=True)
# Try to rescore the problem, and get exception
with patch('capa.capa_problem.LoncapaProblem.supports_rescoring') as mock_supports_rescoring:
mock_supports_rescoring.return_value = False
with self.assertRaises(NotImplementedError):
module.rescore_problem()
def _rescore_problem_error_helper(self, exception_class):
"""Helper to allow testing all errors that rescoring might return."""
# Create the module
module = CapaFactory.create(attempts=1, done=True)
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore:
mock_rescore.side_effect = exception_class(u'test error \u03a9')
result = module.rescore_problem()
# Expect an AJAX alert message in 'success'
expected_msg = u'Error: test error \u03a9'
self.assertEqual(result['success'], expected_msg)
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_rescore_problem_student_input_error(self):
self._rescore_problem_error_helper(StudentInputError)
def test_rescore_problem_problem_error(self):
self._rescore_problem_error_helper(LoncapaProblemError)
def test_rescore_problem_response_error(self):
self._rescore_problem_error_helper(ResponseError)
def test_save_problem(self):
module = CapaFactory.create(done=False)
# Save the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that answers are saved to the problem
expected_answers = {CapaFactory.answer_key(): '3.14'}
self.assertEqual(module.lcp.student_answers, expected_answers)
# Expect that the result is success
self.assertTrue('success' in result and result['success'])
def test_save_problem_closed(self):
module = CapaFactory.create(done=False)
# Simulate that the problem is closed
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
# Try to save the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that the result is failure
self.assertTrue('success' in result and not result['success'])
def test_save_problem_submitted_with_randomize(self):
# Capa XModule treats 'always' and 'true' equivalently
rerandomize_values = ['always', 'true']
for rerandomize in rerandomize_values:
module = CapaFactory.create(rerandomize=rerandomize, done=True)
# Try to save
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that we cannot save
self.assertTrue('success' in result and not result['success'])
def test_save_problem_submitted_no_randomize(self):
# Capa XModule treats 'false' and 'per_student' equivalently
rerandomize_values = ['never', 'false', 'per_student']
for rerandomize in rerandomize_values:
module = CapaFactory.create(rerandomize=rerandomize, done=True)
# Try to save
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that we succeed
self.assertTrue('success' in result and result['success'])
def test_check_button_name(self):
# If last attempt, button name changes to "Final Check"
# Just in case, we also check what happens if we have
# more attempts than allowed.
attempts = random.randint(1, 10)
module = CapaFactory.create(attempts=attempts - 1, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
module = CapaFactory.create(attempts=attempts, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
module = CapaFactory.create(attempts=attempts + 1, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
# Otherwise, button name is "Check"
module = CapaFactory.create(attempts=attempts - 2, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Check")
module = CapaFactory.create(attempts=attempts - 3, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Check")
# If no limit on attempts, then always show "Check"
module = CapaFactory.create(attempts=attempts - 3)
self.assertEqual(module.check_button_name(), "Check")
module = CapaFactory.create(attempts=0)
self.assertEqual(module.check_button_name(), "Check")
def test_check_button_checking_name(self):
module = CapaFactory.create(attempts=1, max_attempts=10)
self.assertEqual(module.check_button_checking_name(), "Checking...")
module = CapaFactory.create(attempts=10, max_attempts=10)
self.assertEqual(module.check_button_checking_name(), "Checking...")
def test_check_button_name_customization(self):
module = CapaFactory.create(
attempts=1,
max_attempts=10,
text_customization={"custom_check": "Submit", "custom_final_check": "Final Submit"}
)
self.assertEqual(module.check_button_name(), "Submit")
module = CapaFactory.create(attempts=9,
max_attempts=10,
text_customization={"custom_check": "Submit", "custom_final_check": "Final Submit"}
)
self.assertEqual(module.check_button_name(), "Final Submit")
def test_check_button_checking_name_customization(self):
module = CapaFactory.create(
attempts=1,
max_attempts=10,
text_customization={
"custom_check": "Submit",
"custom_final_check": "Final Submit",
"custom_checking": "Checking..."
}
)
self.assertEqual(module.check_button_checking_name(), "Checking...")
module = CapaFactory.create(
attempts=9,
max_attempts=10,
text_customization={
"custom_check": "Submit",
"custom_final_check": "Final Submit",
"custom_checking": "Checking..."
}
)
self.assertEqual(module.check_button_checking_name(), "Checking...")
def test_should_show_check_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show check button
module = CapaFactory.create(due=self.yesterday_str)
self.assertFalse(module.should_show_check_button())
# If user is out of attempts, do NOT show the check button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts)
self.assertFalse(module.should_show_check_button())
# If survey question (max_attempts = 0), do NOT show the check button
module = CapaFactory.create(max_attempts=0)
self.assertFalse(module.should_show_check_button())
# If user submitted a problem but hasn't reset,
# do NOT show the check button
# Note: we can only reset when rerandomize="always" or "true"
module = CapaFactory.create(rerandomize="always", done=True)
self.assertFalse(module.should_show_check_button())
module = CapaFactory.create(rerandomize="true", done=True)
self.assertFalse(module.should_show_check_button())
# Otherwise, DO show the check button
module = CapaFactory.create()
self.assertTrue(module.should_show_check_button())
# If the user has submitted the problem
# and we do NOT have a reset button, then we can show the check button
# Setting rerandomize to "never" or "false" ensures that the reset button
# is not shown
module = CapaFactory.create(rerandomize="never", done=True)
self.assertTrue(module.should_show_check_button())
module = CapaFactory.create(rerandomize="false", done=True)
self.assertTrue(module.should_show_check_button())
module = CapaFactory.create(rerandomize="per_student", done=True)
self.assertTrue(module.should_show_check_button())
def test_should_show_reset_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show the reset button
module = CapaFactory.create(due=self.yesterday_str, done=True)
self.assertFalse(module.should_show_reset_button())
# If the user is out of attempts, do NOT show the reset button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts, done=True)
self.assertFalse(module.should_show_reset_button())
# If we're NOT randomizing, then do NOT show the reset button
module = CapaFactory.create(rerandomize="never", done=True)
self.assertFalse(module.should_show_reset_button())
# If we're NOT randomizing, then do NOT show the reset button
module = CapaFactory.create(rerandomize="per_student", done=True)
self.assertFalse(module.should_show_reset_button())
# If we're NOT randomizing, then do NOT show the reset button
module = CapaFactory.create(rerandomize="false", done=True)
self.assertFalse(module.should_show_reset_button())
# If the user hasn't submitted an answer yet,
# then do NOT show the reset button
module = CapaFactory.create(done=False)
self.assertFalse(module.should_show_reset_button())
# pre studio default value, DO show the reset button
module = CapaFactory.create(rerandomize="always", done=True)
self.assertTrue(module.should_show_reset_button())
# If survey question for capa (max_attempts = 0),
# DO show the reset button
module = CapaFactory.create(rerandomize="always", max_attempts=0, done=True)
self.assertTrue(module.should_show_reset_button())
def test_should_show_save_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show the save button
module = CapaFactory.create(due=self.yesterday_str, done=True)
self.assertFalse(module.should_show_save_button())
# If the user is out of attempts, do NOT show the save button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts, done=True)
self.assertFalse(module.should_show_save_button())
# If user submitted a problem but hasn't reset, do NOT show the save button
module = CapaFactory.create(rerandomize="always", done=True)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(rerandomize="true", done=True)
self.assertFalse(module.should_show_save_button())
# If the user has unlimited attempts and we are not randomizing,
# then do NOT show a save button
# because they can keep using "Check"
module = CapaFactory.create(max_attempts=None, rerandomize="never", done=False)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(max_attempts=None, rerandomize="false", done=True)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(max_attempts=None, rerandomize="per_student", done=True)
self.assertFalse(module.should_show_save_button())
# pre-studio default, DO show the save button
module = CapaFactory.create(rerandomize="always", done=False)
self.assertTrue(module.should_show_save_button())
# If we're not randomizing and we have limited attempts, then we can save
module = CapaFactory.create(rerandomize="never", max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(rerandomize="false", max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(rerandomize="per_student", max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
# If survey question for capa (max_attempts = 0),
# DO show the save button
module = CapaFactory.create(max_attempts=0, done=False)
self.assertTrue(module.should_show_save_button())
def test_should_show_save_button_force_save_button(self):
# If we're after the deadline, do NOT show the save button
# even though we're forcing a save
module = CapaFactory.create(due=self.yesterday_str,
force_save_button="true",
done=True)
self.assertFalse(module.should_show_save_button())
# If the user is out of attempts, do NOT show the save button
attempts = random.randint(1, 10)
module = CapaFactory.create(attempts=attempts,
max_attempts=attempts,
force_save_button="true",
done=True)
self.assertFalse(module.should_show_save_button())
# Otherwise, if we force the save button,
# then show it even if we would ordinarily
# require a reset first
module = CapaFactory.create(force_save_button="true",
rerandomize="always",
done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(force_save_button="true",
rerandomize="true",
done=True)
self.assertTrue(module.should_show_save_button())
def test_no_max_attempts(self):
module = CapaFactory.create(max_attempts='')
html = module.get_problem_html()
self.assertTrue(html is not None)
# assert that we got here without exploding
def test_get_problem_html(self):
module = CapaFactory.create()
# We've tested the show/hide button logic in other tests,
# so here we hard-wire the values
show_check_button = bool(random.randint(0, 1) % 2)
show_reset_button = bool(random.randint(0, 1) % 2)
show_save_button = bool(random.randint(0, 1) % 2)
module.should_show_check_button = Mock(return_value=show_check_button)
module.should_show_reset_button = Mock(return_value=show_reset_button)
module.should_show_save_button = Mock(return_value=show_save_button)
# Mock the system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Patch the capa problem's HTML rendering
with patch('capa.capa_problem.LoncapaProblem.get_html') as mock_html:
mock_html.return_value = "<div>Test Problem HTML</div>"
# Render the problem HTML
html = module.get_problem_html(encapsulate=False)
# Also render the problem encapsulated in a <div>
html_encapsulated = module.get_problem_html(encapsulate=True)
# Expect that we get the rendered template back
self.assertEqual(html, "<div>Test Template HTML</div>")
# Check the rendering context
render_args, _ = module.system.render_template.call_args
self.assertEqual(len(render_args), 2)
template_name = render_args[0]
self.assertEqual(template_name, "problem.html")
context = render_args[1]
self.assertEqual(context['problem']['html'], "<div>Test Problem HTML</div>")
self.assertEqual(bool(context['check_button']), show_check_button)
self.assertEqual(bool(context['reset_button']), show_reset_button)
self.assertEqual(bool(context['save_button']), show_save_button)
# Assert that the encapsulated html contains the original html
self.assertTrue(html in html_encapsulated)
def test_input_state_consistency(self):
module1 = CapaFactory.create()
module2 = CapaFactory.create()
# check to make sure that the input_state and the keys have the same values
module1.set_state_from_lcp()
self.assertEqual(module1.lcp.inputs.keys(), module1.input_state.keys())
module2.set_state_from_lcp()
intersection = set(module2.input_state.keys()).intersection(set(module1.input_state.keys()))
self.assertEqual(len(intersection), 0)
def test_get_problem_html_error(self):
"""
In production, when an error occurs with the problem HTML
rendering, a "dummy" problem is created with an error
message to display to the user.
"""
module = CapaFactory.create()
# Save the original problem so we can compare it later
original_problem = module.lcp
# Simulate throwing an exception when the capa problem
# is asked to render itself as HTML
module.lcp.get_html = Mock(side_effect=Exception("Test"))
# Stub out the get_test_system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Turn off DEBUG
module.system.DEBUG = False
# Try to render the module with DEBUG turned off
html = module.get_problem_html()
self.assertTrue(html is not None)
# Check the rendering context
render_args, _ = module.system.render_template.call_args
context = render_args[1]
self.assertTrue("error" in context['problem']['html'])
# Expect that the module has created a new dummy problem with the error
self.assertNotEqual(original_problem, module.lcp)
def test_get_problem_html_error_w_debug(self):
"""
Test the html response when an error occurs with DEBUG on
"""
module = CapaFactory.create()
# Simulate throwing an exception when the capa problem
# is asked to render itself as HTML
error_msg = u"Superterrible error happened: ☠"
module.lcp.get_html = Mock(side_effect=Exception(error_msg))
# Stub out the get_test_system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Make sure DEBUG is on
module.system.DEBUG = True
# Try to render the module with DEBUG turned on
html = module.get_problem_html()
self.assertTrue(html is not None)
# Check the rendering context
render_args, _ = module.system.render_template.call_args
context = render_args[1]
self.assertTrue(error_msg in context['problem']['html'])
def test_random_seed_no_change(self):
# Run the test for each possible rerandomize value
for rerandomize in ['false', 'never',
'per_student', 'always',
'true', 'onreset']:
module = CapaFactory.create(rerandomize=rerandomize)
# Get the seed
# By this point, the module should have persisted the seed
seed = module.seed
self.assertTrue(seed is not None)
# If we're not rerandomizing, the seed is always set
# to the same value (1)
if rerandomize in ['never']:
self.assertEqual(seed, 1,
msg="Seed should always be 1 when rerandomize='%s'" % rerandomize)
# Check the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that the seed is the same
self.assertEqual(seed, module.seed)
# Save the problem
module.save_problem(get_request_dict)
# Expect that the seed is the same
self.assertEqual(seed, module.seed)
def test_random_seed_with_reset(self):
def _reset_and_get_seed(module):
'''
Reset the XModule and return the module's seed
'''
# Simulate submitting an attempt
# We need to do this, or reset_problem() will
# fail with a complaint that we haven't submitted
# the problem yet.
module.done = True
# Reset the problem
module.reset_problem({})
# Return the seed
return module.seed
def _retry_and_check(num_tries, test_func):
'''
Returns True if *test_func* was successful
(returned True) within *num_tries* attempts
*test_func* must be a function
of the form test_func() -> bool
'''
success = False
for i in range(num_tries):
if test_func() is True:
success = True
break
return success
# Run the test for each possible rerandomize value
for rerandomize in ['never', 'false', 'per_student',
'always', 'true', 'onreset']:
module = CapaFactory.create(rerandomize=rerandomize)
# Get the seed
# By this point, the module should have persisted the seed
seed = module.seed
self.assertTrue(seed is not None)
# We do NOT want the seed to reset if rerandomize
# is set to 'never' -- it should still be 1
# The seed also stays the same if we're randomizing
# 'per_student': the same student should see the same problem
if rerandomize in ['never', 'false', 'per_student']:
self.assertEqual(seed, _reset_and_get_seed(module))
# Otherwise, we expect the seed to change
# to another valid seed
else:
# Since there's a small chance we might get the
# same seed again, give it 5 chances
# to generate a different seed
success = _retry_and_check(5, lambda: _reset_and_get_seed(module) != seed)
self.assertTrue(module.seed is not None)
msg = 'Could not get a new seed from reset after 5 tries'
self.assertTrue(success, msg)
def test_random_seed_bins(self):
# Assert that we are limiting the number of possible seeds.
# Check the conditions that generate random seeds
for rerandomize in ['always', 'per_student', 'true', 'onreset']:
# Get a bunch of seeds, they should all be in 0-999.
for i in range(200):
module = CapaFactory.create(rerandomize=rerandomize)
assert 0 <= module.seed < 1000
@patch('xmodule.capa_base.log')
@patch('xmodule.capa_base.Progress')
def test_get_progress_error(self, mock_progress, mock_log):
"""
Check that an exception given in `Progress` produces a `log.exception` call.
"""
error_types = [TypeError, ValueError]
for error_type in error_types:
mock_progress.side_effect = error_type
module = CapaFactory.create()
self.assertIsNone(module.get_progress())
mock_log.exception.assert_called_once_with('Got bad progress')
mock_log.reset_mock()
@patch('xmodule.capa_base.Progress')
def test_get_progress_no_error_if_weight_zero(self, mock_progress):
"""
Check that if the weight is 0 get_progress does not try to create a Progress object.
"""
mock_progress.return_value = True
module = CapaFactory.create()
module.weight = 0
progress = module.get_progress()
self.assertIsNone(progress)
self.assertFalse(mock_progress.called)
@patch('xmodule.capa_base.Progress')
def test_get_progress_calculate_progress_fraction(self, mock_progress):
"""
Check that score and total are calculated correctly for the progress fraction.
"""
module = CapaFactory.create()
module.weight = 1
module.get_progress()
mock_progress.assert_called_with(0, 1)
other_module = CapaFactory.create(correct=True)
other_module.weight = 1
other_module.get_progress()
mock_progress.assert_called_with(1, 1)
def test_get_html(self):
"""
Check that get_html() calls get_progress() with no arguments.
"""
module = CapaFactory.create()
module.get_progress = Mock(wraps=module.get_progress)
module.get_html()
module.get_progress.assert_called_once_with()
def test_get_problem(self):
"""
Check that get_problem() returns the expected dictionary.
"""
module = CapaFactory.create()
self.assertEquals(module.get_problem("data"), {'html': module.get_problem_html(encapsulate=False)})
# Standard question with shuffle="true" used by a few tests
common_shuffle_xml = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
def test_check_unmask(self):
"""
Check that shuffle unmasking is plumbed through: when check_problem is called,
unmasked names should appear in the track_function event_info.
"""
module = CapaFactory.create(xml=self.common_shuffle_xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'mask_1'} # the correct choice
module.check_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
# 'answers' key modified to use unmasked name
self.assertEqual(event_info['answers'][CapaFactory.answer_key()], 'choice_3')
# 'permutation' key added to record how problem was shown
self.assertEquals(event_info['permutation'][CapaFactory.answer_key()],
('shuffle', ['choice_3', 'choice_1', 'choice_2', 'choice_0']))
self.assertEquals(event_info['success'], 'correct')
def test_save_unmask(self):
"""On problem save, unmasked data should appear on track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.save_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(event_info['answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
def test_reset_unmask(self):
"""On problem reset, unmask names should appear track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.check_problem(get_request_dict)
# On reset, 'old_state' should use unmasked names
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.reset_problem(None)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(mock_call[1][0], 'reset_problem')
self.assertEquals(event_info['old_state']['student_answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
def test_rescore_unmask(self):
"""On problem rescore, unmasked names should appear on track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.check_problem(get_request_dict)
# On rescore, state/student_answers should use unmasked names
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.rescore_problem()
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(mock_call[1][0], 'problem_rescore')
self.assertEquals(event_info['state']['student_answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
def test_check_unmask_answerpool(self):
"""Check answer-pool question track_function uses unmasked names"""
xml = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="4">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
module = CapaFactory.create(xml=xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.check_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
print event_info
# 'answers' key modified to use unmasked name
self.assertEqual(event_info['answers'][CapaFactory.answer_key()], 'choice_2')
# 'permutation' key added to record how problem was shown
self.assertEquals(event_info['permutation'][CapaFactory.answer_key()],
('answerpool', ['choice_1', 'choice_3', 'choice_2', 'choice_0']))
self.assertEquals(event_info['success'], 'incorrect')
class ComplexEncoderTest(unittest.TestCase):
def test_default(self):
"""
Check that complex numbers can be encoded into JSON.
"""
complex_num = 1 - 1j
expected_str = '1-1*j'
json_str = json.dumps(complex_num, cls=ComplexEncoder)
self.assertEqual(expected_str, json_str[1:-1]) # ignore quotes
class TestProblemCheckTracking(unittest.TestCase):
"""
Ensure correct tracking information is included in events emitted during problem checks.
"""
def setUp(self):
self.maxDiff = None
def test_choice_answer_text(self):
factory = self.capa_factory_for_problem_xml("""\
<problem display_name="Multiple Choice Questions">
<p>What color is the open ocean on a sunny day?</p>
<optionresponse>
<optioninput options="('yellow','blue','green')" correct="blue" label="What color is the open ocean on a sunny day?"/>
</optionresponse>
<p>Which piece of furniture is built for sitting?</p>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false">
<text>a table</text>
</choice>
<choice correct="false">
<text>a desk</text>
</choice>
<choice correct="true">
<text>a chair</text>
</choice>
<choice correct="false">
<text>a bookshelf</text>
</choice>
</choicegroup>
</multiplechoiceresponse>
<p>Which of the following are musical instruments?</p>
<choiceresponse>
<checkboxgroup direction="vertical" label="Which of the following are musical instruments?">
<choice correct="true">a piano</choice>
<choice correct="false">a tree</choice>
<choice correct="true">a guitar</choice>
<choice correct="false">a window</choice>
</checkboxgroup>
</choiceresponse>
</problem>
""")
module = factory.create()
answer_input_dict = {
factory.input_key(2): 'blue',
factory.input_key(3): 'choice_0',
factory.input_key(4): ['choice_0', 'choice_1'],
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': 'What color is the open ocean on a sunny day?',
'answer': 'blue',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': True,
'variant': '',
},
factory.answer_key(3): {
'question': '',
'answer': u'<text>a table</text>',
'response_type': 'multiplechoiceresponse',
'input_type': 'choicegroup',
'correct': False,
'variant': '',
},
factory.answer_key(4): {
'question': 'Which of the following are musical instruments?',
'answer': [u'a piano', u'a tree'],
'response_type': 'choiceresponse',
'input_type': 'checkboxgroup',
'correct': False,
'variant': '',
},
})
def capa_factory_for_problem_xml(self, xml):
class CustomCapaFactory(CapaFactory):
"""
A factory for creating a Capa problem with arbitrary xml.
"""
sample_problem_xml = textwrap.dedent(xml)
return CustomCapaFactory
def get_event_for_answers(self, module, answer_input_dict):
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.check_problem(answer_input_dict)
self.assertEquals(len(mock_track_function.mock_calls), 1)
mock_call = mock_track_function.mock_calls[0]
event = mock_call[1][1]
return event
def test_numerical_textline(self):
factory = CapaFactory
module = factory.create()
answer_input_dict = {
factory.input_key(2): '3.14'
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': '3.14',
'response_type': 'numericalresponse',
'input_type': 'textline',
'correct': True,
'variant': '',
}
})
def test_multiple_inputs(self):
factory = self.capa_factory_for_problem_xml("""\
<problem display_name="Multiple Inputs">
<p>Choose the correct color</p>
<optionresponse>
<p>What color is the sky?</p>
<optioninput options="('yellow','blue','green')" correct="blue"/>
<p>What color are pine needles?</p>
<optioninput options="('yellow','blue','green')" correct="green"/>
</optionresponse>
</problem>
""")
module = factory.create()
answer_input_dict = {
factory.input_key(2, 1): 'blue',
factory.input_key(2, 2): 'yellow',
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2, 1): {
'question': '',
'answer': 'blue',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': True,
'variant': '',
},
factory.answer_key(2, 2): {
'question': '',
'answer': 'yellow',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': False,
'variant': '',
},
})
def test_rerandomized_inputs(self):
factory = CapaFactory
module = factory.create(rerandomize='always')
answer_input_dict = {
factory.input_key(2): '3.14'
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': '3.14',
'response_type': 'numericalresponse',
'input_type': 'textline',
'correct': True,
'variant': module.seed,
}
})
def test_file_inputs(self):
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
factory = CapaFactoryWithFiles
module = factory.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok")) # pylint: disable=protected-access
module.system.xqueue['interface'] = xqueue_interface
answer_input_dict = {
CapaFactoryWithFiles.input_key(response_num=2): fileobjs,
CapaFactoryWithFiles.input_key(response_num=3): 'None',
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': fpaths,
'response_type': 'coderesponse',
'input_type': 'filesubmission',
'correct': False,
'variant': '',
},
factory.answer_key(3): {
'answer': 'None',
'correct': True,
'question': '',
'response_type': 'customresponse',
'input_type': 'textline',
'variant': ''
}
})
|
malishevg/edugraph
|
common/lib/xmodule/xmodule/tests/test_capa_module.py
|
Python
|
agpl-3.0
| 72,072
|
"""
Schema differencing support.
"""
import logging
import sqlalchemy
from sqlalchemy.types import Float
log = logging.getLogger(__name__)
def getDiffOfModelAgainstDatabase(metadata, engine, excludeTables=None):
"""
Return differences of model against database.
:return: object which will evaluate to :keyword:`True` if there \
are differences else :keyword:`False`.
"""
db_metadata = sqlalchemy.MetaData(engine)
db_metadata.reflect()
# sqlite will include a dynamically generated 'sqlite_sequence' table if
# there are autoincrement sequences in the database; this should not be
# compared.
if engine.dialect.name == 'sqlite':
if 'sqlite_sequence' in db_metadata.tables:
db_metadata.remove(db_metadata.tables['sqlite_sequence'])
return SchemaDiff(metadata, db_metadata,
labelA='model',
labelB='database',
excludeTables=excludeTables)
def getDiffOfModelAgainstModel(metadataA, metadataB, excludeTables=None):
"""
Return differences of model against another model.
:return: object which will evaluate to :keyword:`True` if there \
are differences else :keyword:`False`.
"""
return SchemaDiff(metadataA, metadataB, excludeTables=excludeTables)
class ColDiff(object):
"""
Container for differences in one :class:`~sqlalchemy.schema.Column`
between two :class:`~sqlalchemy.schema.Table` instances, ``A``
and ``B``.
.. attribute:: col_A
The :class:`~sqlalchemy.schema.Column` object for A.
.. attribute:: col_B
The :class:`~sqlalchemy.schema.Column` object for B.
.. attribute:: type_A
The most generic type of the :class:`~sqlalchemy.schema.Column`
object in A.
.. attribute:: type_B
The most generic type of the :class:`~sqlalchemy.schema.Column`
object in A.
"""
diff = False
def __init__(self,col_A,col_B):
self.col_A = col_A
self.col_B = col_B
self.type_A = col_A.type
self.type_B = col_B.type
self.affinity_A = self.type_A._type_affinity
self.affinity_B = self.type_B._type_affinity
if self.affinity_A is not self.affinity_B:
self.diff = True
return
if isinstance(self.type_A,Float) or isinstance(self.type_B,Float):
if not (isinstance(self.type_A,Float) and isinstance(self.type_B,Float)):
self.diff=True
return
for attr in ('precision','scale','length'):
A = getattr(self.type_A,attr,None)
B = getattr(self.type_B,attr,None)
if not (A is None or B is None) and A!=B:
self.diff=True
return
def __nonzero__(self):
return self.diff
class TableDiff(object):
"""
Container for differences in one :class:`~sqlalchemy.schema.Table`
between two :class:`~sqlalchemy.schema.MetaData` instances, ``A``
and ``B``.
.. attribute:: columns_missing_from_A
A sequence of column names that were found in B but weren't in
A.
.. attribute:: columns_missing_from_B
A sequence of column names that were found in A but weren't in
B.
.. attribute:: columns_different
A dictionary containing information about columns that were
found to be different.
It maps column names to a :class:`ColDiff` objects describing the
differences found.
"""
__slots__ = (
'columns_missing_from_A',
'columns_missing_from_B',
'columns_different',
)
def __nonzero__(self):
return bool(
self.columns_missing_from_A or
self.columns_missing_from_B or
self.columns_different
)
class SchemaDiff(object):
"""
Compute the difference between two :class:`~sqlalchemy.schema.MetaData`
objects.
The string representation of a :class:`SchemaDiff` will summarise
the changes found between the two
:class:`~sqlalchemy.schema.MetaData` objects.
The length of a :class:`SchemaDiff` will give the number of
changes found, enabling it to be used much like a boolean in
expressions.
:param metadataA:
First :class:`~sqlalchemy.schema.MetaData` to compare.
:param metadataB:
Second :class:`~sqlalchemy.schema.MetaData` to compare.
:param labelA:
The label to use in messages about the first
:class:`~sqlalchemy.schema.MetaData`.
:param labelB:
The label to use in messages about the second
:class:`~sqlalchemy.schema.MetaData`.
:param excludeTables:
A sequence of table names to exclude.
.. attribute:: tables_missing_from_A
A sequence of table names that were found in B but weren't in
A.
.. attribute:: tables_missing_from_B
A sequence of table names that were found in A but weren't in
B.
.. attribute:: tables_different
A dictionary containing information about tables that were found
to be different.
It maps table names to a :class:`TableDiff` objects describing the
differences found.
"""
def __init__(self,
metadataA, metadataB,
labelA='metadataA',
labelB='metadataB',
excludeTables=None):
self.metadataA, self.metadataB = metadataA, metadataB
self.labelA, self.labelB = labelA, labelB
self.label_width = max(len(labelA),len(labelB))
excludeTables = set(excludeTables or [])
A_table_names = set(metadataA.tables.keys())
B_table_names = set(metadataB.tables.keys())
self.tables_missing_from_A = sorted(
B_table_names - A_table_names - excludeTables
)
self.tables_missing_from_B = sorted(
A_table_names - B_table_names - excludeTables
)
self.tables_different = {}
for table_name in A_table_names.intersection(B_table_names):
td = TableDiff()
A_table = metadataA.tables[table_name]
B_table = metadataB.tables[table_name]
A_column_names = set(A_table.columns.keys())
B_column_names = set(B_table.columns.keys())
td.columns_missing_from_A = sorted(
B_column_names - A_column_names
)
td.columns_missing_from_B = sorted(
A_column_names - B_column_names
)
td.columns_different = {}
for col_name in A_column_names.intersection(B_column_names):
cd = ColDiff(
A_table.columns.get(col_name),
B_table.columns.get(col_name)
)
if cd:
td.columns_different[col_name]=cd
# XXX - index and constraint differences should
# be checked for here
if td:
self.tables_different[table_name]=td
def __str__(self):
''' Summarize differences. '''
out = []
column_template =' %%%is: %%r' % self.label_width
for names,label in (
(self.tables_missing_from_A,self.labelA),
(self.tables_missing_from_B,self.labelB),
):
if names:
out.append(
' tables missing from %s: %s' % (
label,', '.join(sorted(names))
)
)
for name,td in sorted(self.tables_different.items()):
out.append(
' table with differences: %s' % name
)
for names,label in (
(td.columns_missing_from_A,self.labelA),
(td.columns_missing_from_B,self.labelB),
):
if names:
out.append(
' %s missing these columns: %s' % (
label,', '.join(sorted(names))
)
)
for name,cd in td.columns_different.items():
out.append(' column with differences: %s' % name)
out.append(column_template % (self.labelA,cd.col_A))
out.append(column_template % (self.labelB,cd.col_B))
if out:
out.insert(0, 'Schema diffs:')
return '\n'.join(out)
else:
return 'No schema diffs'
def __len__(self):
"""
Used in bool evaluation, return of 0 means no diffs.
"""
return (
len(self.tables_missing_from_A) +
len(self.tables_missing_from_B) +
len(self.tables_different)
)
|
davidvon/pipa-pay-server
|
site-packages/migrate/versioning/schemadiff.py
|
Python
|
apache-2.0
| 8,767
|
# Generated by Django 3.0.6 on 2020-05-26 09:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tests', '0050_customimagewithauthor_customrenditionwithauthor'),
]
operations = [
migrations.AlterField(
model_name='restauranttag',
name='name',
field=models.CharField(max_length=100, unique=True, verbose_name='name'),
),
migrations.AlterField(
model_name='restauranttag',
name='slug',
field=models.SlugField(max_length=100, unique=True, verbose_name='slug'),
),
]
|
zerolab/wagtail
|
wagtail/tests/testapp/migrations/0051_tag_verbose_name.py
|
Python
|
bsd-3-clause
| 646
|
"""Geometry classes and factories
"""
from .base import CAP_STYLE, JOIN_STYLE
from .geo import box, shape, asShape, mapping
from .point import Point, asPoint
from .linestring import LineString, asLineString
from .polygon import Polygon, asPolygon, LinearRing, asLinearRing
from .multipoint import MultiPoint, asMultiPoint
from .multilinestring import MultiLineString, asMultiLineString
from .multipolygon import MultiPolygon, asMultiPolygon
from .collection import GeometryCollection
__all__ = [
'box', 'shape', 'asShape', 'Point', 'asPoint', 'LineString',
'asLineString', 'Polygon', 'asPolygon', 'MultiPoint', 'asMultiPoint',
'MultiLineString', 'asMultiLineString', 'MultiPolygon', 'asMultiPolygon',
'GeometryCollection', 'mapping', 'LinearRing', 'asLinearRing',
'CAP_STYLE', 'JOIN_STYLE',
]
# This needs to be called here to avoid circular references
import shapely.speedups
|
jdmcbr/Shapely
|
shapely/geometry/__init__.py
|
Python
|
bsd-3-clause
| 900
|
# Generated by Django 2.2 on 2019-04-26 15:48
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import taggit.managers
import wagtail.core.models
import wagtail.images.models
import wagtail.search.index
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('wagtailcore', '0041_group_collection_permissions_verbose_name_plural'),
('taggit', '0002_auto_20150616_2121'),
('tests', '0049_rawhtmlblock'),
]
operations = [
migrations.CreateModel(
name='CustomImageWithAuthor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='title')),
('file', models.ImageField(height_field='height', upload_to=wagtail.images.models.get_upload_to, verbose_name='file', width_field='width')),
('width', models.IntegerField(editable=False, verbose_name='width')),
('height', models.IntegerField(editable=False, verbose_name='height')),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='created at')),
('focal_point_x', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_y', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_width', models.PositiveIntegerField(blank=True, null=True)),
('focal_point_height', models.PositiveIntegerField(blank=True, null=True)),
('file_size', models.PositiveIntegerField(editable=False, null=True)),
('file_hash', models.CharField(blank=True, editable=False, max_length=40)),
('author', models.CharField(max_length=255)),
('collection', models.ForeignKey(default=wagtail.core.models.get_root_collection_id, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Collection', verbose_name='collection')),
('tags', taggit.managers.TaggableManager(blank=True, help_text=None, through='taggit.TaggedItem', to='taggit.Tag', verbose_name='tags')),
('uploaded_by_user', models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='uploaded by user')),
],
options={
'abstract': False,
},
bases=(wagtail.search.index.Indexed, models.Model),
),
migrations.CreateModel(
name='CustomRenditionWithAuthor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('filter_spec', models.CharField(db_index=True, max_length=255)),
('file', models.ImageField(height_field='height', upload_to=wagtail.images.models.get_rendition_upload_to, width_field='width')),
('width', models.IntegerField(editable=False)),
('height', models.IntegerField(editable=False)),
('focal_point_key', models.CharField(blank=True, default='', editable=False, max_length=16)),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='renditions', to='tests.CustomImageWithAuthor')),
],
options={
'unique_together': {('image', 'filter_spec', 'focal_point_key')},
},
),
]
|
zerolab/wagtail
|
wagtail/tests/testapp/migrations/0050_customimagewithauthor_customrenditionwithauthor.py
|
Python
|
bsd-3-clause
| 3,649
|
#!/usr/bin/env python3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Command-line tool for generating modularization stats."""
import argparse
import json
from typing import Dict, List
import class_dependency
import count_cycles
import graph
import os
import package_dependency
import print_dependencies_helper
import serialization
import sys
_SRC_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..'))
sys.path.append(
os.path.join(_SRC_PATH, 'tools', 'android', 'modularization', 'loc'))
import modularization_loc_stat as loc_stat
CLASSES_TO_COUNT_INBOUND = ['ChromeActivity', 'ChromeTabbedActivity']
def _copy_metadata(metadata: Dict) -> Dict[str, str]:
if metadata is None:
return {}
return {f'meta_{key}': value for key, value in metadata.items()}
def _generate_graph_sizes(
class_graph: class_dependency.JavaClassDependencyGraph,
package_graph: package_dependency.JavaPackageDependencyGraph
) -> Dict[str, int]:
return {
'class_nodes': class_graph.num_nodes,
'class_edges': class_graph.num_edges,
'package_nodes': package_graph.num_nodes,
'package_edges': package_graph.num_edges
}
def _generate_inbound_stats(
class_graph: class_dependency.JavaClassDependencyGraph,
class_names: List[str]) -> Dict[str, int]:
valid_class_names = \
print_dependencies_helper.get_valid_classes_from_class_list(
class_graph, class_names)
result = {}
for class_name, valid_class_name in zip(class_names, valid_class_names):
node: class_dependency.JavaClass = class_graph.get_node_by_key(
valid_class_name)
result[f'inbound_{class_name}'] = len(node.inbound)
return result
def _generate_package_cycle_stats(
package_graph: package_dependency.JavaPackageDependencyGraph
) -> Dict[str, int]:
all_cycles = count_cycles.find_cycles(package_graph, 4)
cycles_size_2 = len(all_cycles[2])
cycles_size_up_to_4 = sum(map(len, all_cycles[2:]))
return {
'package_cycles_size_equals_2': cycles_size_2,
'package_cycles_size_up_to_4': cycles_size_up_to_4
}
def _generate_chrome_java_size(
class_graph: class_dependency.JavaClassDependencyGraph
) -> Dict[str, int]:
count = 0
class_node: class_dependency.JavaClass
for class_node in class_graph.nodes:
if '//chrome/android:chrome_java' in class_node.build_targets:
count += 1
return {'chrome_java_class_count': count}
def _generate_loc_stats(git_dir: str) -> Dict[str, object]:
start_date, end_date = loc_stat.GetDateRange(past_days=7)
loc_result_json: str = loc_stat.GenerateLOCStats(start_date,
end_date,
quiet=True,
json_format=True,
git_dir=git_dir)
loc_result: Dict = json.loads(loc_result_json)
loc_modularized = loc_result.get(loc_stat.KEY_LOC_MODULARIZED, 0)
loc_chrome_android = loc_result.get(loc_stat.KEY_LOC_LEGACY, 0)
total = loc_modularized + loc_chrome_android
percentage_modularized: float = loc_modularized / total if total > 0 else 0
return {
'loc_modularized': loc_modularized,
'loc_chrome_android': loc_chrome_android,
'loc_modularized_percentage': percentage_modularized,
'loc_start_date': loc_result.get(loc_stat.KEY_START_DATE, ''),
'loc_end_date': loc_result.get(loc_stat.KEY_END_DATE, ''),
}
def main():
arg_parser = argparse.ArgumentParser(
description='Given a JSON dependency graph, output a JSON with a '
'number of metrics to track progress of modularization.')
required_arg_group = arg_parser.add_argument_group('required arguments')
required_arg_group.add_argument(
'-f',
'--file',
required=True,
help='Path to the JSON file containing the dependency graph. '
'See the README on how to generate this file.')
arg_parser.add_argument(
'--git-dir',
type=str,
help='Root directory of the git repo to look into. '
'If not specified, use the current directory.')
arg_parser.add_argument(
'-o',
'--output',
help='File to write the result json to. In not specified, outputs to '
'stdout.')
arguments = arg_parser.parse_args()
class_graph, package_graph, graph_metadata = \
serialization.load_class_and_package_graphs_from_file(arguments.file)
stats = {}
stats.update(_copy_metadata(graph_metadata))
stats.update(_generate_graph_sizes(class_graph, package_graph))
stats.update(_generate_inbound_stats(class_graph,
CLASSES_TO_COUNT_INBOUND))
stats.update(_generate_package_cycle_stats(package_graph))
stats.update(_generate_chrome_java_size(class_graph))
stats.update(_generate_loc_stats(arguments.git_dir))
if arguments.output:
with open(arguments.output, 'w') as f:
json.dump(stats, f, sort_keys=True)
else:
print(json.dumps(stats, sort_keys=True))
if __name__ == '__main__':
main()
|
nwjs/chromium.src
|
tools/android/dependency_analysis/modularization_stats.py
|
Python
|
bsd-3-clause
| 5,404
|
"""
================================
Recognizing hand-written digits
================================
This example shows how scikit-learn can be used to recognize images of
hand-written digits, from 0-9.
"""
print(__doc__)
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# License: BSD 3 clause
# Standard scientific Python imports
import matplotlib.pyplot as plt
# Import datasets, classifiers and performance metrics
from sklearn import datasets, svm, metrics
from sklearn.model_selection import train_test_split
###############################################################################
# Digits dataset
# --------------
#
# The digits dataset consists of 8x8
# pixel images of digits. The ``images`` attribute of the dataset stores
# 8x8 arrays of grayscale values for each image. We will use these arrays to
# visualize the first 4 images. The ``target`` attribute of the dataset stores
# the digit each image represents and this is included in the title of the 4
# plots below.
#
# Note: if we were working from image files (e.g., 'png' files), we would load
# them using :func:`matplotlib.pyplot.imread`.
digits = datasets.load_digits()
_, axes = plt.subplots(nrows=1, ncols=4, figsize=(10, 3))
for ax, image, label in zip(axes, digits.images, digits.target):
ax.set_axis_off()
ax.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
ax.set_title('Training: %i' % label)
###############################################################################
# Classification
# --------------
#
# To apply a classifier on this data, we need to flatten the images, turning
# each 2-D array of grayscale values from shape ``(8, 8)`` into shape
# ``(64,)``. Subsequently, the entire dataset will be of shape
# ``(n_samples, n_features)``, where ``n_samples`` is the number of images and
# ``n_features`` is the total number of pixels in each image.
#
# We can then split the data into train and test subsets and fit a support
# vector classifier on the train samples. The fitted classifier can
# subsequently be used to predict the value of the digit for the samples
# in the test subset.
# flatten the images
n_samples = len(digits.images)
data = digits.images.reshape((n_samples, -1))
# Create a classifier: a support vector classifier
clf = svm.SVC(gamma=0.001)
# Split data into 50% train and 50% test subsets
X_train, X_test, y_train, y_test = train_test_split(
data, digits.target, test_size=0.5, shuffle=False)
# Learn the digits on the train subset
clf.fit(X_train, y_train)
# Predict the value of the digit on the test subset
predicted = clf.predict(X_test)
###############################################################################
# Below we visualize the first 4 test samples and show their predicted
# digit value in the title.
_, axes = plt.subplots(nrows=1, ncols=4, figsize=(10, 3))
for ax, image, prediction in zip(axes, X_test, predicted):
ax.set_axis_off()
image = image.reshape(8, 8)
ax.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
ax.set_title(f'Prediction: {prediction}')
###############################################################################
# :func:`~sklearn.metrics.classification_report` builds a text report showing
# the main classification metrics.
print(f"Classification report for classifier {clf}:\n"
f"{metrics.classification_report(y_test, predicted)}\n")
###############################################################################
# We can also plot a :ref:`confusion matrix <confusion_matrix>` of the
# true digit values and the predicted digit values.
disp = metrics.ConfusionMatrixDisplay.from_predictions(y_test, predicted)
disp.figure_.suptitle("Confusion Matrix")
print(f"Confusion matrix:\n{disp.confusion_matrix}")
plt.show()
|
glemaitre/scikit-learn
|
examples/classification/plot_digits_classification.py
|
Python
|
bsd-3-clause
| 3,787
|
"""
Acceptance tests for Studio related to the acid xblock.
"""
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.xblock.acid import AcidView
from common.test.acceptance.tests.helpers import AcceptanceTest
class XBlockAcidBase(AcceptanceTest):
"""
Base class for tests that verify that XBlock integration is working correctly
"""
__test__ = False
def setUp(self):
"""
Create a unique identifier for the course used in this test.
"""
# Ensure that the superclass sets up
super(XBlockAcidBase, self).setUp()
# Define a unique course identifier
self.course_info = {
'org': 'test_org',
'number': 'course_' + self.unique_id[:5],
'run': 'test_' + self.unique_id,
'display_name': 'Test Course ' + self.unique_id
}
self.outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_id = '{org}.{number}.{run}'.format(**self.course_info)
self.setup_fixtures()
self.auth_page = AutoAuthPage(
self.browser,
staff=False,
username=self.user.get('username'),
email=self.user.get('email'),
password=self.user.get('password')
)
self.auth_page.visit()
def validate_acid_block_preview(self, acid_block):
"""
Validate the Acid Block's preview
"""
self.assertTrue(acid_block.init_fn_passed)
self.assertTrue(acid_block.resource_url_passed)
self.assertTrue(acid_block.scope_passed('user_state'))
self.assertTrue(acid_block.scope_passed('user_state_summary'))
self.assertTrue(acid_block.scope_passed('preferences'))
self.assertTrue(acid_block.scope_passed('user_info'))
def test_acid_block_preview(self):
"""
Verify that all expected acid block tests pass in studio preview
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
acid_block = AcidView(self.browser, unit.xblocks[0].preview_selector)
self.validate_acid_block_preview(acid_block)
def test_acid_block_editor(self):
"""
Verify that all expected acid block tests pass in studio editor
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
acid_block = AcidView(self.browser, unit.xblocks[0].edit().editor_selector)
self.assertTrue(acid_block.init_fn_passed)
self.assertTrue(acid_block.resource_url_passed)
class XBlockAcidNoChildTest(XBlockAcidBase):
"""
Tests of an AcidBlock with no children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid', 'Acid Block')
)
)
)
).install()
self.user = course_fix.user
class XBlockAcidParentBase(XBlockAcidBase):
"""
Base class for tests that verify that parent XBlock integration is working correctly
"""
__test__ = False
def validate_acid_block_preview(self, acid_block):
super(XBlockAcidParentBase, self).validate_acid_block_preview(acid_block)
self.assertTrue(acid_block.child_tests_passed)
def test_acid_block_preview(self):
"""
Verify that all expected acid block tests pass in studio preview
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
container = unit.xblocks[0].go_to_container()
acid_block = AcidView(self.browser, container.xblocks[0].preview_selector)
self.validate_acid_block_preview(acid_block)
class XBlockAcidEmptyParentTest(XBlockAcidParentBase):
"""
Tests of an AcidBlock with children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid_parent', 'Acid Parent Block').add_children(
)
)
)
)
).install()
self.user = course_fix.user
class XBlockAcidChildTest(XBlockAcidParentBase):
"""
Tests of an AcidBlock with children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid_parent', 'Acid Parent Block').add_children(
XBlockFixtureDesc('acid', 'First Acid Child', metadata={'name': 'first'}),
XBlockFixtureDesc('acid', 'Second Acid Child', metadata={'name': 'second'}),
XBlockFixtureDesc('html', 'Html Child', data="<html>Contents</html>"),
)
)
)
)
).install()
self.user = course_fix.user
def test_acid_block_preview(self):
super(XBlockAcidChildTest, self).test_acid_block_preview()
def test_acid_block_editor(self):
super(XBlockAcidChildTest, self).test_acid_block_editor()
|
fintech-circle/edx-platform
|
common/test/acceptance/tests/studio/test_studio_acid_xblock.py
|
Python
|
agpl-3.0
| 7,011
|