repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
nesdis/djongo
|
refs/heads/master
|
tests/django_tests/tests/v21/tests/postgres_tests/models.py
|
5
|
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from .fields import (
ArrayField, BigIntegerRangeField, CICharField, CIEmailField, CITextField,
DateRangeField, DateTimeRangeField, FloatRangeField, HStoreField,
IntegerRangeField, JSONField, SearchVectorField,
)
class Tag:
def __init__(self, tag_id):
self.tag_id = tag_id
def __eq__(self, other):
return isinstance(other, Tag) and self.tag_id == other.tag_id
class TagField(models.SmallIntegerField):
def from_db_value(self, value, expression, connection):
if value is None:
return value
return Tag(int(value))
def to_python(self, value):
if isinstance(value, Tag):
return value
if value is None:
return value
return Tag(int(value))
def get_prep_value(self, value):
return value.tag_id
class PostgreSQLModel(models.Model):
class Meta:
abstract = True
required_db_vendor = 'postgresql'
class IntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list, blank=True)
class NullableIntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), blank=True, null=True)
class CharArrayModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=10))
class DateTimeArrayModel(PostgreSQLModel):
datetimes = ArrayField(models.DateTimeField())
dates = ArrayField(models.DateField())
times = ArrayField(models.TimeField())
class NestedIntegerArrayModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.IntegerField()))
class OtherTypesArrayModel(PostgreSQLModel):
ips = ArrayField(models.GenericIPAddressField())
uuids = ArrayField(models.UUIDField())
decimals = ArrayField(models.DecimalField(max_digits=5, decimal_places=2))
tags = ArrayField(TagField(), blank=True, null=True)
class HStoreModel(PostgreSQLModel):
field = HStoreField(blank=True, null=True)
array_field = ArrayField(HStoreField(), null=True)
class CharFieldModel(models.Model):
field = models.CharField(max_length=16)
class TextFieldModel(models.Model):
field = models.TextField()
def __str__(self):
return self.field
# Scene/Character/Line models are used to test full text search. They're
# populated with content from Monty Python and the Holy Grail.
class Scene(models.Model):
scene = models.CharField(max_length=255)
setting = models.CharField(max_length=255)
def __str__(self):
return self.scene
class Character(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name
class CITestModel(PostgreSQLModel):
name = CICharField(primary_key=True, max_length=255)
email = CIEmailField()
description = CITextField()
array_field = ArrayField(CITextField(), null=True)
def __str__(self):
return self.name
class Line(PostgreSQLModel):
scene = models.ForeignKey('Scene', models.CASCADE)
character = models.ForeignKey('Character', models.CASCADE)
dialogue = models.TextField(blank=True, null=True)
dialogue_search_vector = SearchVectorField(blank=True, null=True)
dialogue_config = models.CharField(max_length=100, blank=True, null=True)
def __str__(self):
return self.dialogue or ''
class RangesModel(PostgreSQLModel):
ints = IntegerRangeField(blank=True, null=True)
bigints = BigIntegerRangeField(blank=True, null=True)
floats = FloatRangeField(blank=True, null=True)
timestamps = DateTimeRangeField(blank=True, null=True)
dates = DateRangeField(blank=True, null=True)
class RangeLookupsModel(PostgreSQLModel):
parent = models.ForeignKey(RangesModel, models.SET_NULL, blank=True, null=True)
integer = models.IntegerField(blank=True, null=True)
big_integer = models.BigIntegerField(blank=True, null=True)
float = models.FloatField(blank=True, null=True)
timestamp = models.DateTimeField(blank=True, null=True)
date = models.DateField(blank=True, null=True)
class JSONModel(PostgreSQLModel):
field = JSONField(blank=True, null=True)
field_custom = JSONField(blank=True, null=True, encoder=DjangoJSONEncoder)
class ArrayFieldSubclass(ArrayField):
def __init__(self, *args, **kwargs):
super().__init__(models.IntegerField())
class AggregateTestModel(models.Model):
"""
To test postgres-specific general aggregation functions
"""
char_field = models.CharField(max_length=30, blank=True)
integer_field = models.IntegerField(null=True)
boolean_field = models.BooleanField(null=True)
class StatTestModel(models.Model):
"""
To test postgres-specific aggregation functions for statistics
"""
int1 = models.IntegerField()
int2 = models.IntegerField()
related_field = models.ForeignKey(AggregateTestModel, models.SET_NULL, null=True)
class NowTestModel(models.Model):
when = models.DateTimeField(null=True, default=None)
class UUIDTestModel(models.Model):
uuid = models.UUIDField(default=None, null=True)
|
pkoutsias/SickRage
|
refs/heads/master
|
sickbeard/providers/nyaatorrents.py
|
2
|
# coding=utf-8
# Author: Mr_Orange
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import urllib
import re
from sickbeard import logger
from sickbeard import tvcache
from sickrage.helper.common import convert_size
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class NyaaProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self):
TorrentProvider.__init__(self, "NyaaTorrents")
self.public = True
self.supports_absolute_numbering = True
self.anime_only = True
self.ratio = None
self.cache = NyaaCache(self)
self.urls = {'base_url': 'http://www.nyaa.se/'}
self.url = self.urls['base_url']
self.minseed = 0
self.minleech = 0
self.confirmed = False
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals
results = []
if self.show and not self.show.is_anime:
return results
for mode in search_strings:
items = []
logger.log(u"Search Mode: %s" % mode, logger.DEBUG)
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: %s" % search_string, logger.DEBUG)
params = {
"page": 'rss',
"cats": '1_0', # All anime
"sort": 2, # Sort Descending By Seeders
"order": 1
}
if mode != 'RSS':
params["term"] = search_string.encode('utf-8')
search_url = self.url + '?' + urllib.urlencode(params)
logger.log(u"Search URL: %s" % search_url, logger.DEBUG)
summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)"
s = re.compile(summary_regex, re.DOTALL)
results = []
for curItem in self.cache.getRSSFeed(search_url)['entries'] or []:
title = curItem['title']
download_url = curItem['link']
if not all([title, download_url]):
continue
seeders, leechers, torrent_size, verified = s.findall(curItem['summary'])[0]
size = convert_size(torrent_size) or -1
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG)
continue
if self.confirmed and not verified and mode != 'RSS':
logger.log(u"Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", logger.DEBUG)
continue
item = title, download_url, size, seeders, leechers
if mode != 'RSS':
logger.log(u"Found result: %s " % title, logger.DEBUG)
items.append(item)
# For each search mode sort all the items by seeders if available
items.sort(key=lambda tup: tup[3], reverse=True)
results += items
return results
def seed_ratio(self):
return self.ratio
class NyaaCache(tvcache.TVCache):
def __init__(self, provider_obj):
tvcache.TVCache.__init__(self, provider_obj)
# only poll NyaaTorrents every 15 minutes max
self.minTime = 15
def _getRSSData(self):
search_params = {'RSS': ['']}
return {'entries': self.provider.search(search_params)}
provider = NyaaProvider()
|
JavML/django
|
refs/heads/master
|
tests/user_commands/management/commands/transaction.py
|
553
|
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Say hello."
args = ''
output_transaction = True
def handle(self, *args, **options):
return 'Hello!'
|
thomashaw/SecGen
|
refs/heads/master
|
modules/utilities/unix/audit_tools/ghidra/files/release/Ghidra/Features/Python/data/jython-2.7.1/Lib/distutils/jythoncompiler.py
|
6
|
"""distutils.jythoncompiler
Jython does not support extension libraries. This CCompiler simply
raises CCompiler exceptions.
"""
from distutils.ccompiler import CCompiler
import warnings
class JythonCompiler(CCompiler):
"""Refuses to compile C extensions on Jython"""
compiler_type = 'jython'
executables = {}
def refuse_compilation(self, *args, **kwargs):
"""Refuse compilation"""
warnings.warn('Compiling extensions is not supported on Jython')
return []
preprocess = compile = create_static_lib = link = refuse_compilation
|
kindersung/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/webdriver/network.py
|
212
|
# this comes from this stack overflow post:
# http://stackoverflow.com/a/1947766/725944
# module for getting the lan ip address of the computer
import os
import socket
if os.name != "nt":
import fcntl
import struct
def get_interface_ip(ifname):
sckt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
sckt.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
def get_lan_ip():
ip = socket.gethostbyname(socket.gethostname())
if ip.startswith("127.") and os.name != "nt":
interfaces = ["eth0","eth1","eth2","wlan0","wlan1","wifi0","ath0","ath1","ppp0"]
for ifname in interfaces:
try:
ip = get_interface_ip(ifname)
break
except IOError:
pass
return ip
|
Pakketeretet2/lammps
|
refs/heads/master
|
tools/i-pi/ipi/engine/cell.py
|
33
|
"""Contains the classes which deal with the system box.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
Used for implementing the minimum image convention.
Classes:
Cell: Base cell class with the generic methods and attributes.
"""
__all__ = ['Cell']
import numpy as np
from ipi.utils.depend import *
from ipi.utils.mathtools import *
from ipi.utils import units
class Cell(dobject):
"""Base class to represent the simulation cell in a periodic system.
This class has the base attributes required for either flexible or
isotropic cell dynamics. Uses an upper triangular lattice vector matrix to
represent the cell.
Depend objects:
h: An array giving the lattice vector matrix.
ih: An array giving the inverse of the lattice vector matrix.
V: The volume of the cell.
"""
def __init__(self, h=None):
"""Initialises base cell class.
Args:
h: Optional array giving the initial lattice vector matrix. The
reference cell matrix is set equal to this. Must be an upper
triangular 3*3 matrix. Defaults to a 3*3 zeroes matrix.
"""
if h is None:
#h = np.identity(3,float)
h = np.zeros((3,3), float)
dset(self,"h",depend_array(name = 'h', value = h) )
dset(self,"ih",
depend_array(name = "ih", value = np.zeros((3,3),float),
func=self.get_ih, dependencies=[dget(self,"h")]) )
dset(self,"V",
depend_value(name = 'V', func=self.get_volume,
dependencies=[dget(self,"h")]) )
def get_ih(self):
"""Inverts the lattice vector matrix."""
return invert_ut3x3(self.h)
def get_volume(self):
"""Calculates the volume of the system box."""
return det_ut3x3(self.h)
def apply_pbc(self, atom):
"""Uses the minimum image convention to return a particle to the
unit cell.
Args:
atom: An Atom object.
Returns:
An array giving the position of the image that is inside the
system box.
"""
s = np.dot(self.ih,atom.q)
for i in range(3):
s[i] = s[i] - round(s[i])
return np.dot(self.h,s)
def array_pbc(self, pos):
"""Uses the minimum image convention to return a list of particles to the
unit cell.
Args:
atom: An Atom object.
Returns:
An array giving the position of the image that is inside the
system box.
"""
s = depstrip(pos).copy()
s.shape = (len(pos)/3,3)
s = np.dot(depstrip(self.ih),s.T)
s = s - np.round(s)
s = np.dot(depstrip(self.h),s).T
pos[:] = s.reshape((len(s)*3))
def minimum_distance(self, atom1, atom2):
"""Takes two atoms and tries to find the smallest vector between two
images.
This is only rigorously accurate in the case of a cubic cell,
but gives the correct results as long as the cut-off radius is defined
as smaller than the smallest width between parallel faces even for
triclinic cells.
Args:
atom1: An Atom object.
atom2: An Atom object.
Returns:
An array giving the minimum distance between the positions of atoms
atom1 and atom2 in the minimum image convention.
"""
s = np.dot(self.ih,atom1.q-atom2.q)
for i in range(3):
s[i] -= round(s[i])
return np.dot(self.h, s)
|
maackle/ILC-app
|
refs/heads/master
|
lib/pyspatialite-3.0.1/lib/test/hooks.py
|
1
|
#-*- coding: ISO-8859-1 -*-
# pyspatialite/test/hooks.py: tests for various SQLite-specific hooks
#
# Copyright (C) 2006-2007 Gerhard Häring <gh@ghaering.de>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import os, unittest
import pyspatialite.dbapi2 as sqlite
class CollationTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def CheckCreateCollationNotCallable(self):
con = sqlite.connect(":memory:")
try:
con.create_collation("X", 42)
self.fail("should have raised a TypeError")
except TypeError, e:
self.assertEqual(e.args[0], "parameter must be callable")
def CheckCreateCollationNotAscii(self):
con = sqlite.connect(":memory:")
try:
con.create_collation("collä", cmp)
self.fail("should have raised a ProgrammingError")
except sqlite.ProgrammingError, e:
pass
def CheckCollationIsUsed(self):
if sqlite.version_info < (3, 2, 1): # old SQLite versions crash on this test
return
def mycoll(x, y):
# reverse order
return -cmp(x, y)
con = sqlite.connect(":memory:")
con.create_collation("mycoll", mycoll)
sql = """
select x from (
select 'a' as x
union
select 'b' as x
union
select 'c' as x
) order by x collate mycoll
"""
result = con.execute(sql).fetchall()
if result[0][0] != "c" or result[1][0] != "b" or result[2][0] != "a":
self.fail("the expected order was not returned")
con.create_collation("mycoll", None)
try:
result = con.execute(sql).fetchall()
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0].lower(), "no such collation sequence: mycoll")
def CheckCollationRegisterTwice(self):
"""
Register two different collation functions under the same name.
Verify that the last one is actually used.
"""
con = sqlite.connect(":memory:")
con.create_collation("mycoll", cmp)
con.create_collation("mycoll", lambda x, y: -cmp(x, y))
result = con.execute("""
select x from (select 'a' as x union select 'b' as x) order by x collate mycoll
""").fetchall()
if result[0][0] != 'b' or result[1][0] != 'a':
self.fail("wrong collation function is used")
def CheckDeregisterCollation(self):
"""
Register a collation, then deregister it. Make sure an error is raised if we try
to use it.
"""
con = sqlite.connect(":memory:")
con.create_collation("mycoll", cmp)
con.create_collation("mycoll", None)
try:
con.execute("select 'a' as x union select 'b' as x order by x collate mycoll")
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
if not e.args[0].startswith("no such collation sequence"):
self.fail("wrong OperationalError raised")
class ProgressTests(unittest.TestCase):
def CheckProgressHandlerUsed(self):
"""
Test that the progress handler is invoked once it is set.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 0
con.set_progress_handler(progress, 1)
con.execute("""
create table foo(a, b)
""")
self.assertTrue(progress_calls)
def CheckOpcodeCount(self):
"""
Test that the opcode argument is respected.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 0
con.set_progress_handler(progress, 1)
curs = con.cursor()
curs.execute("""
create table foo (a, b)
""")
first_count = len(progress_calls)
progress_calls = []
con.set_progress_handler(progress, 2)
curs.execute("""
create table bar (a, b)
""")
second_count = len(progress_calls)
self.assertTrue(first_count > second_count)
def CheckCancelOperation(self):
"""
Test that returning a non-zero value stops the operation in progress.
"""
con = sqlite.connect(":memory:")
progress_calls = []
def progress():
progress_calls.append(None)
return 1
con.set_progress_handler(progress, 1)
curs = con.cursor()
self.assertRaises(
sqlite.OperationalError,
curs.execute,
"create table bar (a, b)")
def CheckClearHandler(self):
"""
Test that setting the progress handler to None clears the previously set handler.
"""
con = sqlite.connect(":memory:")
action = 0
def progress():
action = 1
return 0
con.set_progress_handler(progress, 1)
con.set_progress_handler(None, 1)
con.execute("select 1 union select 2 union select 3").fetchall()
self.assertEqual(action, 0, "progress handler was not cleared")
def suite():
collation_suite = unittest.makeSuite(CollationTests, "Check")
progress_suite = unittest.makeSuite(ProgressTests, "Check")
return unittest.TestSuite((collation_suite, progress_suite))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
|
idea4bsd/idea4bsd
|
refs/heads/idea4bsd-master
|
python/testData/docstrings/googleKeywordArgumentsSection.py
|
53
|
def f():
"""
Keyword arguments:
"""
|
andrewmoses/ssquiz
|
refs/heads/master
|
flask/lib/python2.7/site-packages/sqlparse/engine/__init__.py
|
119
|
# Copyright (C) 2008 Andi Albrecht, albrecht.andi@gmail.com
#
# This module is part of python-sqlparse and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php.
"""filter"""
from sqlparse import lexer
from sqlparse.engine import grouping
from sqlparse.engine.filter import StatementFilter
# XXX remove this when cleanup is complete
Filter = object
class FilterStack(object):
def __init__(self):
self.preprocess = []
self.stmtprocess = []
self.postprocess = []
self.split_statements = False
self._grouping = False
def _flatten(self, stream):
for token in stream:
if token.is_group():
for t in self._flatten(token.tokens):
yield t
else:
yield token
def enable_grouping(self):
self._grouping = True
def full_analyze(self):
self.enable_grouping()
def run(self, sql, encoding=None):
stream = lexer.tokenize(sql, encoding)
# Process token stream
if self.preprocess:
for filter_ in self.preprocess:
stream = filter_.process(self, stream)
if (self.stmtprocess or self.postprocess or self.split_statements
or self._grouping):
splitter = StatementFilter()
stream = splitter.process(self, stream)
if self._grouping:
def _group(stream):
for stmt in stream:
grouping.group(stmt)
yield stmt
stream = _group(stream)
if self.stmtprocess:
def _run1(stream):
ret = []
for stmt in stream:
for filter_ in self.stmtprocess:
filter_.process(self, stmt)
ret.append(stmt)
return ret
stream = _run1(stream)
if self.postprocess:
def _run2(stream):
for stmt in stream:
stmt.tokens = list(self._flatten(stmt.tokens))
for filter_ in self.postprocess:
stmt = filter_.process(self, stmt)
yield stmt
stream = _run2(stream)
return stream
|
inonit/wagtail
|
refs/heads/master
|
wagtail/wagtailimages/utils.py
|
40
|
import base64
import hmac
import hashlib
from django.conf import settings
def generate_signature(image_id, filter_spec):
# Based on libthumbor hmac generation
# https://github.com/thumbor/libthumbor/blob/b19dc58cf84787e08c8e397ab322e86268bb4345/libthumbor/crypto.py#L50
url = str(image_id) + '/' + str(filter_spec) + '/'
return base64.urlsafe_b64encode(hmac.new(settings.SECRET_KEY.encode(), url.encode(), hashlib.sha1).digest())
def verify_signature(signature, image_id, filter_spec):
return signature == generate_signature(image_id, filter_spec)
|
naerthon/cursodjango
|
refs/heads/master
|
cursodjango/settings_local.py
|
10
|
from settings import *
|
theotherjimmy/mbed
|
refs/heads/master
|
tools/misc/remove-device-h.py
|
53
|
import json
import os
import stat
import re
from collections import OrderedDict
from subprocess import Popen
git_processes = []
class MyJSONEncoder(json.JSONEncoder):
def __init__(self, *args, **kwargs):
super(MyJSONEncoder, self).__init__(*args, **kwargs)
self.current_indent = 0
self.current_indent_str = ""
def encode(self, o):
#Special Processing for lists
if isinstance(o, (list, tuple)):
primitives_only = True
for item in o:
if isinstance(item, (list, tuple, dict)):
primitives_only = False
break
output = []
if primitives_only:
for item in o:
output.append(json.dumps(item))
return "[" + ", ".join(output) + "]"
else:
self.current_indent += self.indent
self.current_indent_str = " " * self.current_indent
for item in o:
output.append(self.current_indent_str + self.encode(item))
self.current_indent -= self.indent
self.current_indent_str = " " * self.current_indent
return "[\n" + ",\n".join(output) + "\n" + self.current_indent_str + "]"
elif isinstance(o, dict):
primitives_only = True
for item in o.values():
if isinstance(item, (list, tuple, dict)):
primitives_only = False
break
output = []
if primitives_only and len(o) < 3:
for key, value in o.iteritems():
output.append(json.dumps(key) + ": " + self.encode(value))
return "{" + ", ".join(output) + "}"
else:
self.current_indent += self.indent
self.current_indent_str = " " * self.current_indent
for key, value in o.iteritems():
output.append(self.current_indent_str + json.dumps(key) + ": " + self.encode(value))
self.current_indent -= self.indent
self.current_indent_str = " " * self.current_indent
return "{\n" + ",\n".join(output) + "\n" + self.current_indent_str + "}"
else:
return json.dumps(o)
def load(path):
with open(path, 'r') as f :
return json.load(f, object_pairs_hook=OrderedDict)
def dump(path, obj):
with os.fdopen(os.open(path, os.O_WRONLY | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR), 'w') as f :
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
f.write(MyJSONEncoder(indent=4).encode(obj))
f.write(u'\n')
f.truncate()
def find(stem, path) :
for root, directories, files in os.walk(path, followlinks=True) :
[dir for dir in directories if dir[0] != '.']
if (stem_match(stem,os.path.basename(os.path.normpath(root))) and
"device.h" in files) :
return os.path.join(root, "device.h")
def find_all_devices(path, verbose=False) :
for root, directories, files in os.walk(path, followlinks=True) :
[dir for dir in directories if dir[0] != '.']
if "device.h" in files :
if verbose : print("[VERBOSE] found a device.h file in {}".format(root))
yield os.path.join(root, "device.h")
mbed_matcher = re.compile('mbed', re.IGNORECASE)
def stem_match(stem, thing) :
return (stem in thing or
re.sub(mbed_matcher, '', stem) in thing)
attr_matcher = re.compile('^#define\W+DEVICE_(\w+)\W+1.*$')
def parse_attributes(path) :
with open(path) as input :
for line in input :
m = re.match(attr_matcher, line)
if m: yield m.group(1)
remove_matcher = re.compile('^#define\W+DEVICE_(\w+)\W+[10].*$')
def remove_attributes(path) :
with open(path) as input :
remainder = filter(lambda l: not re.match(remove_matcher, l), input)
with open(path,"wb") as output :
output.truncate(0)
output.write("// The 'provides' section in 'target.json' is now used"+
" to create the device's hardware preprocessor switches.\n")
output.write("// Check the 'provides' section of the target description"+
" in 'targets.json' for more details.\n")
output.writelines(remainder)
def user_select(things, message) :
print(message)
for thing, number in zip(things, range(len(things))):
print("{} : {}".format(number, thing))
selection = None
while selection is None :
print("please select an integer [0..{}] or specify all".format(len(things) - 1))
try :
i = raw_input()
if i == "all" :
selection = "all"
else :
selection = int(i)
if (selection > len(things) or
selection < 0) :
print("selection {} out of range".format(selection))
selection = None
except (ValueError, SyntaxError) :
print("selection not understood")
if selection == "all" :
return things
else :
return [things[selection]]
target_matcher = re.compile("TARGET_")
def strip_target(str) :
return re.sub(target_matcher, "", str)
def add_to_targets(targets, device_file, verbose=False, remove=False) :
if verbose : print("[VERBOSE] trying target {}".format(device_file))
device = strip_target(os.path.basename(os.path.normpath(os.path.dirname(device_file))))
if not device :
print("[WARNING] device {} did not have an associated device.h".format(device))
else :
possible_matches = set([key for key in targets.keys() if stem_match(device, key)])
for key, value in targets.iteritems() :
for alt in value['extra_labels'] if 'extra_labels' in value else [] :
if stem_match(device, alt) : possible_matches.add(key)
for alt in value['extra_labels_add'] if 'extra_labels_add' in value else [] :
if stem_match(device, alt) : possible_matches.add(key)
possible_matches = list(possible_matches)
for match in possible_matches :
if device == match : possible_matches = [match]
if not possible_matches :
print("[WARNING] device {} did not have an associated entry in targets.json".format(device))
return None
elif len(possible_matches) > 1 :
message = ("possible matches for file {}".format(device_file))
target = user_select(possible_matches, message)
else :
target = possible_matches
attrs = list(parse_attributes(device_file))
if attrs :
for t in target :
targets[t]["device_has"] = sorted(list(set(targets[t].setdefault("device_has",[]) + attrs)))
if verbose : print("[VERBOSE] target {} now device_has {}".format(t, attrs))
if remove is True:
remove_attributes(device_file)
if __name__ == '__main__' :
import argparse
parser = argparse.ArgumentParser(description='A helpful little script for converting' +
' device.h files to parts of the targets.json file')
parser.add_argument('-a', '--all', action='store_true',
help='find and convert all available device.h files in the'+
' directory tree starting at the current directory')
parser.add_argument('-f', '--file', nargs='+', help='specify an individual file to '+
'convert from device.h format to a piece of targets.json')
parser.add_argument('-t', '--target', nargs='+', help='specify an individual target'+
' to convert from device.h format to a piece of targets.json')
parser.add_argument('-v', '--verbose', action='store_true',
help="print out every target that is updated in the targets.json")
parser.add_argument('-r', '--rm', action='store_true',
help="remove the used attributes from a device.h file")
args = parser.parse_args()
if not args.target and not args.file and not args.all :
print("[WARNING] no action specified; auto-formatting targets.json")
targets_file_name = os.path.join(os.curdir, "hal", "targets.json")
try :
targets = load(targets_file_name)
except OSError :
print("[ERROR] did not find targets.json where I expected it {}".format(targets_file_name))
exit(1)
except ValueError :
print("[ERROR] invalid json found in {}".format(targets_file_name))
exit(2)
if args.target :
for target in args.target :
device_file = find(target, os.curdir)
if device_file :
add_to_targets(targets, device_file, verbose=args.verbose, remove=args.rm)
else :
print("[WARNING] could not locate a device file for target {}".format(target))
if args.file :
for file in args.file :
add_to_targets(targets, file, verbose=args.verbose, remove=args.rm)
if args.all :
for file in find_all_devices(os.curdir, verbose=args.verbose) :
add_to_targets(targets, file, verbose=args.verbose, remove=args.rm)
dump(targets_file_name, targets)
for process in git_processes :
process.wait()
|
kazitanvirahsan/scrapy
|
refs/heads/master
|
scrapy/contrib/spiders/crawl.py
|
144
|
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
warnings.warn("Module `scrapy.contrib.spiders.crawl` is deprecated, "
"use `scrapy.spiders.crawl` instead",
ScrapyDeprecationWarning, stacklevel=2)
from scrapy.spiders.crawl import *
|
galah-group/galah-interact-python
|
refs/heads/master
|
docs/guides/examples/unittest_tutorial/harness_with_capture.py
|
2
|
import interact
from interact.capture import capture_function
student_code = interact.unittest.load_files(["main.cpp"])
captured_function = capture_function(student_code["main"]["bar"], 3, 4)
# Wait for the function to end.
captured_function.wait()
print "The function returned:", repr(captured_function.return_value)
print "The function wrote to stdout:", repr(captured_function.stdout.read())
print "The function wrote to stderr:", repr(captured_function.stderr.read())
|
CodeForKuwait/reForm
|
refs/heads/master
|
reForm/reForm/__init__.py
|
12133432
| |
sabi0/intellij-community
|
refs/heads/master
|
python/testData/resolve/multiFile/fromQualifiedPackageImportFile/mypackage/child/testfile.py
|
12133432
| |
gjtorikian/readthedocs.org
|
refs/heads/master
|
readthedocs/gold/migrations/__init__.py
|
12133432
| |
koebbe/homeworks
|
refs/heads/master
|
visit/__init__.py
|
12133432
| |
bitcity/django
|
refs/heads/master
|
django/conf/locale/km/__init__.py
|
12133432
| |
okffi/decisions
|
refs/heads/master
|
web/decisions/news/templatetags/__init__.py
|
12133432
| |
Juniper/nova
|
refs/heads/master
|
nova/tests/unit/api/openstack/__init__.py
|
12133432
| |
saurabh6790/medapp
|
refs/heads/master
|
patches/june_2013/p05_remove_search_criteria_reports.py
|
30
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
def execute():
try:
webnotes.conn.sql("""delete from `tabSearch Criteria` where ifnull(standard, 'No') = 'Yes'""")
except Exception, e:
pass
|
prometheanbob/clowdflows
|
refs/heads/master
|
workflows/noise/interaction_views.py
|
7
|
from django.shortcuts import render
def noise_filter_integers(request,input_dict,output_dict,widget):
return render(request, 'interactions/noise_filter_integers.html',{'widget':widget,'intList':input_dict['intList']})
|
bixbydev/Bixby
|
refs/heads/master
|
google/dist/gdata-2.0.18/samples/apps/marketplace_sample/gdata/tlslite/X509CertChain.py
|
238
|
"""Class representing an X.509 certificate chain."""
from utils import cryptomath
class X509CertChain:
"""This class represents a chain of X.509 certificates.
@type x509List: list
@ivar x509List: A list of L{tlslite.X509.X509} instances,
starting with the end-entity certificate and with every
subsequent certificate certifying the previous.
"""
def __init__(self, x509List=None):
"""Create a new X509CertChain.
@type x509List: list
@param x509List: A list of L{tlslite.X509.X509} instances,
starting with the end-entity certificate and with every
subsequent certificate certifying the previous.
"""
if x509List:
self.x509List = x509List
else:
self.x509List = []
def getNumCerts(self):
"""Get the number of certificates in this chain.
@rtype: int
"""
return len(self.x509List)
def getEndEntityPublicKey(self):
"""Get the public key from the end-entity certificate.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].publicKey
def getFingerprint(self):
"""Get the hex-encoded fingerprint of the end-entity certificate.
@rtype: str
@return: A hex-encoded fingerprint.
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].getFingerprint()
def getCommonName(self):
"""Get the Subject's Common Name from the end-entity certificate.
The cryptlib_py module must be installed in order to use this
function.
@rtype: str or None
@return: The CN component of the certificate's subject DN, if
present.
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].getCommonName()
def validate(self, x509TrustList):
"""Check the validity of the certificate chain.
This checks that every certificate in the chain validates with
the subsequent one, until some certificate validates with (or
is identical to) one of the passed-in root certificates.
The cryptlib_py module must be installed in order to use this
function.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
certificate chain must extend to one of these certificates to
be considered valid.
"""
import cryptlib_py
c1 = None
c2 = None
lastC = None
rootC = None
try:
rootFingerprints = [c.getFingerprint() for c in x509TrustList]
#Check that every certificate in the chain validates with the
#next one
for cert1, cert2 in zip(self.x509List, self.x509List[1:]):
#If we come upon a root certificate, we're done.
if cert1.getFingerprint() in rootFingerprints:
return True
c1 = cryptlib_py.cryptImportCert(cert1.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
c2 = cryptlib_py.cryptImportCert(cert2.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
try:
cryptlib_py.cryptCheckCert(c1, c2)
except:
return False
cryptlib_py.cryptDestroyCert(c1)
c1 = None
cryptlib_py.cryptDestroyCert(c2)
c2 = None
#If the last certificate is one of the root certificates, we're
#done.
if self.x509List[-1].getFingerprint() in rootFingerprints:
return True
#Otherwise, find a root certificate that the last certificate
#chains to, and validate them.
lastC = cryptlib_py.cryptImportCert(self.x509List[-1].writeBytes(),
cryptlib_py.CRYPT_UNUSED)
for rootCert in x509TrustList:
rootC = cryptlib_py.cryptImportCert(rootCert.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
if self._checkChaining(lastC, rootC):
try:
cryptlib_py.cryptCheckCert(lastC, rootC)
return True
except:
return False
return False
finally:
if not (c1 is None):
cryptlib_py.cryptDestroyCert(c1)
if not (c2 is None):
cryptlib_py.cryptDestroyCert(c2)
if not (lastC is None):
cryptlib_py.cryptDestroyCert(lastC)
if not (rootC is None):
cryptlib_py.cryptDestroyCert(rootC)
def _checkChaining(self, lastC, rootC):
import cryptlib_py
import array
def compareNames(name):
try:
length = cryptlib_py.cryptGetAttributeString(lastC, name, None)
lastName = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(lastC, name, lastName)
lastName = lastName.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
lastName = None
try:
length = cryptlib_py.cryptGetAttributeString(rootC, name, None)
rootName = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(rootC, name, rootName)
rootName = rootName.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
rootName = None
return lastName == rootName
cryptlib_py.cryptSetAttribute(lastC,
cryptlib_py.CRYPT_CERTINFO_ISSUERNAME,
cryptlib_py.CRYPT_UNUSED)
if not compareNames(cryptlib_py.CRYPT_CERTINFO_COUNTRYNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_LOCALITYNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONALUNITNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_COMMONNAME):
return False
return True
|
sauloal/pycluster
|
refs/heads/master
|
pypy-1.9_64/lib_pypy/dbm.py
|
2
|
from ctypes import Structure, c_char_p, c_int, c_void_p, CDLL
import ctypes.util
import os, sys
class error(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class datum(Structure):
_fields_ = [
('dptr', c_char_p),
('dsize', c_int),
]
def __init__(self, text):
if not isinstance(text, str):
raise TypeError("datum: expected string, not %s" % type(text))
Structure.__init__(self, text, len(text))
class dbm(object):
def __init__(self, dbmobj):
self._aobj = dbmobj
def close(self):
if not self._aobj:
raise error('DBM object has already been closed')
getattr(lib, funcs['close'])(self._aobj)
self._aobj = None
def __del__(self):
if self._aobj:
self.close()
def keys(self):
if not self._aobj:
raise error('DBM object has already been closed')
allkeys = []
k = getattr(lib, funcs['firstkey'])(self._aobj)
while k.dptr:
allkeys.append(k.dptr[:k.dsize])
k = getattr(lib, funcs['nextkey'])(self._aobj)
return allkeys
def get(self, key, default=None):
if not self._aobj:
raise error('DBM object has already been closed')
dat = datum(key)
k = getattr(lib, funcs['fetch'])(self._aobj, dat)
if k.dptr:
return k.dptr[:k.dsize]
if getattr(lib, funcs['error'])(self._aobj):
getattr(lib, funcs['clearerr'])(self._aobj)
raise error("")
return default
def __len__(self):
return len(self.keys())
def __getitem__(self, key):
value = self.get(key)
if value is None:
raise KeyError(key)
return value
def __setitem__(self, key, value):
if not self._aobj:
raise error('DBM object has already been closed')
dat = datum(key)
data = datum(value)
status = getattr(lib, funcs['store'])(self._aobj, dat, data, lib.DBM_REPLACE)
if getattr(lib, funcs['error'])(self._aobj):
getattr(lib, funcs['clearerr'])(self._aobj)
raise error("")
return status
def setdefault(self, key, default=''):
if not self._aobj:
raise error('DBM object has already been closed')
dat = datum(key)
k = getattr(lib, funcs['fetch'])(self._aobj, dat)
if k.dptr:
return k.dptr[:k.dsize]
data = datum(default)
status = getattr(lib, funcs['store'])(self._aobj, dat, data, lib.DBM_INSERT)
if status < 0:
getattr(lib, funcs['clearerr'])(self._aobj)
raise error("cannot add item to database")
return default
def __contains__(self, key):
if not self._aobj:
raise error('DBM object has already been closed')
dat = datum(key)
k = getattr(lib, funcs['fetch'])(self._aobj, dat)
if k.dptr:
return True
return False
has_key = __contains__
def __delitem__(self, key):
if not self._aobj:
raise error('DBM object has already been closed')
dat = datum(key)
status = getattr(lib, funcs['delete'])(self._aobj, dat)
if status < 0:
raise KeyError(key)
### initialization: Berkeley DB versus normal DB
def _init_func(name, argtypes=None, restype=None):
try:
func = getattr(lib, '__db_ndbm_' + name)
funcs[name] = '__db_ndbm_' + name
except AttributeError:
func = getattr(lib, 'dbm_' + name)
funcs[name] = 'dbm_' + name
if argtypes is not None:
func.argtypes = argtypes
if restype is not None:
func.restype = restype
if sys.platform != 'darwin':
libpath = ctypes.util.find_library('db')
if not libpath:
# XXX this is hopeless...
libpath = ctypes.util.find_library('db-4.5')
if not libpath:
raise ImportError("Cannot find dbm library")
lib = CDLL(libpath) # Linux
_platform = 'bdb'
else:
lib = CDLL("/usr/lib/libdbm.dylib") # OS X
_platform = 'osx'
library = "GNU gdbm"
funcs = {}
_init_func('open', (c_char_p, c_int, c_int), restype=c_void_p)
_init_func('close', (c_void_p,), restype=c_void_p)
_init_func('firstkey', (c_void_p,), restype=datum)
_init_func('nextkey', (c_void_p,), restype=datum)
_init_func('fetch', (c_void_p, datum), restype=datum)
_init_func('store', (c_void_p, datum, datum, c_int), restype=c_int)
_init_func('error', (c_void_p,), restype=c_int)
_init_func('delete', (c_void_p, datum), restype=c_int)
lib.DBM_INSERT = 0
lib.DBM_REPLACE = 1
def open(filename, flag='r', mode=0666):
"open a DBM database"
if not isinstance(filename, str):
raise TypeError("expected string")
openflag = 0
try:
openflag = {
'r': os.O_RDONLY,
'rw': os.O_RDWR,
'w': os.O_RDWR | os.O_CREAT,
'c': os.O_RDWR | os.O_CREAT,
'n': os.O_RDWR | os.O_CREAT | os.O_TRUNC,
}[flag]
except KeyError, e:
raise error("arg 2 to open should be 'r', 'w', 'c', or 'n'")
a_db = getattr(lib, funcs['open'])(filename, openflag, mode)
if a_db == 0:
raise error("Could not open file %s.db" % filename)
return dbm(a_db)
__all__ = ('datum', 'dbm', 'error', 'funcs', 'open', 'library')
|
kaushik94/sympy
|
refs/heads/master
|
sympy/codegen/matrix_nodes.py
|
3
|
"""
Additional AST nodes for operations on matrices. The nodes in this module
are meant to represent optimization of matrix expressions within codegen's
target languages that cannot be represented by SymPy expressions.
As an example, we can use :meth:`sympy.codegen.rewriting.optimize` and the
``matin_opt`` optimization provided in :mod:`sympy.codegen.rewriting` to
transform matrix multiplication under certain assumptions:
>>> from sympy import symbols, MatrixSymbol
>>> n = symbols('n', integer=True)
>>> A = MatrixSymbol('A', n, n)
>>> x = MatrixSymbol('x', n, 1)
>>> expr = A**(-1) * x
>>> from sympy.assumptions import assuming, Q
>>> from sympy.codegen.rewriting import matinv_opt, optimize
>>> with assuming(Q.fullrank(A)):
... optimize(expr, [matinv_opt])
MatrixSolve(A, vector=x)
"""
from .ast import Token
from sympy.matrices import MatrixExpr
from sympy.core.sympify import sympify
class MatrixSolve(Token, MatrixExpr):
"""Represents an operation to solve a linear matrix equation.
Parameters
==========
matrix : MatrixSymbol
Matrix representing the coefficients of variables in the linear
equation. This matrix must be square and full-rank (i.e. all columns must
be linearly independent) for the solving operation to be valid.
vector : MatrixSymbol
One-column matrix representing the solutions to the equations
represented in ``matrix``.
Examples
========
>>> from sympy import symbols, MatrixSymbol
>>> from sympy.codegen.matrix_nodes import MatrixSolve
>>> n = symbols('n', integer=True)
>>> A = MatrixSymbol('A', n, n)
>>> x = MatrixSymbol('x', n, 1)
>>> from sympy.printing.pycode import NumPyPrinter
>>> NumPyPrinter().doprint(MatrixSolve(A, x))
'numpy.linalg.solve(A, x)'
>>> from sympy.printing import octave_code
>>> octave_code(MatrixSolve(A, x))
'A \\\\ x'
"""
__slots__ = ['matrix', 'vector']
_construct_matrix = staticmethod(sympify)
def __init__(self, matrix, vector):
self.shape = self.vector.shape
|
jdunaravich/thumbor
|
refs/heads/master
|
thumbor/loaders/https_loader.py
|
2
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
from thumbor.loaders import http_loader
from tornado.concurrent import return_future
from urllib import unquote
def _normalize_url(url):
url = http_loader.quote_url(unquote(url))
return url if url.startswith('http') else 'https://%s' % url
def validate(context, url):
return http_loader.validate(context, url, normalize_url_func=_normalize_url)
def return_contents(response, url, callback, context):
return http_loader.return_contents(response, url, callback, context)
@return_future
def load(context, url, callback):
return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url)
def encode(string):
return http_loader.encode(string)
|
vasyarv/edx-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/features/video_editor.py
|
101
|
# -*- coding: utf-8 -*-
# disable missing docstring
# pylint: disable=missing-docstring
import requests
from lettuce import world, step
from nose.tools import assert_true, assert_equal, assert_in, assert_not_equal # pylint: disable=no-name-in-module
from terrain.steps import reload_the_page
from django.conf import settings
from common import upload_file, attach_file
TEST_ROOT = settings.COMMON_TEST_DATA_ROOT
DISPLAY_NAME = "Component Display Name"
NATIVE_LANGUAGES = {lang: label for lang, label in settings.LANGUAGES if len(lang) == 2}
LANGUAGES = {
lang: NATIVE_LANGUAGES.get(lang, display)
for lang, display in settings.ALL_LANGUAGES
}
LANGUAGES.update({
'table': 'Table of Contents'
})
TRANSLATION_BUTTONS = {
'add': '.metadata-video-translations .create-action',
'upload': '.metadata-video-translations .upload-action',
'download': '.metadata-video-translations .download-action',
'remove': '.metadata-video-translations .remove-action',
'clear': '.metadata-video-translations .setting-clear',
}
VIDEO_MENUS = {
'language': '.lang .menu',
}
class RequestHandlerWithSessionId(object):
def get(self, url):
"""
Sends a request.
"""
kwargs = dict()
session_id = [{i['name']:i['value']} for i in world.browser.cookies.all() if i['name'] == u'sessionid']
if session_id:
kwargs.update({
'cookies': session_id[0]
})
response = requests.get(url, **kwargs)
self.response = response
self.status_code = response.status_code
self.headers = response.headers
self.content = response.content
return self
def is_success(self):
"""
Returns `True` if the response was succeed, otherwise, returns `False`.
"""
if self.status_code < 400:
return True
return False
def check_header(self, name, value):
"""
Returns `True` if the response header exist and has appropriate value,
otherwise, returns `False`.
"""
if value in self.headers.get(name, ''):
return True
return False
def success_upload_file(filename):
upload_file(filename, sub_path="uploads/")
world.css_has_text('#upload_confirm', 'Success!')
world.is_css_not_present('.wrapper-modal-window-assetupload', wait_time=30)
def get_translations_container():
return world.browser.find_by_xpath('//label[text()="Transcript Languages"]/following-sibling::div')
def get_setting_container(lang_code):
try:
get_xpath = lambda value: './/descendant::a[@data-lang="{}" and contains(@class,"remove-setting")]/parent::*'.format(value)
return get_translations_container().find_by_xpath(get_xpath(lang_code)).first
except Exception:
return None
def get_last_dropdown():
return get_translations_container().find_by_xpath('.//descendant::select[last()]').last
def choose_option(dropdown, value):
dropdown.find_by_value(value)[0].click()
def choose_new_lang(lang_code):
world.css_click(TRANSLATION_BUTTONS['add'])
choose_option(get_last_dropdown(), lang_code)
assert_equal(get_last_dropdown().value, lang_code, "Option with provided value is not available or was not selected")
def open_menu(menu):
world.browser.execute_script("$('{selector}').parent().addClass('is-opened')".format(
selector=VIDEO_MENUS[menu]
))
@step('I have set "transcript display" to (.*)$')
def set_show_captions(step, setting):
# Prevent cookies from overriding course settings
world.browser.cookies.delete('hide_captions')
world.edit_component()
world.select_editor_tab('Advanced')
world.browser.select('Show Transcript', setting)
world.save_component()
@step('when I view the video it (.*) show the captions$')
def shows_captions(_step, show_captions):
world.wait_for_js_variable_truthy("Video")
world.wait(0.5)
if show_captions == 'does not':
assert_true(world.is_css_present('div.video.closed'))
else:
assert_true(world.is_css_not_present('div.video.closed'))
# Prevent cookies from overriding course settings
world.browser.cookies.delete('hide_captions')
world.browser.cookies.delete('current_player_mode')
@step('I see the correct video settings and default values$')
def correct_video_settings(_step):
expected_entries = [
# basic
[DISPLAY_NAME, 'Video', False],
['Default Video URL', 'http://youtu.be/3_yD_cEKoCk, , ', False],
# advanced
[DISPLAY_NAME, 'Video', False],
['Default Timed Transcript', '', False],
['Download Transcript Allowed', 'False', False],
['Downloadable Transcript URL', '', False],
['Show Transcript', 'True', False],
['Transcript Languages', '', False],
['Upload Handout', '', False],
['Video Available on Web Only', 'False', False],
['Video Download Allowed', 'False', False],
['Video File URLs', '', False],
['Video Start Time', '00:00:00', False],
['Video Stop Time', '00:00:00', False],
['YouTube ID', '3_yD_cEKoCk', False],
['YouTube ID for .75x speed', '', False],
['YouTube ID for 1.25x speed', '', False],
['YouTube ID for 1.5x speed', '', False]
]
world.verify_all_setting_entries(expected_entries)
@step('my video display name change is persisted on save$')
def video_name_persisted(step):
world.save_component()
reload_the_page(step)
world.wait_for_xmodule()
world.edit_component()
world.verify_setting_entry(
world.get_setting_entry(DISPLAY_NAME),
DISPLAY_NAME, '3.4', True
)
@step('I can modify video display name')
def i_can_modify_video_display_name(_step):
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, '3.4')
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, '3.4', True)
@step('I upload transcript file(?:s)?:$')
def upload_transcript(step):
input_hidden = '.metadata-video-translations .input'
# Number of previously added translations
initial_index = len(world.css_find(TRANSLATION_BUTTONS['download']))
if step.hashes:
for i, item in enumerate(step.hashes):
lang_code = item['lang_code']
filename = item['filename']
index = initial_index + i
choose_new_lang(lang_code)
expected_text = world.css_text(TRANSLATION_BUTTONS['upload'], index=index)
assert_equal(expected_text, "Upload")
assert_equal(world.css_find(input_hidden).last.value, "")
world.css_click(TRANSLATION_BUTTONS['upload'], index=index)
success_upload_file(filename)
world.wait_for_visible(TRANSLATION_BUTTONS['download'], index=index)
assert_equal(world.css_find(TRANSLATION_BUTTONS['upload']).last.text, "Replace")
assert_equal(world.css_find(input_hidden).last.value, filename)
@step('I try to upload transcript file "([^"]*)"$')
def try_to_upload_transcript(step, filename):
world.css_click(TRANSLATION_BUTTONS['upload'])
attach_file(filename, 'uploads/')
@step('I upload transcript file "([^"]*)" for "([^"]*)" language code$')
def upload_transcript_for_lang(step, filename, lang_code):
get_xpath = lambda value: './/div/a[contains(@class, "upload-action")]'.format(value)
container = get_setting_container(lang_code)
# If translation isn't uploaded, prepare drop-down and try to find container again
choose_new_lang(lang_code)
container = get_setting_container(lang_code)
button = container.find_by_xpath(get_xpath(lang_code)).first
button.click()
success_upload_file(filename)
@step('I replace transcript file for "([^"]*)" language code by "([^"]*)"$')
def replace_transcript_for_lang(step, lang_code, filename):
get_xpath = lambda value: './/div/a[contains(@class, "upload-action")]'.format(value)
container = get_setting_container(lang_code)
button = container.find_by_xpath(get_xpath(lang_code)).first
button.click()
success_upload_file(filename)
@step('I see validation error "([^"]*)"$')
def verify_validation_error_message(step, error_message):
assert_equal(world.css_text('#upload_error'), error_message)
@step('I can download transcript for "([^"]*)" language code, that contains text "([^"]*)"$')
def i_can_download_transcript(_step, lang_code, text):
MIME_TYPE = 'application/x-subrip'
get_xpath = lambda value: './/div/a[contains(text(), "Download")]'.format(value)
container = get_setting_container(lang_code)
assert container
button = container.find_by_xpath(get_xpath(lang_code)).first
url = button['href']
request = RequestHandlerWithSessionId()
assert_true(request.get(url).is_success())
assert_true(request.check_header('content-type', MIME_TYPE))
assert_in(text.encode('utf-8'), request.content)
@step('I remove translation for "([^"]*)" language code$')
def i_can_remove_transcript(_step, lang_code):
get_xpath = lambda value: './/descendant::a[@data-lang="{}" and contains(@class,"remove-setting")]'.format(value)
container = get_setting_container(lang_code)
assert container
button = container.find_by_xpath(get_xpath(lang_code)).first
button.click()
@step('I see translations for "([^"]*)"$')
def verify_translations(_step, lang_codes_string):
expected = [l.strip() for l in lang_codes_string.split(',')]
actual = [l['data-lang'] for l in world.css_find('.metadata-video-translations .remove-setting')]
assert_equal(set(expected), set(actual))
@step('I do not see translations$')
def no_translations(_step):
assert_true(world.is_css_not_present('.metadata-video-translations .remove-setting'))
@step('I confirm prompt$')
def confirm_prompt(_step):
world.confirm_studio_prompt()
@step('I (cannot )?choose "([^"]*)" language code$')
def i_choose_lang_code(_step, cannot, lang_code):
choose_option(get_last_dropdown(), lang_code)
if cannot:
assert_not_equal(get_last_dropdown().value, lang_code, "Option with provided value was selected, but shouldn't")
else:
assert_equal(get_last_dropdown().value, lang_code, "Option with provided value is not available or was not selected")
@step('I click button "([^"]*)"$')
def click_button(_step, button):
world.css_click(TRANSLATION_BUTTONS[button.lower()])
@step('video language menu has "([^"]*)" translations$')
def i_see_correct_langs(_step, langs):
menu_name = 'language'
open_menu(menu_name)
items = world.css_find(VIDEO_MENUS[menu_name] + ' li')
translations = {t.strip(): LANGUAGES[t.strip()] for t in langs.split(',')}
assert_equal(len(translations), len(items))
for lang_code, label in translations.items():
assert_true(any([i.text == label for i in items]))
assert_true(any([i['data-lang-code'] == lang_code for i in items]))
@step('video language with code "([^"]*)" at position "(\d+)"$')
def i_see_lang_at_position(_step, code, position):
menu_name = 'language'
open_menu(menu_name)
item = world.css_find(VIDEO_MENUS[menu_name] + ' li')[int(position)]
assert_equal(item['data-lang-code'], code)
|
possel/possel
|
refs/heads/dev
|
possel/web_client.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tornado.web
class WebUIServer(tornado.web.RequestHandler):
def get(self):
self.render('client.html')
|
Nova-Boy/zeppelin
|
refs/heads/master
|
interpreter/lib/python/mpl_config.py
|
23
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module provides utitlites for users to configure the inline plotting
# backend through a PyZeppelinContext instance (eg, through z.configure_mpl())
import matplotlib
def configure(**kwargs):
"""
Generic configure function.
Usage: configure(prop1='foo', prop2='bar', ...)
Currently supported zeppelin-specific properties are:
interactive - If true show all figures without explicit call to show()
via a post-execute hook.
angular - If true, bind figures to angular display system.
close - If true, close all figures once shown.
width, height - Default width / height of the figure in pixels.
fontsize - Font size.
dpi - dpi of the figure.
fmt - Figure format
supported_formats - Supported Figure formats ()
context - ZeppelinContext instance (requires PY4J)
"""
_config.update(**kwargs)
# Broadcast relevant changes to matplotlib RC
_on_config_change()
def get(key):
"""
Get the configuration info given a key
"""
return _config[key]
def _on_config_change():
# dpi
dpi = _config['dpi']
# For older versions of matplotlib, savefig.dpi is not synced with
# figure.dpi by default
matplotlib.rcParams['figure.dpi'] = dpi
if matplotlib.__version__ < '2.0.0':
matplotlib.rcParams['savefig.dpi'] = dpi
# Width and height
width = float(_config['width']) / dpi
height = float(_config['height']) / dpi
matplotlib.rcParams['figure.figsize'] = (width, height)
# Font size
fontsize = _config['fontsize']
matplotlib.rcParams['font.size'] = fontsize
# Default Figure Format
fmt = _config['format']
supported_formats = _config['supported_formats']
if fmt not in supported_formats:
raise ValueError("Unsupported format %s" %fmt)
matplotlib.rcParams['savefig.format'] = fmt
# Interactive mode
interactive = _config['interactive']
matplotlib.interactive(interactive)
def _init_config():
dpi = matplotlib.rcParams['figure.dpi']
fmt = matplotlib.rcParams['savefig.format']
width, height = matplotlib.rcParams['figure.figsize']
fontsize = matplotlib.rcParams['font.size']
_config['dpi'] = dpi
_config['format'] = fmt
_config['width'] = width*dpi
_config['height'] = height*dpi
_config['fontsize'] = fontsize
_config['close'] = True
_config['interactive'] = matplotlib.is_interactive()
_config['angular'] = False
_config['supported_formats'] = ['png', 'jpg', 'svg']
_config['context'] = None
_config = {}
_init_config()
|
dursk/django
|
refs/heads/master
|
tests/migration_test_data_persistence/tests.py
|
368
|
from django.test import TestCase, TransactionTestCase
from .models import Book
class MigrationDataPersistenceTestCase(TransactionTestCase):
"""
Tests that data loaded in migrations is available if we set
serialized_rollback = True on TransactionTestCase
"""
available_apps = ["migration_test_data_persistence"]
serialized_rollback = True
def test_persistence(self):
self.assertEqual(
Book.objects.count(),
1,
)
class MigrationDataNormalPersistenceTestCase(TestCase):
"""
Tests that data loaded in migrations is available on TestCase
"""
def test_persistence(self):
self.assertEqual(
Book.objects.count(),
1,
)
|
TeamNext/EmpressX
|
refs/heads/master
|
empressx/contrib/auth/forms.py
|
1
|
from django import forms
class AuthenticationForm(forms.Form):
ticket = forms.SlugField(max_length=512)
|
lberruti/ansible-modules-core
|
refs/heads/maint1.99
|
database/mysql/__init__.py
|
12133432
| |
muffinresearch/amo-validator
|
refs/heads/master
|
validator/testcases/l10n/__init__.py
|
12133432
| |
aleksandra-tarkowska/django
|
refs/heads/master
|
tests/custom_columns/__init__.py
|
12133432
| |
naphthalene/hubcave
|
refs/heads/master
|
hubcave/core/management/commands/__init__.py
|
12133432
| |
lkastler/Analysis-DbpediaLogs
|
refs/heads/master
|
DatasetAnalyzer/src/dbpedia_analyzer/__init__.py
|
12133432
| |
learn-ml/ml
|
refs/heads/master
|
optimiz/__init__.py
|
12133432
| |
Immortalin/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/distutils/tests/test_msvc9compiler.py
|
47
|
"""Tests for distutils.msvc9compiler."""
import sys
import unittest
import os
from distutils.errors import DistutilsPlatformError
from distutils.tests import support
from test.support import run_unittest
_MANIFEST = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.CRT"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX">
</assemblyIdentity>
</dependentAssembly>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>
"""
_CLEANED_MANIFEST = """\
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.MFC"
version="9.0.21022.8" processorArchitecture="x86"
publicKeyToken="XXXX"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>"""
if sys.platform=="win32":
from distutils.msvccompiler import get_build_version
if get_build_version()>=8.0:
SKIP_MESSAGE = None
else:
SKIP_MESSAGE = "These tests are only for MSVC8.0 or above"
else:
SKIP_MESSAGE = "These tests are only for win32"
@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE)
class msvc9compilerTestCase(support.TempdirManager,
unittest.TestCase):
def test_no_compiler(self):
# makes sure query_vcvarsall throws
# a DistutilsPlatformError if the compiler
# is not found
from distutils.msvc9compiler import query_vcvarsall
def _find_vcvarsall(version):
return None
from distutils import msvc9compiler
old_find_vcvarsall = msvc9compiler.find_vcvarsall
msvc9compiler.find_vcvarsall = _find_vcvarsall
try:
self.assertRaises(DistutilsPlatformError, query_vcvarsall,
'wont find this version')
finally:
msvc9compiler.find_vcvarsall = old_find_vcvarsall
def test_reg_class(self):
from distutils.msvc9compiler import Reg
self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx')
# looking for values that should exist on all
# windows registeries versions.
path = r'Control Panel\Desktop'
v = Reg.get_value(path, 'dragfullwindows')
self.assertTrue(v in ('0', '1', '2'))
import winreg
HKCU = winreg.HKEY_CURRENT_USER
keys = Reg.read_keys(HKCU, 'xxxx')
self.assertEqual(keys, None)
keys = Reg.read_keys(HKCU, r'Control Panel')
self.assertTrue('Desktop' in keys)
def test_remove_visual_c_ref(self):
from distutils.msvc9compiler import MSVCCompiler
tempdir = self.mkdtemp()
manifest = os.path.join(tempdir, 'manifest')
f = open(manifest, 'w')
try:
f.write(_MANIFEST)
finally:
f.close()
compiler = MSVCCompiler()
compiler._remove_visual_c_ref(manifest)
# see what we got
f = open(manifest)
try:
# removing trailing spaces
content = '\n'.join([line.rstrip() for line in f.readlines()])
finally:
f.close()
# makes sure the manifest was properly cleaned
self.assertEqual(content, _CLEANED_MANIFEST)
def test_suite():
return unittest.makeSuite(msvc9compilerTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
|
sergafts/django-registration
|
refs/heads/master
|
registration/forms.py
|
2
|
"""
Forms and validation code for user registration.
Note that all of these forms assume Django's bundle default ``User``
model; since it's not possible for a form to anticipate in advance the
needs of custom user models, you will need to write your own forms if
you're using a custom model.
"""
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.forms import UserCreationForm
from .users import UserModel, UsernameField
User = UserModel()
class RegistrationForm(UserCreationForm):
"""
Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should avoid defining a ``save()`` method -- the actual
saving of collected user data is delegated to the active
registration backend.
"""
required_css_class = 'required'
email = forms.EmailField(label=_("E-mail"))
class Meta:
model = User
fields = (UsernameField(), "email")
class RegistrationFormTermsOfService(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which adds a required checkbox
for agreeing to a site's Terms of Service.
"""
tos = forms.BooleanField(widget=forms.CheckboxInput,
label=_('I have read and agree to the Terms of Service'),
error_messages={'required': _("You must agree to the terms to register")})
class RegistrationFormUniqueEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which enforces uniqueness of
email addresses.
"""
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_("This email address is already in use. Please supply a different email address."))
return self.cleaned_data['email']
class RegistrationFormNoFreeEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which disallows registration with
email addresses from popular free webmail services; moderately
useful for preventing automated spam registrations.
To change the list of banned domains, subclass this form and
override the attribute ``bad_domains``.
"""
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com',
'yahoo.com']
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
email_domain = self.cleaned_data['email'].split('@')[1]
if email_domain in self.bad_domains:
raise forms.ValidationError(_("Registration using free email addresses is prohibited. Please supply a different email address."))
return self.cleaned_data['email']
class ResendActivationForm(forms.Form):
required_css_class = 'required'
email = forms.EmailField(label=_("E-mail"))
|
vijayanandnandam/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/tele13.py
|
90
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from .youtube import YoutubeIE
from ..utils import (
js_to_json,
qualities,
determine_ext,
)
class Tele13IE(InfoExtractor):
_VALID_URL = r'^https?://(?:www\.)?t13\.cl/videos(?:/[^/]+)+/(?P<id>[\w-]+)'
_TESTS = [
{
'url': 'http://www.t13.cl/videos/actualidad/el-circulo-de-hierro-de-michelle-bachelet-en-su-regreso-a-la-moneda',
'md5': '4cb1fa38adcad8fea88487a078831755',
'info_dict': {
'id': 'el-circulo-de-hierro-de-michelle-bachelet-en-su-regreso-a-la-moneda',
'ext': 'mp4',
'title': 'El círculo de hierro de Michelle Bachelet en su regreso a La Moneda',
},
'params': {
# HTTP Error 404: Not Found
'skip_download': True,
},
},
{
'url': 'http://www.t13.cl/videos/mundo/tendencias/video-captan-misteriosa-bola-fuego-cielos-bangkok',
'md5': '867adf6a3b3fef932c68a71d70b70946',
'info_dict': {
'id': 'rOoKv2OMpOw',
'ext': 'mp4',
'title': 'Shooting star seen on 7-Sep-2015',
'description': 'md5:7292ff2a34b2f673da77da222ae77e1e',
'uploader': 'Porjai Jaturongkhakun',
'upload_date': '20150906',
'uploader_id': 'UCnLY_3ezwNcDSC_Wc6suZxw',
},
'add_ie': ['Youtube'],
}
]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
setup_js = self._search_regex(
r"(?s)jwplayer\('player-vivo'\).setup\((\{.*?\})\)",
webpage, 'setup code')
sources = self._parse_json(self._search_regex(
r'sources\s*:\s*(\[[^\]]+\])', setup_js, 'sources'),
display_id, js_to_json)
preference = qualities(['Móvil', 'SD', 'HD'])
formats = []
urls = []
for f in sources:
format_url = f['file']
if format_url and format_url not in urls:
ext = determine_ext(format_url)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, display_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
elif YoutubeIE.suitable(format_url):
return self.url_result(format_url, 'Youtube')
else:
formats.append({
'url': format_url,
'format_id': f.get('label'),
'preference': preference(f.get('label')),
'ext': ext,
})
urls.append(format_url)
self._sort_formats(formats)
return {
'id': display_id,
'title': self._search_regex(
r'title\s*:\s*"([^"]+)"', setup_js, 'title'),
'description': self._html_search_meta(
'description', webpage, 'description'),
'thumbnail': self._search_regex(
r'image\s*:\s*"([^"]+)"', setup_js, 'thumbnail', default=None),
'formats': formats,
}
|
priyankajain18/nereid-webshop
|
refs/heads/develop
|
tests/test_base.py
|
3
|
'''
Test Base Case
:copyright: (c) 2014 by Openlabs Technologies & Consulting (P) LTD
:license: GPLv3, see LICENSE for more details
'''
import datetime
from decimal import Decimal
from dateutil.relativedelta import relativedelta
import pycountry
import trytond.tests.test_tryton
from trytond.tests.test_tryton import POOL, USER, CONTEXT
from nereid.testing import NereidTestCase
from trytond.transaction import Transaction
class BaseTestCase(NereidTestCase):
"""
Base test Case for nereid webshop
"""
def setUp(self):
trytond.tests.test_tryton.install_module('nereid_webshop')
trytond.tests.test_tryton.install_module(
'payment_gateway_authorize_net'
)
self.FiscalYear = POOL.get('account.fiscalyear')
self.Account = POOL.get('account.account')
self.PaymentTerm = POOL.get('account.invoice.payment_term')
self.Currency = POOL.get('currency.currency')
self.Company = POOL.get('company.company')
self.Party = POOL.get('party.party')
self.Sale = POOL.get('sale.sale')
self.Cart = POOL.get('nereid.cart')
self.Product = POOL.get('product.product')
self.ProductTemplate = POOL.get('product.template')
self.Language = POOL.get('ir.lang')
self.NereidWebsite = POOL.get('nereid.website')
self.SaleShop = POOL.get('sale.shop')
self.Uom = POOL.get('product.uom')
self.Country = POOL.get('country.country')
self.Subdivision = POOL.get('country.subdivision')
self.Currency = POOL.get('currency.currency')
self.NereidUser = POOL.get('nereid.user')
self.User = POOL.get('res.user')
self.PriceList = POOL.get('product.price_list')
self.Location = POOL.get('stock.location')
self.Party = POOL.get('party.party')
self.Locale = POOL.get('nereid.website.locale')
self.Tax = POOL.get('account.tax')
self.Node = POOL.get('product.tree_node')
self.ArticleCategory = POOL.get('nereid.cms.article.category')
self.Article = POOL.get('nereid.cms.article')
self.Category = POOL.get('product.category')
self.ProductNodeRelationship = POOL.get(
'product.product-product.tree_node'
)
self.templates = {
'shopping-cart.jinja':
'Cart:{{ cart.id }},{{get_cart_size()|round|int}},'
'{{cart.sale.total_amount}}',
'product.jinja':
'{{ product.name }}',
'catalog/gift-card.html':
'{{ product.id }}',
}
def _get_account_by_kind(self, kind, company=None, silent=True):
"""Returns an account with given spec
:param kind: receivable/payable/expense/revenue
:param silent: dont raise error if account is not found
"""
if company is None:
company, = self.Company.search([], limit=1)
accounts = self.Account.search([
('kind', '=', kind),
('company', '=', company)
], limit=1)
if not accounts and not silent:
raise Exception("Account not found")
return accounts[0] if accounts else False
def _create_product_category(self, name, vlist):
"""
Creates a product category
Name is mandatory while other value may be provided as keyword
arguments
:param name: Name of the product category
:param vlist: List of dictionaries of values to create
"""
for values in vlist:
values['name'] = name
return self.Category.create(vlist)
def _create_product_template(
self, name, vlist, uri, uom=u'Unit', displayed_on_eshop=True
):
"""
Create a product template with products and return its ID
:param name: Name of the product
:param vlist: List of dictionaries of values to create
:param uri: uri of product template
:param uom: Note it is the name of UOM (not symbol or code)
:param displayed_on_eshop: Boolean field to display product
on shop or not
"""
for values in vlist:
values['name'] = name
values['default_uom'], = self.Uom.search(
[('name', '=', uom)], limit=1
)
values['sale_uom'], = self.Uom.search(
[('name', '=', uom)], limit=1
)
values['products'] = [
('create', [{
'uri': uri,
'displayed_on_eshop': displayed_on_eshop,
}])
]
return self.ProductTemplate.create(vlist)
def create_test_products(self):
# Create product templates with products
self._create_product_template(
'product 1',
[{
'category': self.category.id,
'type': 'goods',
'salable': True,
'list_price': Decimal('10'),
'cost_price': Decimal('5'),
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
}],
uri='product-1',
)
self._create_product_template(
'product 2',
[{
'category': self.category2.id,
'type': 'goods',
'salable': True,
'list_price': Decimal('20'),
'cost_price': Decimal('5'),
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
}],
uri='product-2',
)
self._create_product_template(
'product 3',
[{
'category': self.category3.id,
'type': 'goods',
'list_price': Decimal('30'),
'cost_price': Decimal('5'),
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
}],
uri='product-3',
)
self._create_product_template(
'product 4',
[{
'category': self.category3.id,
'type': 'goods',
'list_price': Decimal('30'),
'cost_price': Decimal('5'),
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
}],
uri='product-4',
displayed_on_eshop=False
)
def _create_auth_net_gateway_for_site(self):
"""
A helper function that creates the authorize.net gateway and assigns
it to the websites.
"""
PaymentGateway = POOL.get('payment_gateway.gateway')
NereidWebsite = POOL.get('nereid.website')
Journal = POOL.get('account.journal')
cash_journal, = Journal.search([
('name', '=', 'Cash')
])
gatway = PaymentGateway(
name='Authorize.net',
journal=cash_journal,
provider='authorize_net',
method='credit_card',
authorize_net_login='327deWY74422',
authorize_net_transaction_key='32jF65cTxja88ZA2',
)
gatway.save()
websites = NereidWebsite.search([])
NereidWebsite.write(websites, {
'accept_credit_card': True,
'save_payment_profile': True,
'credit_card_gateway': gatway.id,
})
def _create_fiscal_year(self, date=None, company=None):
"""
Creates a fiscal year and requried sequences
"""
Sequence = POOL.get('ir.sequence')
SequenceStrict = POOL.get('ir.sequence.strict')
if date is None:
date = datetime.date.today()
if company is None:
company, = self.Company.search([], limit=1)
invoice_sequence, = SequenceStrict.create([{
'name': '%s' % date.year,
'code': 'account.invoice',
'company': company,
}])
fiscal_year, = self.FiscalYear.create([{
'name': '%s' % date.year,
'start_date': date + relativedelta(month=1, day=1),
'end_date': date + relativedelta(month=12, day=31),
'company': company,
'post_move_sequence': Sequence.create([{
'name': '%s' % date.year,
'code': 'account.move',
'company': company,
}])[0],
'out_invoice_sequence': invoice_sequence,
'in_invoice_sequence': invoice_sequence,
'out_credit_note_sequence': invoice_sequence,
'in_credit_note_sequence': invoice_sequence,
}])
self.FiscalYear.create_period([fiscal_year])
return fiscal_year
def _create_coa_minimal(self, company):
"""Create a minimal chart of accounts
"""
AccountTemplate = POOL.get('account.account.template')
account_create_chart = POOL.get(
'account.create_chart', type="wizard")
account_template, = AccountTemplate.search(
[('parent', '=', None)]
)
session_id, _, _ = account_create_chart.create()
create_chart = account_create_chart(session_id)
create_chart.account.account_template = account_template
create_chart.account.company = company
create_chart.transition_create_account()
receivable, = self.Account.search([
('kind', '=', 'receivable'),
('company', '=', company),
])
payable, = self.Account.search([
('kind', '=', 'payable'),
('company', '=', company),
])
create_chart.properties.company = company
create_chart.properties.account_receivable = receivable
create_chart.properties.account_payable = payable
create_chart.transition_create_properties()
def _create_payment_term(self):
"""Create a simple payment term with all advance
"""
return self.PaymentTerm.create([{
'name': 'Direct',
'lines': [('create', [{'type': 'remainder'}])]
}])
def _create_countries(self, count=5):
"""
Create some sample countries and subdivisions
"""
for country in list(pycountry.countries)[0:count]:
countries = self.Country.create([{
'name': country.name,
'code': country.alpha2,
}])
try:
divisions = pycountry.subdivisions.get(
country_code=country.alpha2
)
except KeyError:
pass
else:
for subdivision in list(divisions)[0:count]:
self.Subdivision.create([{
'country': countries[0].id,
'name': subdivision.name,
'code': subdivision.code,
'type': subdivision.type.lower(),
}])
def _create_pricelists(self):
"""
Create the pricelists
"""
# Setup the pricelists
self.party_pl_margin = Decimal('1.10')
self.guest_pl_margin = Decimal('1.20')
user_price_list, = self.PriceList.create([{
'name': 'PL 1',
'company': self.company.id,
'lines': [
('create', [{
'formula': 'unit_price * %s' % self.party_pl_margin
}])
],
}])
guest_price_list, = self.PriceList.create([{
'name': 'PL 2',
'company': self.company.id,
'lines': [
('create', [{
'formula': 'unit_price * %s' % self.guest_pl_margin
}])
],
}])
return guest_price_list.id, user_price_list.id
def setup_defaults(self):
"""
Setup the defaults
"""
with Transaction().set_context(company=None):
self.usd, = self.Currency.create([{
'name': 'US Dollar',
'code': 'USD',
'symbol': '$',
}])
self.party, = self.Party.create([{
'name': 'Openlabs',
}])
self.company, = self.Company.create([{
'party': self.party.id,
'currency': self.usd
}])
self.User.write(
[self.User(USER)], {
'main_company': self.company.id,
'company': self.company.id,
}
)
CONTEXT.update(self.User.get_preferences(context_only=True))
# Create Fiscal Year
self._create_fiscal_year(company=self.company.id)
# Create Chart of Accounts
self._create_coa_minimal(company=self.company.id)
# Create a payment term
payment_term, = self._create_payment_term()
shop_price_list, user_price_list = self._create_pricelists()
party1, = self.Party.create([{
'name': 'Guest User',
}])
party2, = self.Party.create([{
'name': 'Registered User',
'sale_price_list': user_price_list,
}])
self.party2 = party2
party3, = self.Party.create([{
'name': 'Registered User 2',
}])
# Create users and assign the pricelists to them
self.guest_user, = self.NereidUser.create([{
'party': party1.id,
'display_name': 'Guest User',
'email': 'guest@openlabs.co.in',
'password': 'password',
'company': self.company.id,
}])
self.registered_user, = self.NereidUser.create([{
'party': party2.id,
'display_name': 'Registered User',
'email': 'email@example.com',
'password': 'password',
'company': self.company.id,
}])
self.registered_user2, = self.NereidUser.create([{
'party': party3.id,
'display_name': 'Registered User 2',
'email': 'email2@example.com',
'password': 'password2',
'company': self.company.id,
}])
self._create_countries()
self.available_countries = self.Country.search([], limit=5)
warehouse, = self.Location.search([
('type', '=', 'warehouse')
], limit=1)
location, = self.Location.search([
('type', '=', 'storage')
], limit=1)
en_us, = self.Language.search([('code', '=', 'en_US')])
self.locale_en_us, = self.Locale.create([{
'code': 'en_US',
'language': en_us.id,
'currency': self.usd.id,
}])
self.sale_tax, = self.Tax.create([{
'name': 'Sales Tax',
'description': 'Sales Tax',
'type': 'percentage',
'rate': Decimal('0.05'), # Rate 5%
'company': self.company.id,
'invoice_account': self._get_account_by_kind('other').id,
'credit_note_account': self._get_account_by_kind('other').id,
}])
self.shop, = self.SaleShop.create([{
'name': 'Default Shop',
'price_list': shop_price_list,
'warehouse': warehouse,
'payment_term': payment_term,
'company': self.company.id,
'users': [('add', [USER])]
}])
self.User.set_preferences({'shop': self.shop})
self.default_node, = self.Node.create([{
'name': 'root',
'slug': 'root',
}])
self.NereidWebsite.create([{
'name': 'localhost',
'shop': self.shop,
'company': self.company.id,
'application_user': USER,
'default_locale': self.locale_en_us.id,
'guest_user': self.guest_user,
'countries': [('add', self.available_countries)],
'currencies': [('add', [self.usd.id])],
}])
# Create an article category
article_categ, = self.ArticleCategory.create([{
'title': 'Test Categ',
'unique_name': 'test-categ',
}])
self.Article.create([{
'title': 'Test Article',
'uri': 'test-article',
'content': 'Test Content',
'sequence': 10,
'categories': [('add', [article_categ.id])],
}])
# Product categories
self.category, = self._create_product_category(
'categ1', [{'uri': 'category1 '}]
)
self.category2, = self._create_product_category(
'categ2', [{'uri': 'category2'}]
)
self.category3, = self._create_product_category(
'categ3', [{'uri': 'category3'}]
)
def login(self, client, username, password, assert_=True):
"""
Login method.
:param client: Instance of the test client
:param username: The username, usually email
:param password: The password to login
:param assert_: Boolean value to indicate if the login has to be
ensured. If the login failed an assertion error would
be raised
"""
rv = client.post(
'/login', data={
'email': username,
'password': password,
}
)
if assert_:
self.assertEquals(rv.status_code, 302)
return rv
|
alikins/ansible
|
refs/heads/devel
|
lib/ansible/modules/packaging/language/gem.py
|
27
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Johan Wiren <johan.wiren.se@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gem
short_description: Manage Ruby gems
description:
- Manage installation and uninstallation of Ruby gems.
version_added: "1.1"
options:
name:
description:
- The name of the gem to be managed.
required: true
state:
description:
- The desired state of the gem. C(latest) ensures that the latest version is installed.
required: false
choices: [present, absent, latest]
default: present
gem_source:
description:
- The path to a local gem used as installation source.
required: false
include_dependencies:
description:
- Whether to include dependencies or not.
required: false
choices: [ "yes", "no" ]
default: "yes"
repository:
description:
- The repository from which the gem will be installed
required: false
aliases: [source]
user_install:
description:
- Install gem in user's local gems cache or for all users
required: false
choices: ["yes", "no"]
default: "yes"
version_added: "1.3"
executable:
description:
- Override the path to the gem executable
required: false
version_added: "1.4"
env_shebang:
description:
- Rewrite the shebang line on installed scripts to use /usr/bin/env.
required: false
default: "no"
version_added: "2.2"
version:
description:
- Version of the gem to be installed/removed.
required: false
pre_release:
description:
- Allow installation of pre-release versions of the gem.
required: false
default: "no"
version_added: "1.6"
include_doc:
description:
- Install with or without docs.
required: false
default: "no"
version_added: "2.0"
build_flags:
description:
- Allow adding build flags for gem compilation
required: false
version_added: "2.0"
author:
- "Ansible Core Team"
- "Johan Wiren"
'''
EXAMPLES = '''
# Installs version 1.0 of vagrant.
- gem:
name: vagrant
version: 1.0
state: present
# Installs latest available version of rake.
- gem:
name: rake
state: latest
# Installs rake version 1.0 from a local gem on disk.
- gem:
name: rake
gem_source: /path/to/gems/rake-1.0.gem
state: present
'''
import re
from ansible.module_utils.basic import AnsibleModule
def get_rubygems_path(module):
if module.params['executable']:
result = module.params['executable'].split(' ')
else:
result = [module.get_bin_path('gem', True)]
return result
def get_rubygems_version(module):
cmd = get_rubygems_path(module) + ['--version']
(rc, out, err) = module.run_command(cmd, check_rc=True)
match = re.match(r'^(\d+)\.(\d+)\.(\d+)', out)
if not match:
return None
return tuple(int(x) for x in match.groups())
def get_installed_versions(module, remote=False):
cmd = get_rubygems_path(module)
cmd.append('query')
if remote:
cmd.append('--remote')
if module.params['repository']:
cmd.extend(['--source', module.params['repository']])
cmd.append('-n')
cmd.append('^%s$' % module.params['name'])
(rc, out, err) = module.run_command(cmd, check_rc=True)
installed_versions = []
for line in out.splitlines():
match = re.match(r"\S+\s+\((.+)\)", line)
if match:
versions = match.group(1)
for version in versions.split(', '):
installed_versions.append(version.split()[0])
return installed_versions
def exists(module):
if module.params['state'] == 'latest':
remoteversions = get_installed_versions(module, remote=True)
if remoteversions:
module.params['version'] = remoteversions[0]
installed_versions = get_installed_versions(module)
if module.params['version']:
if module.params['version'] in installed_versions:
return True
else:
if installed_versions:
return True
return False
def uninstall(module):
if module.check_mode:
return
cmd = get_rubygems_path(module)
cmd.append('uninstall')
if module.params['version']:
cmd.extend(['--version', module.params['version']])
else:
cmd.append('--all')
cmd.append('--executable')
cmd.append(module.params['name'])
module.run_command(cmd, check_rc=True)
def install(module):
if module.check_mode:
return
ver = get_rubygems_version(module)
if ver:
major = ver[0]
else:
major = None
cmd = get_rubygems_path(module)
cmd.append('install')
if module.params['version']:
cmd.extend(['--version', module.params['version']])
if module.params['repository']:
cmd.extend(['--source', module.params['repository']])
if not module.params['include_dependencies']:
cmd.append('--ignore-dependencies')
else:
if major and major < 2:
cmd.append('--include-dependencies')
if module.params['user_install']:
cmd.append('--user-install')
else:
cmd.append('--no-user-install')
if module.params['pre_release']:
cmd.append('--pre')
if not module.params['include_doc']:
if major and major < 2:
cmd.append('--no-rdoc')
cmd.append('--no-ri')
else:
cmd.append('--no-document')
if module.params['env_shebang']:
cmd.append('--env-shebang')
cmd.append(module.params['gem_source'])
if module.params['build_flags']:
cmd.extend(['--', module.params['build_flags']])
module.run_command(cmd, check_rc=True)
def main():
module = AnsibleModule(
argument_spec=dict(
executable=dict(required=False, type='path'),
gem_source=dict(required=False, type='path'),
include_dependencies=dict(required=False, default=True, type='bool'),
name=dict(required=True, type='str'),
repository=dict(required=False, aliases=['source'], type='str'),
state=dict(required=False, default='present', choices=['present', 'absent', 'latest'], type='str'),
user_install=dict(required=False, default=True, type='bool'),
pre_release=dict(required=False, default=False, type='bool'),
include_doc=dict(required=False, default=False, type='bool'),
env_shebang=dict(required=False, default=False, type='bool'),
version=dict(required=False, type='str'),
build_flags=dict(required=False, type='str'),
),
supports_check_mode=True,
mutually_exclusive=[['gem_source', 'repository'], ['gem_source', 'version']],
)
if module.params['version'] and module.params['state'] == 'latest':
module.fail_json(msg="Cannot specify version when state=latest")
if module.params['gem_source'] and module.params['state'] == 'latest':
module.fail_json(msg="Cannot maintain state=latest when installing from local source")
if not module.params['gem_source']:
module.params['gem_source'] = module.params['name']
changed = False
if module.params['state'] in ['present', 'latest']:
if not exists(module):
install(module)
changed = True
elif module.params['state'] == 'absent':
if exists(module):
uninstall(module)
changed = True
result = {}
result['name'] = module.params['name']
result['state'] = module.params['state']
if module.params['version']:
result['version'] = module.params['version']
result['changed'] = changed
module.exit_json(**result)
if __name__ == '__main__':
main()
|
daymer/xWIKI_Karma
|
refs/heads/master
|
CustomModules/mysql-connector-python-2.1.7/tests/test_pep249.py
|
13
|
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2009, 2014, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Unittests for PEP-249
Rewritten from scratch. Found Ian Bicking's test suite and shamelessly
stole few of his ideas. (Geert)
"""
import datetime
import time
import inspect
import tests
import mysql.connector as myconn
class PEP249Base(tests.MySQLConnectorTests):
def db_connect(self):
return myconn.connect(use_pure=True, **tests.get_mysql_config())
def get_connection_id(self, cursor):
cid = None
try:
cursor.execute("SELECT CONNECTION_ID()")
cid = cursor.fetchone()[0]
except myconn.errors.Error as err:
self.fail("Failed getting connection id; {0}".format(str(err)))
return cid
def setUp(self):
self.cnx = self.db_connect()
def tearDown(self):
self.cnx.close()
class PEP249ModuleTests(PEP249Base):
def setUp(self):
pass
def tearDown(self):
pass
def test_connect(self):
"""Interface exports the connect()-function"""
self.assertTrue(inspect.isfunction(myconn.connect),
"Module does not export the connect()-function")
cnx = myconn.connect(use_pure=True, **tests.get_mysql_config())
self.assertTrue(isinstance(cnx, myconn.connection.MySQLConnection),
"The connect()-method returns incorrect instance")
cnx = myconn.connect(**tests.get_mysql_config())
self.assertTrue(isinstance(cnx, myconn.connection.MySQLConnection),
"connect() not returning by default pure "
"MySQLConnection object")
if tests.MYSQL_CAPI:
cnx = myconn.connect(use_pure=False, **tests.get_mysql_config())
self.assertTrue(isinstance(cnx,
myconn.connection_cext.CMySQLConnection),
"The connect()-method returns incorrect instance")
def test_apilevel(self):
"""Interface sets the API level"""
self.assertTrue(hasattr(myconn, 'apilevel'),
"API level is not defined")
self.assertEqual('2.0', myconn.apilevel,
"API Level should be '2.0'")
def test_threadsafety(self):
"""Interface defines thread safety"""
self.assertTrue(myconn.threadsafety in (0, 1, 2, 3))
self.assertEqual(1, myconn.threadsafety)
def test_paramstyle(self):
"""Interface sets the parameter style"""
self.assertTrue(myconn.paramstyle in
('qmark', 'numeric', 'named', 'format', 'pyformat'),
"paramstyle was assigned an unsupported value")
self.assertEqual('pyformat', myconn.paramstyle,
"paramstyle should be 'pyformat'")
class PEP249ErrorsTests(PEP249Base):
def setUp(self):
pass
def tearDown(self):
pass
def test_Warning(self):
"""Interface exports the Warning-exception"""
self.assertTrue(issubclass(myconn.errors.Warning, Exception),
"Warning is not subclass of Exception")
def test_Error(self):
"""Interface exports the Error-exception"""
self.assertTrue(issubclass(myconn.errors.Error, Exception),
"Error is not subclass of Exception")
def test_InterfaceError(self):
"""Interface exports the InterfaceError-exception"""
self.assertTrue(issubclass(myconn.errors.InterfaceError,
myconn.errors.Error),
"InterfaceError is not subclass of errors.Error")
def test_DatabaseError(self):
"""Interface exports the DatabaseError-exception"""
self.assertTrue(issubclass(myconn.errors.DatabaseError,
myconn.errors.Error),
"DatabaseError is not subclass of errors.Error")
def test_DataError(self):
"""Interface exports the DataError-exception"""
self.assertTrue(issubclass(myconn.errors.DataError,
myconn.errors.DatabaseError),
"DataError is not subclass of errors.DatabaseError")
def test_OperationalError(self):
"""Interface exports the OperationalError-exception"""
self.assertTrue(
issubclass(myconn.errors.OperationalError,
myconn.errors.DatabaseError),
"OperationalError is not subclass of errors.DatabaseError")
def test_IntegrityError(self):
"""Interface exports the IntegrityError-exception"""
self.assertTrue(
issubclass(myconn.errors.IntegrityError,
myconn.errors.DatabaseError),
"IntegrityError is not subclass of errors.DatabaseError")
def test_InternalError(self):
"""Interface exports the InternalError-exception"""
self.assertTrue(issubclass(myconn.errors.InternalError,
myconn.errors.DatabaseError),
"InternalError is not subclass of errors.DatabaseError")
def test_ProgrammingError(self):
"""Interface exports the ProgrammingError-exception"""
self.assertTrue(
issubclass(myconn.errors.ProgrammingError,
myconn.errors.DatabaseError),
"ProgrammingError is not subclass of errors.DatabaseError")
def test_NotSupportedError(self):
"""Interface exports the NotSupportedError-exception"""
self.assertTrue(
issubclass(myconn.errors.NotSupportedError,
myconn.errors.DatabaseError),
"NotSupportedError is not subclass of errors.DatabaseError")
class PEP249ConnectionTests(PEP249Base):
def test_close(self):
"""Connection object has close()-method"""
self.assertTrue(hasattr(self.cnx, 'close'),
"Interface connection has no close()-method")
self.assertTrue(
inspect.ismethod(self.cnx.close),
"Interface connection defines connect, but is not a method")
def test_commit(self):
"""Connection object has commit()-method"""
self.assertTrue(hasattr(self.cnx, 'commit'),
"Interface connection has no commit()-method")
self.assertTrue(
inspect.ismethod(self.cnx.commit),
"Interface connection defines commit, but is not a method")
def test_rollback(self):
"""Connection object has rollback()-method"""
self.assertTrue(hasattr(self.cnx, 'rollback'),
"Interface connection has no rollback()-method")
self.assertTrue(
inspect.ismethod(self.cnx.rollback),
"Interface connection defines rollback, but is not a method")
def test_cursor(self):
"""Connection object has cursor()-method"""
self.assertTrue(hasattr(self.cnx, 'cursor'),
"Interface connection has no cursor()-method")
self.assertTrue(
inspect.ismethod(self.cnx.cursor),
"Interface connection defines cursor, but is not a method")
self.assertTrue(
isinstance(self.cnx.cursor(), myconn.cursor.MySQLCursor),
"Interface connection cursor()-method does not return a cursor")
class PEP249CursorTests(PEP249Base):
def setUp(self):
self.cnx = self.db_connect()
self.cur = self.cnx.cursor()
def test_description(self):
"""Cursor object has description-attribute"""
self.assertTrue(hasattr(self.cur, 'description'),
"Cursor object has no description-attribute")
self.assertEqual(None, self.cur.description,
"Cursor object's description should default ot None")
def test_rowcount(self):
"""Cursor object has rowcount-attribute"""
self.assertTrue(hasattr(self.cur, 'rowcount'),
"Cursor object has no rowcount-attribute")
self.assertEqual(-1, self.cur.rowcount,
"Cursor object's rowcount should default to -1")
def test_lastrowid(self):
"""Cursor object has lastrowid-attribute"""
self.assertTrue(hasattr(self.cur, 'lastrowid'),
"Cursor object has no lastrowid-attribute")
self.assertEqual(None, self.cur.lastrowid,
"Cursor object's lastrowid should default to None")
def test_callproc(self):
"""Cursor object has callproc()-method"""
self.assertTrue(hasattr(self.cur, 'callproc'),
"Cursor object has no callproc()-method")
self.assertTrue(inspect.ismethod(self.cur.callproc),
"Cursor object defines callproc, but is not a method")
def test_close(self):
"""Cursor object has close()-method"""
self.assertTrue(hasattr(self.cur, 'close'),
"Cursor object has no close()-method")
self.assertTrue(inspect.ismethod(self.cur.close),
"Cursor object defines close, but is not a method")
def test_execute(self):
"""Cursor object has execute()-method"""
self.assertTrue(hasattr(self.cur, 'execute'),
"Cursor object has no execute()-method")
self.assertTrue(inspect.ismethod(self.cur.execute),
"Cursor object defines execute, but is not a method")
def test_executemany(self):
"""Cursor object has executemany()-method"""
self.assertTrue(hasattr(self.cur, 'executemany'),
"Cursor object has no executemany()-method")
self.assertTrue(
inspect.ismethod(self.cur.executemany),
"Cursor object defines executemany, but is not a method")
def test_fetchone(self):
"""Cursor object has fetchone()-method"""
self.assertTrue(hasattr(self.cur, 'fetchone'),
"Cursor object has no fetchone()-method")
self.assertTrue(inspect.ismethod(self.cur.fetchone),
"Cursor object defines fetchone, but is not a method")
def test_fetchmany(self):
"""Cursor object has fetchmany()-method"""
self.assertTrue(hasattr(self.cur, 'execute'),
"Cursor object has no fetchmany()-method")
self.assertTrue(inspect.ismethod(self.cur.fetchmany),
"Cursor object defines fetchmany, but is not a method")
def test_fetchall(self):
"""Cursor object has fetchall()-method"""
self.assertTrue(hasattr(self.cur, 'fetchall'),
"Cursor object has no fetchall()-method")
self.assertTrue(inspect.ismethod(self.cur.fetchall),
"Cursor object defines fetchall, but is not a method")
def test_nextset(self):
"""Cursor object has nextset()-method"""
self.assertTrue(hasattr(self.cur, 'nextset'),
"Cursor object has no nextset()-method")
self.assertTrue(inspect.ismethod(self.cur.nextset),
"Cursor object defines nextset, but is not a method")
def test_arraysize(self):
"""Cursor object has arraysize-attribute"""
self.assertTrue(hasattr(self.cur, 'arraysize'),
"Cursor object has no arraysize-attribute")
self.assertEqual(1, self.cur.arraysize,
"Cursor object's arraysize should default to 1")
def test_setinputsizes(self):
"""Cursor object has setinputsizes()-method"""
self.assertTrue(hasattr(self.cur, 'setinputsizes'),
"Cursor object has no setinputsizes()-method")
self.assertTrue(inspect.ismethod(self.cur.setinputsizes),
"Cursor object's setinputsizes should default to 1")
def test_setoutputsize(self):
"""Cursor object has setoutputsize()-method"""
self.assertTrue(hasattr(self.cur, 'setoutputsize'),
"Cursor object has no setoutputsize()-method")
self.assertTrue(inspect.ismethod(self.cur.setoutputsize),
"Cursor object's setoutputsize should default to 1")
def _isolation_setup(self, drop, create):
cursor = self.cnx.cursor()
try:
cursor.execute(drop)
cursor.execute(create)
except myconn.errors.Error as err:
self.fail("Failed setting up test table; {0}".format(err))
cursor.close()
def _isolation_connection_equal(self, cnx1, cnx2):
cid1 = self.get_connection_id(cnx1)
cid2 = self.get_connection_id(cnx2)
return (cid1 == cid2)
def _isolation_cleanup(self, drop):
cursor = self.cnx.cursor()
try:
cursor.execute(drop)
except myconn.errors.Error as err:
self.fail("Failed cleaning up; {0}".format(err))
cursor.close()
def _isolation_test(self, cnx1, cnx2, engine='MyISAM'):
cur1 = cnx1.cursor()
cur2 = cnx2.cursor()
data = (1, 'myconnpy')
tbl = 'myconnpy_cursor_isolation'
stmt_create = (
"CREATE TABLE {table} "
"(col1 INT, col2 VARCHAR(30), PRIMARY KEY (col1)) "
"ENGINE={engine}"
).format(table=tbl, engine=engine)
stmt_drop = "DROP TABLE IF EXISTS {table}".format(table=tbl)
stmt_insert = (
"INSERT INTO {table} (col1,col2) "
"VALUES (%s,%s)"
).format(table=tbl)
stmt_select = "SELECT col1,col2 FROM {table}".format(table=tbl)
# Setup
cur1.execute("SET SESSION TRANSACTION ISOLATION LEVEL REPEATABLE READ")
cur2.execute("SET SESSION TRANSACTION ISOLATION LEVEL REPEATABLE READ")
self._isolation_setup(stmt_drop, stmt_create)
conn_equal = self._isolation_connection_equal(cur1, cur2)
if cnx1 == cnx2 and not conn_equal:
self.fail("Cursors should have same connection ID")
elif cnx1 != cnx2 and conn_equal:
self.fail("Cursors should have different connection ID")
# Insert data
try:
cur1.execute(stmt_insert, data)
except myconn.errors.Error as err:
self.fail("Failed inserting test data; {0}".format(str(err)))
# Query for data
result = None
try:
cur2.execute(stmt_select)
result = cur2.fetchone()
except myconn.errors.InterfaceError:
pass
except myconn.errors.Error as err:
self.fail("Failed querying for test data; {0}".format(str(err)))
if conn_equal:
self.assertEqual(data, result)
elif not conn_equal and engine.lower() == 'innodb':
self.assertEqual(None, result)
# Clean up
self._isolation_cleanup(stmt_drop)
cur1.close()
cur2.close()
def test_isolation1(self):
"""Cursor isolation between 2 cursor on same connection"""
self._isolation_test(self.cnx, self.cnx, 'MyISAM')
def test_isolation2(self):
"""Cursor isolation with 2 cursors, different connections, trans."""
db2 = self.db_connect()
if tests.have_engine(db2, 'InnoDB'):
self._isolation_test(self.cnx, db2, 'InnoDB')
class PEP249TypeObjConstructorsTests(PEP249Base):
def test_Date(self):
"""Interface exports Date"""
exp = datetime.date(1977, 6, 14)
self.assertEqual(myconn.Date(1977, 6, 14), exp,
"Interface Date should return a datetime.date")
def test_Time(self):
"""Interface exports Time"""
exp = datetime.time(23, 56, 13)
self.assertEqual(myconn.Time(23, 56, 13), exp,
"Interface Time should return a datetime.time")
def test_Timestamp(self):
"""Interface exports Timestamp"""
adate = (1977, 6, 14, 21, 54, 23)
exp = datetime.datetime(*adate)
self.assertEqual(
myconn.Timestamp(*adate), exp,
"Interface Timestamp should return a datetime.datetime")
def test_DateFromTicks(self):
"""Interface exports DateFromTicks"""
ticks = 1
exp = datetime.date(*time.localtime(ticks)[:3])
self.assertEqual(
myconn.DateFromTicks(ticks), exp,
"Interface DateFromTicks should return a datetime.date")
def test_TimeFromTicks(self):
"""Interface exports TimeFromTicks"""
ticks = 1
exp = datetime.time(*time.localtime(ticks)[3:6])
self.assertEqual(
myconn.TimeFromTicks(ticks), exp,
"Interface TimeFromTicks should return a datetime.time")
def test_TimestampFromTicks(self):
"""Interface exports TimestampFromTicks"""
ticks = 1
exp = datetime.datetime(*time.localtime(ticks)[:6])
self.assertEqual(
myconn.TimestampFromTicks(ticks), exp,
"Interface TimestampFromTicks should return a datetime.datetime")
def test_Binary(self):
"""Interface exports Binary"""
exp = r'\u82b1'.encode('utf-8')
self.assertEqual(
myconn.Binary(r'\u82b1'.encode('utf-8')), exp,
"Interface Binary should return a str")
def test_STRING(self):
"""Interface exports STRING"""
self.assertTrue(hasattr(myconn, 'STRING'))
self.assertTrue(
isinstance(myconn.STRING, myconn.dbapi._DBAPITypeObject),
"Interface STRING should return a _DBAPITypeObject")
def test_BINARY(self):
"""Interface exports BINARY"""
self.assertTrue(hasattr(myconn, 'BINARY'))
self.assertTrue(
isinstance(myconn.BINARY, myconn.dbapi._DBAPITypeObject),
"Interface BINARY should return a _DBAPITypeObject")
def test_NUMBER(self):
"""Interface exports NUMBER"""
self.assertTrue(hasattr(myconn, 'NUMBER'))
self.assertTrue(
isinstance(myconn.NUMBER, myconn.dbapi._DBAPITypeObject),
"Interface NUMBER should return a _DBAPITypeObject")
def test_DATETIME(self):
"""Interface exports DATETIME"""
self.assertTrue(hasattr(myconn, 'DATETIME'))
self.assertTrue(
isinstance(myconn.DATETIME, myconn.dbapi._DBAPITypeObject),
"Interface DATETIME should return a _DBAPITypeObject")
def test_ROWID(self):
"""Interface exports ROWID"""
self.assertTrue(hasattr(myconn, 'ROWID'))
self.assertTrue(
isinstance(myconn.ROWID, myconn.dbapi._DBAPITypeObject),
"Interface ROWID should return a _DBAPITypeObject")
|
samdoran/ansible
|
refs/heads/devel
|
lib/ansible/plugins/strategy/free.py
|
36
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
DOCUMENTATION:
strategy: free
short_description: Executes tasks on each host independently
description:
- Task execution is as fast as possible per host in batch as defined by C(serial) (default all).
Ansible will not wait for other hosts to finish the current task before queuing the next task for a host that has finished.
Once a host is done with the play, it opens it's slot to a new host that was waiting to start.
version_added: "2.0"
author: Ansible Core Team
'''
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import time
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.playbook.included_file import IncludedFile
from ansible.plugins import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible.template import Templar
from ansible.module_utils._text import to_text
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class StrategyModule(StrategyBase):
def run(self, iterator, play_context):
'''
The "free" strategy is a bit more complex, in that it allows tasks to
be sent to hosts as quickly as they can be processed. This means that
some hosts may finish very quickly if run tasks result in little or no
work being done versus other systems.
The algorithm used here also tries to be more "fair" when iterating
through hosts by remembering the last host in the list to be given a task
and starting the search from there as opposed to the top of the hosts
list again, which would end up favoring hosts near the beginning of the
list.
'''
# the last host to be given a task
last_host = 0
result = self._tqm.RUN_OK
work_to_do = True
while work_to_do and not self._tqm._terminated:
hosts_left = self.get_hosts_left(iterator)
if len(hosts_left) == 0:
self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
result = False
break
work_to_do = False # assume we have no more work to do
starting_host = last_host # save current position so we know when we've looped back around and need to break
# try and find an unblocked host with a task to run
host_results = []
while True:
host = hosts_left[last_host]
display.debug("next free host: %s" % host)
host_name = host.get_name()
# peek at the next task for the host, to see if there's
# anything to do do for this host
(state, task) = iterator.get_next_task_for_host(host, peek=True)
display.debug("free host state: %s" % state)
display.debug("free host task: %s" % task)
if host_name not in self._tqm._unreachable_hosts and task:
# set the flag so the outer loop knows we've still found
# some work which needs to be done
work_to_do = True
display.debug("this host has work to do")
# check to see if this host is blocked (still executing a previous task)
if host_name not in self._blocked_hosts or not self._blocked_hosts[host_name]:
# pop the task, mark the host blocked, and queue it
self._blocked_hosts[host_name] = True
(state, task) = iterator.get_next_task_for_host(host)
try:
action = action_loader.get(task.action, class_only=True)
except KeyError:
# we don't care here, because the action may simply not have a
# corresponding action plugin
action = None
display.debug("getting variables")
task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=task)
self.add_tqm_variables(task_vars, play=iterator._play)
templar = Templar(loader=self._loader, variables=task_vars)
display.debug("done getting variables")
try:
task.name = to_text(templar.template(task.name, fail_on_undefined=False), nonstring='empty')
display.debug("done templating")
except:
# just ignore any errors during task name templating,
# we don't care if it just shows the raw name
display.debug("templating failed for some reason")
pass
run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)
if run_once:
if action and getattr(action, 'BYPASS_HOST_LOOP', False):
raise AnsibleError("The '%s' module bypasses the host loop, which is currently not supported in the free strategy "
"and would instead execute for every host in the inventory list." % task.action, obj=task._ds)
else:
display.warning("Using run_once with the free strategy is not currently supported. This task will still be "
"executed for every host in the inventory list.")
# check to see if this task should be skipped, due to it being a member of a
# role which has already run (and whether that role allows duplicate execution)
if task._role and task._role.has_run(host):
# If there is no metadata, the default behavior is to not allow duplicates,
# if there is metadata, check to see if the allow_duplicates flag was set to true
if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
display.debug("'%s' skipped because role has already run" % task)
del self._blocked_hosts[host_name]
continue
if task.action == 'meta':
self._execute_meta(task, play_context, iterator, target_host=host)
self._blocked_hosts[host_name] = False
else:
# handle step if needed, skip meta actions as they are used internally
if not self._step or self._take_step(task, host_name):
if task.any_errors_fatal:
display.warning("Using any_errors_fatal with the free strategy is not supported, "
"as tasks are executed independently on each host")
self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
self._queue_task(host, task, task_vars, play_context)
del task_vars
else:
display.debug("%s is blocked, skipping for now" % host_name)
# move on to the next host and make sure we
# haven't gone past the end of our hosts list
last_host += 1
if last_host > len(hosts_left) - 1:
last_host = 0
# if we've looped around back to the start, break out
if last_host == starting_host:
break
results = self._process_pending_results(iterator)
host_results.extend(results)
try:
included_files = IncludedFile.process_include_results(
host_results,
self._tqm,
iterator=iterator,
inventory=self._inventory,
loader=self._loader,
variable_manager=self._variable_manager
)
except AnsibleError as e:
return self._tqm.RUN_ERROR
if len(included_files) > 0:
all_blocks = dict((host, []) for host in hosts_left)
for included_file in included_files:
display.debug("collecting new blocks for %s" % included_file)
try:
new_blocks = self._load_included_file(included_file, iterator=iterator)
except AnsibleError as e:
for host in included_file._hosts:
iterator.mark_host_failed(host)
display.warning(str(e))
continue
for new_block in new_blocks:
task_vars = self._variable_manager.get_vars(play=iterator._play, task=included_file._task)
final_block = new_block.filter_tagged_tasks(play_context, task_vars)
for host in hosts_left:
if host in included_file._hosts:
all_blocks[host].append(final_block)
display.debug("done collecting new blocks for %s" % included_file)
display.debug("adding all collected blocks from %d included file(s) to iterator" % len(included_files))
for host in hosts_left:
iterator.add_tasks(host, all_blocks[host])
display.debug("done adding collected blocks to iterator")
# pause briefly so we don't spin lock
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
# collect all the final results
results = self._wait_on_pending_results(iterator)
# run the base class run() method, which executes the cleanup function
# and runs any outstanding handlers which have been triggered
return super(StrategyModule, self).run(iterator, play_context, result)
|
abhikumar22/MYBLOG
|
refs/heads/master
|
blg/Lib/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py
|
2715
|
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
|
SecretImbecile/McrRaspJam
|
refs/heads/master
|
006_Picraft_advancedMinecraft/3_tntsword.py
|
2
|
#API setup
from picraft import Vector
from picraft import World, Block
def operation(world, position):
world.blocks[position] = Block(8, 1)
def main():
#API setup
world = World()
while True:
#get recent sword hits
hits = world.events.poll()
for hit in hits:
currenthit = hit.pos
operation(world, currenthit)
if __name__ == "__main__":
main()
|
tgoodyear/dpkt
|
refs/heads/master
|
dpkt/rpc.py
|
17
|
# $Id$
"""Remote Procedure Call."""
import struct
import dpkt
# RPC.dir
CALL = 0
REPLY = 1
# RPC.Auth.flavor
AUTH_NONE = AUTH_NULL = 0
AUTH_UNIX = 1
AUTH_SHORT = 2
AUTH_DES = 3
# RPC.Reply.stat
MSG_ACCEPTED = 0
MSG_DENIED = 1
# RPC.Reply.Accept.stat
SUCCESS = 0
PROG_UNAVAIL = 1
PROG_MISMATCH = 2
PROC_UNAVAIL = 3
GARBAGE_ARGS = 4
SYSTEM_ERR = 5
# RPC.Reply.Reject.stat
RPC_MISMATCH = 0
AUTH_ERROR = 1
class RPC(dpkt.Packet):
__hdr__ = (
('xid', 'I', 0),
('dir', 'I', CALL)
)
class Auth(dpkt.Packet):
__hdr__ = (('flavor', 'I', AUTH_NONE), )
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
n = struct.unpack('>I', self.data[:4])[0]
self.data = self.data[4:4+n]
def __len__(self):
return 8 + len(self.data)
def __str__(self):
return self.pack_hdr() + struct.pack('>I', len(self.data)) + \
str(self.data)
class Call(dpkt.Packet):
__hdr__ = (
('rpcvers', 'I', 2),
('prog', 'I', 0),
('vers', 'I', 0),
('proc', 'I', 0)
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
self.cred = RPC.Auth(self.data)
self.verf = RPC.Auth(self.data[len(self.cred):])
self.data = self.data[len(self.cred) + len(self.verf):]
def __len__(self):
return len(str(self)) # XXX
def __str__(self):
return dpkt.Packet.__str__(self) + \
str(getattr(self, 'cred', RPC.Auth())) + \
str(getattr(self, 'verf', RPC.Auth())) + \
str(self.data)
class Reply(dpkt.Packet):
__hdr__ = (('stat', 'I', MSG_ACCEPTED), )
class Accept(dpkt.Packet):
__hdr__ = (('stat', 'I', SUCCESS), )
def unpack(self, buf):
self.verf = RPC.Auth(buf)
buf = buf[len(self.verf):]
self.stat = struct.unpack('>I', buf[:4])[0]
if self.stat == SUCCESS:
self.data = buf[4:]
elif self.stat == PROG_MISMATCH:
self.low, self.high = struct.unpack('>II', buf[4:12])
self.data = buf[12:]
def __len__(self):
if self.stat == PROG_MISMATCH: n = 8
else: n = 0
return len(self.verf) + 4 + n + len(self.data)
def __str__(self):
if self.stat == PROG_MISMATCH:
return str(self.verf) + struct.pack('>III', self.stat,
self.low, self.high) + self.data
return str(self.verf) + dpkt.Packet.__str__(self)
class Reject(dpkt.Packet):
__hdr__ = (('stat', 'I', AUTH_ERROR), )
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
if self.stat == RPC_MISMATCH:
self.low, self.high = struct.unpack('>II', self.data[:8])
self.data = self.data[8:]
elif self.stat == AUTH_ERROR:
self.why = struct.unpack('>I', self.data[:4])[0]
self.data = self.data[4:]
def __len__(self):
if self.stat == RPC_MISMATCH: n = 8
elif self.stat == AUTH_ERROR: n =4
else: n = 0
return 4 + n + len(self.data)
def __str__(self):
if self.stat == RPC_MISMATCH:
return struct.pack('>III', self.stat, self.low,
self.high) + self.data
elif self.stat == AUTH_ERROR:
return struct.pack('>II', self.stat, self.why) + self.data
return dpkt.Packet.__str__(self)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
if self.stat == MSG_ACCEPTED:
self.data = self.accept = self.Accept(self.data)
elif self.status == MSG_DENIED:
self.data = self.reject = self.Reject(self.data)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
if self.dir == CALL:
self.data = self.call = self.Call(self.data)
elif self.dir == REPLY:
self.data = self.reply = self.Reply(self.data)
def unpack_xdrlist(cls, buf):
l = []
while buf:
if buf.startswith('\x00\x00\x00\x01'):
p = cls(buf[4:])
l.append(p)
buf = p.data
elif buf.startswith('\x00\x00\x00\x00'):
break
else:
raise dpkt.UnpackError, 'invalid XDR list'
return l
def pack_xdrlist(*args):
return '\x00\x00\x00\x01'.join(map(str, args)) + '\x00\x00\x00\x00'
|
mzdaniel/oh-mainline
|
refs/heads/master
|
vendor/packages/Django/tests/regressiontests/introspection/tests.py
|
47
|
from django.conf import settings
from django.db import connection, DEFAULT_DB_ALIAS
from django.test import TestCase, skipUnlessDBFeature
from django.utils import functional
from models import Reporter, Article
#
# The introspection module is optional, so methods tested here might raise
# NotImplementedError. This is perfectly acceptable behavior for the backend
# in question, but the tests need to handle this without failing. Ideally we'd
# skip these tests, but until #4788 is done we'll just ignore them.
#
# The easiest way to accomplish this is to decorate every test case with a
# wrapper that ignores the exception.
#
# The metaclass is just for fun.
#
def ignore_not_implemented(func):
def _inner(*args, **kwargs):
try:
return func(*args, **kwargs)
except NotImplementedError:
return None
functional.update_wrapper(_inner, func)
return _inner
class IgnoreNotimplementedError(type):
def __new__(cls, name, bases, attrs):
for k,v in attrs.items():
if k.startswith('test'):
attrs[k] = ignore_not_implemented(v)
return type.__new__(cls, name, bases, attrs)
class IntrospectionTests(TestCase):
__metaclass__ = IgnoreNotimplementedError
def test_table_names(self):
tl = connection.introspection.table_names()
self.assertTrue(Reporter._meta.db_table in tl,
"'%s' isn't in table_list()." % Reporter._meta.db_table)
self.assertTrue(Article._meta.db_table in tl,
"'%s' isn't in table_list()." % Article._meta.db_table)
def test_django_table_names(self):
cursor = connection.cursor()
cursor.execute('CREATE TABLE django_ixn_test_table (id INTEGER);');
tl = connection.introspection.django_table_names()
cursor.execute("DROP TABLE django_ixn_test_table;")
self.assertTrue('django_ixn_testcase_table' not in tl,
"django_table_names() returned a non-Django table")
def test_installed_models(self):
tables = [Article._meta.db_table, Reporter._meta.db_table]
models = connection.introspection.installed_models(tables)
self.assertEqual(models, set([Article, Reporter]))
def test_sequence_list(self):
sequences = connection.introspection.sequence_list()
expected = {'table': Reporter._meta.db_table, 'column': 'id'}
self.assertTrue(expected in sequences,
'Reporter sequence not found in sequence_list()')
def test_get_table_description_names(self):
cursor = connection.cursor()
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual([r[0] for r in desc],
[f.column for f in Reporter._meta.fields])
def test_get_table_description_types(self):
cursor = connection.cursor()
desc = connection.introspection.get_table_description(cursor, Reporter._meta.db_table)
self.assertEqual(
[datatype(r[1], r) for r in desc],
['IntegerField', 'CharField', 'CharField', 'CharField', 'BigIntegerField']
)
# Regression test for #9991 - 'real' types in postgres
@skipUnlessDBFeature('has_real_datatype')
def test_postgresql_real_type(self):
cursor = connection.cursor()
cursor.execute("CREATE TABLE django_ixn_real_test_table (number REAL);")
desc = connection.introspection.get_table_description(cursor, 'django_ixn_real_test_table')
cursor.execute('DROP TABLE django_ixn_real_test_table;')
self.assertEqual(datatype(desc[0][1], desc[0]), 'FloatField')
def test_get_relations(self):
cursor = connection.cursor()
relations = connection.introspection.get_relations(cursor, Article._meta.db_table)
# Older versions of MySQL don't have the chops to report on this stuff,
# so just skip it if no relations come back. If they do, though, we
# should test that the response is correct.
if relations:
# That's {field_index: (field_index_other_table, other_table)}
self.assertEqual(relations, {3: (0, Reporter._meta.db_table)})
def test_get_indexes(self):
cursor = connection.cursor()
indexes = connection.introspection.get_indexes(cursor, Article._meta.db_table)
self.assertEqual(indexes['reporter_id'], {'unique': False, 'primary_key': False})
def datatype(dbtype, description):
"""Helper to convert a data type into a string."""
dt = connection.introspection.get_field_type(dbtype, description)
if type(dt) is tuple:
return dt[0]
else:
return dt
|
gisce/OCB
|
refs/heads/7.0
|
addons/crm/crm_phonecall.py
|
23
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.base_status.base_state import base_state
import crm
from datetime import datetime
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
from openerp.tools.translate import _
class crm_phonecall(base_state, osv.osv):
""" Model for CRM phonecalls """
_name = "crm.phonecall"
_description = "Phonecall"
_order = "id desc"
_inherit = ['mail.thread']
_columns = {
# base_state required fields
'date_action_last': fields.datetime('Last Action', readonly=1),
'date_action_next': fields.datetime('Next Action', readonly=1),
'create_date': fields.datetime('Creation Date' , readonly=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team', \
select=True, help='Sales team to which Case belongs to.'),
'user_id': fields.many2one('res.users', 'Responsible'),
'partner_id': fields.many2one('res.partner', 'Contact'),
'company_id': fields.many2one('res.company', 'Company'),
'description': fields.text('Description'),
'state': fields.selection([ ('draft', 'Draft'),
('open', 'Confirmed'),
('pending', 'Not Held'),
('cancel', 'Cancelled'),
('done', 'Held'),],
string='Status', size=16, readonly=True, track_visibility='onchange',
help='The status is set to \'Todo\', when a case is created.\
If the case is in progress the status is set to \'Open\'.\
When the call is over, the status is set to \'Held\'.\
If the call needs to be done then the status is set to \'Not Held\'.'),
'email_from': fields.char('Email', size=128, help="These people will receive email."),
'date_open': fields.datetime('Opened', readonly=True),
# phonecall fields
'name': fields.char('Call Summary', size=64, required=True),
'active': fields.boolean('Active', required=False),
'duration': fields.float('Duration', help="Duration in Minutes"),
'categ_id': fields.many2one('crm.case.categ', 'Category', \
domain="['|',('section_id','=',section_id),('section_id','=',False),\
('object_id.model', '=', 'crm.phonecall')]"),
'partner_phone': fields.char('Phone', size=32),
'partner_mobile': fields.char('Mobile', size=32),
'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority'),
'date_closed': fields.datetime('Closed', readonly=True),
'date': fields.datetime('Date'),
'opportunity_id': fields.many2one ('crm.lead', 'Lead/Opportunity'),
}
def _get_default_state(self, cr, uid, context=None):
if context and context.get('default_state', False):
return context.get('default_state')
return 'open'
_defaults = {
'date': fields.datetime.now,
'priority': crm.AVAILABLE_PRIORITIES[2][0],
'state': _get_default_state,
'user_id': lambda self,cr,uid,ctx: uid,
'active': 1
}
def case_close(self, cr, uid, ids, context=None):
""" Overrides close for crm_case for setting duration """
res = True
for phone in self.browse(cr, uid, ids, context=context):
phone_id = phone.id
data = {}
if phone.duration <=0:
duration = datetime.now() - datetime.strptime(phone.date, DEFAULT_SERVER_DATETIME_FORMAT)
data['duration'] = duration.seconds/float(60)
res = super(crm_phonecall, self).case_close(cr, uid, [phone_id], context=context)
self.write(cr, uid, [phone_id], data, context=context)
return res
def case_reset(self, cr, uid, ids, context=None):
"""Resets case as Todo
"""
res = super(crm_phonecall, self).case_reset(cr, uid, ids, context)
self.write(cr, uid, ids, {'duration': 0.0, 'state':'open'}, context=context)
return res
def schedule_another_phonecall(self, cr, uid, ids, schedule_time, call_summary, \
user_id=False, section_id=False, categ_id=False, action='schedule', context=None):
"""
action :('schedule','Schedule a call'), ('log','Log a call')
"""
model_data = self.pool.get('ir.model.data')
phonecall_dict = {}
if not categ_id:
try:
res_id = model_data._get_id(cr, uid, 'crm', 'categ_phone2')
categ_id = model_data.browse(cr, uid, res_id, context=context).res_id
except ValueError:
pass
for call in self.browse(cr, uid, ids, context=context):
if not section_id:
section_id = call.section_id and call.section_id.id or False
if not user_id:
user_id = call.user_id and call.user_id.id or False
if not schedule_time:
schedule_time = call.date
vals = {
'name' : call_summary,
'user_id' : user_id or False,
'categ_id' : categ_id or False,
'description' : call.description or False,
'date' : schedule_time,
'section_id' : section_id or False,
'partner_id': call.partner_id and call.partner_id.id or False,
'partner_phone' : call.partner_phone,
'partner_mobile' : call.partner_mobile,
'priority': call.priority,
'opportunity_id': call.opportunity_id and call.opportunity_id.id or False,
}
new_id = self.create(cr, uid, vals, context=context)
if action == 'log':
self.case_close(cr, uid, [new_id])
phonecall_dict[call.id] = new_id
return phonecall_dict
def _call_create_partner(self, cr, uid, phonecall, context=None):
partner = self.pool.get('res.partner')
partner_id = partner.create(cr, uid, {
'name': phonecall.name,
'user_id': phonecall.user_id.id,
'comment': phonecall.description,
'address': []
})
return partner_id
def on_change_opportunity(self, cr, uid, ids, opportunity_id, context=None):
values = {}
if opportunity_id:
opportunity = self.pool.get('crm.lead').browse(cr, uid, opportunity_id, context=context)
values = {
'section_id' : opportunity.section_id and opportunity.section_id.id or False,
'partner_phone' : opportunity.phone,
'partner_mobile' : opportunity.mobile,
'partner_id' : opportunity.partner_id and opportunity.partner_id.id or False,
}
return {'value' : values}
def _call_set_partner(self, cr, uid, ids, partner_id, context=None):
write_res = self.write(cr, uid, ids, {'partner_id' : partner_id}, context=context)
self._call_set_partner_send_note(cr, uid, ids, context)
return write_res
def _call_create_partner_address(self, cr, uid, phonecall, partner_id, context=None):
address = self.pool.get('res.partner')
return address.create(cr, uid, {
'parent_id': partner_id,
'name': phonecall.name,
'phone': phonecall.partner_phone,
})
def handle_partner_assignation(self, cr, uid, ids, action='create', partner_id=False, context=None):
"""
Handle partner assignation during a lead conversion.
if action is 'create', create new partner with contact and assign lead to new partner_id.
otherwise assign lead to specified partner_id
:param list ids: phonecalls ids to process
:param string action: what has to be done regarding partners (create it, assign an existing one, or nothing)
:param int partner_id: partner to assign if any
:return dict: dictionary organized as followed: {lead_id: partner_assigned_id}
"""
#TODO this is a duplication of the handle_partner_assignation method of crm_lead
partner_ids = {}
# If a partner_id is given, force this partner for all elements
force_partner_id = partner_id
for call in self.browse(cr, uid, ids, context=context):
# If the action is set to 'create' and no partner_id is set, create a new one
if action == 'create':
partner_id = force_partner_id or self._call_create_partner(cr, uid, call, context=context)
self._call_create_partner_address(cr, uid, call, partner_id, context=context)
self._call_set_partner(cr, uid, [call.id], partner_id, context=context)
partner_ids[call.id] = partner_id
return partner_ids
def redirect_phonecall_view(self, cr, uid, phonecall_id, context=None):
model_data = self.pool.get('ir.model.data')
# Select the view
tree_view = model_data.get_object_reference(cr, uid, 'crm', 'crm_case_phone_tree_view')
form_view = model_data.get_object_reference(cr, uid, 'crm', 'crm_case_phone_form_view')
search_view = model_data.get_object_reference(cr, uid, 'crm', 'view_crm_case_phonecalls_filter')
value = {
'name': _('Phone Call'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'crm.phonecall',
'res_id' : int(phonecall_id),
'views': [(form_view and form_view[1] or False, 'form'), (tree_view and tree_view[1] or False, 'tree'), (False, 'calendar')],
'type': 'ir.actions.act_window',
'search_view_id': search_view and search_view[1] or False,
}
return value
def convert_opportunity(self, cr, uid, ids, opportunity_summary=False, partner_id=False, planned_revenue=0.0, probability=0.0, context=None):
partner = self.pool.get('res.partner')
opportunity = self.pool.get('crm.lead')
opportunity_dict = {}
default_contact = False
for call in self.browse(cr, uid, ids, context=context):
if not partner_id:
partner_id = call.partner_id and call.partner_id.id or False
if partner_id:
address_id = partner.address_get(cr, uid, [partner_id])['default']
if address_id:
default_contact = partner.browse(cr, uid, address_id, context=context)
opportunity_id = opportunity.create(cr, uid, {
'name': opportunity_summary or call.name,
'planned_revenue': planned_revenue,
'probability': probability,
'partner_id': partner_id or False,
'mobile': default_contact and default_contact.mobile,
'section_id': call.section_id and call.section_id.id or False,
'description': call.description or False,
'priority': call.priority,
'type': 'opportunity',
'phone': call.partner_phone or False,
'email_from': default_contact and default_contact.email,
})
vals = {
'partner_id': partner_id,
'opportunity_id' : opportunity_id,
}
self.write(cr, uid, [call.id], vals)
self.case_close(cr, uid, [call.id])
opportunity.case_open(cr, uid, [opportunity_id])
opportunity_dict[call.id] = opportunity_id
return opportunity_dict
def action_make_meeting(self, cr, uid, ids, context=None):
"""
Open meeting's calendar view to schedule a meeting on current phonecall.
:return dict: dictionary value for created meeting view
"""
phonecall = self.browse(cr, uid, ids[0], context)
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'base_calendar', 'action_crm_meeting', context)
res['context'] = {
'default_phonecall_id': phonecall.id,
'default_partner_id': phonecall.partner_id and phonecall.partner_id.id or False,
'default_user_id': uid,
'default_email_from': phonecall.email_from,
'default_state': 'open',
'default_name': phonecall.name,
}
return res
def action_button_convert2opportunity(self, cr, uid, ids, context=None):
"""
Convert a phonecall into an opp and then redirect to the opp view.
:param list ids: list of calls ids to convert (typically contains a single id)
:return dict: containing view information
"""
if len(ids) != 1:
raise osv.except_osv(_('Warning!'),_('It\'s only possible to convert one phonecall at a time.'))
opportunity_dict = self.convert_opportunity(cr, uid, ids, context=context)
return self.pool.get('crm.lead').redirect_opportunity_view(cr, uid, opportunity_dict[ids[0]], context)
# ----------------------------------------
# OpenChatter
# ----------------------------------------
def _call_set_partner_send_note(self, cr, uid, ids, context=None):
return self.message_post(cr, uid, ids, body=_("Partner has been <b>created</b>."), context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
CharlesMcKinnis/stack-recon
|
refs/heads/master
|
stack-recon/mysql/connector/fabric/connection.py
|
17
|
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2013, 2015, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Implementing communication with MySQL Fabric"""
import sys
import datetime
import time
import uuid
from base64 import b16decode
from bisect import bisect
from hashlib import md5
import logging
import socket
import collections
# pylint: disable=F0401,E0611
try:
from xmlrpclib import Fault, ServerProxy, Transport
import urllib2
from httplib import BadStatusLine
except ImportError:
# Python v3
from xmlrpc.client import Fault, ServerProxy, Transport
import urllib.request as urllib2
from http.client import BadStatusLine
if sys.version_info[0] == 2:
try:
from httplib import HTTPSConnection
except ImportError:
HAVE_SSL = False
else:
HAVE_SSL = True
else:
try:
from http.client import HTTPSConnection
except ImportError:
HAVE_SSL = False
else:
HAVE_SSL = True
# pylint: enable=F0401,E0611
import mysql.connector
from ..connection import MySQLConnection
from ..conversion import MySQLConverter
from ..pooling import MySQLConnectionPool
from ..errors import (
Error, InterfaceError, NotSupportedError, MySQLFabricError, InternalError,
DatabaseError
)
from ..cursor import (
MySQLCursor, MySQLCursorBuffered,
MySQLCursorRaw, MySQLCursorBufferedRaw
)
from .. import errorcode
from . import FabricMySQLServer, FabricShard
from .caching import FabricCache
from .balancing import WeightedRoundRobin
from .. import version
from ..catch23 import PY2, isunicode, UNICODE_TYPES
RESET_CACHE_ON_ERROR = (
errorcode.CR_SERVER_LOST,
errorcode.ER_OPTION_PREVENTS_STATEMENT,
)
# Errors to be reported to Fabric
REPORT_ERRORS = (
errorcode.CR_SERVER_LOST,
errorcode.CR_SERVER_GONE_ERROR,
errorcode.CR_CONN_HOST_ERROR,
errorcode.CR_CONNECTION_ERROR,
errorcode.CR_IPSOCK_ERROR,
)
REPORT_ERRORS_EXTRA = []
DEFAULT_FABRIC_PROTOCOL = 'xmlrpc'
MYSQL_FABRIC_PORT = {
'xmlrpc': 32274,
'mysql': 32275
}
FABRICS = {}
# For attempting to connect with Fabric
_CNX_ATTEMPT_DELAY = 1
_CNX_ATTEMPT_MAX = 3
_GETCNX_ATTEMPT_DELAY = 1
_GETCNX_ATTEMPT_MAX = 3
MODE_READONLY = 1
MODE_WRITEONLY = 2
MODE_READWRITE = 3
STATUS_FAULTY = 0
STATUS_SPARE = 1
STATUS_SECONDARY = 2
STATUS_PRIMARY = 3
SCOPE_GLOBAL = 'GLOBAL'
SCOPE_LOCAL = 'LOCAL'
_SERVER_STATUS_FAULTY = 'FAULTY'
_CNX_PROPERTIES = {
# name: ((valid_types), description, default)
'group': ((str,), "Name of group of servers", None),
'key': (tuple([int, str, datetime.datetime,
datetime.date] + list(UNICODE_TYPES)),
"Sharding key", None),
'tables': ((tuple, list), "List of tables in query", None),
'mode': ((int,), "Read-Only, Write-Only or Read-Write", MODE_READWRITE),
'shard': ((str,), "Identity of the shard for direct connection", None),
'mapping': ((str,), "", None),
'scope': ((str,), "GLOBAL for accessing Global Group, or LOCAL",
SCOPE_LOCAL),
'attempts': ((int,), "Attempts for getting connection",
_GETCNX_ATTEMPT_MAX),
'attempt_delay': ((int,), "Seconds to wait between each attempt",
_GETCNX_ATTEMPT_DELAY),
}
_LOGGER = logging.getLogger('myconnpy-fabric')
class MySQLRPCProtocol(object):
"""Class using MySQL protocol to query Fabric.
"""
def __init__(self, fabric, host, port, connect_attempts, connect_delay):
self.converter = MySQLConverter()
self.handler = FabricMySQLConnection(fabric, host, port,
connect_attempts,
connect_delay)
self.handler.connect()
def _process_params_dict(self, params):
"""Process query parameters given as dictionary"""
try:
res = []
for key, value in list(params.items()):
conv = value
conv = self.converter.to_mysql(conv)
conv = self.converter.escape(conv)
conv = self.converter.quote(conv)
res.append("{0}={1}".format(key, str(conv)))
except Exception as err:
raise mysql.connector.errors.ProgrammingError(
"Failed processing pyformat-parameters; %s" % err)
else:
return res
def _process_params(self, params):
"""Process query parameters."""
try:
res = params
res = [self.converter.to_mysql(i) for i in res]
res = [self.converter.escape(i) for i in res]
res = [self.converter.quote(i) for i in res]
res = [str(i) for i in res]
except Exception as err:
raise mysql.connector.errors.ProgrammingError(
"Failed processing format-parameters; %s" % err)
else:
return tuple(res)
def _execute_cmd(self, stmt, params=None):
"""Executes the given query
Returns a list containing response from Fabric
"""
if not params:
params = ()
cur = self.handler.connection.cursor(dictionary=True)
results = []
for res in cur.execute(stmt, params, multi=True):
results.append(res.fetchall())
return results
def create_params(self, *args, **kwargs):
"""Process arguments to create query parameters.
"""
params = []
if args:
args = self._process_params(args)
params.extend(args)
if kwargs:
kwargs = self._process_params_dict(kwargs)
params.extend(kwargs)
params = ', '.join(params)
return params
def execute(self, group, command, *args, **kwargs):
"""Executes the given command with MySQL protocol
Executes the given command with the given parameters.
Returns an iterator to navigate to navigate through the result set
returned by Fabric
"""
params = self.create_params(*args, **kwargs)
cmd = "CALL {0}.{1}({2})".format(group, command, params)
fab_set = None
try:
data = self._execute_cmd(cmd)
fab_set = FabricMySQLSet(data)
except (Fault, socket.error, InterfaceError) as exc:
msg = "Executing {group}.{command} failed: {error}".format(
group=group, command=command, error=str(exc))
raise InterfaceError(msg)
return fab_set
class XMLRPCProtocol(object):
"""Class using XML-RPC protocol to query Fabric.
"""
def __init__(self, fabric, host, port, connect_attempts, connect_delay):
self.handler = FabricXMLRPCConnection(fabric, host, port,
connect_attempts, connect_delay)
self.handler.connect()
def execute(self, group, command, *args, **kwargs):
"""Executes the given command with XML-RPC protocol
Executes the given command with the given parameters
Returns an iterator to navigate to navigate through the result set
returned by Fabric
"""
try:
grp = getattr(self.handler.proxy, group)
cmd = getattr(grp, command)
except AttributeError as exc:
raise ValueError("{group}.{command} not available ({err})".format(
group=group, command=command, err=str(exc)))
fab_set = None
try:
data = cmd(*args, **kwargs)
fab_set = FabricSet(data)
except (Fault, socket.error, InterfaceError) as exc:
msg = "Executing {group}.{command} failed: {error}".format(
group=group, command=command, error=str(exc))
raise InterfaceError(msg)
return fab_set
class FabricMySQLResponse(object):
"""Class used to parse a response got from Fabric with MySQL protocol.
"""
def __init__(self, data):
info = data[0][0]
(fabric_uuid_str, ttl, error) = (info['fabric_uuid'], info['ttl'],
info['message'])
if error:
raise InterfaceError(error)
self.fabric_uuid_str = fabric_uuid_str
self.ttl = ttl
self.coded_rows = data[1]
class FabricMySQLSet(FabricMySQLResponse):
"""Iterator to navigate through the result set returned from Fabric
with MySQL Protocol.
"""
def __init__(self, data):
"""Initialize the FabricSet object.
"""
super(FabricMySQLSet, self).__init__(data)
self.__names = self.coded_rows[0].keys()
self.__rows = self.coded_rows
self.__result = collections.namedtuple('ResultSet', self.__names)
def rowcount(self):
"""The number of rows in the result set.
"""
return len(self.__rows)
def rows(self):
"""Iterate over the rows of the result set.
Each row is a named tuple.
"""
for row in self.__rows:
yield self.__result(**row)
def row(self, index):
"""Indexing method for a row.
Each row is a named tuple.
"""
return self.__result(**self.__rows[index])
class FabricResponse(object):
"""Class used to parse a response got from Fabric.
"""
SUPPORTED_VERSION = 1
def __init__(self, data):
"""Initialize the FabricResponse object
"""
(format_version, fabric_uuid_str, ttl, error, rows) = data
if error:
raise InterfaceError(error)
if format_version != FabricResponse.SUPPORTED_VERSION:
raise InterfaceError(
"Supported protocol has version {sversion}. Got a response "
"from MySQL Fabric with version {gversion}.".format(
sversion=FabricResponse.SUPPORTED_VERSION,
gversion=format_version)
)
self.format_version = format_version
self.fabric_uuid_str = fabric_uuid_str
self.ttl = ttl
self.coded_rows = rows
class FabricSet(FabricResponse):
"""Iterator to navigate through the result set returned from Fabric
"""
def __init__(self, data):
"""Initialize the FabricSet object.
"""
super(FabricSet, self).__init__(data)
assert len(self.coded_rows) == 1
self.__names = self.coded_rows[0]['info']['names']
self.__rows = self.coded_rows[0]['rows']
assert all(len(self.__names) == len(row) for row in self.__rows) or \
len(self.__rows) == 0
self.__result = collections.namedtuple('ResultSet', self.__names)
def rowcount(self):
"""The number of rows in the result set.
"""
return len(self.__rows)
def rows(self):
"""Iterate over the rows of the result set.
Each row is a named tuple.
"""
for row in self.__rows:
yield self.__result(*row)
def row(self, index):
"""Indexing method for a row.
Each row is a named tuple.
"""
return self.__result(*self.__rows[index])
def extra_failure_report(error_codes):
"""Add MySQL error to be reported to Fabric
This function adds error_codes to the error list to be reported to
Fabric. To reset the custom error reporting list, pass None or empty
list.
The error_codes argument can be either a MySQL error code defined in the
errorcode module, or list of error codes.
Raises AttributeError when code is not an int.
"""
global REPORT_ERRORS_EXTRA # pylint: disable=W0603
if not error_codes:
REPORT_ERRORS_EXTRA = []
if not isinstance(error_codes, (list, tuple)):
error_codes = [error_codes]
for code in error_codes:
if not isinstance(code, int) or not (code >= 1000 and code < 3000):
raise AttributeError("Unknown or invalid error code.")
REPORT_ERRORS_EXTRA.append(code)
def _fabric_xmlrpc_uri(host, port):
"""Create an XMLRPC URI for connecting to Fabric
This method will create a URI using the host and TCP/IP
port suitable for connecting to a MySQL Fabric instance.
Returns a URI.
"""
return 'http://{host}:{port}'.format(host=host, port=port)
def _fabric_server_uuid(host, port):
"""Create a UUID using host and port"""
return uuid.uuid3(uuid.NAMESPACE_URL, _fabric_xmlrpc_uri(host, port))
def _validate_ssl_args(ssl_ca, ssl_key, ssl_cert):
"""Validate the SSL argument.
Raises AttributeError is required argument is not set.
Returns dict or None.
"""
if not HAVE_SSL:
raise InterfaceError("Python does not support SSL")
if any([ssl_ca, ssl_key, ssl_cert]):
if not ssl_ca:
raise AttributeError("Missing ssl_ca argument.")
if (ssl_key or ssl_cert) and not (ssl_key and ssl_cert):
raise AttributeError(
"ssl_key and ssl_cert need to be both "
"specified, or neither."
)
return {
'ca': ssl_ca,
'key': ssl_key,
'cert': ssl_cert,
}
return None
if HAVE_SSL:
class FabricHTTPSHandler(urllib2.HTTPSHandler):
"""Class handling HTTPS connections"""
def __init__(self, ssl_config): #pylint: disable=E1002
"""Initialize"""
if PY2:
urllib2.HTTPSHandler.__init__(self)
else:
super().__init__() # pylint: disable=W0104
self._ssl_config = ssl_config
def https_open(self, req):
"""Open HTTPS connection"""
return self.do_open(self.get_https_connection, req)
def get_https_connection(self, host, timeout=300):
"""Returns a HTTPSConnection"""
return HTTPSConnection(
host,
key_file=self._ssl_config['key'],
cert_file=self._ssl_config['cert']
)
class FabricTransport(Transport):
"""Custom XMLRPC Transport for Fabric"""
user_agent = 'MySQL Connector Python/{0}'.format(version.VERSION_TEXT)
def __init__(self, username, password, #pylint: disable=E1002
verbose=0, use_datetime=False, https_handler=None):
"""Initialize"""
if PY2:
Transport.__init__(self, use_datetime=False)
else:
super().__init__(use_datetime=False)
self._username = username
self._password = password
self._use_datetime = use_datetime
self.verbose = verbose
self._username = username
self._password = password
self._handlers = []
if self._username and self._password:
self._passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
self._auth_handler = urllib2.HTTPDigestAuthHandler(self._passmgr)
else:
self._auth_handler = None
self._passmgr = None
if https_handler:
self._handlers.append(https_handler)
self._scheme = 'https'
else:
self._scheme = 'http'
if self._auth_handler:
self._handlers.append(self._auth_handler)
def request(self, host, handler, request_body, verbose=0):
"""Send XMLRPC request"""
uri = '{scheme}://{host}{handler}'.format(scheme=self._scheme,
host=host, handler=handler)
if self._passmgr:
self._passmgr.add_password(None, uri, self._username,
self._password)
if self.verbose:
_LOGGER.debug("FabricTransport: {0}".format(uri))
opener = urllib2.build_opener(*self._handlers)
headers = {
'Content-Type': 'text/xml',
'User-Agent': self.user_agent,
}
req = urllib2.Request(uri, request_body, headers=headers)
try:
return self.parse_response(opener.open(req))
except (urllib2.URLError, urllib2.HTTPError) as exc:
try:
code = -1
if exc.code == 400:
reason = 'Permission denied'
code = exc.code
else:
reason = exc.reason
msg = "{reason} ({code})".format(reason=reason, code=code)
except AttributeError:
if 'SSL' in str(exc):
msg = "SSL error"
else:
msg = str(exc)
raise InterfaceError("Connection with Fabric failed: " + msg)
except BadStatusLine:
raise InterfaceError("Connection with Fabric failed: check SSL")
class Fabric(object):
"""Class managing MySQL Fabric instances"""
def __init__(self, host, username=None, password=None,
port=None,
connect_attempts=_CNX_ATTEMPT_MAX,
connect_delay=_CNX_ATTEMPT_DELAY,
report_errors=False,
ssl_ca=None, ssl_key=None, ssl_cert=None, user=None,
protocol=DEFAULT_FABRIC_PROTOCOL):
"""Initialize"""
if protocol == 'xmlrpc':
self._protocol_class = XMLRPCProtocol
elif protocol == 'mysql':
self._protocol_class = MySQLRPCProtocol
else:
raise InterfaceError(
"Protocol not supported by MySQL Fabric,"
" was '{}'".format(protocol))
if not port:
port = MYSQL_FABRIC_PORT[protocol]
self._fabric_instances = {}
self._fabric_uuid = None
self._ttl = 1 * 60 # one minute by default
self._version_token = None
self._connect_attempts = connect_attempts
self._connect_delay = connect_delay
self._cache = FabricCache()
self._group_balancers = {}
self._init_host = host
self._init_port = port
self._ssl = _validate_ssl_args(ssl_ca, ssl_key, ssl_cert)
self._report_errors = report_errors
self._protocol = protocol
if user and username:
raise ValueError("can not specify both user and username")
self._username = user or username
self._password = password
@property
def username(self):
"""Return username used to authenticate with Fabric"""
return self._username
@property
def password(self):
"""Return password used to authenticate with Fabric"""
return self._password
@property
def ssl_config(self):
"""Return the SSL configuration"""
return self._ssl
def seed(self, host=None, port=None):
"""Get MySQL Fabric Instances
This method uses host and port to connect to a MySQL Fabric server
and get all the instances managing the same metadata.
Raises InterfaceError on errors.
"""
host = host or self._init_host
port = port or self._init_port
fabinst = self._protocol_class(self, host, port,
connect_attempts=self._connect_attempts,
connect_delay=self._connect_delay)
fabric_uuid, fabric_version, ttl, fabrics = self.get_fabric_servers(
fabinst)
if not fabrics:
# Raise, something went wrong.
raise InterfaceError("Failed getting list of Fabric servers")
if self._version_token == fabric_version:
return
_LOGGER.info(
"Loading Fabric configuration version {version}".format(
version=fabric_version))
self._fabric_uuid = fabric_uuid
self._version_token = fabric_version
if ttl > 0:
self._ttl = ttl
# Update the Fabric servers
for fabric in fabrics:
inst = self._protocol_class(self, fabric['host'], fabric['port'],
connect_attempts=self._connect_attempts,
connect_delay=self._connect_delay)
inst_uuid = inst.handler.uuid
if inst_uuid not in self._fabric_instances:
self._fabric_instances[inst_uuid] = inst
_LOGGER.debug(
"Added new Fabric server {host}:{port}".format(
host=inst.handler.host, port=inst.handler.port))
def reset_cache(self, group=None):
"""Reset cached information
This method destroys all cached information.
"""
if group:
_LOGGER.debug("Resetting cache for group '{group}'".format(
group=group))
self.get_group_servers(group, use_cache=False)
else:
_LOGGER.debug("Resetting cache")
self._cache = FabricCache()
def get_instance(self):
"""Get a MySQL Fabric Instance
This method will get the next available MySQL Fabric Instance.
Raises InterfaceError when no instance is available or connected.
"""
nxt = 0
errmsg = "No MySQL Fabric instance available"
if not self._fabric_instances:
raise InterfaceError(errmsg + " (not seeded?)")
if PY2:
instance_list = self._fabric_instances.keys()
inst = self._fabric_instances[instance_list[nxt]]
else:
inst = self._fabric_instances[list(self._fabric_instances)[nxt]]
if not inst.handler.is_connected:
inst.handler.connect()
return inst
def report_failure(self, server_uuid, errno):
"""Report failure to Fabric
This method sets the status of a MySQL server identified by
server_uuid.
"""
if not self._report_errors:
return
errno = int(errno)
current_host = socket.getfqdn()
if errno in REPORT_ERRORS or errno in REPORT_ERRORS_EXTRA:
_LOGGER.debug("Reporting error %d of server %s", errno,
server_uuid)
inst = self.get_instance()
try:
data = inst.execute('threat', 'report_failure',
server_uuid, current_host, errno)
FabricResponse(data)
except (Fault, socket.error) as exc:
_LOGGER.debug("Failed reporting server to Fabric (%s)",
str(exc))
# Not requiring further action
def get_fabric_servers(self, fabric_cnx=None):
"""Get all MySQL Fabric instances
This method looks up the other MySQL Fabric instances which uses
the same metadata. The returned list contains dictionaries with
connection information such ass host and port. For example:
[
{'host': 'fabric_prod_1.example.com', 'port': 32274 },
{'host': 'fabric_prod_2.example.com', 'port': 32274 },
]
Returns a list of dictionaries
"""
inst = fabric_cnx or self.get_instance()
result = []
err_msg = "Looking up Fabric servers failed using {host}:{port}: {err}"
try:
fset = inst.execute('dump', 'fabric_nodes',
"protocol." + self._protocol)
for row in fset.rows():
result.append({'host': row.host, 'port': row.port})
except (Fault, socket.error) as exc:
msg = err_msg.format(err=str(exc), host=inst.handler.host,
port=inst.handler.port)
raise InterfaceError(msg)
except (TypeError, AttributeError) as exc:
msg = err_msg.format(
err="No Fabric server available ({0})".format(exc),
host=inst.handler.host, port=inst.handler.port)
raise InterfaceError(msg)
try:
fabric_uuid = uuid.UUID(fset.fabric_uuid_str)
except TypeError:
fabric_uuid = uuid.uuid4()
fabric_version = 0
return fabric_uuid, fabric_version, fset.ttl, result
def get_group_servers(self, group, use_cache=True):
"""Get all MySQL servers in a group
This method returns information about all MySQL part of the
given high-availability group. When use_cache is set to
True, the cached information will be used.
Raises InterfaceError on errors.
Returns list of FabricMySQLServer objects.
"""
# Get group information from cache
if use_cache:
entry = self._cache.group_search(group)
if entry:
# Cache group information
return entry.servers
inst = self.get_instance()
result = []
try:
fset = inst.execute('dump', 'servers', self._version_token, group)
except (Fault, socket.error) as exc:
msg = ("Looking up MySQL servers failed for group "
"{group}: {error}").format(error=str(exc), group=group)
raise InterfaceError(msg)
weights = []
for row in fset.rows():
# We make sure, when using local groups, we skip the global group
if row.group_id == group:
mysqlserver = FabricMySQLServer(
row.server_uuid, row.group_id, row.host, row.port,
row.mode, row.status, row.weight
)
result.append(mysqlserver)
if mysqlserver.status == STATUS_SECONDARY:
weights.append((mysqlserver.uuid, mysqlserver.weight))
self._cache.cache_group(group, result)
if weights:
self._group_balancers[group] = WeightedRoundRobin(*weights)
return result
def get_group_server(self, group, mode=None, status=None):
"""Get a MySQL server from a group
The method uses MySQL Fabric to get the correct MySQL server
for the specified group. You can specify mode or status, but
not both.
The mode argument will decide whether the primary or a secondary
server is returned. When no secondary server is available, the
primary is returned.
Status is used to force getting either a primary or a secondary.
The returned tuple contains host, port and uuid.
Raises InterfaceError on errors; ValueError when both mode
and status are given.
Returns a FabricMySQLServer object.
"""
if mode and status:
raise ValueError(
"Either mode or status must be given, not both")
errmsg = "No MySQL server available for group '{group}'"
servers = self.get_group_servers(group, use_cache=True)
if not servers:
raise InterfaceError(errmsg.format(group=group))
# Get the Master and return list (host, port, UUID)
primary = None
secondary = []
for server in servers:
if server.status == STATUS_SECONDARY:
secondary.append(server)
elif server.status == STATUS_PRIMARY:
primary = server
if mode in (MODE_WRITEONLY, MODE_READWRITE) or status == STATUS_PRIMARY:
if not primary:
self.reset_cache(group=group)
raise InterfaceError((errmsg + ' {query}={value}').format(
query='status' if status else 'mode',
group=group,
value=status or mode))
return primary
# Return primary if no secondary is available
if not secondary and primary:
return primary
elif group in self._group_balancers:
next_secondary = self._group_balancers[group].get_next()[0]
for mysqlserver in secondary:
if next_secondary == mysqlserver.uuid:
return mysqlserver
self.reset_cache(group=group)
raise InterfaceError(errmsg.format(group=group, mode=mode))
def get_sharding_information(self, tables=None, database=None):
"""Get and cache the sharding information for given tables
This method is fetching sharding information from MySQL Fabric
and caches the result. The tables argument must be sequence
of sequences contain the name of the database and table. If no
database is given, the value for the database argument will
be used.
Examples:
tables = [('salary',), ('employees',)]
get_sharding_information(tables, database='employees')
tables = [('salary', 'employees'), ('employees', employees)]
get_sharding_information(tables)
Raises InterfaceError on errors; ValueError when something is wrong
with the tables argument.
"""
if not isinstance(tables, (list, tuple)):
raise ValueError("tables should be a sequence")
patterns = []
for table in tables:
if not isinstance(table, (list, tuple)) and not database:
raise ValueError("No database specified for table {0}".format(
table))
if isinstance(table, (list, tuple)):
dbase = table[1]
tbl = table[0]
else:
dbase = database
tbl = table
patterns.append("{0}.{1}".format(dbase, tbl))
inst = self.get_instance()
try:
fset = inst.execute(
'dump', 'sharding_information', self._version_token,
','.join(patterns)
)
except (Fault, socket.error) as exc:
msg = "Looking up sharding information failed : {error}".format(
error=str(exc))
raise InterfaceError(msg)
for row in fset.rows():
self._cache.sharding_cache_table(
FabricShard(row.schema_name, row.table_name, row.column_name,
row.lower_bound, row.shard_id, row.type_name,
row.group_id, row.global_group)
)
def get_shard_server(self, tables, key, scope=SCOPE_LOCAL, mode=None):
"""Get MySQL server information for a particular shard
Raises DatabaseError when the table is unknown or when tables are not
on the same shard. ValueError is raised when there is a problem
with the methods arguments. InterfaceError is raised for other errors.
"""
if not isinstance(tables, (list, tuple)):
raise ValueError("tables should be a sequence")
groups = []
for dbobj in tables:
try:
database, table = dbobj.split('.')
except ValueError:
raise ValueError(
"tables should be given as <database>.<table>, "
"was {0}".format(dbobj))
entry = self._cache.sharding_search(database, table)
if not entry:
self.get_sharding_information((table,), database)
entry = self._cache.sharding_search(database, table)
if not entry:
raise DatabaseError(
errno=errorcode.ER_BAD_TABLE_ERROR,
msg="Unknown table '{database}.{table}'".format(
database=database, table=table))
if scope == 'GLOBAL':
return self.get_group_server(entry.global_group, mode=mode)
if entry.shard_type == 'RANGE':
try:
range_key = int(key)
except ValueError:
raise ValueError("Key must be an integer for RANGE")
partitions = entry.keys
index = partitions[bisect(partitions, range_key) - 1]
partition = entry.partitioning[index]
elif entry.shard_type == 'RANGE_DATETIME':
if not isinstance(key, (datetime.date, datetime.datetime)):
raise ValueError(
"Key must be datetime.date or datetime.datetime for "
"RANGE_DATETIME")
index = None
for partkey in entry.keys_reversed:
if key >= partkey:
index = partkey
break
try:
partition = entry.partitioning[index]
except KeyError:
raise ValueError("Key invalid; was '{0}'".format(key))
elif entry.shard_type == 'RANGE_STRING':
if not isunicode(key):
raise ValueError("Key must be a unicode value")
index = None
for partkey in entry.keys_reversed:
if key >= partkey:
index = partkey
break
try:
partition = entry.partitioning[index]
except KeyError:
raise ValueError("Key invalid; was '{0}'".format(key))
elif entry.shard_type == 'HASH':
md5key = md5(str(key))
index = entry.keys_reversed[-1]
for partkey in entry.keys_reversed:
if md5key.digest() >= b16decode(partkey):
index = partkey
break
partition = entry.partitioning[index]
else:
raise InterfaceError(
"Unsupported sharding type {0}".format(entry.shard_type))
groups.append(partition['group'])
if not all(group == groups[0] for group in groups):
raise DatabaseError(
"Tables are located in different shards.")
return self.get_group_server(groups[0], mode=mode)
def execute(self, group, command, *args, **kwargs):
"""Execute a Fabric command from given group
This method will execute the given Fabric command from the given group
using the given arguments. It returns an instance of FabricSet.
Raises ValueError when group.command is not valid and raises
InterfaceError when an error occurs while executing.
Returns FabricSet.
"""
inst = self.get_instance()
return inst.execute(group, command, *args, **kwargs)
class FabricConnection(object):
"""Base Class for a class holding a connection to a MySQL Fabric server
"""
def __init__(self, fabric, host,
port=MYSQL_FABRIC_PORT[DEFAULT_FABRIC_PROTOCOL],
connect_attempts=_CNX_ATTEMPT_MAX,
connect_delay=_CNX_ATTEMPT_DELAY):
"""Initialize"""
if not isinstance(fabric, Fabric):
raise ValueError("fabric must be instance of class Fabric")
self._fabric = fabric
self._host = host
self._port = port
self._connect_attempts = connect_attempts
self._connect_delay = connect_delay
@property
def host(self):
"""Returns server IP or name of current Fabric connection"""
return self._host
@property
def port(self):
"""Returns TCP/IP port of current Fabric connection"""
return self._port
@property
def uuid(self):
"""Returns UUID of the Fabric server we are connected with"""
return _fabric_server_uuid(self._host, self._port)
def connect(self):
"""Connect with MySQL Fabric"""
pass
@property
def is_connected(self):
"""Check whether connection with Fabric is valid
Return True if we can still interact with the Fabric server; False
if Not.
Returns True or False.
"""
pass
def __repr__(self):
return "{class_}(host={host}, port={port})".format(
class_=self.__class__,
host=self._host,
port=self._port,
)
class FabricXMLRPCConnection(FabricConnection):
"""Class holding a connection to a MySQL Fabric server through XML-RPC"""
def __init__(self, fabric, host, port=MYSQL_FABRIC_PORT['xmlrpc'],
connect_attempts=_CNX_ATTEMPT_MAX,
connect_delay=_CNX_ATTEMPT_DELAY):
"""Initialize"""
super(FabricXMLRPCConnection, self).__init__(
fabric, host, port, connect_attempts, connect_delay
)
self._proxy = None
@property
def proxy(self):
"""Returns the XMLRPC Proxy of current Fabric connection"""
return self._proxy
@property
def uri(self):
"""Returns the XMLRPC URI for current Fabric connection"""
return _fabric_xmlrpc_uri(self._host, self._port)
def _xmlrpc_get_proxy(self):
"""Return the XMLRPC server proxy instance to MySQL Fabric
This method tries to get a valid connection to a MySQL Fabric
server.
Returns a XMLRPC ServerProxy instance.
"""
if self.is_connected:
return self._proxy
attempts = self._connect_attempts
delay = self._connect_delay
proxy = None
counter = 0
while counter != attempts:
counter += 1
try:
if self._fabric.ssl_config:
if not HAVE_SSL:
raise InterfaceError("Python does not support SSL")
https_handler = FabricHTTPSHandler(self._fabric.ssl_config)
else:
https_handler = None
transport = FabricTransport(self._fabric.username,
self._fabric.password,
verbose=0,
https_handler=https_handler)
proxy = ServerProxy(self.uri, transport=transport, verbose=0)
proxy._some_nonexisting_method() # pylint: disable=W0212
except Fault:
# We are actually connected
return proxy
except socket.error as exc:
if counter == attempts:
raise InterfaceError(
"Connection to MySQL Fabric failed ({0})".format(exc))
_LOGGER.debug(
"Retrying {host}:{port}, attempts {counter}".format(
host=self.host, port=self.port, counter=counter))
if delay > 0:
time.sleep(delay)
def connect(self):
"""Connect with MySQL Fabric"""
self._proxy = self._xmlrpc_get_proxy()
@property
def is_connected(self):
"""Check whether connection with Fabric is valid
Return True if we can still interact with the Fabric server; False
if Not.
Returns True or False.
"""
try:
self._proxy._some_nonexisting_method() # pylint: disable=W0212
except Fault:
return True
except (TypeError, AttributeError):
return False
else:
return False
class FabricMySQLConnection(FabricConnection):
"""
Class holding a connection to a MySQL Fabric server through MySQL protocol
"""
def __init__(self, fabric, host, port=MYSQL_FABRIC_PORT['mysql'],
connect_attempts=_CNX_ATTEMPT_MAX,
connect_delay=_CNX_ATTEMPT_DELAY):
"""Initialize"""
super(FabricMySQLConnection, self).__init__(
fabric, host, port=port,
connect_attempts=connect_attempts, connect_delay=connect_delay
)
self._connection = None
@property
def connection(self):
"""Returns the MySQL RPC Connection to Fabric"""
return self._connection
def _get_connection(self):
"""Return the connection instance to MySQL Fabric through MySQL RPC
This method tries to get a valid connection to a MySQL Fabric
server.
Returns a MySQLConnection instance.
"""
if self.is_connected:
return self._connection
attempts = self._connect_attempts
delay = self._connect_delay
counter = 0
while counter != attempts:
counter += 1
try:
dbconfig = {
'host': self._host,
'port': self._port,
'user': self._fabric.username,
'password': self._fabric.password
}
if self._fabric.ssl_config:
if not HAVE_SSL:
raise InterfaceError("Python does not support SSL")
dbconfig['ssl_key'] = self._fabric.ssl_config['key']
dbconfig['ssl_cert'] = self._fabric.ssl_config['cert']
return MySQLConnection(**dbconfig)
except AttributeError as exc:
if counter == attempts:
raise InterfaceError(
"Connection to MySQL Fabric failed ({0})".format(exc))
_LOGGER.debug(
"Retrying {host}:{port}, attempts {counter}".format(
host=self.host, port=self.port, counter=counter))
if delay > 0:
time.sleep(delay)
def connect(self):
"""Connect with MySQL Fabric"""
self._connection = self._get_connection()
@property
def is_connected(self):
"""Check whether connection with Fabric is valid
Return True if we can still interact with the Fabric server; False
if Not.
Returns True or False.
"""
try:
return self._connection.is_connected()
except AttributeError:
return False
class MySQLFabricConnection(object):
"""Connection to a MySQL server through MySQL Fabric"""
def __init__(self, **kwargs):
"""Initialize"""
self._mysql_cnx = None
self._fabric = None
self._fabric_mysql_server = None
self._mysql_config = None
self._cnx_properties = {}
self.reset_properties()
# Validity of fabric-argument is checked in config()-method
if 'fabric' not in kwargs:
raise ValueError("Configuration parameters for Fabric missing")
if kwargs:
self.store_config(**kwargs)
def __getattr__(self, attr):
"""Return the return value of the MySQLConnection instance"""
if attr.startswith('cmd_'):
raise NotSupportedError(
"Calling {attr} is not supported for connections managed by "
"MySQL Fabric.".format(attr=attr))
return getattr(self._mysql_cnx, attr)
@property
def fabric_uuid(self):
"""Returns the Fabric UUID of the MySQL server"""
if self._fabric_mysql_server:
return self._fabric_mysql_server.uuid
return None
@property
def properties(self):
"""Returns connection properties"""
return self._cnx_properties
def reset_cache(self, group=None):
"""Reset cache for this connection's group"""
if not group and self._fabric_mysql_server:
group = self._fabric_mysql_server.group
self._fabric.reset_cache(group=group)
def is_connected(self):
"""Check whether we are connected with the MySQL server
Returns True or False
"""
return self._mysql_cnx is not None
def reset_properties(self):
"""Resets the connection properties
This method can be called to reset the connection properties to
their default values.
"""
self._cnx_properties = {}
for key, attr in _CNX_PROPERTIES.items():
self._cnx_properties[key] = attr[2]
def set_property(self, **properties):
"""Set one or more connection properties
Arguments to the set_property() method will be used as properties.
They are validated against the _CNX_PROPERTIES constant.
Raise ValueError in case an invalid property is being set. TypeError
is raised when the type of the value is not correct.
To unset a property, set it to None.
"""
try:
self.close()
except Error:
# We tried, but it's OK when we fail.
pass
props = self._cnx_properties
for name, value in properties.items():
if name not in _CNX_PROPERTIES:
raise ValueError(
"Invalid property connection {0}".format(name))
elif value and not isinstance(value, _CNX_PROPERTIES[name][0]):
valid_types_str = ' or '.join(
[atype.__name__ for atype in _CNX_PROPERTIES[name][0]])
raise TypeError(
"{name} is not valid, excepted {typename}".format(
name=name, typename=valid_types_str))
if (name == 'group' and value and
(props['key'] or props['tables'])):
raise ValueError(
"'group' property can not be set when 'key' or "
"'tables' are set")
elif name in ('key', 'tables') and value and props['group']:
raise ValueError(
"'key' and 'tables' property can not be "
"set together with 'group'")
elif name == 'scope' and value not in (SCOPE_LOCAL, SCOPE_GLOBAL):
raise ValueError("Invalid value for 'scope'")
elif name == 'mode' and value not in (
MODE_READWRITE, MODE_READONLY):
raise ValueError("Invalid value for 'mode'")
if value is None:
# Set the default
props[name] = _CNX_PROPERTIES[name][2]
else:
props[name] = value
def _configure_fabric(self, config):
"""Configure the Fabric connection
The config argument can be either a dictionary containing the
necessary information to setup the connection. Or config can
be an instance of Fabric.
"""
if isinstance(config, Fabric):
self._fabric = config
else:
required_keys = ['host']
for required_key in required_keys:
if required_key not in config:
raise ValueError(
"Missing configuration parameter '{parameter}' "
"for fabric".format(parameter=required_key))
host = config['host']
protocol = config.get('protocol', DEFAULT_FABRIC_PROTOCOL)
try:
port = config.get('port', MYSQL_FABRIC_PORT[protocol])
except KeyError:
raise InterfaceError(
"{0} protocol is not available".format(protocol))
server_uuid = _fabric_server_uuid(host, port)
try:
self._fabric = FABRICS[server_uuid]
except KeyError:
_LOGGER.debug("New Fabric connection")
self._fabric = Fabric(**config)
self._fabric.seed()
# Cache the new connection
FABRICS[server_uuid] = self._fabric
def store_config(self, **kwargs):
"""Store configuration of MySQL connections to use with Fabric
The configuration found in the dictionary kwargs is used
when instanciating a MySQLConnection object. The host and port
entries are used to connect to MySQL Fabric.
Raises ValueError when the Fabric configuration parameter
is not correct or missing; AttributeError is raised when
when a paramater is not valid.
"""
config = kwargs.copy()
# Configure the Fabric connection
if 'fabric' in config:
self._configure_fabric(config['fabric'])
del config['fabric']
if 'unix_socket' in config:
_LOGGER.warning("MySQL Fabric does not use UNIX sockets.")
config['unix_socket'] = None
# Try to use the configuration
test_config = config.copy()
if 'pool_name' in test_config:
del test_config['pool_name']
if 'pool_size' in test_config:
del test_config['pool_size']
if 'pool_reset_session' in test_config:
del test_config['pool_reset_session']
try:
pool = MySQLConnectionPool(pool_name=str(uuid.uuid4()))
pool.set_config(**test_config)
except AttributeError as err:
raise AttributeError(
"Connection configuration not valid: {0}".format(err))
self._mysql_config = config
def _connect(self):
"""Get a MySQL server based on properties and connect
This method gets a MySQL server from MySQL Fabric using already
properties set using the set_property() method. You can specify how
many times and the delay between trying using attempts and
attempt_delay.
Raises ValueError when there are problems with arguments or
properties; InterfaceError on connectivity errors.
"""
if self.is_connected():
return
props = self._cnx_properties
attempts = props['attempts']
attempt_delay = props['attempt_delay']
dbconfig = self._mysql_config.copy()
counter = 0
while counter != attempts:
counter += 1
try:
group = None
if props['tables']:
if props['scope'] == 'LOCAL' and not props['key']:
raise ValueError(
"Scope 'LOCAL' needs key property to be set")
mysqlserver = self._fabric.get_shard_server(
props['tables'], props['key'],
scope=props['scope'],
mode=props['mode'])
elif props['group']:
group = props['group']
mysqlserver = self._fabric.get_group_server(
group, mode=props['mode'])
else:
raise ValueError(
"Missing group or key and tables properties")
except InterfaceError as exc:
_LOGGER.debug(
"Trying to get MySQL server (attempt {0}; {1})".format(
counter, exc))
if counter == attempts:
raise InterfaceError("Error getting connection: {0}".format(
exc))
if attempt_delay > 0:
_LOGGER.debug("Waiting {0}".format(attempt_delay))
time.sleep(attempt_delay)
continue
# Make sure we do not change the stored configuration
dbconfig['host'] = mysqlserver.host
dbconfig['port'] = mysqlserver.port
try:
self._mysql_cnx = mysql.connector.connect(**dbconfig)
except Error as exc:
if counter == attempts:
self.reset_cache(mysqlserver.group)
self._fabric.report_failure(mysqlserver.uuid, exc.errno)
raise InterfaceError(
"Reported faulty server to Fabric ({0})".format(exc))
if attempt_delay > 0:
time.sleep(attempt_delay)
continue
else:
self._fabric_mysql_server = mysqlserver
break
def disconnect(self):
"""Close connection to MySQL server"""
try:
self.rollback()
self._mysql_cnx.close()
except AttributeError:
pass # There was no connection
except Error:
raise
finally:
self._mysql_cnx = None
self._fabric_mysql_server = None
close = disconnect
def cursor(self, buffered=None, raw=None, prepared=None, cursor_class=None):
"""Instantiates and returns a cursor
This method is similar to MySQLConnection.cursor() except that
it checks whether the connection is available and raises
an InterfaceError when not.
cursor_class argument is not supported and will raise a
NotSupportedError exception.
Returns a MySQLCursor or subclass.
"""
self._connect()
if cursor_class:
raise NotSupportedError(
"Custom cursors not supported with MySQL Fabric")
if prepared:
raise NotSupportedError(
"Prepared Statements are not supported with MySQL Fabric")
if self._unread_result is True:
raise InternalError("Unread result found.")
buffered = buffered or self._buffered
raw = raw or self._raw
cursor_type = 0
if buffered is True:
cursor_type |= 1
if raw is True:
cursor_type |= 2
types = (
MySQLCursor, # 0
MySQLCursorBuffered,
MySQLCursorRaw,
MySQLCursorBufferedRaw,
)
return (types[cursor_type])(self)
def handle_mysql_error(self, exc):
"""Handles MySQL errors
This method takes a mysql.connector.errors.Error exception
and checks the error code. Based on the value, it takes
certain actions such as clearing the cache.
"""
if exc.errno in RESET_CACHE_ON_ERROR:
self.reset_cache()
self.disconnect()
raise MySQLFabricError(
"Temporary error ({error}); "
"retry transaction".format(error=str(exc)))
raise exc
def commit(self):
"""Commit current transaction
Raises whatever MySQLConnection.commit() raises, but
raises MySQLFabricError when MySQL returns error
ER_OPTION_PREVENTS_STATEMENT.
"""
try:
self._mysql_cnx.commit()
except Error as exc:
self.handle_mysql_error(exc)
def rollback(self):
"""Rollback current transaction
Raises whatever MySQLConnection.rollback() raises, but
raises MySQLFabricError when MySQL returns error
ER_OPTION_PREVENTS_STATEMENT.
"""
try:
self._mysql_cnx.rollback()
except Error as exc:
self.handle_mysql_error(exc)
def cmd_query(self, statement):
"""Send a statement to the MySQL server
Raises whatever MySQLConnection.cmd_query() raises, but
raises MySQLFabricError when MySQL returns error
ER_OPTION_PREVENTS_STATEMENT.
Returns a dictionary.
"""
self._connect()
try:
return self._mysql_cnx.cmd_query(statement)
except Error as exc:
self.handle_mysql_error(exc)
def cmd_query_iter(self, statements):
"""Send one or more statements to the MySQL server
Raises whatever MySQLConnection.cmd_query_iter() raises, but
raises MySQLFabricError when MySQL returns error
ER_OPTION_PREVENTS_STATEMENT.
Returns a dictionary.
"""
self._connect()
try:
return self._mysql_cnx.cmd_query_iter(statements)
except Error as exc:
self.handle_mysql_error(exc)
|
jaruba/chromium.src
|
refs/heads/nw12
|
tools/telemetry/telemetry/page/record_wpr.py
|
11
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import sys
from telemetry import benchmark
from telemetry.core import browser_options
from telemetry.core import discover
from telemetry.core import util
from telemetry.core import wpr_modes
from telemetry.page import page_set
from telemetry.page import page_test
from telemetry.page import test_expectations
from telemetry.results import results_options
from telemetry.user_story import user_story_runner
_ACTION_NAMES = [
'RunPageInteractions',
'RunNavigateSteps',
]
class RecorderPageTest(page_test.PageTest): # pylint: disable=W0223
def __init__(self):
super(RecorderPageTest, self).__init__()
self.page_test = None
def CanRunForPage(self, page):
return page.url.startswith('http')
def WillStartBrowser(self, browser):
if self.page_test:
self.page_test.WillStartBrowser(browser)
def DidStartBrowser(self, browser):
if self.page_test:
self.page_test.DidStartBrowser(browser)
def WillNavigateToPage(self, page, tab):
"""Override to ensure all resources are fetched from network."""
tab.ClearCache(force=False)
if self.page_test:
self.page_test.WillNavigateToPage(page, tab)
def DidNavigateToPage(self, page, tab):
if self.page_test:
self.page_test.DidNavigateToPage(page, tab)
def WillRunActions(self, page, tab):
if self.page_test:
self.page_test.WillRunActions(page, tab)
def DidRunActions(self, page, tab):
if self.page_test:
self.page_test.DidRunActions(page, tab)
def CleanUpAfterPage(self, page, tab):
if self.page_test:
self.page_test.CleanUpAfterPage(page, tab)
def ValidateAndMeasurePage(self, page, tab, results):
if self.page_test:
self.page_test.ValidateAndMeasurePage(page, tab, results)
def RunPage(self, page, tab, results):
tab.WaitForDocumentReadyStateToBeComplete()
util.WaitFor(tab.HasReachedQuiescence, 30)
if self.page_test:
self._action_name_to_run = self.page_test.action_name_to_run
self.page_test.RunPage(page, tab, results)
return
should_reload = False
# Run the actions on the page for all available measurements.
for action_name in _ACTION_NAMES:
# Skip this action if it is not defined
if not hasattr(page, action_name):
continue
# Reload the page between actions to start with a clean slate.
if should_reload:
self.RunNavigateSteps(page, tab)
self._action_name_to_run = action_name
super(RecorderPageTest, self).RunPage(page, tab, results)
should_reload = True
def RunNavigateSteps(self, page, tab):
if self.page_test:
self.page_test.RunNavigateSteps(page, tab)
else:
super(RecorderPageTest, self).RunNavigateSteps(page, tab)
def _MaybeGetInstanceOfClass(target, base_dir, cls):
if isinstance(target, cls):
return target
classes = discover.DiscoverClasses(base_dir, base_dir, cls,
index_by_class_name=True)
return classes[target]() if target in classes else None
class WprRecorder(object):
def __init__(self, base_dir, target, args=None):
self._record_page_test = RecorderPageTest()
self._options = self._CreateOptions()
self._benchmark = _MaybeGetInstanceOfClass(target, base_dir,
benchmark.Benchmark)
if self._benchmark is not None:
self._record_page_test.page_test = self._benchmark.test()
self._parser = self._options.CreateParser(usage='%prog <PageSet|Benchmark>')
self._AddCommandLineArgs()
self._ParseArgs(args)
self._ProcessCommandLineArgs()
if self._options.page_set_base_dir:
page_set_base_dir = self._options.page_set_base_dir
else:
page_set_base_dir = base_dir
self._page_set = self._GetPageSet(page_set_base_dir, target)
@property
def options(self):
return self._options
def _CreateOptions(self):
options = browser_options.BrowserFinderOptions()
options.browser_options.wpr_mode = wpr_modes.WPR_RECORD
options.browser_options.no_proxy_server = True
return options
def CreateResults(self):
if self._benchmark is not None:
benchmark_metadata = self._benchmark.GetMetadata()
else:
benchmark_metadata = benchmark.BenchmarkMetadata('record_wpr')
return results_options.CreateResults(benchmark_metadata, self._options)
def _AddCommandLineArgs(self):
self._parser.add_option('--page-set-base-dir', action='store',
type='string')
user_story_runner.AddCommandLineArgs(self._parser)
if self._benchmark is not None:
self._benchmark.AddCommandLineArgs(self._parser)
self._benchmark.SetArgumentDefaults(self._parser)
self._parser.add_option('--upload', action='store_true')
self._SetArgumentDefaults()
def _SetArgumentDefaults(self):
self._parser.set_defaults(**{'output_formats': ['none']})
def _ParseArgs(self, args=None):
args_to_parse = sys.argv[1:] if args is None else args
self._parser.parse_args(args_to_parse)
def _ProcessCommandLineArgs(self):
user_story_runner.ProcessCommandLineArgs(self._parser, self._options)
if self._benchmark is not None:
self._benchmark.ProcessCommandLineArgs(self._parser, self._options)
def _GetPageSet(self, base_dir, target):
if self._benchmark is not None:
return self._benchmark.CreatePageSet(self._options)
ps = _MaybeGetInstanceOfClass(target, base_dir, page_set.PageSet)
if ps is None:
self._parser.print_usage()
sys.exit(1)
return ps
def Record(self, results):
assert self._page_set.wpr_archive_info, (
'Pageset archive_data_file path must be specified.')
self._page_set.wpr_archive_info.AddNewTemporaryRecording()
self._record_page_test.CustomizeBrowserOptions(self._options)
user_story_runner.Run(self._record_page_test, self._page_set,
test_expectations.TestExpectations(), self._options, results)
def HandleResults(self, results, upload_to_cloud_storage):
if results.failures or results.skipped_values:
logging.warning('Some pages failed and/or were skipped. The recording '
'has not been updated for these pages.')
results.PrintSummary()
self._page_set.wpr_archive_info.AddRecordedUserStories(
results.pages_that_succeeded,
upload_to_cloud_storage)
def Main(base_dir):
quick_args = []
upload_to_cloud_storage = False
for a in sys.argv[1:]:
if not a.startswith('-'):
quick_args.append(a)
elif a == '--upload':
upload_to_cloud_storage = True
if len(quick_args) != 1:
print >> sys.stderr, 'Usage: record_wpr <PageSet|Benchmark> [--upload]\n'
sys.exit(1)
target = quick_args.pop()
wpr_recorder = WprRecorder(base_dir, target)
results = wpr_recorder.CreateResults()
wpr_recorder.Record(results)
wpr_recorder.HandleResults(results, upload_to_cloud_storage)
return min(255, len(results.failures))
|
Bounder/xhtml2pdf
|
refs/heads/master
|
demo/tgpisa/tgpisa/__init__.py
|
12133432
| |
itbabu/django-filer
|
refs/heads/develop
|
filer/fields/__init__.py
|
12133432
| |
nolanliou/tensorflow
|
refs/heads/master
|
tensorflow/python/estimator/canned/optimizers.py
|
73
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Methods related to optimizers used in canned_estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.training import adagrad
from tensorflow.python.training import adam
from tensorflow.python.training import ftrl
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import optimizer as optimizer_lib
from tensorflow.python.training import rmsprop
_OPTIMIZER_CLS_NAMES = {
'Adagrad': adagrad.AdagradOptimizer,
'Adam': adam.AdamOptimizer,
'Ftrl': ftrl.FtrlOptimizer,
'RMSProp': rmsprop.RMSPropOptimizer,
'SGD': gradient_descent.GradientDescentOptimizer,
}
def get_optimizer_instance(opt, learning_rate=None):
"""Returns an optimizer instance.
Supports the following types for the given `opt`:
* An `Optimizer` instance: Returns the given `opt`.
* A string: Creates an `Optimizer` subclass with the given `learning_rate`.
Supported strings:
* 'Adagrad': Returns an `AdagradOptimizer`.
* 'Adam': Returns an `AdamOptimizer`.
* 'Ftrl': Returns an `FtrlOptimizer`.
* 'RMSProp': Returns an `RMSPropOptimizer`.
* 'SGD': Returns a `GradientDescentOptimizer`.
Args:
opt: An `Optimizer` instance, or string, as discussed above.
learning_rate: A float. Only used if `opt` is a string.
Returns:
An `Optimizer` instance.
Raises:
ValueError: If `opt` is an unsupported string.
ValueError: If `opt` is a supported string but `learning_rate` was not
specified.
ValueError: If `opt` is none of the above types.
"""
if isinstance(opt, six.string_types):
if opt in six.iterkeys(_OPTIMIZER_CLS_NAMES):
if not learning_rate:
raise ValueError('learning_rate must be specified when opt is string.')
return _OPTIMIZER_CLS_NAMES[opt](learning_rate=learning_rate)
raise ValueError(
'Unsupported optimizer name: {}. Supported names are: {}'.format(
opt, tuple(sorted(six.iterkeys(_OPTIMIZER_CLS_NAMES)))))
if not isinstance(opt, optimizer_lib.Optimizer):
raise ValueError(
'The given object is not an Optimizer instance. Given: {}'.format(opt))
return opt
|
tumi8/sKnock
|
refs/heads/master
|
server/benchmarks/__init__.py
|
1
|
__all__ = ['ap_firewall']
|
macosforge/ccs-calendarserver
|
refs/heads/master
|
txdav/caldav/datastore/scheduling/test/test_implicit.py
|
1
|
##
# Copyright (c) 2005-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from pycalendar.datetime import DateTime
from pycalendar.timezone import Timezone
from txweb2 import responsecode
from txweb2.http import HTTPError
from twisted.internet import reactor
from twisted.internet.defer import succeed, inlineCallbacks, returnValue
from twisted.internet.task import deferLater
from twisted.trial.unittest import TestCase
from twistedcaldav.config import config
from twistedcaldav.ical import Component
from twistedcaldav.timezones import TimezoneCache
from txdav.caldav.datastore.scheduling.cuaddress import LocalCalendarUser
from txdav.caldav.datastore.scheduling.implicit import ImplicitScheduler
from txdav.caldav.datastore.scheduling.scheduler import ScheduleResponseQueue
from txdav.caldav.icalendarstore import AttendeeAllowedError, \
ComponentUpdateState
from txdav.caldav.datastore.sql import CalendarObject
from txdav.common.datastore.test.util import CommonCommonTests, populateCalendarsFrom
from twext.enterprise.jobs.jobitem import JobItem
from twext.python.clsprop import classproperty
import hashlib
import sys
class FakeScheduler(object):
"""
A fake CalDAVScheduler that does nothing except track who messages were sent to.
"""
def __init__(self, recipients):
self.recipients = recipients
def doSchedulingViaPUT(self, originator, recipients, calendar, internal_request=False, suppress_refresh=False):
self.recipients.extend(recipients)
return succeed(ScheduleResponseQueue("FAKE", responsecode.OK))
class Implicit(CommonCommonTests, TestCase):
"""
iCalendar support tests
"""
@inlineCallbacks
def setUp(self):
yield super(Implicit, self).setUp()
yield self.buildStoreAndDirectory()
@inlineCallbacks
def test_removed_attendees(self):
data = (
(
"#1.1 Simple component, no change",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""",
(),
),
(
"#1.2 Simple component, one removal",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
END:VEVENT
END:VCALENDAR
""",
(("mailto:user02@example.com", None),),
),
(
"#1.3 Simple component, two removals",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user02@example.com", None),
("mailto:user03@example.com", None),
),
),
(
"#2.1 Simple recurring component, two removals",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user02@example.com", None),
("mailto:user03@example.com", None),
),
),
(
"#2.2 Simple recurring component, add exdate",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
EXDATE:20080801T120000Z
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user01@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user02@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user03@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
(
"#2.3 Simple recurring component, add multiple comma exdates",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
EXDATE:20080801T120000Z,20080901T120000Z
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user01@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user02@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user03@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user01@example.com", DateTime(2008, 9, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user02@example.com", DateTime(2008, 9, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user03@example.com", DateTime(2008, 9, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
(
"#2.3 Simple recurring component, add multiple comma/property exdates",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
EXDATE:20080801T120000Z,20080901T120000Z
EXDATE:20081201T120000Z
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user01@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user02@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user03@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user01@example.com", DateTime(2008, 9, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user02@example.com", DateTime(2008, 9, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user03@example.com", DateTime(2008, 9, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user01@example.com", DateTime(2008, 12, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user02@example.com", DateTime(2008, 12, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user03@example.com", DateTime(2008, 12, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
(
"#3.1 Complex recurring component with same attendees, no change",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
(),
),
(
"#3.2 Complex recurring component with same attendees, change master/override",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user03@example.com", None),
("mailto:user03@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
(
"#3.3 Complex recurring component with same attendees, change override",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user03@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
(
"#3.4 Complex recurring component with same attendees, change master",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user03@example.com", None),
),
),
(
"#3.5 Complex recurring component with same attendees, remove override - no exdate",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
END:VCALENDAR
""",
(),
),
(
"#3.6 Complex recurring component with same attendees, remove override - exdate",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
EXDATE:20080801T120000Z
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user01@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user02@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user03@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
(
"#4.1 Complex recurring component with different attendees, change master/override",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user04@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user03@example.com", None),
("mailto:user04@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
(
"#4.2 Complex recurring component with different attendees, remove override - no exdate",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user04@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user04@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
(
"#4.3 Complex recurring component with different attendees, remove override - exdate",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
RECURRENCE-ID:20080801T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user04@example.com
END:VEVENT
END:VCALENDAR
""",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=MONTHLY
EXDATE:20080801T120000Z
END:VEVENT
END:VCALENDAR
""",
(
("mailto:user01@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user02@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
("mailto:user04@example.com", DateTime(2008, 8, 1, 12, 0, 0, tzid=Timezone.UTCTimezone)),
),
),
)
for description, calendar1, calendar2, result in data:
scheduler = ImplicitScheduler()
scheduler.resource = None
scheduler.oldcalendar = Component.fromString(calendar1)
scheduler.oldAttendeesByInstance = scheduler.oldcalendar.getAttendeesByInstance(True, onlyScheduleAgentServer=True)
scheduler.oldInstances = set(scheduler.oldcalendar.getComponentInstances())
scheduler.calendar = Component.fromString(calendar2)
txn = self.transactionUnderTest()
scheduler.txn = txn
scheduler.calendar_home = yield self.homeUnderTest(txn=txn, name=u"user01", create=True)
yield scheduler.extractCalendarData()
scheduler.findRemovedAttendees()
self.assertEqual(scheduler.cancelledAttendees, set(result), msg=description)
yield self.commit()
@inlineCallbacks
def test_process_request_excludes_includes(self):
"""
Test that processRequests correctly excludes or includes the specified attendees.
"""
data = (
((), None, 3, ("mailto:user02@example.com", "mailto:user03@example.com", "mailto:user04@example.com",),),
(("mailto:user02@example.com",), None, 2, ("mailto:user03@example.com", "mailto:user04@example.com",),),
((), ("mailto:user02@example.com", "mailto:user04@example.com",), 2, ("mailto:user02@example.com", "mailto:user04@example.com",),),
)
calendar = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
ATTENDEE:mailto:user04@example.com
END:VEVENT
END:VCALENDAR
"""
for excludes, includes, result_count, result_set in data:
scheduler = ImplicitScheduler()
scheduler.resource = None
scheduler.calendar = Component.fromString(calendar)
scheduler.state = "organizer"
scheduler.action = "modify"
scheduler.internal_request = True
scheduler.except_attendees = excludes
scheduler.only_refresh_attendees = includes
scheduler.changed_rids = None
scheduler.reinvites = None
txn = self.transactionUnderTest()
scheduler.txn = txn
scheduler.calendar_home = yield self.homeUnderTest(txn=txn, name=u"user01", create=True)
# Get some useful information from the calendar
yield scheduler.extractCalendarData()
record = yield self.directory.recordWithUID(scheduler.calendar_home.uid())
scheduler.organizerAddress = LocalCalendarUser(
"mailto:user01@example.com",
record,
)
recipients = []
def makeFakeScheduler():
return FakeScheduler(recipients)
scheduler.makeScheduler = makeFakeScheduler
count = (yield scheduler.processRequests())
self.assertEqual(count, result_count)
self.assertEqual(len(recipients), result_count)
self.assertEqual(set(recipients), set(result_set))
yield self.commit()
class ImplicitRequests(CommonCommonTests, TestCase):
"""
Test txdav.caldav.datastore.scheduling.implicit.
"""
@inlineCallbacks
def setUp(self):
yield super(ImplicitRequests, self).setUp()
yield self.buildStoreAndDirectory()
yield self.populate()
@inlineCallbacks
def populate(self):
yield populateCalendarsFrom(self.requirements, self.storeUnderTest())
self.notifierFactory.reset()
@classproperty(cache=False)
def requirements(cls): # @NoSelf
return {
"user01": {
"calendar_1": {
},
"inbox": {
},
},
"user02": {
"calendar_1": {
},
"inbox": {
},
},
"user03": {
"calendar_1": {
},
"inbox": {
},
},
}
@inlineCallbacks
def _createCalendarObject(self, data, user, name):
calendar_collection = (yield self.calendarUnderTest(home=user))
yield calendar_collection.createCalendarObjectWithName("test.ics", Component.fromString(data))
yield self.commit()
@inlineCallbacks
def _listCalendarObjects(self, user, collection_name="calendar_1"):
collection = (yield self.calendarUnderTest(name=collection_name, home=user))
items = (yield collection.listCalendarObjects())
yield self.commit()
returnValue(items)
@inlineCallbacks
def _getCalendarData(self, user, name=None):
if name is None:
items = (yield self._listCalendarObjects(user))
name = items[0]
calendar_resource = (yield self.calendarObjectUnderTest(name=name, home=user))
calendar = (yield calendar_resource.component())
yield self.commit()
returnValue(str(calendar).replace("\r\n ", ""))
@inlineCallbacks
def _setCalendarData(self, data, user, name=None):
if name is None:
items = (yield self._listCalendarObjects(user))
name = items[0]
calendar_resource = (yield self.calendarObjectUnderTest(name=name, home=user))
yield calendar_resource.setComponent(Component.fromString(data))
yield self.commit()
@inlineCallbacks
def test_testImplicitSchedulingPUT_ScheduleState(self):
"""
Test that checkImplicitState() always returns True for any organizer, valid or not.
"""
data = (
(
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
END:VEVENT
END:VCALENDAR
""",
False,
),
(
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""",
True,
),
(
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:bogus@bogus.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:bogus@bogus.com
END:VEVENT
END:VCALENDAR
""",
True,
),
)
calendar_collection = (yield self.calendarUnderTest(home="user01"))
for calendar, result in data:
calendar = Component.fromString(calendar)
scheduler = ImplicitScheduler()
doAction, isScheduleObject = (yield scheduler.testImplicitSchedulingPUT(calendar_collection, None, calendar, False))
self.assertEqual(doAction, result)
self.assertEqual(isScheduleObject, result)
@inlineCallbacks
def test_testImplicitSchedulingPUT_FixScheduleState(self):
"""
Test that testImplicitSchedulingPUT will fix an old cached schedule object state by
re-evaluating the calendar data.
"""
calendarOld = Component.fromString("""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""")
calendarNew = Component.fromString("""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""")
calendar_collection = (yield self.calendarUnderTest(home="user01"))
calresource = (yield calendar_collection.createCalendarObjectWithName(
"1.ics", calendarOld
))
calresource.isScheduleObject = False
scheduler = ImplicitScheduler()
try:
doAction, isScheduleObject = (yield scheduler.testImplicitSchedulingPUT(calendar_collection, calresource, calendarNew, False))
except Exception as e:
print e
self.fail("Exception must not be raised")
self.assertTrue(doAction)
self.assertTrue(isScheduleObject)
@inlineCallbacks
def test_testImplicitSchedulingPUT_NoChangeScheduleState(self):
"""
Test that testImplicitSchedulingPUT will prevent attendees from changing the
schedule object state.
"""
calendarOld = Component.fromString("""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
END:VEVENT
END:VCALENDAR
""")
calendarNew = Component.fromString("""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 02":mailto:user02@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
""")
calendar_collection = (yield self.calendarUnderTest(home="user01"))
calresource = (yield calendar_collection.createCalendarObjectWithName(
"1.ics", calendarOld
))
calresource.isScheduleObject = False
scheduler = ImplicitScheduler()
try:
yield scheduler.testImplicitSchedulingPUT(calendar_collection, calresource, calendarNew, False)
except HTTPError:
pass
except:
self.fail("HTTPError exception must be raised")
else:
self.fail("Exception must be raised")
@inlineCallbacks
def test_doImplicitScheduling_NewOrganizerEvent(self):
"""
Test that doImplicitScheduling delivers scheduling messages to attendees.
"""
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
yield self._createCalendarObject(data, "user01", "test.ics")
list2 = (yield self._listCalendarObjects("user02"))
self.assertEqual(len(list2), 1)
self.assertTrue(list2[0].startswith(hashlib.md5("12345-67890").hexdigest()))
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 1)
self.assertTrue(list2[0].startswith(hashlib.md5("12345-67890").hexdigest()))
@inlineCallbacks
def test_doImplicitScheduling_UpdateOrganizerEvent(self):
"""
Test that doImplicitScheduling delivers scheduling messages to attendees.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
data2 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T130000Z
DTEND:20080601T140000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
yield self._createCalendarObject(data1, "user01", "test.ics")
yield self._setCalendarData(data2, "user01", "test.ics")
list2 = (yield self._listCalendarObjects("user02"))
self.assertEqual(len(list2), 1)
self.assertTrue(list2[0].startswith(hashlib.md5("12345-67890").hexdigest()))
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 2)
self.assertTrue(list2[0].startswith(hashlib.md5("12345-67890").hexdigest()))
self.assertTrue(list2[1].startswith(hashlib.md5("12345-67890").hexdigest()))
@inlineCallbacks
def test_doImplicitScheduling_DeleteOrganizerEvent(self):
"""
Test that doImplicitScheduling delivers scheduling messages to attendees.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
yield self._createCalendarObject(data1, "user01", "test.ics")
calendar_resource = (yield self.calendarObjectUnderTest(name="test.ics", home="user01"))
yield calendar_resource.remove()
yield self.commit()
list2 = (yield self._listCalendarObjects("user02"))
self.assertEqual(len(list2), 1)
self.assertTrue(list2[0].startswith(hashlib.md5("12345-67890").hexdigest()))
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 2)
self.assertTrue(list2[0].startswith(hashlib.md5("12345-67890").hexdigest()))
self.assertTrue(list2[1].startswith(hashlib.md5("12345-67890").hexdigest()))
@inlineCallbacks
def test_doImplicitScheduling_UpdateMailtoOrganizerEvent(self):
"""
Test that doImplicitScheduling works when the existing calendar data contains a non-normalized
organizer calendar user address.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01";SCHEDULE-AGENT=NONE:mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
data2 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20080601T130000Z
DTEND:20080601T140000Z
ORGANIZER;CN="User 01";SCHEDULE-AGENT=NONE:mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
self.patch(CalendarObject.CalendarObjectUpgradeWork, "delay", 1)
yield self._createCalendarObject(data1, "user01", "test.ics")
cobj = yield self.calendarObjectUnderTest(home="user01", name="test.ics")
actualVersion = CalendarObject._currentDataVersion
self.patch(CalendarObject, "_currentDataVersion", 0)
yield cobj._setComponentInternal(Component.fromString(data1), internal_state=ComponentUpdateState.RAW)
CalendarObject._currentDataVersion = actualVersion
yield self.commit()
cobj = yield self.calendarObjectUnderTest(home="user01", name="test.ics")
comp = yield cobj.component()
# Because CUA normalization happens in component() now too...
self.assertTrue(comp.getOrganizer().startswith("urn:x-uid:"))
self.assertFalse(comp.getOrganizerScheduleAgent())
yield self.commit()
yield JobItem.waitEmpty(self.storeUnderTest().newTransaction, reactor, 60)
cobj = yield self.calendarObjectUnderTest(home="user01", name="test.ics")
comp = yield cobj.component()
# Because CUA normalization happens in component() now too...
self.assertTrue(comp.getOrganizer().startswith("urn:x-uid:"))
self.assertFalse(comp.getOrganizerScheduleAgent())
yield self.commit()
cobj = yield self.calendarObjectUnderTest(home="user01", name="test.ics")
actualVersion = CalendarObject._currentDataVersion
self.patch(CalendarObject, "_currentDataVersion", 0)
yield cobj.setComponent(Component.fromString(data2))
CalendarObject._currentDataVersion = actualVersion
yield self.commit()
cobj = yield self.calendarObjectUnderTest(home="user01", name="test.ics")
comp = yield cobj.component()
self.assertTrue(comp.getOrganizer().startswith("urn:x-uid:"))
self.assertTrue(comp.getOrganizerScheduleAgent())
yield self.commit()
yield JobItem.waitEmpty(self.storeUnderTest().newTransaction, reactor, 60)
cobj = yield self.calendarObjectUnderTest(home="user01", name="test.ics")
comp = yield cobj.component()
self.assertTrue(comp.getOrganizer().startswith("urn:x-uid:"))
self.assertTrue(comp.getOrganizerScheduleAgent())
yield self.commit()
@inlineCallbacks
def test_doImplicitScheduling_AttendeeEventNoOrganizerEvent(self):
"""
Test that doImplicitScheduling handles an attendee reply with no organizer event.
"""
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-attendee-no-organizer
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE;PARTSTAT=ACCEPTED:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
try:
yield self._createCalendarObject(data, "user02", "test.ics")
except AttendeeAllowedError:
pass
except:
self.fail("Wrong exception raised: %s" % (sys.exc_info()[0].__name__,))
else:
self.fail("Exception not raised")
list1 = (yield self._listCalendarObjects("user01", "inbox"))
self.assertEqual(len(list1), 0)
@inlineCallbacks
def test_doImplicitScheduling_AttendeeReply(self):
"""
Test that doImplicitScheduling delivers scheduling messages to attendees who can then reply.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-attendee-reply
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
data2 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-attendee-reply
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE;PARTSTAT=ACCEPTED:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
yield self._createCalendarObject(data1, "user01", "test.ics")
calendar1 = (yield self._getCalendarData("user01", "test.ics"))
self.assertTrue("SCHEDULE-STATUS=1.2" in calendar1)
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 1)
yield self._setCalendarData(data2, "user02")
yield JobItem.waitEmpty(self.storeUnderTest().newTransaction, reactor, 60)
list1 = (yield self._listCalendarObjects("user01", "inbox"))
self.assertEqual(len(list1), 1)
calendar1 = (yield self._getCalendarData("user01", "test.ics"))
self.assertTrue("SCHEDULE-STATUS=2.0" in calendar1)
self.assertTrue("PARTSTAT=ACCEPTED" in calendar1)
@inlineCallbacks
def test_doImplicitScheduling_refreshAllAttendeesExceptSome(self):
"""
Test that doImplicitScheduling delivers scheduling messages to attendees who can then reply.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-attendee-reply
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
"""
data2 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-attendee-reply
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE;PARTSTAT=ACCEPTED:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
"""
# Need refreshes to occur immediately, not via reactor.callLater
self.patch(config.Scheduling.Options, "AttendeeRefreshBatch", False)
yield self._createCalendarObject(data1, "user01", "test.ics")
list1 = (yield self._listCalendarObjects("user01", "inbox"))
self.assertEqual(len(list1), 0)
calendar1 = (yield self._getCalendarData("user01", "test.ics"))
self.assertTrue("SCHEDULE-STATUS=1.2" in calendar1)
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 1)
calendar2 = (yield self._getCalendarData("user02"))
self.assertTrue("PARTSTAT=ACCEPTED" not in calendar2)
list3 = (yield self._listCalendarObjects("user03", "inbox"))
self.assertEqual(len(list3), 1)
calendar3 = (yield self._getCalendarData("user03"))
self.assertTrue("PARTSTAT=ACCEPTED" not in calendar3)
yield self._setCalendarData(data2, "user02")
yield JobItem.waitEmpty(self.storeUnderTest().newTransaction, reactor, 60)
list1 = (yield self._listCalendarObjects("user01", "inbox"))
self.assertEqual(len(list1), 1)
calendar1 = (yield self._getCalendarData("user01", "test.ics"))
self.assertTrue("SCHEDULE-STATUS=2.0" in calendar1)
self.assertTrue("PARTSTAT=ACCEPTED" in calendar1)
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 1)
calendar2 = (yield self._getCalendarData("user02"))
self.assertTrue("PARTSTAT=ACCEPTED" in calendar2)
list3 = (yield self._listCalendarObjects("user03", "inbox"))
self.assertEqual(len(list3), 1)
calendar3 = (yield self._getCalendarData("user03"))
self.assertTrue("PARTSTAT=ACCEPTED" in calendar3)
@inlineCallbacks
def test_doImplicitScheduling_refreshAllAttendeesExceptSome_Batched(self):
"""
Test that doImplicitScheduling delivers scheduling messages to attendees who can then reply.
Verify that batched refreshing is working.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-attendee-reply
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
"""
data2 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-attendee-reply
DTSTAMP:20080601T120000Z
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE;PARTSTAT=ACCEPTED:mailto:user02@example.com
ATTENDEE:mailto:user03@example.com
END:VEVENT
END:VCALENDAR
"""
# Need refreshes to occur immediately, not via reactor.callLater
self.patch(config.Scheduling.Options, "AttendeeRefreshBatch", 5)
self.patch(config.Scheduling.Options.WorkQueues, "AttendeeRefreshBatchDelaySeconds", 1)
yield self._createCalendarObject(data1, "user01", "test.ics")
list1 = (yield self._listCalendarObjects("user01", "inbox"))
self.assertEqual(len(list1), 0)
calendar1 = (yield self._getCalendarData("user01", "test.ics"))
self.assertTrue("SCHEDULE-STATUS=1.2" in calendar1)
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 1)
calendar2 = (yield self._getCalendarData("user02"))
self.assertTrue("PARTSTAT=ACCEPTED" not in calendar2)
list3 = (yield self._listCalendarObjects("user03", "inbox"))
self.assertEqual(len(list3), 1)
calendar3 = (yield self._getCalendarData("user03"))
self.assertTrue("PARTSTAT=ACCEPTED" not in calendar3)
yield self._setCalendarData(data2, "user02")
yield JobItem.waitEmpty(self.storeUnderTest().newTransaction, reactor, 60)
list1 = (yield self._listCalendarObjects("user01", "inbox"))
self.assertEqual(len(list1), 1)
calendar1 = (yield self._getCalendarData("user01", "test.ics"))
self.assertTrue("SCHEDULE-STATUS=2.0" in calendar1)
self.assertTrue("PARTSTAT=ACCEPTED" in calendar1)
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 1)
calendar2 = (yield self._getCalendarData("user02"))
self.assertTrue("PARTSTAT=ACCEPTED" in calendar2)
@inlineCallbacks
def _test_user03_refresh():
list3 = (yield self._listCalendarObjects("user03", "inbox"))
self.assertEqual(len(list3), 1)
calendar3 = (yield self._getCalendarData("user03"))
self.assertTrue("PARTSTAT=ACCEPTED" in calendar3)
yield deferLater(reactor, 2.0, _test_user03_refresh)
@inlineCallbacks
def test_doImplicitScheduling_OrganizerEventTimezoneDST(self):
"""
Test that doImplicitScheduling delivers scheduling messages to attendees. This test
creates an exception close to a DST transition to make sure timezone DST handling
is correct.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART;TZID=America/Los_Angeles:20140302T190000
DTEND;TZID=America/Los_Angeles:20140302T193000
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
RRULE:FREQ=DAILY;UNTIL=20140309T075959Z
END:VEVENT
END:VCALENDAR
"""
data2 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART;TZID=America/Los_Angeles:20140302T190000
DTEND;TZID=America/Los_Angeles:20140302T193000
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
RRULE:FREQ=DAILY;UNTIL=20140309T075959Z
END:VEVENT
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
RECURRENCE-ID;TZID=America/Los_Angeles:20140308T190000
DTSTART;TZID=America/Los_Angeles:20140308T190000
DTEND;TZID=America/Los_Angeles:20140308T193000
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
END:VEVENT
END:VCALENDAR
"""
TimezoneCache.create()
yield self._createCalendarObject(data1, "user01", "test.ics")
yield self._setCalendarData(data2, "user01", "test.ics")
list2 = (yield self._listCalendarObjects("user02"))
self.assertEqual(len(list2), 1)
self.assertTrue(list2[0].startswith(hashlib.md5("12345-67890").hexdigest()))
list2 = (yield self._listCalendarObjects("user02", "inbox"))
self.assertEqual(len(list2), 2)
self.assertTrue(list2[0].startswith(hashlib.md5("12345-67890").hexdigest()))
self.assertTrue(list2[1].startswith(hashlib.md5("12345-67890").hexdigest()))
@inlineCallbacks
def test_doImplicitScheduling_MissingAttendeeWithInvalidUser(self):
"""
Test that doImplicitMissingAttendee works when the event contains an
invalid attendee.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20140302T190000Z
DURATION:PT1H
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:foo@bar.com
RRULE:FREQ=DAILY;UNTIL=20140309T075959Z
END:VEVENT
END:VCALENDAR
"""
yield self._createCalendarObject(data1, "user02", "test.ics")
list2 = (yield self._listCalendarObjects("user02"))
self.assertEqual(len(list2), 1)
yield self._setCalendarData(data1, "user02", "test.ics")
list2 = (yield self._listCalendarObjects("user02"))
self.assertEqual(len(list2), 1)
@inlineCallbacks
def test_doImplicitScheduling_MissingAttendeeWithiMIP(self):
"""
Test that doImplicitMissingAttendee works when iMIP is enabled and the event
contains an iMIP attendee.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20140302T190000Z
DURATION:PT1H
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:foo@bar.com
RRULE:FREQ=DAILY;UNTIL=20140309T075959Z
END:VEVENT
END:VCALENDAR
"""
self.patch(config.Scheduling.iMIP, "Enabled", True)
self.patch(config.Scheduling.iMIP, "AddressPatterns", ["mailto:.*"])
yield self._createCalendarObject(data1, "user02", "test.ics")
list2 = (yield self._listCalendarObjects("user02"))
self.assertEqual(len(list2), 1)
yield self._setCalendarData(data1, "user02", "test.ics")
list2 = (yield self._listCalendarObjects("user02"))
self.assertEqual(len(list2), 1)
@inlineCallbacks
def test_sendAttendeeReply_ScheduleAgentNone(self):
"""
Test that sendAttendeeReply does nothing when the Organizer has
SCHEDULE-AGENT=NONE.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20140302T190000Z
DURATION:PT1H
ORGANIZER;SCHEDULE-AGENT=NONE;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
RRULE:FREQ=DAILY;UNTIL=20140309T075959Z
END:VEVENT
END:VCALENDAR
"""
yield self._createCalendarObject(data1, "user02", "test.ics")
cobj = yield self.calendarObjectUnderTest(home="user02", name="test.ics",)
result = yield ImplicitScheduler().sendAttendeeReply(cobj._txn, cobj)
self.assertFalse(result)
@inlineCallbacks
def test_sendAttendeeReply_ScheduleAgentClient(self):
"""
Test that sendAttendeeReply does nothing when the Organizer has
SCHEDULE-AGENT=CLIENT.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20140302T190000Z
DURATION:PT1H
ORGANIZER;SCHEDULE-AGENT=CLIENT;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user02@example.com
RRULE:FREQ=DAILY;UNTIL=20140309T075959Z
END:VEVENT
END:VCALENDAR
"""
yield self._createCalendarObject(data1, "user02", "test.ics")
cobj = yield self.calendarObjectUnderTest(home="user02", name="test.ics",)
result = yield ImplicitScheduler().sendAttendeeReply(cobj._txn, cobj)
self.assertFalse(result)
@inlineCallbacks
def test_sendAttendeeReply_NoAttendee(self):
"""
Test that sendAttendeeReply does nothing when the Attencdee is not
listed in the event. This will not normally ever be possible, but a case
like this was seen due to a processing error elsewehere.
"""
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890
DTSTAMP:20080601T120000Z
DTSTART:20140302T190000Z
DURATION:PT1H
ORGANIZER;CN="User 01":mailto:user01@example.com
ATTENDEE:mailto:user01@example.com
ATTENDEE:mailto:user03@example.com
RRULE:FREQ=DAILY;UNTIL=20140309T075959Z
END:VEVENT
END:VCALENDAR
"""
yield self._createCalendarObject(data1, "user02", "test.ics")
cobj = yield self.calendarObjectUnderTest(home="user02", name="test.ics",)
# Need to remove SCHEDULE-AGENT=NONE on ORGANIZER as that will have been added during the store operation
cal = yield cobj.componentForUser()
cal.removePropertyParameters("ORGANIZER", ("SCHEDULE-AGENT", "SCHEDULE-STATUS",))
result = yield ImplicitScheduler().sendAttendeeReply(cobj._txn, cobj)
self.assertFalse(result)
class ScheduleAgentFixBase(CommonCommonTests, TestCase):
"""
Test txdav.caldav.datastore.scheduling.implicit.
"""
@inlineCallbacks
def setUp(self):
yield super(ScheduleAgentFixBase, self).setUp()
yield self.buildStoreAndDirectory()
yield self.populate()
self.patch(config.Scheduling.Options, "AttendeeRefreshBatch", 0)
@inlineCallbacks
def populate(self):
yield populateCalendarsFrom(self.requirements, self.storeUnderTest())
self.notifierFactory.reset()
metadata = {
"accessMode": "PUBLIC",
"isScheduleObject": True,
"scheduleTag": "abc",
"scheduleEtags": (),
"hasPrivateComment": False,
}
@classproperty(cache=False)
def requirements(cls): # @NoSelf
return {
"user01": {
"calendar_1": {
"organizer.ics": (cls.organizer_data, cls.metadata),
},
"inbox": {
},
},
"user02": {
"calendar_1": {
"attendee2.ics": (cls.attendee2_data, cls.metadata),
},
"inbox": {
},
},
"user03": {
"calendar_1": {
"attendee3.ics": (cls.attendee3_data, cls.metadata),
},
"inbox": {
},
},
}
class ScheduleAgentFix(ScheduleAgentFixBase):
"""
Test that implicit scheduling where an attendee has S-A=CLIENT and S-A=SERVER is
corrected when the attendee updates.
"""
organizer_data = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART:20140101T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user03
RRULE:FREQ=DAILY
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
RECURRENCE-ID:20140102T100000Z
DTSTART:20140102T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user02
ATTENDEE:urn:x-uid:user03
END:VEVENT
END:VCALENDAR
"""
attendee2_data = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART:20140101T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER;SCHEDULE-AGENT=CLIENT:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user03
RRULE:FREQ=DAILY
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
RECURRENCE-ID:20140102T100000Z
DTSTART:20140102T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER;SCHEDULE-AGENT=SERVER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user02
ATTENDEE:urn:x-uid:user03
END:VEVENT
END:VCALENDAR
"""
attendee2_update_data = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART:20140101T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER;SCHEDULE-AGENT=CLIENT:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user03
RRULE:FREQ=DAILY
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
RECURRENCE-ID:20140102T100000Z
DTSTART:20140102T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER;SCHEDULE-AGENT=SERVER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE;PARTSTAT=ACCEPTED:urn:x-uid:user02
ATTENDEE:urn:x-uid:user03
END:VEVENT
END:VCALENDAR
"""
attendee3_data = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART:20140101T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user03
RRULE:FREQ=DAILY
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
RECURRENCE-ID:20140102T100000Z
DTSTART:20140102T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user02
ATTENDEE:urn:x-uid:user03
END:VEVENT
END:VCALENDAR
"""
@inlineCallbacks
def test_doImplicitScheduling(self):
"""
Test that doImplicitScheduling fixes an inconsistent schedule-agent state when an
attendee stores their data.
"""
cobj = yield self.calendarObjectUnderTest(home="user02", name="attendee2.ics")
yield cobj.setComponent(Component.fromString(self.attendee2_update_data))
yield self.commit()
cobj = yield self.calendarObjectUnderTest(home="user02", name="attendee2.ics")
comp = yield cobj.component()
self.assertTrue(comp.masterComponent() is None)
self.assertTrue(comp.getOrganizerScheduleAgent())
inbox = yield self.calendarUnderTest(home="user01", name="inbox")
cobjs = yield inbox.calendarObjects()
self.assertTrue(len(cobjs) == 1)
class MissingOrganizerFix(ScheduleAgentFixBase):
"""
Test that an attendee with a copy of an event without any organizer or attendee
properties is corrected when the organizer updates.
"""
organizer_data = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART:20140101T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user03
END:VEVENT
END:VCALENDAR
"""
organizer_update_data = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART:20140101T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user02
ATTENDEE:urn:x-uid:user03
END:VEVENT
END:VCALENDAR
"""
attendee2_data = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART:20140101T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
END:VEVENT
END:VCALENDAR
"""
attendee3_data = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART:20140101T100000Z
DURATION:PT1H
SUMMARY:event 1
UID:event1@ninevah.local
ORGANIZER:urn:x-uid:user01
ATTENDEE:urn:x-uid:user01
ATTENDEE:urn:x-uid:user03
END:VEVENT
END:VCALENDAR
"""
@inlineCallbacks
def test_doImplicitScheduling(self):
"""
Test that doImplicitScheduling fixes an inconsistent schedule-agent state when an
attendee stores their data.
"""
cobj = yield self.calendarObjectUnderTest(home="user02", name="attendee2.ics")
comp = yield cobj.component()
self.assertTrue(comp.getOrganizer() is None)
yield self.commit()
cobj = yield self.calendarObjectUnderTest(home="user01", name="organizer.ics")
yield cobj.setComponent(Component.fromString(self.organizer_update_data))
yield self.commit()
cal = yield self.calendarUnderTest(home="user02")
cobjs = yield cal.calendarObjects()
self.assertTrue(len(cobjs) == 2)
for cobj in cobjs:
comp = yield cobj.component()
if comp.resourceUID() == "event1@ninevah.local":
self.assertTrue(comp.getOrganizer() is not None)
else:
self.assertTrue(comp.getOrganizer() is None)
inbox = yield self.calendarUnderTest(home="user02", name="inbox")
cobjs = yield inbox.calendarObjects()
self.assertTrue(len(cobjs) == 1)
|
kamotos/sendprism
|
refs/heads/master
|
docs/conf.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# sendprism documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import sendprism
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Sendprism'
copyright = u'2015, Anass Zahim'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = sendprism.__version__
# The full version, including alpha/beta/rc tags.
release = sendprism.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'sendprismdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'sendprism.tex',
u'Sendprism Documentation',
u'Anass Zahim', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sendprism',
u'Sendprism Documentation',
[u'Anass Zahim'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'sendprism',
u'Sendprism Documentation',
u'Anass Zahim',
'sendprism',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
GhostThrone/django
|
refs/heads/master
|
tests/migrations/test_operations.py
|
105
|
from __future__ import unicode_literals
import unittest
from django.db import connection, migrations, models, transaction
from django.db.migrations.migration import Migration
from django.db.migrations.state import ProjectState
from django.db.models.fields import NOT_PROVIDED
from django.db.transaction import atomic
from django.db.utils import IntegrityError
from django.test import override_settings, skipUnlessDBFeature
from django.utils import six
from .models import FoodManager, FoodQuerySet
from .test_base import MigrationTestBase
try:
import sqlparse
except ImportError:
sqlparse = None
class OperationTestBase(MigrationTestBase):
"""
Common functions to help test operations.
"""
def apply_operations(self, app_label, project_state, operations):
migration = Migration('name', app_label)
migration.operations = operations
with connection.schema_editor() as editor:
return migration.apply(project_state, editor)
def unapply_operations(self, app_label, project_state, operations):
migration = Migration('name', app_label)
migration.operations = operations
with connection.schema_editor() as editor:
return migration.unapply(project_state, editor)
def make_test_state(self, app_label, operation, **kwargs):
"""
Makes a test state using set_up_test_model and returns the
original state and the state after the migration is applied.
"""
project_state = self.set_up_test_model(app_label, **kwargs)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
return project_state, new_state
def set_up_test_model(self, app_label, second_model=False, third_model=False,
related_model=False, mti_model=False, proxy_model=False, manager_model=False,
unique_together=False, options=False, db_table=None, index_together=False):
"""
Creates a test model state and database table.
"""
# Delete the tables if they already exist
table_names = [
# Start with ManyToMany tables
'_pony_stables', '_pony_vans',
# Then standard model tables
'_pony', '_stable', '_van',
]
tables = [(app_label + table_name) for table_name in table_names]
with connection.cursor() as cursor:
table_names = connection.introspection.table_names(cursor)
connection.disable_constraint_checking()
sql_delete_table = connection.schema_editor().sql_delete_table
with transaction.atomic():
for table in tables:
if table in table_names:
cursor.execute(sql_delete_table % {
"table": connection.ops.quote_name(table),
})
connection.enable_constraint_checking()
# Make the "current" state
model_options = {
"swappable": "TEST_SWAP_MODEL",
"index_together": [["weight", "pink"]] if index_together else [],
"unique_together": [["pink", "weight"]] if unique_together else [],
}
if options:
model_options["permissions"] = [("can_groom", "Can groom")]
if db_table:
model_options["db_table"] = db_table
operations = [migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options=model_options,
)]
if second_model:
operations.append(migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
]
))
if third_model:
operations.append(migrations.CreateModel(
"Van",
[
("id", models.AutoField(primary_key=True)),
]
))
if related_model:
operations.append(migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
("friend", models.ForeignKey("self", models.CASCADE))
],
))
if mti_model:
operations.append(migrations.CreateModel(
"ShetlandPony",
fields=[
('pony_ptr', models.OneToOneField(
'Pony',
models.CASCADE,
auto_created=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("cuteness", models.IntegerField(default=1)),
],
bases=['%s.Pony' % app_label],
))
if proxy_model:
operations.append(migrations.CreateModel(
"ProxyPony",
fields=[],
options={"proxy": True},
bases=['%s.Pony' % app_label],
))
if manager_model:
operations.append(migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
))
return self.apply_operations(app_label, ProjectState(), operations)
class OperationTests(OperationTestBase):
"""
Tests running the operations and making sure they do what they say they do.
Each test looks at their state changing, and then their database operation -
both forwards and backwards.
"""
def test_create_model(self):
"""
Tests the CreateModel operation.
Most other tests use this operation as part of setup, so check failures here first.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
self.assertEqual(operation.describe(), "Create model Pony")
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2].keys()), ["fields", "name"])
# And default manager not in set
operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())])
definition = operation.deconstruct()
self.assertNotIn('managers', definition[2])
def test_create_model_with_unique_after(self):
"""
Tests the CreateModel operation directly followed by an
AlterUniqueTogether (bug #22844 - sqlite remake issues)
"""
operation1 = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
operation2 = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("number", models.IntegerField(default=1)),
("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)),
],
)
operation3 = migrations.AlterUniqueTogether(
"Rider",
[
("number", "pony"),
],
)
# Test the database alteration
project_state = ProjectState()
self.assertTableNotExists("test_crmoua_pony")
self.assertTableNotExists("test_crmoua_rider")
with connection.schema_editor() as editor:
new_state = project_state.clone()
operation1.state_forwards("test_crmoua", new_state)
operation1.database_forwards("test_crmoua", editor, project_state, new_state)
project_state, new_state = new_state, new_state.clone()
operation2.state_forwards("test_crmoua", new_state)
operation2.database_forwards("test_crmoua", editor, project_state, new_state)
project_state, new_state = new_state, new_state.clone()
operation3.state_forwards("test_crmoua", new_state)
operation3.database_forwards("test_crmoua", editor, project_state, new_state)
self.assertTableExists("test_crmoua_pony")
self.assertTableExists("test_crmoua_rider")
def test_create_model_m2m(self):
"""
Test the creation of a model with a ManyToMany field and the
auto-created "through" model.
"""
project_state = self.set_up_test_model("test_crmomm")
operation = migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("Pony", related_name="stables"))
]
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_crmomm", new_state)
# Test the database alteration
self.assertTableNotExists("test_crmomm_stable_ponies")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmomm", editor, project_state, new_state)
self.assertTableExists("test_crmomm_stable")
self.assertTableExists("test_crmomm_stable_ponies")
self.assertColumnNotExists("test_crmomm_stable", "ponies")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_crmomm", "Pony")
Stable = new_state.apps.get_model("test_crmomm", "Stable")
stable = Stable.objects.create()
p1 = Pony.objects.create(pink=False, weight=4.55)
p2 = Pony.objects.create(pink=True, weight=5.43)
stable.ponies.add(p1, p2)
self.assertEqual(stable.ponies.count(), 2)
stable.ponies.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmomm", editor, new_state, project_state)
self.assertTableNotExists("test_crmomm_stable")
self.assertTableNotExists("test_crmomm_stable_ponies")
def test_create_model_inheritance(self):
"""
Tests the CreateModel operation on a multi-table inheritance setup.
"""
project_state = self.set_up_test_model("test_crmoih")
# Test the state alteration
operation = migrations.CreateModel(
"ShetlandPony",
[
('pony_ptr', models.OneToOneField(
'test_crmoih.Pony',
models.CASCADE,
auto_created=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("cuteness", models.IntegerField(default=1)),
],
)
new_state = project_state.clone()
operation.state_forwards("test_crmoih", new_state)
self.assertIn(("test_crmoih", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crmoih_shetlandpony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmoih", editor, project_state, new_state)
self.assertTableExists("test_crmoih_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmoih", editor, new_state, project_state)
self.assertTableNotExists("test_crmoih_shetlandpony")
def test_create_proxy_model(self):
"""
Tests that CreateModel ignores proxy models.
"""
project_state = self.set_up_test_model("test_crprmo")
# Test the state alteration
operation = migrations.CreateModel(
"ProxyPony",
[],
options={"proxy": True},
bases=("test_crprmo.Pony", ),
)
self.assertEqual(operation.describe(), "Create proxy model ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_crprmo", new_state)
self.assertIn(("test_crprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crprmo", editor, project_state, new_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crprmo", editor, new_state, project_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2].keys()), ["bases", "fields", "name", "options"])
def test_create_unmanaged_model(self):
"""
Tests that CreateModel ignores unmanaged models.
"""
project_state = self.set_up_test_model("test_crummo")
# Test the state alteration
operation = migrations.CreateModel(
"UnmanagedPony",
[],
options={"proxy": True},
bases=("test_crummo.Pony", ),
)
self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony")
new_state = project_state.clone()
operation.state_forwards("test_crummo", new_state)
self.assertIn(("test_crummo", "unmanagedpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crummo", editor, project_state, new_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crummo", editor, new_state, project_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
def test_create_model_managers(self):
"""
Tests that the managers on a model are set.
"""
project_state = self.set_up_test_model("test_cmoma")
# Test the state alteration
operation = migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
)
self.assertEqual(operation.describe(), "Create model Food")
new_state = project_state.clone()
operation.state_forwards("test_cmoma", new_state)
self.assertIn(("test_cmoma", "food"), new_state.models)
managers = new_state.models["test_cmoma", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
def test_delete_model(self):
"""
Tests the DeleteModel operation.
"""
project_state = self.set_up_test_model("test_dlmo")
# Test the state alteration
operation = migrations.DeleteModel("Pony")
self.assertEqual(operation.describe(), "Delete model Pony")
new_state = project_state.clone()
operation.state_forwards("test_dlmo", new_state)
self.assertNotIn(("test_dlmo", "pony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlmo", editor, project_state, new_state)
self.assertTableNotExists("test_dlmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlmo", editor, new_state, project_state)
self.assertTableExists("test_dlmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "DeleteModel")
self.assertEqual(definition[1], [])
self.assertEqual(list(definition[2]), ["name"])
def test_delete_proxy_model(self):
"""
Tests the DeleteModel operation ignores proxy models.
"""
project_state = self.set_up_test_model("test_dlprmo", proxy_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_dlprmo", new_state)
self.assertIn(("test_dlprmo", "proxypony"), project_state.models)
self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlprmo", editor, project_state, new_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlprmo", editor, new_state, project_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
def test_rename_model(self):
"""
Tests the RenameModel operation.
"""
project_state = self.set_up_test_model("test_rnmo", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Pony", "Horse")
self.assertEqual(operation.describe(), "Rename model Pony to Horse")
# Test initial state and database
self.assertIn(("test_rnmo", "pony"), project_state.models)
self.assertNotIn(("test_rnmo", "horse"), project_state.models)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# Migrate forwards
new_state = project_state.clone()
new_state = self.apply_operations("test_rnmo", new_state, [operation])
# Test new state and database
self.assertNotIn(("test_rnmo", "pony"), new_state.models)
self.assertIn(("test_rnmo", "horse"), new_state.models)
# RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual("test_rnmo.Horse", new_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model)
self.assertTableNotExists("test_rnmo_pony")
self.assertTableExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# Migrate backwards
original_state = self.unapply_operations("test_rnmo", project_state, [operation])
# Test original state and database
self.assertIn(("test_rnmo", "pony"), original_state.models)
self.assertNotIn(("test_rnmo", "horse"), original_state.models)
self.assertEqual("Pony", original_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"})
def test_rename_model_with_self_referential_fk(self):
"""
Tests the RenameModel operation on model with self referential FK.
"""
project_state = self.set_up_test_model("test_rmwsrf", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Rider", "HorseRider")
self.assertEqual(operation.describe(), "Rename model Rider to HorseRider")
new_state = project_state.clone()
operation.state_forwards("test_rmwsrf", new_state)
self.assertNotIn(("test_rmwsrf", "rider"), new_state.models)
self.assertIn(("test_rmwsrf", "horserider"), new_state.models)
# Remember, RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
"test_rmwsrf.HorseRider",
new_state.models["test_rmwsrf", "horserider"].fields[2][1].remote_field.model
)
# Test the database alteration
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
with connection.schema_editor() as editor:
operation.database_forwards("test_rmwsrf", editor, project_state, new_state)
self.assertTableNotExists("test_rmwsrf_rider")
self.assertTableExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmwsrf", editor, new_state, project_state)
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
def test_rename_model_with_superclass_fk(self):
"""
Tests the RenameModel operation on a model which has a superclass that
has a foreign key.
"""
project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True)
# Test the state alteration
operation = migrations.RenameModel("ShetlandPony", "LittleHorse")
self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse")
new_state = project_state.clone()
operation.state_forwards("test_rmwsc", new_state)
self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models)
self.assertIn(("test_rmwsc", "littlehorse"), new_state.models)
# RenameModel shouldn't repoint the superclass's relations, only local ones
self.assertEqual(
project_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model,
new_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model
)
# Before running the migration we have a table for Shetland Pony, not Little Horse
self.assertTableExists("test_rmwsc_shetlandpony")
self.assertTableNotExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# and the foreign key on rider points to pony, not shetland pony
self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id"))
self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id"))
with connection.schema_editor() as editor:
operation.database_forwards("test_rmwsc", editor, project_state, new_state)
# Now we have a little horse table, not shetland pony
self.assertTableNotExists("test_rmwsc_shetlandpony")
self.assertTableExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# but the Foreign keys still point at pony, not little horse
self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id"))
self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id"))
def test_rename_model_with_self_referential_m2m(self):
app_label = "test_rename_model_with_self_referential_m2m"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("ReflexivePony", fields=[
("ponies", models.ManyToManyField("self")),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("ReflexivePony", "ReflexivePony2"),
])
Pony = project_state.apps.get_model(app_label, "ReflexivePony2")
pony = Pony.objects.create()
pony.ponies.add(pony)
def test_rename_model_with_m2m(self):
app_label = "test_rename_model_with_m2m"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[]),
migrations.CreateModel("Pony", fields=[
("riders", models.ManyToManyField("Rider")),
]),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("Pony", "Pony2"),
])
Pony = project_state.apps.get_model(app_label, "Pony2")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2)
def test_rename_m2m_target_model(self):
app_label = "test_rename_m2m_target_model"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[]),
migrations.CreateModel("Pony", fields=[
("riders", models.ManyToManyField("Rider")),
]),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("Rider", "Rider2"),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider2")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2)
def test_add_field(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
self.assertEqual(operation.describe(), "Add field height to Pony")
project_state, new_state = self.make_test_state("test_adfl", operation)
self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4)
field = [
f for n, f in new_state.models["test_adfl", "pony"].fields
if n == "height"
][0]
self.assertEqual(field.default, 5)
# Test the database alteration
self.assertColumnNotExists("test_adfl_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfl", editor, project_state, new_state)
self.assertColumnExists("test_adfl_pony", "height")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfl", editor, new_state, project_state)
self.assertColumnNotExists("test_adfl_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_add_charfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adchfl")
Pony = project_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adchfl", project_state, [
migrations.AddField(
"Pony",
"text",
models.CharField(max_length=10, default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.CharField(max_length=10, default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.CharField(max_length=10, default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.CharField(max_length=10, default='"\'"'),
),
])
Pony = new_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_textfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adtxtfl")
Pony = project_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adtxtfl", project_state, [
migrations.AddField(
"Pony",
"text",
models.TextField(default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.TextField(default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.TextField(default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.TextField(default='"\'"'),
),
])
Pony = new_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_binaryfield(self):
"""
Tests the AddField operation on TextField/BinaryField.
"""
project_state = self.set_up_test_model("test_adbinfl")
Pony = project_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adbinfl", project_state, [
migrations.AddField(
"Pony",
"blob",
models.BinaryField(default=b"some text"),
),
migrations.AddField(
"Pony",
"empty",
models.BinaryField(default=b""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.BinaryField(default=b"42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.BinaryField(default=b'"\'"'),
),
])
Pony = new_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
# SQLite returns buffer/memoryview, cast to bytes for checking.
self.assertEqual(bytes(pony.blob), b"some text")
self.assertEqual(bytes(pony.empty), b"")
self.assertEqual(bytes(pony.digits), b"42")
self.assertEqual(bytes(pony.quotes), b'"\'"')
def test_column_name_quoting(self):
"""
Column names that are SQL keywords shouldn't cause problems when used
in migrations (#22168).
"""
project_state = self.set_up_test_model("test_regr22168")
operation = migrations.AddField(
"Pony",
"order",
models.IntegerField(default=0),
)
new_state = project_state.clone()
operation.state_forwards("test_regr22168", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_regr22168", editor, project_state, new_state)
self.assertColumnExists("test_regr22168_pony", "order")
def test_add_field_preserve_default(self):
"""
Tests the AddField operation's state alteration
when preserve_default = False.
"""
project_state = self.set_up_test_model("test_adflpd")
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=4),
preserve_default=False,
)
new_state = project_state.clone()
operation.state_forwards("test_adflpd", new_state)
self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4)
field = [
f for n, f in new_state.models["test_adflpd", "pony"].fields
if n == "height"
][0]
self.assertEqual(field.default, NOT_PROVIDED)
# Test the database alteration
project_state.apps.get_model("test_adflpd", "pony").objects.create(
weight=4,
)
self.assertColumnNotExists("test_adflpd_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflpd", editor, project_state, new_state)
self.assertColumnExists("test_adflpd_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"])
def test_add_field_m2m(self):
"""
Tests the AddField operation with a ManyToManyField.
"""
project_state = self.set_up_test_model("test_adflmm", second_model=True)
# Test the state alteration
operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
new_state = project_state.clone()
operation.state_forwards("test_adflmm", new_state)
self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4)
# Test the database alteration
self.assertTableNotExists("test_adflmm_pony_stables")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflmm", editor, project_state, new_state)
self.assertTableExists("test_adflmm_pony_stables")
self.assertColumnNotExists("test_adflmm_pony", "stables")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_adflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.stables.create()
self.assertEqual(p.stables.count(), 1)
p.stables.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adflmm", editor, new_state, project_state)
self.assertTableNotExists("test_adflmm_pony_stables")
def test_alter_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertFalse(Pony._meta.get_field('stables').blank)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AlterField(
"Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True)
)
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertTrue(Pony._meta.get_field('stables').blank)
def test_repoint_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies"))
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies"))
])
# Ensure the new field actually works
Pony = project_state.apps.get_model("test_alflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.places.create()
self.assertEqual(p.places.count(), 1)
p.places.all().delete()
def test_remove_field_m2m(self):
project_state = self.set_up_test_model("test_rmflmm", second_model=True)
project_state = self.apply_operations("test_rmflmm", project_state, operations=[
migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
])
self.assertTableExists("test_rmflmm_pony_stables")
with_field_state = project_state.clone()
operations = [migrations.RemoveField("Pony", "stables")]
project_state = self.apply_operations("test_rmflmm", project_state, operations=operations)
self.assertTableNotExists("test_rmflmm_pony_stables")
# And test reversal
self.unapply_operations("test_rmflmm", with_field_state, operations=operations)
self.assertTableExists("test_rmflmm_pony_stables")
def test_remove_field_m2m_with_through(self):
project_state = self.set_up_test_model("test_rmflmmwt", second_model=True)
self.assertTableNotExists("test_rmflmmwt_ponystables")
project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[
migrations.CreateModel("PonyStables", fields=[
("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)),
("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)),
]),
migrations.AddField(
"Pony", "stables",
models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables')
)
])
self.assertTableExists("test_rmflmmwt_ponystables")
operations = [migrations.RemoveField("Pony", "stables")]
self.apply_operations("test_rmflmmwt", project_state, operations=operations)
def test_remove_field(self):
"""
Tests the RemoveField operation.
"""
project_state = self.set_up_test_model("test_rmfl")
# Test the state alteration
operation = migrations.RemoveField("Pony", "pink")
self.assertEqual(operation.describe(), "Remove field pink from Pony")
new_state = project_state.clone()
operation.state_forwards("test_rmfl", new_state)
self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2)
# Test the database alteration
self.assertColumnExists("test_rmfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_rmfl", editor, project_state, new_state)
self.assertColumnNotExists("test_rmfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmfl", editor, new_state, project_state)
self.assertColumnExists("test_rmfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'})
def test_remove_fk(self):
"""
Tests the RemoveField operation on a foreign key.
"""
project_state = self.set_up_test_model("test_rfk", related_model=True)
self.assertColumnExists("test_rfk_rider", "pony_id")
operation = migrations.RemoveField("Rider", "pony")
new_state = project_state.clone()
operation.state_forwards("test_rfk", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_rfk", editor, project_state, new_state)
self.assertColumnNotExists("test_rfk_rider", "pony_id")
with connection.schema_editor() as editor:
operation.database_backwards("test_rfk", editor, new_state, project_state)
self.assertColumnExists("test_rfk_rider", "pony_id")
def test_alter_model_table(self):
"""
Tests the AlterModelTable operation.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony_2")
self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2")
# Test the database alteration
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableNotExists("test_almota_pony")
self.assertTableExists("test_almota_pony_2")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_almota", editor, new_state, project_state)
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTable")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"})
def test_alter_model_table_noop(self):
"""
Tests the AlterModelTable operation if the table name is not changed.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony")
# Test the database alteration
self.assertTableExists("test_almota_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableExists("test_almota_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_almota", editor, new_state, project_state)
self.assertTableExists("test_almota_pony")
def test_alter_model_table_m2m(self):
"""
AlterModelTable should rename auto-generated M2M tables.
"""
app_label = "test_talflmltlm2m"
pony_db_table = 'pony_foo'
project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table)
# Add the M2M field
first_state = project_state.clone()
operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable"))
operation.state_forwards(app_label, first_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, first_state)
original_m2m_table = "%s_%s" % (pony_db_table, "stables")
new_m2m_table = "%s_%s" % (app_label, "pony_stables")
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
# Rename the Pony db_table which should also rename the m2m table.
second_state = first_state.clone()
operation = migrations.AlterModelTable(name='pony', table=None)
operation.state_forwards(app_label, second_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, first_state, second_state)
self.assertTableExists(new_m2m_table)
self.assertTableNotExists(original_m2m_table)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, second_state, first_state)
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
def test_alter_field(self):
"""
Tests the AlterField operation.
"""
project_state = self.set_up_test_model("test_alfl")
# Test the state alteration
operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True))
self.assertEqual(operation.describe(), "Alter field pink on Pony")
new_state = project_state.clone()
operation.state_forwards("test_alfl", new_state)
self.assertEqual(project_state.models["test_alfl", "pony"].get_field_by_name("pink").null, False)
self.assertEqual(new_state.models["test_alfl", "pony"].get_field_by_name("pink").null, True)
# Test the database alteration
self.assertColumnNotNull("test_alfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alfl", editor, project_state, new_state)
self.assertColumnNull("test_alfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alfl", editor, new_state, project_state)
self.assertColumnNotNull("test_alfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_alter_field_pk(self):
"""
Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness)
"""
project_state = self.set_up_test_model("test_alflpk")
# Test the state alteration
operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True))
new_state = project_state.clone()
operation.state_forwards("test_alflpk", new_state)
self.assertIsInstance(project_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.AutoField)
self.assertIsInstance(new_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.IntegerField)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpk", editor, project_state, new_state)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflpk", editor, new_state, project_state)
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_field_pk_fk(self):
"""
Tests the AlterField operation on primary keys changes any FKs pointing to it.
"""
project_state = self.set_up_test_model("test_alflpkfk", related_model=True)
# Test the state alteration
operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True))
new_state = project_state.clone()
operation.state_forwards("test_alflpkfk", new_state)
self.assertIsInstance(project_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.AutoField)
self.assertIsInstance(new_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.FloatField)
def assertIdTypeEqualsFkType():
with connection.cursor() as cursor:
id_type, id_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony")
if c.name == "id"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider")
if c.name == "pony_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_null, fk_null)
assertIdTypeEqualsFkType()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpkfk", editor, project_state, new_state)
assertIdTypeEqualsFkType()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflpkfk", editor, new_state, project_state)
assertIdTypeEqualsFkType()
def test_rename_field(self):
"""
Tests the RenameField operation.
"""
project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True)
# Test the state alteration
operation = migrations.RenameField("Pony", "pink", "blue")
self.assertEqual(operation.describe(), "Rename field pink on Pony to blue")
new_state = project_state.clone()
operation.state_forwards("test_rnfl", new_state)
self.assertIn("blue", [n for n, f in new_state.models["test_rnfl", "pony"].fields])
self.assertNotIn("pink", [n for n, f in new_state.models["test_rnfl", "pony"].fields])
# Make sure the unique_together has the renamed column too
self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0])
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0])
# Make sure the index_together has the renamed column too
self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0])
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0])
# Test the database alteration
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnfl", editor, project_state, new_state)
self.assertColumnExists("test_rnfl_pony", "blue")
self.assertColumnNotExists("test_rnfl_pony", "pink")
# Ensure the unique constraint has been ported over
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_rnfl_pony")
# Ensure the index constraint has been ported over
self.assertIndexExists("test_rnfl_pony", ["weight", "blue"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rnfl", editor, new_state, project_state)
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
# Ensure the index constraint has been reset
self.assertIndexExists("test_rnfl_pony", ["weight", "pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"})
def test_alter_unique_together(self):
"""
Tests the AlterUniqueTogether operation.
"""
project_state = self.set_up_test_model("test_alunto")
# Test the state alteration
operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")])
self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))")
new_state = project_state.clone()
operation.state_forwards("test_alunto", new_state)
self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0)
self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
# Make sure we can insert duplicate rows
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alunto", editor, project_state, new_state)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alunto", editor, new_state, project_state)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test flat unique_together
operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight"))
operation.state_forwards("test_alunto", new_state)
self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterUniqueTogether")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}})
def test_alter_unique_together_remove(self):
operation = migrations.AlterUniqueTogether("Pony", None)
self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))")
def test_alter_index_together(self):
"""
Tests the AlterIndexTogether operation.
"""
project_state = self.set_up_test_model("test_alinto")
# Test the state alteration
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))")
new_state = project_state.clone()
operation.state_forwards("test_alinto", new_state)
self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0)
self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1)
# Make sure there's no matching index
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alinto", editor, project_state, new_state)
self.assertIndexExists("test_alinto_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alinto", editor, new_state, project_state)
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterIndexTogether")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}})
def test_alter_index_together_remove(self):
operation = migrations.AlterIndexTogether("Pony", None)
self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))")
def test_alter_model_options(self):
"""
Tests the AlterModelOptions operation.
"""
project_state = self.set_up_test_model("test_almoop")
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0)
self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1)
self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}})
def test_alter_model_options_emptying(self):
"""
Tests that the AlterModelOptions operation removes keys from the dict (#23121)
"""
project_state = self.set_up_test_model("test_almoop", options=True)
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1)
self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'options': {}})
def test_alter_order_with_respect_to(self):
"""
Tests the AlterOrderWithRespectTo operation.
"""
project_state = self.set_up_test_model("test_alorwrtto", related_model=True)
# Test the state alteration
operation = migrations.AlterOrderWithRespectTo("Rider", "pony")
self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony")
new_state = project_state.clone()
operation.state_forwards("test_alorwrtto", new_state)
self.assertIsNone(
project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None)
)
self.assertEqual(
new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None),
"pony"
)
# Make sure there's no matching index
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# Create some rows before alteration
rendered_state = project_state.apps
pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50)
rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=1)
rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=2)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alorwrtto", editor, project_state, new_state)
self.assertColumnExists("test_alorwrtto_rider", "_order")
# Check for correct value in rows
updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all()
self.assertEqual(updated_riders[0]._order, 0)
self.assertEqual(updated_riders[1]._order, 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alorwrtto", editor, new_state, project_state)
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterOrderWithRespectTo")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"})
def test_alter_model_managers(self):
"""
Tests that the managers on a model are set.
"""
project_state = self.set_up_test_model("test_almoma")
# Test the state alteration
operation = migrations.AlterModelManagers(
"Pony",
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
)
self.assertEqual(operation.describe(), "Change managers on Pony")
managers = project_state.models["test_almoma", "pony"].managers
self.assertEqual(managers, [])
new_state = project_state.clone()
operation.state_forwards("test_almoma", new_state)
self.assertIn(("test_almoma", "pony"), new_state.models)
managers = new_state.models["test_almoma", "pony"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
def test_alter_model_managers_emptying(self):
"""
Tests that the managers on a model are set.
"""
project_state = self.set_up_test_model("test_almomae", manager_model=True)
# Test the state alteration
operation = migrations.AlterModelManagers("Food", managers=[])
self.assertEqual(operation.describe(), "Change managers on Food")
self.assertIn(("test_almomae", "food"), project_state.models)
managers = project_state.models["test_almomae", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
new_state = project_state.clone()
operation.state_forwards("test_almomae", new_state)
managers = new_state.models["test_almomae", "food"].managers
self.assertEqual(managers, [])
def test_alter_fk(self):
"""
Tests that creating and then altering an FK works correctly
and deals with the pending SQL (#23091)
"""
project_state = self.set_up_test_model("test_alfk")
# Test adding and then altering the FK in one go
create_operation = migrations.CreateModel(
name="Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
create_state = project_state.clone()
create_operation.state_forwards("test_alfk", create_state)
alter_operation = migrations.AlterField(
model_name='Rider',
name='pony',
field=models.ForeignKey("Pony", models.CASCADE, editable=False),
)
alter_state = create_state.clone()
alter_operation.state_forwards("test_alfk", alter_state)
with connection.schema_editor() as editor:
create_operation.database_forwards("test_alfk", editor, project_state, create_state)
alter_operation.database_forwards("test_alfk", editor, create_state, alter_state)
def test_alter_fk_non_fk(self):
"""
Tests that altering an FK to a non-FK works (#23244)
"""
# Test the state alteration
operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.FloatField(),
)
project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True)
# Test the database alteration
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_afknfk", editor, project_state, new_state)
self.assertColumnExists("test_afknfk_rider", "pony")
self.assertColumnNotExists("test_afknfk_rider", "pony_id")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_afknfk", editor, new_state, project_state)
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
@unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse")
def test_run_sql(self):
"""
Tests the RunSQL operation.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
# Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively
"CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n"
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n"
"INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n"
"UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';"
"UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';",
# Run delete queries to test for parameter substitution failure
# reported in #23426
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';"
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';"
"DROP TABLE i_love_ponies",
state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])],
)
self.assertEqual(operation.describe(), "Raw SQL operation")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_runsql", new_state)
self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test SQL collection
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql))
operation.database_backwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql))
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 2)
cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'")
self.assertEqual(cursor.fetchall()[0][0], 1)
cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'")
self.assertEqual(cursor.fetchall()[0][0], 1)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_runsql", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunSQL")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"])
def test_run_sql_params(self):
"""
#23426 - RunSQL should accept parameters.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"],
["DROP TABLE i_love_ponies"],
)
param_operation = migrations.RunSQL(
# forwards
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');",
["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']],
("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)),
),
# backwards
[
"DELETE FROM i_love_ponies WHERE special_thing = 'Django';",
["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None],
("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']),
]
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
new_state = project_state.clone()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
# Test parameter passing
with connection.schema_editor() as editor:
param_operation.database_forwards("test_runsql", editor, project_state, new_state)
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 3)
with connection.schema_editor() as editor:
param_operation.database_backwards("test_runsql", editor, new_state, project_state)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_runsql", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
def test_run_sql_params_invalid(self):
"""
#23426 - RunSQL should fail when a list of statements with an incorrect
number of tuples is given.
"""
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
operation = migrations.RunSQL(
# forwards
[
["INSERT INTO foo (bar) VALUES ('buz');"]
],
# backwards
(
("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'),
),
)
with connection.schema_editor() as editor:
six.assertRaisesRegex(self, ValueError,
"Expected a 2-tuple but got 1",
operation.database_forwards,
"test_runsql", editor, project_state, new_state)
with connection.schema_editor() as editor:
six.assertRaisesRegex(self, ValueError,
"Expected a 2-tuple but got 3",
operation.database_backwards,
"test_runsql", editor, new_state, project_state)
def test_run_sql_noop(self):
"""
#24098 - Tests no-op RunSQL operations.
"""
operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, None, None)
operation.database_backwards("test_runsql", editor, None, None)
def test_run_python(self):
"""
Tests the RunPython operation
"""
project_state = self.set_up_test_model("test_runpython", mti_model=True)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
def inner_method_reverse(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.filter(pink=1, weight=3.55).delete()
Pony.objects.filter(weight=5).delete()
operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse)
self.assertEqual(operation.describe(), "Raw Python operation")
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards("test_runpython", new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2)
# Now test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0)
# Now test we can't use a string
with self.assertRaises(ValueError):
migrations.RunPython("print 'ahahaha'")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code", "reverse_code"])
# Also test reversal fails, with an operation identical to above but without reverse_code set
no_reverse_operation = migrations.RunPython(inner_method)
self.assertFalse(no_reverse_operation.reversible)
with connection.schema_editor() as editor:
no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state)
with self.assertRaises(NotImplementedError):
no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2)
def create_ponies(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
pony1 = Pony.objects.create(pink=1, weight=3.55)
self.assertIsNot(pony1.pk, None)
pony2 = Pony.objects.create(weight=5)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_ponies)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code"])
def create_shetlandponies(models, schema_editor):
ShetlandPony = models.get_model("test_runpython", "ShetlandPony")
pony1 = ShetlandPony.objects.create(weight=4.0)
self.assertIsNot(pony1.pk, None)
pony2 = ShetlandPony.objects.create(weight=5.0)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_shetlandponies)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6)
self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2)
def test_run_python_atomic(self):
"""
Tests the RunPython operation correctly handles the "atomic" keyword
"""
project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True)
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpythonatomic", "Pony")
Pony.objects.create(pink=1, weight=3.55)
raise ValueError("Adrian hates ponies.")
atomic_migration = Migration("test", "test_runpythonatomic")
atomic_migration.operations = [migrations.RunPython(inner_method)]
non_atomic_migration = Migration("test", "test_runpythonatomic")
non_atomic_migration.operations = [migrations.RunPython(inner_method, atomic=False)]
# If we're a fully-transactional database, both versions should rollback
if connection.features.can_rollback_ddl:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
# Otherwise, the non-atomic operation should leave a row there
else:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1)
# And deconstruction
definition = non_atomic_migration.operations[0].deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["atomic", "code"])
def test_run_python_related_assignment(self):
"""
#24282 - Tests that model changes to a FK reverse side update the model
on the FK side as well.
"""
def inner_method(models, schema_editor):
Author = models.get_model("test_authors", "Author")
Book = models.get_model("test_books", "Book")
author = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author)
create_author = migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey("test_authors.Author", models.CASCADE))
],
options={},
)
add_hometown = migrations.AddField(
"Author",
"hometown",
models.CharField(max_length=100),
)
create_old_man = migrations.RunPython(inner_method, inner_method)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_authors", new_state)
create_author.database_forwards("test_authors", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_books", new_state)
create_book.database_forwards("test_books", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
add_hometown.state_forwards("test_authors", new_state)
add_hometown.database_forwards("test_authors", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_old_man.state_forwards("test_books", new_state)
create_old_man.database_forwards("test_books", editor, project_state, new_state)
def test_run_python_noop(self):
"""
#24098 - Tests no-op RunPython operations.
"""
project_state = ProjectState()
new_state = project_state.clone()
operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
operation.database_backwards("test_runpython", editor, new_state, project_state)
@unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse")
def test_separate_database_and_state(self):
"""
Tests the SeparateDatabaseAndState operation.
"""
project_state = self.set_up_test_model("test_separatedatabaseandstate")
# Create the operation
database_operation = migrations.RunSQL(
"CREATE TABLE i_love_ponies (id int, special_thing int);",
"DROP TABLE i_love_ponies;"
)
state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])
operation = migrations.SeparateDatabaseAndState(
state_operations=[state_operation],
database_operations=[database_operation]
)
self.assertEqual(operation.describe(), "Custom state/database change combination")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_separatedatabaseandstate", new_state)
self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "SeparateDatabaseAndState")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"])
class SwappableOperationTests(OperationTestBase):
"""
Tests that key operations ignore swappable models
(we don't want to replicate all of them here, as the functionality
is in a common base class anyway)
"""
available_apps = [
"migrations",
"django.contrib.auth",
"django.contrib.contenttypes",
]
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_create_ignore_swapped(self):
"""
Tests that the CreateTable operation ignores swapped models.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
options={
"swappable": "TEST_SWAP_MODEL",
},
)
# Test the state alteration (it should still be there!)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crigsw", new_state)
self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crigsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crigsw", editor, project_state, new_state)
self.assertTableNotExists("test_crigsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crigsw", editor, new_state, project_state)
self.assertTableNotExists("test_crigsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_delete_ignore_swapped(self):
"""
Tests the DeleteModel operation ignores swapped models.
"""
operation = migrations.DeleteModel("Pony")
project_state, new_state = self.make_test_state("test_dligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_dligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dligsw", editor, project_state, new_state)
self.assertTableNotExists("test_dligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dligsw", editor, new_state, project_state)
self.assertTableNotExists("test_dligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_add_field_ignore_swapped(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
project_state, new_state = self.make_test_state("test_adfligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_adfligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfligsw", editor, project_state, new_state)
self.assertTableNotExists("test_adfligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfligsw", editor, new_state, project_state)
self.assertTableNotExists("test_adfligsw_pony")
|
saketkc/bioconda-recipes
|
refs/heads/master
|
recipes/embl-api-validator/embl-api-validator.py
|
46
|
#!/usr/bin/env python
#
# Wrapper script for Java Conda packages that ensures that the java runtime
# is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128).
#
# Program Parameters
#
import os
import subprocess
import sys
import shutil
from os import access
from os import getenv
from os import X_OK
jar_file = 'embl-api-validator-1.1.180.jar'
default_jvm_mem_opts = ['-Xms512m', '-Xmx1g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
exec_dir = None
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
elif arg.startswith('--exec_dir='):
exec_dir = arg.split('=')[1].strip('"').strip("'")
if not os.path.exists(exec_dir):
shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args, exec_dir)
def main():
java = java_executable()
"""
updates files relative to the path of the jar file.
In a multiuser setting, the option --exec_dir="exec_dir"
can be used as the location for the peptide-shaker distribution.
If the exec_dir dies not exist,
we copy the jar file, lib, and resources to the exec_dir directory.
"""
(mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:])
jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
|
kobejean/tensorflow
|
refs/heads/master
|
tensorflow/python/layers/base.py
|
4
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Contains the base Layer class, from which all layers inherit."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras.engine import base_layer
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.util import function_utils
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
InputSpec = base_layer.InputSpec # pylint: disable=invalid-name
@tf_export('layers.Layer')
class Layer(base_layer.Layer):
"""Base layer class.
It is considered legacy, and we recommend the use of `tf.keras.layers.Layer`
instead.
Arguments:
trainable: Boolean, whether the layer's variables should be trainable.
name: String name of the layer.
dtype: Default dtype of the layer's weights (default of `None` means use the
type of the first input).
Read-only properties:
name: The name of the layer (string).
dtype: Default dtype of the layer's weights (default of `None` means use the
type of the first input).
trainable_variables: List of trainable variables.
non_trainable_variables: List of non-trainable variables.
variables: List of all variables of this layer, trainable and
non-trainable.
updates: List of update ops of this layer.
losses: List of losses added by this layer.
trainable_weights: List of variables to be included in backprop.
non_trainable_weights: List of variables that should not be
included in backprop.
weights: The concatenation of the lists trainable_weights and
non_trainable_weights (in this order).
Mutable properties:
trainable: Whether the layer should be trained (boolean).
input_spec: Optional (list of) `InputSpec` object(s) specifying the
constraints on inputs that can be accepted by the layer.
"""
def __init__(self, trainable=True, name=None, dtype=None,
**kwargs):
# For backwards compatibility, legacy layers do not use `ResourceVariable`
# by default.
self._use_resource_variables = False
scope = kwargs.pop('_scope', None)
self._reuse = kwargs.pop('_reuse', None)
# Avoid an incorrect lint error
self._trainable_weights = []
self.built = False
super(Layer, self).__init__(trainable=trainable, name=name, dtype=dtype,
**kwargs)
self._graph = None
self._call_has_scope_arg = 'scope' in self._call_fn_args
if scope:
with vs.variable_scope(scope) as captured_scope:
self._scope = captured_scope
else:
self._scope = None
self._current_scope = None
@property
def graph(self):
if context.executing_eagerly():
raise RuntimeError('Layer.graph not supported when executing eagerly.')
return self._graph
def _init_set_name(self, name):
# Determine layer name (non-unique).
if isinstance(name, vs.VariableScope):
base_name = name.name
else:
base_name = name
self._name = name
if not name:
self._name, base_name = self._make_unique_name()
self._base_name = base_name
def _make_unique_name(self, name_uid_map=None, avoid_names=None,
namespace='', zero_based=False):
base_name = base_layer.to_snake_case(self.__class__.__name__)
name = base_layer.unique_layer_name(base_name,
name_uid_map=name_uid_map,
avoid_names=avoid_names,
namespace=namespace,
zero_based=zero_based)
return (name, base_name)
@property
def scope_name(self):
if not self._scope:
raise ValueError('No name available for layer scope because the layer "' +
self._name + '" has not been used yet. The scope name ' +
' is determined the first time the layer instance is ' +
'called. You must therefore call the layer before ' +
'querying `scope_name`.')
return self._scope.name
def add_loss(self, losses, inputs=None):
previous_losses_length = len(self._losses)
previous_callable_losses_length = len(self._callable_losses)
super(Layer, self).add_loss(losses, inputs=inputs)
if not context.executing_eagerly():
# TODO(fchollet): deprecate collection below.
new_losses = self._losses[previous_losses_length:]
new_callable_losses = self._callable_losses[
previous_callable_losses_length:]
for regularizer in new_callable_losses:
loss_tensor = regularizer()
if loss_tensor is not None:
new_losses.append(loss_tensor)
_add_elements_to_collection(
new_losses,
ops.GraphKeys.REGULARIZATION_LOSSES)
def _name_scope(self):
"""Determines op naming for the Layer."""
return self._current_scope.original_name_scope
def _set_scope(self, scope=None):
if self._scope is None:
# If constructed with _scope=None, lazy setting of scope.
if self._reuse:
with vs.variable_scope(
scope if scope is not None else self._base_name) as captured_scope:
self._scope = captured_scope
else:
with vs.variable_scope(
scope, default_name=self._base_name) as captured_scope:
self._scope = captured_scope
def add_weight(self,
name,
shape,
dtype=None,
initializer=None,
regularizer=None,
trainable=None,
constraint=None,
use_resource=None,
synchronization=vs.VariableSynchronization.AUTO,
aggregation=vs.VariableAggregation.NONE,
partitioner=None):
"""Adds a new variable to the layer, or gets an existing one; returns it.
Arguments:
name: variable name.
shape: variable shape.
dtype: The type of the variable. Defaults to `self.dtype` or `float32`.
initializer: initializer instance (callable).
regularizer: regularizer instance (callable).
trainable: whether the variable should be part of the layer's
"trainable_variables" (e.g. variables, biases)
or "non_trainable_variables" (e.g. BatchNorm mean, stddev).
Note, if the current variable scope is marked as non-trainable
then this parameter is ignored and any added variables are also
marked as non-trainable. `trainable` defaults to `True` unless
`synchronization` is set to `ON_READ`.
constraint: constraint instance (callable).
use_resource: Whether to use `ResourceVariable`.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
partitioner: (optional) partitioner instance (callable). If
provided, when the requested variable is created it will be split
into multiple partitions according to `partitioner`. In this case,
an instance of `PartitionedVariable` is returned. Available
partitioners include `tf.fixed_size_partitioner` and
`tf.variable_axis_size_partitioner`. For more details, see the
documentation of `tf.get_variable` and the "Variable Partitioners
and Sharding" section of the API guide.
Returns:
The created variable. Usually either a `Variable` or `ResourceVariable`
instance. If `partitioner` is not `None`, a `PartitionedVariable`
instance is returned.
Raises:
RuntimeError: If called with partioned variable regularization and
eager execution is enabled.
ValueError: When trainable has been set to True with synchronization
set as `ON_READ`.
"""
if synchronization == vs.VariableSynchronization.ON_READ:
if trainable:
raise ValueError(
'Synchronization value can be set to '
'VariableSynchronization.ON_READ only for non-trainable variables. '
'You have specified trainable=True and '
'synchronization=VariableSynchronization.ON_READ.')
else:
# Set trainable to be false when variable is to be synced on read.
trainable = False
elif trainable is None:
trainable = True
def _should_add_regularizer(variable, existing_variable_set):
if isinstance(variable, tf_variables.PartitionedVariable):
for var in variable:
if var in existing_variable_set:
return False
return True
else:
return variable not in existing_variable_set
init_graph = None
if not context.executing_eagerly():
default_graph = ops.get_default_graph()
if default_graph.building_function:
with ops.init_scope():
# Retrieve the variables from the graph into which variables
# will be lifted; if initialization ops will be lifted into
# the eager context, then there is nothing to retrieve, since variable
# collections are not supported when eager execution is enabled.
if not context.executing_eagerly():
init_graph = ops.get_default_graph()
existing_variables = set(tf_variables.global_variables())
else:
# Initialization ops will not be lifted out of the default graph.
init_graph = default_graph
existing_variables = set(tf_variables.global_variables())
if dtype is None:
dtype = self.dtype or dtypes.float32
self._set_scope(None)
reuse = self.built or self._reuse
prev_len_trainable = len(self._trainable_weights)
with vs.variable_scope(
self._scope, reuse=reuse, auxiliary_name_scope=False) as scope:
self._current_scope = scope
with ops.name_scope(self._name_scope()):
use_resource = (use_resource or
self._use_resource_variables or
scope.use_resource)
if initializer is None:
initializer = scope.initializer
variable = super(Layer, self).add_weight(
name,
shape,
dtype=dtypes.as_dtype(dtype),
initializer=initializer,
trainable=trainable,
constraint=constraint,
partitioner=partitioner,
use_resource=use_resource,
synchronization=synchronization,
aggregation=aggregation,
getter=vs.get_variable)
if regularizer:
if context.executing_eagerly() or _should_add_regularizer(
variable, existing_variables):
self._handle_weight_regularization(name, variable, regularizer)
if init_graph is not None:
# Handle edge case where a custom getter has overridden `trainable`.
# There is one known occurrence of this, in unit test
# testBasicRNNCellNotTrainable in
# contrib.rnn.python.kernel_tests.core_rnn_cell_test
with init_graph.as_default():
trainable_variables = tf_variables.trainable_variables()
if (trainable and self.trainable and
variable not in trainable_variables):
# A custom getter / variable scope overrode the trainable flag.
extra_trainable_vars = self._trainable_weights[prev_len_trainable:]
self._trainable_weights = self._trainable_weights[
:prev_len_trainable]
self._non_trainable_weights += extra_trainable_vars
return variable
def __call__(self, inputs, *args, **kwargs):
"""Wraps `call`, applying pre- and post-processing steps.
Arguments:
inputs: input tensor(s).
*args: additional positional arguments to be passed to `self.call`.
**kwargs: additional keyword arguments to be passed to `self.call`.
**Note**: kwarg `scope` is reserved for use by the layer.
Returns:
Output tensor(s).
Note:
- If the layer's `call` method takes a `scope` keyword argument,
this argument will be automatically set to the current variable scope.
- If the layer's `call` method takes a `mask` argument (as some Keras
layers do), its default value will be set to the mask generated
for `inputs` by the previous layer (if `input` did come from
a layer that generated a corresponding mask, i.e. if it came from
a Keras layer with masking support.
Raises:
ValueError: if the layer's `call` method returns None (an invalid value).
"""
self._set_scope(kwargs.pop('scope', None))
if not context.executing_eagerly():
try:
# Set layer's "graph" at build time
self._graph = ops._get_graph_from_inputs(nest.flatten(inputs), # pylint: disable=protected-access
graph=self._graph)
except ValueError as e:
raise ValueError('Input graph and Layer graph are not the same: %s' % e)
if self.built:
try:
# Some classes which inherit from Layer do not use its constructor, so
# rather than initializing to None we check for an AttributeError.
scope_context_manager = self._always_reuse_variable_scope
except AttributeError:
# From this point we will always set reuse=True, so create a "final"
# variable scope with this setting. We avoid re-creating variable scopes
# after this point as an optimization.
self._always_reuse_variable_scope = vs.variable_scope(
self._scope, reuse=True, auxiliary_name_scope=False)
scope_context_manager = self._always_reuse_variable_scope
else:
scope_context_manager = vs.variable_scope(
self._scope, reuse=self._reuse, auxiliary_name_scope=False)
with scope_context_manager as scope:
self._current_scope = scope
try:
call_has_scope_arg = self._call_has_scope_arg
except AttributeError:
self._call_fn_args = function_utils.fn_args(self.call)
self._call_has_scope_arg = 'scope' in self._call_fn_args
call_has_scope_arg = self._call_has_scope_arg
if call_has_scope_arg:
kwargs['scope'] = scope
# Actually call layer
outputs = super(Layer, self).__call__(inputs, *args, **kwargs)
if not context.executing_eagerly():
# Update global default collections.
_add_elements_to_collection(self.updates, ops.GraphKeys.UPDATE_OPS)
return outputs
def __deepcopy__(self, memo):
no_copy = set(['_graph'])
shallow_copy = set(['_scope', '_always_reuse_variable_scope'])
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
if k in no_copy:
setattr(result, k, v)
elif k in shallow_copy:
setattr(result, k, copy.copy(v))
elif base_layer.is_tensor_or_tensor_list(v):
setattr(result, k, v)
else:
setattr(result, k, copy.deepcopy(v, memo))
return result
def _add_elements_to_collection(elements, collection_list):
if context.executing_eagerly():
raise RuntimeError('Using collections from Layers not supported in Eager '
'mode. Tried to add %s to %s' % (elements,
collection_list))
elements = nest.flatten(elements)
collection_list = nest.flatten(collection_list)
for name in collection_list:
collection = ops.get_collection_ref(name)
collection_set = set(collection)
for element in elements:
if element not in collection_set:
collection.append(element)
|
gyang/nova
|
refs/heads/master
|
nova/virt/vmwareapi/io_util.py
|
5
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility classes for defining the time saving transfer of data from the reader
to the write using a LightQueue as a Pipe between the reader and the writer.
"""
from eventlet import event
from eventlet import greenthread
from eventlet import queue
from nova import exception
from nova import log as logging
LOG = logging.getLogger(__name__)
IO_THREAD_SLEEP_TIME = .01
GLANCE_POLL_INTERVAL = 5
class ThreadSafePipe(queue.LightQueue):
"""The pipe to hold the data which the reader writes to and the writer
reads from."""
def __init__(self, maxsize, transfer_size):
queue.LightQueue.__init__(self, maxsize)
self.transfer_size = transfer_size
self.transferred = 0
def read(self, chunk_size):
"""Read data from the pipe. Chunksize if ignored for we have ensured
that the data chunks written to the pipe by readers is the same as the
chunks asked for by the Writer."""
if self.transferred < self.transfer_size:
data_item = self.get()
self.transferred += len(data_item)
return data_item
else:
return ""
def write(self, data):
"""Put a data item in the pipe."""
self.put(data)
def close(self):
"""A place-holder to maintain consistency."""
pass
class GlanceWriteThread(object):
"""Ensures that image data is written to in the glance client and that
it is in correct ('active')state."""
def __init__(self, input, glance_client, image_id, image_meta=None):
if not image_meta:
image_meta = {}
self.input = input
self.glance_client = glance_client
self.image_id = image_id
self.image_meta = image_meta
self._running = False
def start(self):
self.done = event.Event()
def _inner():
"""Function to do the image data transfer through an update
and thereon checks if the state is 'active'."""
self.glance_client.update_image(self.image_id,
image_meta=self.image_meta,
image_data=self.input)
self._running = True
while self._running:
try:
_get_image_meta = self.glance_client.get_image_meta
image_status = _get_image_meta(self.image_id).get("status")
if image_status == "active":
self.stop()
self.done.send(True)
# If the state is killed, then raise an exception.
elif image_status == "killed":
self.stop()
exc_msg = (_("Glance image %s is in killed state") %
self.image_id)
LOG.error(exc_msg)
self.done.send_exception(exception.Error(exc_msg))
elif image_status in ["saving", "queued"]:
greenthread.sleep(GLANCE_POLL_INTERVAL)
else:
self.stop()
exc_msg = _("Glance image "
"%(image_id)s is in unknown state "
"- %(state)s") % {
"image_id": self.image_id,
"state": image_status}
LOG.error(exc_msg)
self.done.send_exception(exception.Error(exc_msg))
except Exception, exc:
self.stop()
self.done.send_exception(exc)
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
def close(self):
pass
class IOThread(object):
"""Class that reads chunks from the input file and writes them to the
output file till the transfer is completely done."""
def __init__(self, input, output):
self.input = input
self.output = output
self._running = False
self.got_exception = False
def start(self):
self.done = event.Event()
def _inner():
"""Read data from the input and write the same to the output
until the transfer completes."""
self._running = True
while self._running:
try:
data = self.input.read(None)
if not data:
self.stop()
self.done.send(True)
self.output.write(data)
greenthread.sleep(IO_THREAD_SLEEP_TIME)
except Exception, exc:
self.stop()
LOG.exception(exc)
self.done.send_exception(exc)
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
|
alonisser/Open-Knesset
|
refs/heads/master
|
user/templatetags/__init__.py
|
12133432
| |
mrfelcio/where-do-you-go
|
refs/heads/master
|
gheatae/__init__.py
|
12133432
| |
labkaxita/lakaxita
|
refs/heads/master
|
lakaxita/tests/__init__.py
|
12133432
| |
iamkingmaker/trading-with-python
|
refs/heads/master
|
lib/__init__.py
|
12133432
| |
edxzw/edx-platform
|
refs/heads/master
|
common/lib/xmodule/xmodule/util/__init__.py
|
12133432
| |
loic/django
|
refs/heads/master
|
tests/template_tests/syntax_tests/test_cache.py
|
45
|
from django.core.cache import cache
from django.template import Context, Engine, TemplateSyntaxError
from django.test import SimpleTestCase, override_settings
from ..utils import setup
class CacheTagTests(SimpleTestCase):
libraries = {
'cache': 'django.templatetags.cache',
'custom': 'template_tests.templatetags.custom',
}
def tearDown(self):
cache.clear()
@setup({'cache03': '{% load cache %}{% cache 2 test %}cache03{% endcache %}'})
def test_cache03(self):
output = self.engine.render_to_string('cache03')
self.assertEqual(output, 'cache03')
@setup({
'cache03': '{% load cache %}{% cache 2 test %}cache03{% endcache %}',
'cache04': '{% load cache %}{% cache 2 test %}cache04{% endcache %}',
})
def test_cache04(self):
self.engine.render_to_string('cache03')
output = self.engine.render_to_string('cache04')
self.assertEqual(output, 'cache03')
@setup({'cache05': '{% load cache %}{% cache 2 test foo %}cache05{% endcache %}'})
def test_cache05(self):
output = self.engine.render_to_string('cache05', {'foo': 1})
self.assertEqual(output, 'cache05')
@setup({'cache06': '{% load cache %}{% cache 2 test foo %}cache06{% endcache %}'})
def test_cache06(self):
output = self.engine.render_to_string('cache06', {'foo': 2})
self.assertEqual(output, 'cache06')
@setup({
'cache05': '{% load cache %}{% cache 2 test foo %}cache05{% endcache %}',
'cache07': '{% load cache %}{% cache 2 test foo %}cache07{% endcache %}',
})
def test_cache07(self):
context = {'foo': 1}
self.engine.render_to_string('cache05', context)
output = self.engine.render_to_string('cache07', context)
self.assertEqual(output, 'cache05')
@setup({
'cache06': '{% load cache %}{% cache 2 test foo %}cache06{% endcache %}',
'cache08': '{% load cache %}{% cache time test foo %}cache08{% endcache %}',
})
def test_cache08(self):
"""
Allow first argument to be a variable.
"""
context = {'foo': 2, 'time': 2}
self.engine.render_to_string('cache06', context)
output = self.engine.render_to_string('cache08', context)
self.assertEqual(output, 'cache06')
# Raise exception if we don't have at least 2 args, first one integer.
@setup({'cache11': '{% load cache %}{% cache %}{% endcache %}'})
def test_cache11(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cache11')
@setup({'cache12': '{% load cache %}{% cache 1 %}{% endcache %}'})
def test_cache12(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cache12')
@setup({'cache13': '{% load cache %}{% cache foo bar %}{% endcache %}'})
def test_cache13(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('cache13')
@setup({'cache14': '{% load cache %}{% cache foo bar %}{% endcache %}'})
def test_cache14(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('cache14', {'foo': 'fail'})
@setup({'cache15': '{% load cache %}{% cache foo bar %}{% endcache %}'})
def test_cache15(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('cache15', {'foo': []})
@setup({'cache16': '{% load cache %}{% cache 1 foo bar %}{% endcache %}'})
def test_cache16(self):
"""
Regression test for #7460.
"""
output = self.engine.render_to_string('cache16', {'foo': 'foo', 'bar': 'with spaces'})
self.assertEqual(output, '')
@setup({'cache17': '{% load cache %}{% cache 10 long_cache_key poem %}Some Content{% endcache %}'})
def test_cache17(self):
"""
Regression test for #11270.
"""
output = self.engine.render_to_string(
'cache17',
{
'poem': (
'Oh freddled gruntbuggly/Thy micturations are to me/'
'As plurdled gabbleblotchits/On a lurgid bee/'
'That mordiously hath bitled out/Its earted jurtles/'
'Into a rancid festering/Or else I shall rend thee in the gobberwarts'
'with my blurglecruncheon/See if I dont.'
),
}
)
self.assertEqual(output, 'Some Content')
@setup({'cache18': '{% load cache custom %}{% cache 2|noop:"x y" cache18 %}cache18{% endcache %}'})
def test_cache18(self):
"""
Test whitespace in filter arguments
"""
output = self.engine.render_to_string('cache18')
self.assertEqual(output, 'cache18')
class CacheTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(libraries={'cache': 'django.templatetags.cache'})
super(CacheTests, cls).setUpClass()
def test_cache_regression_20130(self):
t = self.engine.from_string('{% load cache %}{% cache 1 regression_20130 %}foo{% endcache %}')
cachenode = t.nodelist[1]
self.assertEqual(cachenode.fragment_name, 'regression_20130')
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'default',
},
'template_fragments': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'fragments',
},
})
def test_cache_fragment_cache(self):
"""
When a cache called "template_fragments" is present, the cache tag
will use it in preference to 'default'
"""
t1 = self.engine.from_string('{% load cache %}{% cache 1 fragment %}foo{% endcache %}')
t2 = self.engine.from_string('{% load cache %}{% cache 1 fragment using="default" %}bar{% endcache %}')
ctx = Context()
o1 = t1.render(ctx)
o2 = t2.render(ctx)
self.assertEqual(o1, 'foo')
self.assertEqual(o2, 'bar')
def test_cache_missing_backend(self):
"""
When a cache that doesn't exist is specified, the cache tag will
raise a TemplateSyntaxError
'"""
t = self.engine.from_string('{% load cache %}{% cache 1 backend using="unknown" %}bar{% endcache %}')
ctx = Context()
with self.assertRaises(TemplateSyntaxError):
t.render(ctx)
|
bankonmecoin/namecoin-legacy
|
refs/heads/namecoinq
|
client/DNS/Type.py
|
40
|
# -*- encoding: utf-8 -*-
"""
$Id: Type.py,v 1.6.2.2 2009/06/09 18:39:06 customdesigned Exp $
This file is part of the pydns project.
Homepage: http://pydns.sourceforge.net
This code is covered by the standard Python License.
TYPE values (section 3.2.2)
"""
A = 1 # a host address
NS = 2 # an authoritative name server
MD = 3 # a mail destination (Obsolete - use MX)
MF = 4 # a mail forwarder (Obsolete - use MX)
CNAME = 5 # the canonical name for an alias
SOA = 6 # marks the start of a zone of authority
MB = 7 # a mailbox domain name (EXPERIMENTAL)
MG = 8 # a mail group member (EXPERIMENTAL)
MR = 9 # a mail rename domain name (EXPERIMENTAL)
NULL = 10 # a null RR (EXPERIMENTAL)
WKS = 11 # a well known service description
PTR = 12 # a domain name pointer
HINFO = 13 # host information
MINFO = 14 # mailbox or mail list information
MX = 15 # mail exchange
TXT = 16 # text strings
AAAA = 28 # IPv6 AAAA records (RFC 1886)
SRV = 33 # DNS RR for specifying the location of services (RFC 2782)
SPF = 99 # TXT RR for Sender Policy Framework
# Additional TYPE values from host.c source
UNAME = 110
MP = 240
# QTYPE values (section 3.2.3)
AXFR = 252 # A request for a transfer of an entire zone
MAILB = 253 # A request for mailbox-related records (MB, MG or MR)
MAILA = 254 # A request for mail agent RRs (Obsolete - see MX)
ANY = 255 # A request for all records
# Construct reverse mapping dictionary
_names = dir()
typemap = {}
for _name in _names:
if _name[0] != '_': typemap[eval(_name)] = _name
def typestr(type):
if typemap.has_key(type): return typemap[type]
else: return `type`
#
# $Log: Type.py,v $
# Revision 1.6.2.2 2009/06/09 18:39:06 customdesigned
# Built-in SPF support
#
# Revision 1.6.2.1 2007/05/22 20:20:39 customdesigned
# Mark utf-8 encoding
#
# Revision 1.6 2002/03/19 12:41:33 anthonybaxter
# tabnannied and reindented everything. 4 space indent, no tabs.
# yay.
#
# Revision 1.5 2002/03/19 12:26:13 anthonybaxter
# death to leading tabs.
#
# Revision 1.4 2001/08/09 09:08:55 anthonybaxter
# added identifying header to top of each file
#
# Revision 1.3 2001/07/19 07:38:28 anthony
# added type code for SRV. From Michael Ströder.
#
# Revision 1.2 2001/07/19 06:57:07 anthony
# cvs keywords added
#
#
|
sandeepgupta2k4/tensorflow
|
refs/heads/master
|
tensorflow/python/platform/resource_loader.py
|
75
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Resource management library.
@@get_data_files_path
@@get_path_to_datafile
@@get_root_dir_with_all_resources
@@load_resource
@@readahead_file_path
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os as _os
import sys as _sys
from tensorflow.python.util import tf_inspect as _inspect
from tensorflow.python.util.all_util import remove_undocumented
def load_resource(path):
"""Load the resource at given path, where path is relative to tensorflow/.
Args:
path: a string resource path relative to tensorflow/.
Returns:
The contents of that resource.
Raises:
IOError: If the path is not found, or the resource can't be opened.
"""
tensorflow_root = (_os.path.join(
_os.path.dirname(__file__), _os.pardir, _os.pardir))
path = _os.path.join(tensorflow_root, path)
path = _os.path.abspath(path)
with open(path, 'rb') as f:
return f.read()
# pylint: disable=protected-access
def get_data_files_path():
"""Get a direct path to the data files colocated with the script.
Returns:
The directory where files specified in data attribute of py_test
and py_binary are stored.
"""
return _os.path.dirname(_inspect.getfile(_sys._getframe(1)))
def get_root_dir_with_all_resources():
"""Get a root directory containing all the data attributes in the build rule.
Returns:
The path to the specified file present in the data attribute of py_test
or py_binary. Falls back to returning the same as get_data_files_path if it
fails to detect a bazel runfiles directory.
"""
script_dir = get_data_files_path()
# Create a history of the paths, because the data files are located relative
# to the repository root directory, which is directly under runfiles
# directory.
directories = [script_dir]
data_files_dir = ''
while True:
candidate_dir = directories[-1]
current_directory = _os.path.basename(candidate_dir)
if '.runfiles' in current_directory:
# Our file should never be directly under runfiles.
# If the history has only one item, it means we are directly inside the
# runfiles directory, something is wrong, fall back to the default return
# value, script directory.
if len(directories) > 1:
data_files_dir = directories[-2]
break
else:
new_candidate_dir = _os.path.dirname(candidate_dir)
# If we are at the root directory these two will be the same.
if new_candidate_dir == candidate_dir:
break
else:
directories.append(new_candidate_dir)
return data_files_dir or script_dir
def get_path_to_datafile(path):
"""Get the path to the specified file in the data dependencies.
The path is relative to tensorflow/
Args:
path: a string resource path relative to tensorflow/
Returns:
The path to the specified file present in the data attribute of py_test
or py_binary.
Raises:
IOError: If the path is not found, or the resource can't be opened.
"""
data_files_path = _os.path.dirname(_inspect.getfile(_sys._getframe(1)))
return _os.path.join(data_files_path, path)
def readahead_file_path(path, readahead='128M'): # pylint: disable=unused-argument
"""Readahead files not implemented; simply returns given path."""
return path
_allowed_symbols = []
remove_undocumented(__name__, _allowed_symbols)
|
isra17/hubot-against-humanity-backend
|
refs/heads/master
|
hah/server.py
|
1
|
from hah import create_app
app = create_app()
app.logger.info('[server] init done')
|
appsembler/edx-platform
|
refs/heads/appsembler/tahoe/master
|
common/djangoapps/terrain/stubs/lti.py
|
20
|
"""
Stub implementation of LTI Provider.
What is supported:
------------------
1.) This LTI Provider can service only one Tool Consumer at the same time. It is
not possible to have this LTI multiple times on a single page in LMS.
"""
import base64
import hashlib
import textwrap
import urllib
from uuid import uuid4
import mock
import oauthlib.oauth1
import requests
from django.conf import settings
from http import StubHttpRequestHandler, StubHttpService
from oauthlib.oauth1.rfc5849 import parameters, signature
class StubLtiHandler(StubHttpRequestHandler):
"""
A handler for LTI POST and GET requests.
"""
DEFAULT_CLIENT_KEY = 'test_client_key'
DEFAULT_CLIENT_SECRET = 'test_client_secret'
DEFAULT_LTI_ENDPOINT = 'correct_lti_endpoint'
DEFAULT_LTI_ADDRESS = 'http://{host}:{port}/'
def do_GET(self):
"""
Handle a GET request from the client and sends response back.
Used for checking LTI Provider started correctly.
"""
self.send_response(200, 'This is LTI Provider.', {'Content-type': 'text/plain'})
def do_POST(self):
"""
Handle a POST request from the client and sends response back.
"""
if 'grade' in self.path and self._send_graded_result().status_code == 200:
status_message = 'LTI consumer (edX) responded with XML content:<br>' + self.server.grade_data['TC answer']
content = self._create_content(status_message)
self.send_response(200, content)
elif 'lti2_outcome' in self.path and self._send_lti2_outcome().status_code == 200:
status_message = 'LTI consumer (edX) responded with HTTP {}<br>'.format(
self.server.grade_data['status_code'])
content = self._create_content(status_message)
self.send_response(200, content)
elif 'lti2_delete' in self.path and self._send_lti2_delete().status_code == 200:
status_message = 'LTI consumer (edX) responded with HTTP {}<br>'.format(
self.server.grade_data['status_code'])
content = self._create_content(status_message)
self.send_response(200, content)
# Respond to request with correct lti endpoint
elif self._is_correct_lti_request():
params = {k: v for k, v in self.post_dict.items() if k != 'oauth_signature'}
if self._check_oauth_signature(params, self.post_dict.get('oauth_signature', "")):
status_message = "This is LTI tool. Success."
# Set data for grades what need to be stored as server data
if 'lis_outcome_service_url' in self.post_dict:
self.server.grade_data = {
'callback_url': self.post_dict.get('lis_outcome_service_url').replace('https', 'http'),
'sourcedId': self.post_dict.get('lis_result_sourcedid')
}
host = getattr(settings, 'LETTUCE_HOST', self.server.server_address[0])
submit_url = '//{}:{}'.format(host, self.server.server_address[1])
content = self._create_content(status_message, submit_url)
self.send_response(200, content)
else:
content = self._create_content("Wrong LTI signature")
self.send_response(200, content)
else:
content = self._create_content("Invalid request URL")
self.send_response(500, content)
def _send_graded_result(self):
"""
Send grade request.
"""
values = {
'textString': 0.5,
'sourcedId': self.server.grade_data['sourcedId'],
'imsx_messageIdentifier': uuid4().hex,
}
payload = textwrap.dedent("""
<?xml version = "1.0" encoding = "UTF-8"?>
<imsx_POXEnvelopeRequest xmlns="http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0">
<imsx_POXHeader>
<imsx_POXRequestHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>{imsx_messageIdentifier}</imsx_messageIdentifier> /
</imsx_POXRequestHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>
<replaceResultRequest>
<resultRecord>
<sourcedGUID>
<sourcedId>{sourcedId}</sourcedId>
</sourcedGUID>
<result>
<resultScore>
<language>en-us</language>
<textString>{textString}</textString>
</resultScore>
</result>
</resultRecord>
</replaceResultRequest>
</imsx_POXBody>
</imsx_POXEnvelopeRequest>
""")
data = payload.format(**values)
url = self.server.grade_data['callback_url']
headers = {
'Content-Type': 'application/xml',
'X-Requested-With': 'XMLHttpRequest',
'Authorization': self._oauth_sign(url, data)
}
# Send request ignoring verifirecation of SSL certificate
response = requests.post(url, data=data, headers=headers, verify=False)
self.server.grade_data['TC answer'] = response.content
return response
def _send_lti2_outcome(self):
"""
Send a grade back to consumer
"""
payload = textwrap.dedent("""
{{
"@context" : "http://purl.imsglobal.org/ctx/lis/v2/Result",
"@type" : "Result",
"resultScore" : {score},
"comment" : "This is awesome."
}}
""")
data = payload.format(score=0.8)
return self._send_lti2(data)
def _send_lti2_delete(self):
"""
Send a delete back to consumer
"""
payload = textwrap.dedent("""
{
"@context" : "http://purl.imsglobal.org/ctx/lis/v2/Result",
"@type" : "Result"
}
""")
return self._send_lti2(payload)
def _send_lti2(self, payload):
"""
Send lti2 json result service request.
"""
### We compute the LTI V2.0 service endpoint from the callback_url (which is set by the launch call)
url = self.server.grade_data['callback_url']
url_parts = url.split('/')
url_parts[-1] = "lti_2_0_result_rest_handler"
anon_id = self.server.grade_data['sourcedId'].split(":")[-1]
url_parts.extend(["user", anon_id])
new_url = '/'.join(url_parts)
content_type = 'application/vnd.ims.lis.v2.result+json'
headers = {
'Content-Type': content_type,
'Authorization': self._oauth_sign(new_url, payload,
method='PUT',
content_type=content_type)
}
# Send request ignoring verifirecation of SSL certificate
response = requests.put(new_url, data=payload, headers=headers, verify=False)
self.server.grade_data['status_code'] = response.status_code
self.server.grade_data['TC answer'] = response.content
return response
def _create_content(self, response_text, submit_url=None):
"""
Return content (str) either for launch, send grade or get result from TC.
"""
if submit_url:
submit_form = textwrap.dedent("""
<form action="{submit_url}/grade" method="post">
<input type="submit" name="submit-button" value="Submit" id="submit-button">
</form>
<form action="{submit_url}/lti2_outcome" method="post">
<input type="submit" name="submit-lti2-button" value="Submit" id="submit-lti2-button">
</form>
<form action="{submit_url}/lti2_delete" method="post">
<input type="submit" name="submit-lti2-delete-button" value="Submit" id="submit-lti-delete-button">
</form>
""").format(submit_url=submit_url)
else:
submit_form = ''
# Show roles only for LTI launch.
if self.post_dict.get('roles'):
role = '<h5>Role: {}</h5>'.format(self.post_dict['roles'])
else:
role = ''
response_str = textwrap.dedent("""
<html>
<head>
<title>TEST TITLE</title>
</head>
<body>
<div>
<h2>IFrame loaded</h2>
<h3>Server response is:</h3>
<h3 class="result">{response}</h3>
{role}
</div>
{submit_form}
</body>
</html>
""").format(response=response_text, role=role, submit_form=submit_form)
# Currently LTI module doublequotes the lis_result_sourcedid parameter.
# Unquote response two times.
return urllib.unquote(urllib.unquote(response_str))
def _is_correct_lti_request(self):
"""
Return a boolean indicating whether the URL path is a valid LTI end-point.
"""
lti_endpoint = self.server.config.get('lti_endpoint', self.DEFAULT_LTI_ENDPOINT)
return lti_endpoint in self.path
def _oauth_sign(self, url, body, content_type=u'application/x-www-form-urlencoded', method=u'POST'):
"""
Signs request and returns signed Authorization header.
"""
client_key = self.server.config.get('client_key', self.DEFAULT_CLIENT_KEY)
client_secret = self.server.config.get('client_secret', self.DEFAULT_CLIENT_SECRET)
client = oauthlib.oauth1.Client(
client_key=unicode(client_key),
client_secret=unicode(client_secret)
)
headers = {
# This is needed for body encoding:
'Content-Type': content_type,
}
# Calculate and encode body hash. See http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html
sha1 = hashlib.sha1()
sha1.update(body)
oauth_body_hash = unicode(base64.b64encode(sha1.digest()))
mock_request = mock.Mock(
uri=unicode(urllib.unquote(url)),
headers=headers,
body=u"",
decoded_body=u"",
http_method=unicode(method),
)
params = client.get_oauth_params(mock_request)
mock_request.oauth_params = params
mock_request.oauth_params.append((u'oauth_body_hash', oauth_body_hash))
sig = client.get_oauth_signature(mock_request)
mock_request.oauth_params.append((u'oauth_signature', sig))
new_headers = parameters.prepare_headers(mock_request.oauth_params, headers, realm=None)
return new_headers['Authorization']
def _check_oauth_signature(self, params, client_signature):
"""
Checks oauth signature from client.
`params` are params from post request except signature,
`client_signature` is signature from request.
Builds mocked request and verifies hmac-sha1 signing::
1. builds string to sign from `params`, `url` and `http_method`.
2. signs it with `client_secret` which comes from server settings.
3. obtains signature after sign and then compares it with request.signature
(request signature comes form client in request)
Returns `True` if signatures are correct, otherwise `False`.
"""
client_secret = unicode(self.server.config.get('client_secret', self.DEFAULT_CLIENT_SECRET))
host = getattr(settings, 'LETTUCE_HOST', '127.0.0.1')
port = self.server.server_address[1]
lti_base = self.DEFAULT_LTI_ADDRESS.format(host=host, port=port)
lti_endpoint = self.server.config.get('lti_endpoint', self.DEFAULT_LTI_ENDPOINT)
url = lti_base + lti_endpoint
request = mock.Mock()
request.params = [(unicode(k), unicode(v)) for k, v in params.items()]
request.uri = unicode(url)
request.http_method = u'POST'
request.signature = unicode(client_signature)
return signature.verify_hmac_sha1(request, client_secret)
class StubLtiService(StubHttpService):
"""
A stub LTI provider server that responds
to POST and GET requests to localhost.
"""
HANDLER_CLASS = StubLtiHandler
|
kapt/django-oscar
|
refs/heads/master
|
sites/demo/apps/order/migrations/__init__.py
|
12133432
| |
nirmeshk/oh-mainline
|
refs/heads/master
|
vendor/packages/Django/tests/modeltests/update_only_fields/__init__.py
|
12133432
| |
indictranstech/erpnext
|
refs/heads/develop
|
erpnext/education/doctype/student_language/__init__.py
|
12133432
| |
kawamon/hue
|
refs/heads/master
|
desktop/core/ext-py/Mako-1.0.7/test/test_filters.py
|
7
|
# -*- coding: utf-8 -*-
from mako.template import Template
import unittest
from test import TemplateTest, eq_, requires_python_2
from test.util import result_lines, flatten_result
from mako.compat import u
from mako import compat
class FilterTest(TemplateTest):
def test_basic(self):
t = Template("""
${x | myfilter}
""")
assert flatten_result(t.render(x="this is x", myfilter=lambda t: "MYFILTER->%s<-MYFILTER" % t)) == "MYFILTER->this is x<-MYFILTER"
def test_expr(self):
"""test filters that are themselves expressions"""
t = Template("""
${x | myfilter(y)}
""")
def myfilter(y):
return lambda x: "MYFILTER->%s<-%s" % (x, y)
assert flatten_result(t.render(x="this is x", myfilter=myfilter, y="this is y")) == "MYFILTER->this is x<-this is y"
def test_convert_str(self):
"""test that string conversion happens in expressions before sending to filters"""
t = Template("""
${x | trim}
""")
assert flatten_result(t.render(x=5)) == "5"
def test_quoting(self):
t = Template("""
foo ${bar | h}
""")
eq_(
flatten_result(t.render(bar="<'some bar'>")),
"foo <'some bar'>"
)
def test_url_escaping(self):
t = Template("""
http://example.com/?bar=${bar | u}&v=1
""")
eq_(
flatten_result(t.render(bar=u"酒吧bar")),
"http://example.com/?bar=%E9%85%92%E5%90%A7bar&v=1"
)
def test_entity(self):
t = Template("foo ${bar | entity}")
eq_(
flatten_result(t.render(bar="<'some bar'>")),
"foo <'some bar'>"
)
@requires_python_2
def test_quoting_non_unicode(self):
t = Template("""
foo ${bar | h}
""", disable_unicode=True,
output_encoding=None)
eq_(
flatten_result(t.render(bar="<'привет'>")),
"foo <'привет'>"
)
@requires_python_2
def test_url_escaping_non_unicode(self):
t = Template("""
http://example.com/?bar=${bar | u}&v=1
""", disable_unicode=True,
output_encoding=None)
eq_(
flatten_result(t.render(bar="酒吧bar")),
"http://example.com/?bar=%E9%85%92%E5%90%A7bar&v=1"
)
def test_def(self):
t = Template("""
<%def name="foo()" filter="myfilter">
this is foo
</%def>
${foo()}
""")
eq_(
flatten_result(t.render(x="this is x",
myfilter=lambda t: "MYFILTER->%s<-MYFILTER" % t)),
"MYFILTER-> this is foo <-MYFILTER"
)
def test_import(self):
t = Template("""
<%!
from mako import filters
%>\
trim this string: ${" some string to trim " | filters.trim} continue\
""")
assert t.render().strip()=="trim this string: some string to trim continue"
def test_import_2(self):
t = Template("""
trim this string: ${" some string to trim " | filters.trim} continue\
""", imports=["from mako import filters"])
#print t.code
assert t.render().strip()=="trim this string: some string to trim continue"
def test_encode_filter(self):
t = Template("""# coding: utf-8
some stuff.... ${x}
""", default_filters=['decode.utf8'])
eq_(
t.render_unicode(x=u("voix m’a réveillé")).strip(),
u("some stuff.... voix m’a réveillé")
)
def test_encode_filter_non_str(self):
t = Template("""# coding: utf-8
some stuff.... ${x}
""", default_filters=['decode.utf8'])
eq_(
t.render_unicode(x=3).strip(),
u("some stuff.... 3")
)
@requires_python_2
def test_encode_filter_non_str_we_return_bytes(self):
class Foo(object):
def __str__(self):
return compat.b("å")
t = Template("""# coding: utf-8
some stuff.... ${x}
""", default_filters=['decode.utf8'])
eq_(
t.render_unicode(x=Foo()).strip(),
u("some stuff.... å")
)
def test_custom_default(self):
t = Template("""
<%!
def myfilter(x):
return "->" + x + "<-"
%>
hi ${'there'}
""", default_filters=['myfilter'])
assert t.render().strip()=="hi ->there<-"
def test_global(self):
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>"}
""")
assert t.render().strip() == "<tag>this is html</tag>"
def test_block_via_context(self):
t = Template("""
<%block name="foo" filter="myfilter">
some text
</%block>
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_def_via_context(self):
t = Template("""
<%def name="foo()" filter="myfilter">
some text
</%def>
${foo()}
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_text_via_context(self):
t = Template("""
<%text filter="myfilter">
some text
</%text>
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_nflag(self):
t = Template("""
${"<tag>this is html</tag>" | n}
""", default_filters=['h', 'unicode'])
assert t.render().strip() == "<tag>this is html</tag>"
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>" | n}
""")
assert t.render().strip() == "<tag>this is html</tag>"
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>" | n, h}
""")
assert t.render().strip() == "<tag>this is html</tag>"
def test_non_expression(self):
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'])
assert t.render().strip() == "this is a"
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${'hi'}
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'], default_filters=['b'])
assert flatten_result(t.render()) == "this is b this is b"
t = Template("""
<%!
class Foo(object):
foo = True
def __str__(self):
return "this is a"
def a(text):
return Foo()
def b(text):
if hasattr(text, 'foo'):
return str(text)
else:
return "this is b"
%>
${'hi'}
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'], default_filters=['b'])
assert flatten_result(t.render()) == "this is b this is a"
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${foo()}
${bar()}
<%def name="foo()" filter="b">
this is text
</%def>
<%def name="bar()" filter="b" buffered="True">
this is text
</%def>
""", buffer_filters=['a'])
assert flatten_result(t.render()) == "this is b this is a"
def test_builtins(self):
t = Template("""
${"this is <text>" | h}
""")
assert flatten_result(t.render()) == "this is <text>"
t = Template("""
http://foo.com/arg1=${"hi! this is a string." | u}
""")
assert flatten_result(t.render()) == "http://foo.com/arg1=hi%21+this+is+a+string."
class BufferTest(unittest.TestCase):
def test_buffered_def(self):
t = Template("""
<%def name="foo()" buffered="True">
this is foo
</%def>
${"hi->" + foo() + "<-hi"}
""")
assert flatten_result(t.render()) == "hi-> this is foo <-hi"
def test_unbuffered_def(self):
t = Template("""
<%def name="foo()" buffered="False">
this is foo
</%def>
${"hi->" + foo() + "<-hi"}
""")
assert flatten_result(t.render()) == "this is foo hi-><-hi"
def test_capture(self):
t = Template("""
<%def name="foo()" buffered="False">
this is foo
</%def>
${"hi->" + capture(foo) + "<-hi"}
""")
assert flatten_result(t.render()) == "hi-> this is foo <-hi"
def test_capture_exception(self):
template = Template("""
<%def name="a()">
this is a
<%
raise TypeError("hi")
%>
</%def>
<%
c = capture(a)
%>
a->${c}<-a
""")
try:
template.render()
assert False
except TypeError:
assert True
def test_buffered_exception(self):
template = Template("""
<%def name="a()" buffered="True">
<%
raise TypeError("hi")
%>
</%def>
${a()}
""")
try:
print(template.render())
assert False
except TypeError:
assert True
def test_capture_ccall(self):
t = Template("""
<%def name="foo()">
<%
x = capture(caller.body)
%>
this is foo. body: ${x}
</%def>
<%call expr="foo()">
ccall body
</%call>
""")
#print t.render()
assert flatten_result(t.render()) == "this is foo. body: ccall body"
|
aricooperman/Jzipline
|
refs/heads/master
|
zipline/pipeline/factors/factor.py
|
2
|
"""
factor.py
"""
from functools import wraps
from operator import attrgetter
from numbers import Number
from numpy import inf, where
from zipline.errors import UnknownRankMethod
from zipline.lib.normalize import naive_grouped_rowwise_apply
from zipline.lib.rank import masked_rankdata_2d
from zipline.pipeline.api_utils import restrict_to_dtype
from zipline.pipeline.classifiers import Classifier, Everything, Quantiles
from zipline.pipeline.mixins import (
CustomTermMixin,
LatestMixin,
PositiveWindowLengthMixin,
RestrictedDTypeMixin,
SingleInputMixin,
)
from zipline.pipeline.term import (
ComputableTerm,
NotSpecified,
NotSpecifiedType,
Term,
)
from zipline.pipeline.expression import (
BadBinaryOperator,
COMPARISONS,
is_comparison,
MATH_BINOPS,
method_name_for_op,
NumericalExpression,
NUMEXPR_MATH_FUNCS,
UNARY_OPS,
unary_op_name,
)
from zipline.pipeline.filters import (
Filter,
NumExprFilter,
PercentileFilter,
NullFilter,
)
from zipline.utils.functional import with_doc, with_name
from zipline.utils.input_validation import expect_types
from zipline.utils.math_utils import nanmean, nanstd
from zipline.utils.numpy_utils import (
bool_dtype,
categorical_dtype,
coerce_to_dtype,
datetime64ns_dtype,
float64_dtype,
int64_dtype,
)
_RANK_METHODS = frozenset(['average', 'min', 'max', 'dense', 'ordinal'])
def coerce_numbers_to_my_dtype(f):
"""
A decorator for methods whose signature is f(self, other) that coerces
``other`` to ``self.dtype``.
This is used to make comparison operations between numbers and `Factor`
instances work independently of whether the user supplies a float or
integer literal.
For example, if I write::
my_filter = my_factor > 3
my_factor probably has dtype float64, but 3 is an int, so we want to coerce
to float64 before doing the comparison.
"""
@wraps(f)
def method(self, other):
if isinstance(other, Number):
other = coerce_to_dtype(self.dtype, other)
return f(self, other)
return method
def binop_return_type(op):
if is_comparison(op):
return NumExprFilter
else:
return NumExprFactor
def binop_return_dtype(op, left, right):
"""
Compute the expected return dtype for the given binary operator.
Parameters
----------
op : str
Operator symbol, (e.g. '+', '-', ...).
left : numpy.dtype
Dtype of left hand side.
right : numpy.dtype
Dtype of right hand side.
Returns
-------
outdtype : numpy.dtype
The dtype of the result of `left <op> right`.
"""
if is_comparison(op):
if left != right:
raise TypeError(
"Don't know how to compute {left} {op} {right}.\n"
"Comparisons are only supported between Factors of equal "
"dtypes.".format(left=left, op=op, right=right)
)
return bool_dtype
elif left != float64_dtype or right != float64_dtype:
raise TypeError(
"Don't know how to compute {left} {op} {right}.\n"
"Arithmetic operators are only supported between Factors of "
"dtype 'float64'.".format(
left=left.name,
op=op,
right=right.name,
)
)
return float64_dtype
def binary_operator(op):
"""
Factory function for making binary operator methods on a Factor subclass.
Returns a function, "binary_operator" suitable for implementing functions
like __add__.
"""
# When combining a Factor with a NumericalExpression, we use this
# attrgetter instance to defer to the commuted implementation of the
# NumericalExpression operator.
commuted_method_getter = attrgetter(method_name_for_op(op, commute=True))
@with_doc("Binary Operator: '%s'" % op)
@with_name(method_name_for_op(op))
@coerce_numbers_to_my_dtype
def binary_operator(self, other):
# This can't be hoisted up a scope because the types returned by
# binop_return_type aren't defined when the top-level function is
# invoked in the class body of Factor.
return_type = binop_return_type(op)
if isinstance(self, NumExprFactor):
self_expr, other_expr, new_inputs = self.build_binary_op(
op, other,
)
return return_type(
"({left}) {op} ({right})".format(
left=self_expr,
op=op,
right=other_expr,
),
new_inputs,
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
elif isinstance(other, NumExprFactor):
# NumericalExpression overrides ops to correctly handle merging of
# inputs. Look up and call the appropriate reflected operator with
# ourself as the input.
return commuted_method_getter(other)(self)
elif isinstance(other, Term):
if self is other:
return return_type(
"x_0 {op} x_0".format(op=op),
(self,),
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
return return_type(
"x_0 {op} x_1".format(op=op),
(self, other),
dtype=binop_return_dtype(op, self.dtype, other.dtype),
)
elif isinstance(other, Number):
return return_type(
"x_0 {op} ({constant})".format(op=op, constant=other),
binds=(self,),
# .dtype access is safe here because coerce_numbers_to_my_dtype
# will convert any input numbers to numpy equivalents.
dtype=binop_return_dtype(op, self.dtype, other.dtype)
)
raise BadBinaryOperator(op, self, other)
return binary_operator
def reflected_binary_operator(op):
"""
Factory function for making binary operator methods on a Factor.
Returns a function, "reflected_binary_operator" suitable for implementing
functions like __radd__.
"""
assert not is_comparison(op)
@with_name(method_name_for_op(op, commute=True))
@coerce_numbers_to_my_dtype
def reflected_binary_operator(self, other):
if isinstance(self, NumericalExpression):
self_expr, other_expr, new_inputs = self.build_binary_op(
op, other
)
return NumExprFactor(
"({left}) {op} ({right})".format(
left=other_expr,
right=self_expr,
op=op,
),
new_inputs,
dtype=binop_return_dtype(op, other.dtype, self.dtype)
)
# Only have to handle the numeric case because in all other valid cases
# the corresponding left-binding method will be called.
elif isinstance(other, Number):
return NumExprFactor(
"{constant} {op} x_0".format(op=op, constant=other),
binds=(self,),
dtype=binop_return_dtype(op, other.dtype, self.dtype),
)
raise BadBinaryOperator(op, other, self)
return reflected_binary_operator
def unary_operator(op):
"""
Factory function for making unary operator methods for Factors.
"""
# Only negate is currently supported.
valid_ops = {'-'}
if op not in valid_ops:
raise ValueError("Invalid unary operator %s." % op)
@with_doc("Unary Operator: '%s'" % op)
@with_name(unary_op_name(op))
def unary_operator(self):
if self.dtype != float64_dtype:
raise TypeError(
"Can't apply unary operator {op!r} to instance of "
"{typename!r} with dtype {dtypename!r}.\n"
"{op!r} is only supported for Factors of dtype "
"'float64'.".format(
op=op,
typename=type(self).__name__,
dtypename=self.dtype.name,
)
)
# This can't be hoisted up a scope because the types returned by
# unary_op_return_type aren't defined when the top-level function is
# invoked.
if isinstance(self, NumericalExpression):
return NumExprFactor(
"{op}({expr})".format(op=op, expr=self._expr),
self.inputs,
dtype=float64_dtype,
)
else:
return NumExprFactor(
"{op}x_0".format(op=op),
(self,),
dtype=float64_dtype,
)
return unary_operator
def function_application(func):
"""
Factory function for producing function application methods for Factor
subclasses.
"""
if func not in NUMEXPR_MATH_FUNCS:
raise ValueError("Unsupported mathematical function '%s'" % func)
@with_name(func)
def mathfunc(self):
if isinstance(self, NumericalExpression):
return NumExprFactor(
"{func}({expr})".format(func=func, expr=self._expr),
self.inputs,
dtype=float64_dtype,
)
else:
return NumExprFactor(
"{func}(x_0)".format(func=func),
(self,),
dtype=float64_dtype,
)
return mathfunc
# Decorators for Factor methods.
if_not_float64_tell_caller_to_use_isnull = restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() was called on a factor of dtype {received_dtype}.\n"
"{method_name}() is only defined for dtype {expected_dtype}."
"To filter missing data, use isnull() or notnull()."
)
)
float64_only = restrict_to_dtype(
dtype=float64_dtype,
message_template=(
"{method_name}() is only defined on Factors of dtype {expected_dtype},"
" but it was called on a Factor of dtype {received_dtype}."
)
)
FACTOR_DTYPES = frozenset([datetime64ns_dtype, float64_dtype, int64_dtype])
class Factor(RestrictedDTypeMixin, ComputableTerm):
"""
Pipeline API expression producing a numerical or date-valued output.
Factors are the most commonly-used Pipeline term, representing the result
of any computation producing a numerical result.
Factors can be combined, both with other Factors and with scalar values,
via any of the builtin mathematical operators (``+``, ``-``, ``*``, etc).
This makes it easy to write complex expressions that combine multiple
Factors. For example, constructing a Factor that computes the average of
two other Factors is simply::
>>> f1 = SomeFactor(...)
>>> f2 = SomeOtherFactor(...)
>>> average = (f1 + f2) / 2.0
Factors can also be converted into :class:`zipline.pipeline.Filter` objects
via comparison operators: (``<``, ``<=``, ``!=``, ``eq``, ``>``, ``>=``).
There are many natural operators defined on Factors besides the basic
numerical operators. These include methods identifying missing or
extreme-valued outputs (isnull, notnull, isnan, notnan), methods for
normalizing outputs (rank, demean, zscore), and methods for constructing
Filters based on rank-order properties of results (top, bottom,
percentile_between).
"""
ALLOWED_DTYPES = FACTOR_DTYPES # Used by RestrictedDTypeMixin
# Dynamically add functions for creating NumExprFactor/NumExprFilter
# instances.
clsdict = locals()
clsdict.update(
{
method_name_for_op(op): binary_operator(op)
# Don't override __eq__ because it breaks comparisons on tuples of
# Factors.
for op in MATH_BINOPS.union(COMPARISONS - {'=='})
}
)
clsdict.update(
{
method_name_for_op(op, commute=True): reflected_binary_operator(op)
for op in MATH_BINOPS
}
)
clsdict.update(
{
unary_op_name(op): unary_operator(op)
for op in UNARY_OPS
}
)
clsdict.update(
{
funcname: function_application(funcname)
for funcname in NUMEXPR_MATH_FUNCS
}
)
__truediv__ = clsdict['__div__']
__rtruediv__ = clsdict['__rdiv__']
eq = binary_operator('==')
@expect_types(
mask=(Filter, NotSpecifiedType),
groupby=(Classifier, NotSpecifiedType),
)
@float64_only
def demean(self, mask=NotSpecified, groupby=NotSpecified):
"""
Construct a Factor that computes ``self`` and subtracts the mean from
row of the result.
If ``mask`` is supplied, ignore values where ``mask`` returns False
when computing row means, and output NaN anywhere the mask is False.
If ``groupby`` is supplied, compute by partitioning each row based on
the values produced by ``groupby``, de-meaning the partitioned arrays,
and stitching the sub-results back together.
Parameters
----------
mask : zipline.pipeline.Filter, optional
A Filter defining values to ignore when computing means.
groupby : zipline.pipeline.Classifier, optional
A classifier defining partitions over which to compute means.
Example
-------
Let ``f`` be a Factor which would produce the following output::
AAPL MSFT MCD BK
2017-03-13 1.0 2.0 3.0 4.0
2017-03-14 1.5 2.5 3.5 1.0
2017-03-15 2.0 3.0 4.0 1.5
2017-03-16 2.5 3.5 1.0 2.0
Let ``c`` be a Classifier producing the following output::
AAPL MSFT MCD BK
2017-03-13 1 1 2 2
2017-03-14 1 1 2 2
2017-03-15 1 1 2 2
2017-03-16 1 1 2 2
Let ``m`` be a Filter producing the following output::
AAPL MSFT MCD BK
2017-03-13 False True True True
2017-03-14 True False True True
2017-03-15 True True False True
2017-03-16 True True True False
Then ``f.demean()`` will subtract the mean from each row produced by
``f``.
::
AAPL MSFT MCD BK
2017-03-13 -1.500 -0.500 0.500 1.500
2017-03-14 -0.625 0.375 1.375 -1.125
2017-03-15 -0.625 0.375 1.375 -1.125
2017-03-16 0.250 1.250 -1.250 -0.250
``f.demean(mask=m)`` will subtract the mean from each row, but means
will be calculated ignoring values on the diagonal, and NaNs will
written to the diagonal in the output. Diagonal values are ignored
because they are the locations where the mask ``m`` produced False.
::
AAPL MSFT MCD BK
2017-03-13 NaN -1.000 0.000 1.000
2017-03-14 -0.500 NaN 1.500 -1.000
2017-03-15 -0.166 0.833 NaN -0.666
2017-03-16 0.166 1.166 -1.333 NaN
``f.demean(groupby=c)`` will subtract the group-mean of AAPL/MSFT and
MCD/BK from their respective entries. The AAPL/MSFT are grouped
together because both assets always produce 1 in the output of the
classifier ``c``. Similarly, MCD/BK are grouped together because they
always produce 2.
::
AAPL MSFT MCD BK
2017-03-13 -0.500 0.500 -0.500 0.500
2017-03-14 -0.500 0.500 1.250 -1.250
2017-03-15 -0.500 0.500 1.250 -1.250
2017-03-16 -0.500 0.500 -0.500 0.500
``f.demean(mask=m, groupby=c)`` will also subtract the group-mean of
AAPL/MSFT and MCD/BK, but means will be calculated ignoring values on
the diagonal , and NaNs will be written to the diagonal in the output.
::
AAPL MSFT MCD BK
2017-03-13 NaN 0.000 -0.500 0.500
2017-03-14 0.000 NaN 1.250 -1.250
2017-03-15 -0.500 0.500 NaN 0.000
2017-03-16 -0.500 0.500 0.000 NaN
Notes
-----
Mean is sensitive to the magnitudes of outliers. When working with
factor that can potentially produce large outliers, it is often useful
to use the ``mask`` parameter to discard values at the extremes of the
distribution::
>>> base = MyFactor(...)
>>> normalized = base.demean(mask=base.percentile_between(1, 99))
``demean()`` is only supported on Factors of dtype float64.
See Also
--------
:meth:`pandas.DataFrame.groupby`
"""
# This is a named function so that it has a __name__ for use in the
# graph repr of GroupedRowTransform.
def demean(row):
return row - nanmean(row)
return GroupedRowTransform(
transform=demean,
factor=self,
mask=mask,
groupby=groupby,
)
@expect_types(
mask=(Filter, NotSpecifiedType),
groupby=(Classifier, NotSpecifiedType),
)
@float64_only
def zscore(self, mask=NotSpecified, groupby=NotSpecified):
"""
Construct a Factor that Z-Scores each day's results.
The Z-Score of a row is defined as::
(row - row.mean()) / row.stddev()
If ``mask`` is supplied, ignore values where ``mask`` returns False
when computing row means and standard deviations, and output NaN
anywhere the mask is False.
If ``groupby`` is supplied, compute by partitioning each row based on
the values produced by ``groupby``, z-scoring the partitioned arrays,
and stitching the sub-results back together.
Parameters
----------
mask : zipline.pipeline.Filter, optional
A Filter defining values to ignore when Z-Scoring.
groupby : zipline.pipeline.Classifier, optional
A classifier defining partitions over which to compute Z-Scores.
Returns
-------
zscored : zipline.pipeline.Factor
A Factor producing that z-scores the output of self.
Notes
-----
Mean and standard deviation are sensitive to the magnitudes of
outliers. When working with factor that can potentially produce large
outliers, it is often useful to use the ``mask`` parameter to discard
values at the extremes of the distribution::
>>> base = MyFactor(...)
>>> normalized = base.zscore(mask=base.percentile_between(1, 99))
``zscore()`` is only supported on Factors of dtype float64.
Example
-------
See :meth:`~zipline.pipeline.factors.Factor.demean` for an in-depth
example of the semantics for ``mask`` and ``groupby``.
See Also
--------
:meth:`pandas.DataFrame.groupby`
"""
# This is a named function so that it has a __name__ for use in the
# graph repr of GroupedRowTransform.
def zscore(row):
return (row - nanmean(row)) / nanstd(row)
return GroupedRowTransform(
transform=zscore,
factor=self,
mask=mask,
groupby=groupby,
window_safe=True,
)
def rank(self, method='ordinal', ascending=True, mask=NotSpecified):
"""
Construct a new Factor representing the sorted rank of each column
within each row.
Parameters
----------
method : str, {'ordinal', 'min', 'max', 'dense', 'average'}
The method used to assign ranks to tied elements. See
`scipy.stats.rankdata` for a full description of the semantics for
each ranking method. Default is 'ordinal'.
ascending : bool, optional
Whether to return sorted rank in ascending or descending order.
Default is True.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, ranks are computed ignoring any asset/date
pairs for which `mask` produces a value of False.
Returns
-------
ranks : zipline.pipeline.factors.Rank
A new factor that will compute the ranking of the data produced by
`self`.
Notes
-----
The default value for `method` is different from the default for
`scipy.stats.rankdata`. See that function's documentation for a full
description of the valid inputs to `method`.
Missing or non-existent data on a given day will cause an asset to be
given a rank of NaN for that day.
See Also
--------
:func:`scipy.stats.rankdata`
:class:`zipline.pipeline.factors.factor.Rank`
"""
return Rank(self, method=method, ascending=ascending, mask=mask)
@expect_types(bins=int, mask=(Filter, NotSpecifiedType))
def quantiles(self, bins, mask=NotSpecified):
"""
Construct a Classifier computing quantiles of the output of ``self``.
Every non-NaN data point the output is labelled with an integer value
from 0 to (bins - 1). NaNs are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
bins : int
Number of bins labels to compute.
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quantiles.
Returns
-------
quantiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to (bins - 1).
"""
if mask is NotSpecified:
mask = self.mask
return Quantiles(inputs=(self,), bins=bins, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def quartiles(self, mask=NotSpecified):
"""
Construct a Classifier computing quartiles over the output of ``self``.
Every non-NaN data point the output is labelled with a value of either
0, 1, 2, or 3, corresponding to the first, second, third, or fourth
quartile over each row. NaN data points are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quartiles.
Returns
-------
quartiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 3.
"""
return self.quantiles(bins=4, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def quintiles(self, mask=NotSpecified):
"""
Construct a Classifier computing quintile labels on ``self``.
Every non-NaN data point the output is labelled with a value of either
0, 1, 2, or 3, 4, corresonding to quintiles over each row. NaN data
points are labelled with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing quintiles.
Returns
-------
quintiles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 4.
"""
return self.quantiles(bins=5, mask=mask)
@expect_types(mask=(Filter, NotSpecifiedType))
def deciles(self, mask=NotSpecified):
"""
Construct a Classifier computing decile labels on ``self``.
Every non-NaN data point the output is labelled with a value from 0 to
9 corresonding to deciles over each row. NaN data points are labelled
with -1.
If ``mask`` is supplied, ignore data points in locations for which
``mask`` produces False, and emit a label of -1 at those locations.
Parameters
----------
mask : zipline.pipeline.Filter, optional
Mask of values to ignore when computing deciles.
Returns
-------
deciles : zipline.pipeline.classifiers.Quantiles
A Classifier producing integer labels ranging from 0 to 9.
"""
return self.quantiles(bins=10, mask=mask)
def top(self, N, mask=NotSpecified):
"""
Construct a Filter matching the top N asset values of self each day.
Parameters
----------
N : int
Number of assets passing the returned filter each day.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, top values are computed ignoring any
asset/date pairs for which `mask` produces a value of False.
Returns
-------
filter : zipline.pipeline.filters.Filter
"""
return self.rank(ascending=False, mask=mask) <= N
def bottom(self, N, mask=NotSpecified):
"""
Construct a Filter matching the bottom N asset values of self each day.
Parameters
----------
N : int
Number of assets passing the returned filter each day.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when computing ranks.
If mask is supplied, bottom values are computed ignoring any
asset/date pairs for which `mask` produces a value of False.
Returns
-------
filter : zipline.pipeline.Filter
"""
return self.rank(ascending=True, mask=mask) <= N
def percentile_between(self,
min_percentile,
max_percentile,
mask=NotSpecified):
"""
Construct a new Filter representing entries from the output of this
Factor that fall within the percentile range defined by min_percentile
and max_percentile.
Parameters
----------
min_percentile : float [0.0, 100.0]
Return True for assets falling above this percentile in the data.
max_percentile : float [0.0, 100.0]
Return True for assets falling below this percentile in the data.
mask : zipline.pipeline.Filter, optional
A Filter representing assets to consider when percentile
calculating thresholds. If mask is supplied, percentile cutoffs
are computed each day using only assets for which ``mask`` returns
True. Assets for which ``mask`` produces False will produce False
in the output of this Factor as well.
Returns
-------
out : zipline.pipeline.filters.PercentileFilter
A new filter that will compute the specified percentile-range mask.
See Also
--------
zipline.pipeline.filters.filter.PercentileFilter
"""
return PercentileFilter(
self,
min_percentile=min_percentile,
max_percentile=max_percentile,
mask=mask,
)
def isnull(self):
"""
A Filter producing True for values where this Factor has missing data.
Equivalent to self.isnan() when ``self.dtype`` is float64.
Otherwise equivalent to ``self.eq(self.missing_value)``.
Returns
-------
filter : zipline.pipeline.filters.Filter
"""
if self.dtype == float64_dtype:
# Using isnan is more efficient when possible because we can fold
# the isnan computation with other NumExpr expressions.
return self.isnan()
else:
return NullFilter(self)
def notnull(self):
"""
A Filter producing True for values where this Factor has complete data.
Equivalent to ``~self.isnan()` when ``self.dtype`` is float64.
Otherwise equivalent to ``(self != self.missing_value)``.
"""
return ~self.isnull()
@if_not_float64_tell_caller_to_use_isnull
def isnan(self):
"""
A Filter producing True for all values where this Factor is NaN.
Returns
-------
nanfilter : zipline.pipeline.filters.Filter
"""
return self != self
@if_not_float64_tell_caller_to_use_isnull
def notnan(self):
"""
A Filter producing True for values where this Factor is not NaN.
Returns
-------
nanfilter : zipline.pipeline.filters.Filter
"""
return ~self.isnan()
@if_not_float64_tell_caller_to_use_isnull
def isfinite(self):
"""
A Filter producing True for values where this Factor is anything but
NaN, inf, or -inf.
"""
return (-inf < self) & (self < inf)
class NumExprFactor(NumericalExpression, Factor):
"""
Factor computed from a numexpr expression.
Parameters
----------
expr : string
A string suitable for passing to numexpr. All variables in 'expr'
should be of the form "x_i", where i is the index of the corresponding
factor input in 'binds'.
binds : tuple
A tuple of factors to use as inputs.
Notes
-----
NumExprFactors are constructed by numerical operators like `+` and `-`.
Users should rarely need to construct a NumExprFactor directly.
"""
pass
class GroupedRowTransform(Factor):
"""
A Factor that transforms an input factor by applying a row-wise
shape-preserving transformation on classifier-defined groups of that
Factor.
This is most often useful for normalization operators like ``zscore`` or
``demean``.
Parameters
----------
transform : function[ndarray[ndim=1] -> ndarray[ndim=1]]
Function to apply over each row group.
factor : zipline.pipeline.Factor
The factor providing baseline data to transform.
mask : zipline.pipeline.Filter
Mask of entries to ignore when calculating transforms.
groupby : zipline.pipeline.Classifier
Classifier partitioning ``factor`` into groups to use when calculating
means.
Notes
-----
Users should rarely construct instances of this factor directly. Instead,
they should construct instances via factor normalization methods like
``zscore`` and ``demean``.
See Also
--------
zipline.pipeline.factors.Factor.zscore
zipline.pipeline.factors.Factor.demean
"""
window_length = 0
def __new__(cls, transform, factor, mask, groupby, **kwargs):
if mask is NotSpecified:
mask = factor.mask
else:
mask = mask & factor.mask
if groupby is NotSpecified:
groupby = Everything(mask=mask)
return super(GroupedRowTransform, cls).__new__(
GroupedRowTransform,
transform=transform,
inputs=(factor, groupby),
missing_value=factor.missing_value,
mask=mask,
dtype=factor.dtype,
**kwargs
)
def _init(self, transform, *args, **kwargs):
self._transform = transform
return super(GroupedRowTransform, self)._init(*args, **kwargs)
@classmethod
def _static_identity(cls, transform, *args, **kwargs):
return (
super(GroupedRowTransform, cls)._static_identity(*args, **kwargs),
transform,
)
def _compute(self, arrays, dates, assets, mask):
data = arrays[0]
groupby_expr = self.inputs[1]
if groupby_expr.dtype == int64_dtype:
group_labels = arrays[1]
null_label = self.inputs[1].missing_value
elif groupby_expr.dtype == categorical_dtype:
# Coerce our LabelArray into an isomorphic array of ints. This is
# necessary because np.where doesn't know about LabelArrays or the
# void dtype.
group_labels = arrays[1].as_int_array()
null_label = arrays[1].missing_value_code
else:
raise TypeError(
"Unexpected groupby dtype: %s." % groupby_expr.dtype
)
# Make a copy with the null code written to masked locations.
group_labels = where(mask, group_labels, null_label)
return where(
group_labels != null_label,
naive_grouped_rowwise_apply(
data=data,
group_labels=group_labels,
func=self._transform,
),
self.missing_value,
)
@property
def transform_name(self):
return self._transform.__name__
def short_repr(self):
return type(self).__name__ + '(%r)' % self.transform_name
class Rank(SingleInputMixin, Factor):
"""
A Factor representing the row-wise rank data of another Factor.
Parameters
----------
factor : zipline.pipeline.factors.Factor
The factor on which to compute ranks.
method : str, {'average', 'min', 'max', 'dense', 'ordinal'}
The method used to assign ranks to tied elements. See
`scipy.stats.rankdata` for a full description of the semantics for each
ranking method.
See Also
--------
:func:`scipy.stats.rankdata`
:class:`Factor.rank`
Notes
-----
Most users should call Factor.rank rather than directly construct an
instance of this class.
"""
window_length = 0
dtype = float64_dtype
window_safe = True
def __new__(cls, factor, method, ascending, mask):
return super(Rank, cls).__new__(
cls,
inputs=(factor,),
method=method,
ascending=ascending,
mask=mask,
)
def _init(self, method, ascending, *args, **kwargs):
self._method = method
self._ascending = ascending
return super(Rank, self)._init(*args, **kwargs)
@classmethod
def _static_identity(cls, method, ascending, *args, **kwargs):
return (
super(Rank, cls)._static_identity(*args, **kwargs),
method,
ascending,
)
def _validate(self):
"""
Verify that the stored rank method is valid.
"""
if self._method not in _RANK_METHODS:
raise UnknownRankMethod(
method=self._method,
choices=set(_RANK_METHODS),
)
return super(Rank, self)._validate()
def _compute(self, arrays, dates, assets, mask):
"""
For each row in the input, compute a like-shaped array of per-row
ranks.
"""
return masked_rankdata_2d(
arrays[0],
mask,
self.inputs[0].missing_value,
self._method,
self._ascending,
)
def __repr__(self):
return "{type}({input_}, method='{method}', mask={mask})".format(
type=type(self).__name__,
input_=self.inputs[0],
method=self._method,
mask=self.mask,
)
class CustomFactor(PositiveWindowLengthMixin, CustomTermMixin, Factor):
'''
Base class for user-defined Factors.
Parameters
----------
inputs : iterable, optional
An iterable of `BoundColumn` instances (e.g. USEquityPricing.close),
describing the data to load and pass to `self.compute`. If this
argument is not passed to the CustomFactor constructor, we look for a
class-level attribute named `inputs`.
outputs : iterable[str], optional
An iterable of strings which represent the names of each output this
factor should compute and return. If this argument is not passed to the
CustomFactor constructor, we look for a class-level attribute named
`outputs`.
window_length : int, optional
Number of rows to pass for each input. If this argument is not passed
to the CustomFactor constructor, we look for a class-level attribute
named `window_length`.
mask : zipline.pipeline.Filter, optional
A Filter describing the assets on which we should compute each day.
Each call to ``CustomFactor.compute`` will only receive assets for
which ``mask`` produced True on the day for which compute is being
called.
Notes
-----
Users implementing their own Factors should subclass CustomFactor and
implement a method named `compute` with the following signature:
.. code-block:: python
def compute(self, today, assets, out, *inputs):
...
On each simulation date, ``compute`` will be called with the current date,
an array of sids, an output array, and an input array for each expression
passed as inputs to the CustomFactor constructor.
The specific types of the values passed to `compute` are as follows::
today : np.datetime64[ns]
Row label for the last row of all arrays passed as `inputs`.
assets : np.array[int64, ndim=1]
Column labels for `out` and`inputs`.
out : np.array[self.dtype, ndim=1]
Output array of the same shape as `assets`. `compute` should write
its desired return values into `out`. If multiple outputs are
specified, `compute` should write its desired return values into
`out.<output_name>` for each output name in `self.outputs`.
*inputs : tuple of np.array
Raw data arrays corresponding to the values of `self.inputs`.
``compute`` functions should expect to be passed NaN values for dates on
which no data was available for an asset. This may include dates on which
an asset did not yet exist.
For example, if a CustomFactor requires 10 rows of close price data, and
asset A started trading on Monday June 2nd, 2014, then on Tuesday, June
3rd, 2014, the column of input data for asset A will have 9 leading NaNs
for the preceding days on which data was not yet available.
Examples
--------
A CustomFactor with pre-declared defaults:
.. code-block:: python
class TenDayRange(CustomFactor):
"""
Computes the difference between the highest high in the last 10
days and the lowest low.
Pre-declares high and low as default inputs and `window_length` as
10.
"""
inputs = [USEquityPricing.high, USEquityPricing.low]
window_length = 10
def compute(self, today, assets, out, highs, lows):
from numpy import nanmin, nanmax
highest_highs = nanmax(highs, axis=0)
lowest_lows = nanmin(lows, axis=0)
out[:] = highest_highs - lowest_lows
# Doesn't require passing inputs or window_length because they're
# pre-declared as defaults for the TenDayRange class.
ten_day_range = TenDayRange()
A CustomFactor without defaults:
.. code-block:: python
class MedianValue(CustomFactor):
"""
Computes the median value of an arbitrary single input over an
arbitrary window..
Does not declare any defaults, so values for `window_length` and
`inputs` must be passed explicitly on every construction.
"""
def compute(self, today, assets, out, data):
from numpy import nanmedian
out[:] = data.nanmedian(data, axis=0)
# Values for `inputs` and `window_length` must be passed explicitly to
# MedianValue.
median_close10 = MedianValue([USEquityPricing.close], window_length=10)
median_low15 = MedianValue([USEquityPricing.low], window_length=15)
A CustomFactor with multiple outputs:
.. code-block:: python
class MultipleOutputs(CustomFactor):
inputs = [USEquityPricing.close]
outputs = ['alpha', 'beta']
window_length = N
def compute(self, today, assets, out, close):
computed_alpha, computed_beta = some_function(close)
out.alpha[:] = computed_alpha
out.beta[:] = computed_beta
# Each output is returned as its own Factor upon instantiation.
alpha, beta = MultipleOutputs()
# Equivalently, we can create a single factor instance and access each
# output as an attribute of that instance.
multiple_outputs = MultipleOutputs()
alpha = multiple_outputs.alpha
beta = multiple_outputs.beta
Note: If a CustomFactor has multiple outputs, all outputs must have the
same dtype. For instance, in the example above, if alpha is a float then
beta must also be a float.
'''
dtype = float64_dtype
def __getattribute__(self, name):
outputs = object.__getattribute__(self, 'outputs')
if outputs is NotSpecified:
return super(CustomFactor, self).__getattribute__(name)
elif name in outputs:
return RecarrayField(factor=self, attribute=name)
else:
try:
return super(CustomFactor, self).__getattribute__(name)
except AttributeError:
raise AttributeError(
'Instance of {factor} has no output named {attr!r}. '
'Possible choices are: {choices}.'.format(
factor=type(self).__name__,
attr=name,
choices=self.outputs,
)
)
def __iter__(self):
if self.outputs is NotSpecified:
raise ValueError(
'{factor} does not have multiple outputs.'.format(
factor=type(self).__name__,
)
)
return (RecarrayField(self, attr) for attr in self.outputs)
class RecarrayField(SingleInputMixin, Factor):
def __new__(cls, factor, attribute):
return super(RecarrayField, cls).__new__(
cls,
attribute=attribute,
inputs=[factor],
window_length=0,
mask=factor.mask,
dtype=factor.dtype,
missing_value=factor.missing_value,
)
def _init(self, attribute, *args, **kwargs):
self._attribute = attribute
return super(RecarrayField, self)._init(*args, **kwargs)
@classmethod
def _static_identity(cls, attribute, *args, **kwargs):
return (
super(RecarrayField, cls)._static_identity(*args, **kwargs),
attribute,
)
def _compute(self, windows, dates, assets, mask):
return windows[0][self._attribute]
class Latest(LatestMixin, CustomFactor):
"""
Factor producing the most recently-known value of `inputs[0]` on each day.
The `.latest` attribute of DataSet columns returns an instance of this
Factor.
"""
window_length = 1
def compute(self, today, assets, out, data):
out[:] = data[-1]
|
MadeiraCloud/salt
|
refs/heads/master
|
libs/requests/packages/chardet/escprober.py
|
2935
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
ISO2022KRSMModel)
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .compat import wrap_ord
class EscCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = [
CodingStateMachine(HZSMModel),
CodingStateMachine(ISO2022CNSMModel),
CodingStateMachine(ISO2022JPSMModel),
CodingStateMachine(ISO2022KRSMModel)
]
self.reset()
def reset(self):
CharSetProber.reset(self)
for codingSM in self._mCodingSM:
if not codingSM:
continue
codingSM.active = True
codingSM.reset()
self._mActiveSM = len(self._mCodingSM)
self._mDetectedCharset = None
def get_charset_name(self):
return self._mDetectedCharset
def get_confidence(self):
if self._mDetectedCharset:
return 0.99
else:
return 0.00
def feed(self, aBuf):
for c in aBuf:
# PY3K: aBuf is a byte array, so c is an int, not a byte
for codingSM in self._mCodingSM:
if not codingSM:
continue
if not codingSM.active:
continue
codingState = codingSM.next_state(wrap_ord(c))
if codingState == constants.eError:
codingSM.active = False
self._mActiveSM -= 1
if self._mActiveSM <= 0:
self._mState = constants.eNotMe
return self.get_state()
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
return self.get_state()
return self.get_state()
|
zubair-arbi/edx-platform
|
refs/heads/master
|
common/djangoapps/student/migrations/0046_auto__add_entranceexamconfiguration__add_unique_entranceexamconfigurat.py
|
93
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'EntranceExamConfiguration'
db.create_table('student_entranceexamconfiguration', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255, db_index=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_index=True, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('skip_entrance_exam', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('student', ['EntranceExamConfiguration'])
# Adding unique constraint on 'EntranceExamConfiguration', fields ['user', 'course_id']
db.create_unique('student_entranceexamconfiguration', ['user_id', 'course_id'])
def backwards(self, orm):
# Removing unique constraint on 'EntranceExamConfiguration', fields ['user', 'course_id']
db.delete_unique('student_entranceexamconfiguration', ['user_id', 'course_id'])
# Deleting model 'EntranceExamConfiguration'
db.delete_table('student_entranceexamconfiguration')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.anonymoususerid': {
'Meta': {'object_name': 'AnonymousUserId'},
'anonymous_user_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseaccessrole': {
'Meta': {'unique_together': "(('user', 'org', 'course_id', 'role'),)", 'object_name': 'CourseAccessRole'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'org': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollmentallowed': {
'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'CourseEnrollmentAllowed'},
'auto_enroll': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.dashboardconfiguration': {
'Meta': {'object_name': 'DashboardConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recent_enrollment_time_delta': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'student.entranceexamconfiguration': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'EntranceExamConfiguration'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'skip_entrance_exam': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.linkedinaddtoprofileconfiguration': {
'Meta': {'object_name': 'LinkedInAddToProfileConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'company_identifier': ('django.db.models.fields.TextField', [], {}),
'dashboard_tracking_code': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trk_partner_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'blank': 'True'})
},
'student.loginfailures': {
'Meta': {'object_name': 'LoginFailures'},
'failure_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lockout_until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.passwordhistory': {
'Meta': {'object_name': 'PasswordHistory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'time_set': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'allow_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'city': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'student.usersignupsource': {
'Meta': {'object_name': 'UserSignupSource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.userstanding': {
'Meta': {'object_name': 'UserStanding'},
'account_status': ('django.db.models.fields.CharField', [], {'max_length': '31', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'standing_last_changed_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'standing'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
|
Communities-Communications/cc-odoo
|
refs/heads/master
|
openerp/addons/base/ir/ir_qweb.py
|
9
|
# -*- coding: utf-8 -*-
import collections
import cStringIO
import datetime
import hashlib
import json
import itertools
import logging
import math
import os
import re
import sys
import textwrap
import uuid
from subprocess import Popen, PIPE
from urlparse import urlparse
import babel
import babel.dates
import werkzeug
from lxml import etree, html
from PIL import Image
import openerp.http
import openerp.tools
from openerp.tools.func import lazy_property
import openerp.tools.lru
from openerp.http import request
from openerp.tools.safe_eval import safe_eval as eval
from openerp.osv import osv, orm, fields
from openerp.tools import html_escape as escape
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
#--------------------------------------------------------------------
# QWeb template engine
#--------------------------------------------------------------------
class QWebException(Exception):
def __init__(self, message, **kw):
Exception.__init__(self, message)
self.qweb = dict(kw)
def pretty_xml(self):
if 'node' not in self.qweb:
return ''
return etree.tostring(self.qweb['node'], pretty_print=True)
class QWebTemplateNotFound(QWebException):
pass
def raise_qweb_exception(etype=None, **kw):
if etype is None:
etype = QWebException
orig_type, original, tb = sys.exc_info()
try:
raise etype, original, tb
except etype, e:
for k, v in kw.items():
e.qweb[k] = v
# Will use `raise foo from bar` in python 3 and rename cause to __cause__
e.qweb['cause'] = original
raise
class QWebContext(dict):
def __init__(self, cr, uid, data, loader=None, templates=None, context=None):
self.cr = cr
self.uid = uid
self.loader = loader
self.templates = templates or {}
self.context = context
dic = dict(data)
super(QWebContext, self).__init__(dic)
self['defined'] = lambda key: key in self
def safe_eval(self, expr):
locals_dict = collections.defaultdict(lambda: None)
locals_dict.update(self)
locals_dict.pop('cr', None)
locals_dict.pop('loader', None)
return eval(expr, None, locals_dict, nocopy=True, locals_builtins=True)
def copy(self):
""" Clones the current context, conserving all data and metadata
(loader, template cache, ...)
"""
return QWebContext(self.cr, self.uid, dict.copy(self),
loader=self.loader,
templates=self.templates,
context=self.context)
def __copy__(self):
return self.copy()
class QWeb(orm.AbstractModel):
""" Base QWeb rendering engine
* to customize ``t-field`` rendering, subclass ``ir.qweb.field`` and
create new models called :samp:`ir.qweb.field.{widget}`
* alternatively, override :meth:`~.get_converter_for` and return an
arbitrary model to use as field converter
Beware that if you need extensions or alterations which could be
incompatible with other subsystems, you should create a local object
inheriting from ``ir.qweb`` and customize that.
"""
_name = 'ir.qweb'
_void_elements = frozenset([
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen',
'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'])
_format_regex = re.compile(
'(?:'
# ruby-style pattern
'#\{(.+?)\}'
')|(?:'
# jinja-style pattern
'\{\{(.+?)\}\}'
')')
def __init__(self, pool, cr):
super(QWeb, self).__init__(pool, cr)
self._render_tag = self.prefixed_methods('render_tag_')
self._render_att = self.prefixed_methods('render_att_')
def prefixed_methods(self, prefix):
""" Extracts all methods prefixed by ``prefix``, and returns a mapping
of (t-name, method) where the t-name is the method name with prefix
removed and underscore converted to dashes
:param str prefix:
:return: dict
"""
n_prefix = len(prefix)
return dict(
(name[n_prefix:].replace('_', '-'), getattr(type(self), name))
for name in dir(self)
if name.startswith(prefix)
)
def register_tag(self, tag, func):
self._render_tag[tag] = func
def add_template(self, qwebcontext, name, node):
"""Add a parsed template in the context. Used to preprocess templates."""
qwebcontext.templates[name] = node
def load_document(self, document, res_id, qwebcontext):
"""
Loads an XML document and installs any contained template in the engine
:type document: a parsed lxml.etree element, an unparsed XML document
(as a string) or the path of an XML file to load
"""
if not isinstance(document, basestring):
# assume lxml.etree.Element
dom = document
elif document.startswith("<?xml"):
dom = etree.fromstring(document)
else:
dom = etree.parse(document).getroot()
for node in dom:
if node.get('t-name'):
name = str(node.get("t-name"))
self.add_template(qwebcontext, name, node)
if res_id and node.tag == "t":
self.add_template(qwebcontext, res_id, node)
res_id = None
def get_template(self, name, qwebcontext):
""" Tries to fetch the template ``name``, either gets it from the
context's template cache or loads one with the context's loader (if
any).
:raises QWebTemplateNotFound: if the template can not be found or loaded
"""
origin_template = qwebcontext.get('__caller__') or qwebcontext['__stack__'][0]
if qwebcontext.loader and name not in qwebcontext.templates:
try:
xml_doc = qwebcontext.loader(name)
except ValueError:
raise_qweb_exception(QWebTemplateNotFound, message="Loader could not find template %r" % name, template=origin_template)
self.load_document(xml_doc, isinstance(name, (int, long)) and name or None, qwebcontext=qwebcontext)
if name in qwebcontext.templates:
return qwebcontext.templates[name]
raise QWebTemplateNotFound("Template %r not found" % name, template=origin_template)
def eval(self, expr, qwebcontext):
try:
return qwebcontext.safe_eval(expr)
except Exception:
template = qwebcontext.get('__template__')
raise_qweb_exception(message="Could not evaluate expression %r" % expr, expression=expr, template=template)
def eval_object(self, expr, qwebcontext):
return self.eval(expr, qwebcontext)
def eval_str(self, expr, qwebcontext):
if expr == "0":
return qwebcontext.get(0, '')
val = self.eval(expr, qwebcontext)
if isinstance(val, unicode):
return val.encode("utf8")
if val is False or val is None:
return ''
return str(val)
def eval_format(self, expr, qwebcontext):
expr, replacements = self._format_regex.subn(
lambda m: self.eval_str(m.group(1) or m.group(2), qwebcontext),
expr
)
if replacements:
return expr
try:
return str(expr % qwebcontext)
except Exception:
template = qwebcontext.get('__template__')
raise_qweb_exception(message="Format error for expression %r" % expr, expression=expr, template=template)
def eval_bool(self, expr, qwebcontext):
return int(bool(self.eval(expr, qwebcontext)))
def render(self, cr, uid, id_or_xml_id, qwebcontext=None, loader=None, context=None):
""" render(cr, uid, id_or_xml_id, qwebcontext=None, loader=None, context=None)
Renders the template specified by the provided template name
:param qwebcontext: context for rendering the template
:type qwebcontext: dict or :class:`QWebContext` instance
:param loader: if ``qwebcontext`` is a dict, loader set into the
context instantiated for rendering
"""
if qwebcontext is None:
qwebcontext = {}
if not isinstance(qwebcontext, QWebContext):
qwebcontext = QWebContext(cr, uid, qwebcontext, loader=loader, context=context)
qwebcontext['__template__'] = id_or_xml_id
stack = qwebcontext.get('__stack__', [])
if stack:
qwebcontext['__caller__'] = stack[-1]
stack.append(id_or_xml_id)
qwebcontext['__stack__'] = stack
qwebcontext['xmlid'] = str(stack[0]) # Temporary fix
return self.render_node(self.get_template(id_or_xml_id, qwebcontext), qwebcontext)
def render_node(self, element, qwebcontext):
generated_attributes = ""
t_render = None
template_attributes = {}
for (attribute_name, attribute_value) in element.attrib.iteritems():
attribute_name = str(attribute_name)
if attribute_name == "groups":
cr = qwebcontext.get('request') and qwebcontext['request'].cr or None
uid = qwebcontext.get('request') and qwebcontext['request'].uid or None
can_see = self.user_has_groups(cr, uid, groups=attribute_value) if cr and uid else False
if not can_see:
return ''
attribute_value = attribute_value.encode("utf8")
if attribute_name.startswith("t-"):
for attribute in self._render_att:
if attribute_name[2:].startswith(attribute):
attrs = self._render_att[attribute](
self, element, attribute_name, attribute_value, qwebcontext)
for att, val in attrs:
if not val: continue
if not isinstance(val, str):
val = unicode(val).encode('utf-8')
generated_attributes += self.render_attribute(element, att, val, qwebcontext)
break
else:
if attribute_name[2:] in self._render_tag:
t_render = attribute_name[2:]
template_attributes[attribute_name[2:]] = attribute_value
else:
generated_attributes += self.render_attribute(element, attribute_name, attribute_value, qwebcontext)
if 'debug' in template_attributes:
debugger = template_attributes.get('debug', 'pdb')
__import__(debugger).set_trace() # pdb, ipdb, pudb, ...
if t_render:
result = self._render_tag[t_render](self, element, template_attributes, generated_attributes, qwebcontext)
else:
result = self.render_element(element, template_attributes, generated_attributes, qwebcontext)
if element.tail:
result += element.tail.encode('utf-8')
if isinstance(result, unicode):
return result.encode('utf-8')
return result
def render_element(self, element, template_attributes, generated_attributes, qwebcontext, inner=None):
# element: element
# template_attributes: t-* attributes
# generated_attributes: generated attributes
# qwebcontext: values
# inner: optional innerXml
if inner:
g_inner = inner.encode('utf-8') if isinstance(inner, unicode) else inner
else:
g_inner = [] if element.text is None else [element.text.encode('utf-8')]
for current_node in element.iterchildren(tag=etree.Element):
try:
g_inner.append(self.render_node(current_node, qwebcontext))
except QWebException:
raise
except Exception:
template = qwebcontext.get('__template__')
raise_qweb_exception(message="Could not render element %r" % element.tag, node=element, template=template)
name = str(element.tag)
inner = "".join(g_inner)
trim = template_attributes.get("trim", 0)
if trim == 0:
pass
elif trim == 'left':
inner = inner.lstrip()
elif trim == 'right':
inner = inner.rstrip()
elif trim == 'both':
inner = inner.strip()
if name == "t":
return inner
elif len(inner) or name not in self._void_elements:
return "<%s%s>%s</%s>" % tuple(
qwebcontext if isinstance(qwebcontext, str) else qwebcontext.encode('utf-8')
for qwebcontext in (name, generated_attributes, inner, name)
)
else:
return "<%s%s/>" % (name, generated_attributes)
def render_attribute(self, element, name, value, qwebcontext):
return ' %s="%s"' % (name, escape(value))
# Attributes
def render_att_att(self, element, attribute_name, attribute_value, qwebcontext):
if attribute_name.startswith("t-attf-"):
return [(attribute_name[7:], self.eval_format(attribute_value, qwebcontext))]
if attribute_name.startswith("t-att-"):
return [(attribute_name[6:], self.eval(attribute_value, qwebcontext))]
result = self.eval_object(attribute_value, qwebcontext)
if isinstance(result, collections.Mapping):
return result.iteritems()
# assume tuple
return [result]
# Tags
def render_tag_raw(self, element, template_attributes, generated_attributes, qwebcontext):
inner = self.eval_str(template_attributes["raw"], qwebcontext)
return self.render_element(element, template_attributes, generated_attributes, qwebcontext, inner)
def render_tag_esc(self, element, template_attributes, generated_attributes, qwebcontext):
options = json.loads(template_attributes.get('esc-options') or '{}')
widget = self.get_widget_for(options.get('widget'))
inner = widget.format(template_attributes['esc'], options, qwebcontext)
return self.render_element(element, template_attributes, generated_attributes, qwebcontext, inner)
def _iterate(self, iterable):
if isinstance (iterable, collections.Mapping):
return iterable.iteritems()
return itertools.izip(*itertools.tee(iterable))
def render_tag_foreach(self, element, template_attributes, generated_attributes, qwebcontext):
expr = template_attributes["foreach"]
enum = self.eval_object(expr, qwebcontext)
if enum is None:
template = qwebcontext.get('__template__')
raise QWebException("foreach enumerator %r is not defined while rendering template %r" % (expr, template), template=template)
if isinstance(enum, int):
enum = range(enum)
varname = template_attributes['as'].replace('.', '_')
copy_qwebcontext = qwebcontext.copy()
size = None
if isinstance(enum, collections.Sized):
size = len(enum)
copy_qwebcontext["%s_size" % varname] = size
copy_qwebcontext["%s_all" % varname] = enum
ru = []
for index, (item, value) in enumerate(self._iterate(enum)):
copy_qwebcontext.update({
varname: item,
'%s_value' % varname: value,
'%s_index' % varname: index,
'%s_first' % varname: index == 0,
})
if size is not None:
copy_qwebcontext['%s_last' % varname] = index + 1 == size
if index % 2:
copy_qwebcontext.update({
'%s_parity' % varname: 'odd',
'%s_even' % varname: False,
'%s_odd' % varname: True,
})
else:
copy_qwebcontext.update({
'%s_parity' % varname: 'even',
'%s_even' % varname: True,
'%s_odd' % varname: False,
})
ru.append(self.render_element(element, template_attributes, generated_attributes, copy_qwebcontext))
for k in qwebcontext.keys():
qwebcontext[k] = copy_qwebcontext[k]
return "".join(ru)
def render_tag_if(self, element, template_attributes, generated_attributes, qwebcontext):
if self.eval_bool(template_attributes["if"], qwebcontext):
return self.render_element(element, template_attributes, generated_attributes, qwebcontext)
return ""
def render_tag_call(self, element, template_attributes, generated_attributes, qwebcontext):
d = qwebcontext.copy()
d[0] = self.render_element(element, template_attributes, generated_attributes, d)
cr = d.get('request') and d['request'].cr or None
uid = d.get('request') and d['request'].uid or None
template = self.eval_format(template_attributes["call"], d)
try:
template = int(template)
except ValueError:
pass
return self.render(cr, uid, template, d)
def render_tag_call_assets(self, element, template_attributes, generated_attributes, qwebcontext):
""" This special 't-call' tag can be used in order to aggregate/minify javascript and css assets"""
if len(element):
# An asset bundle is rendered in two differents contexts (when genereting html and
# when generating the bundle itself) so they must be qwebcontext free
# even '0' variable is forbidden
template = qwebcontext.get('__template__')
raise QWebException("t-call-assets cannot contain children nodes", template=template)
xmlid = template_attributes['call-assets']
cr, uid, context = [getattr(qwebcontext, attr) for attr in ('cr', 'uid', 'context')]
bundle = AssetsBundle(xmlid, cr=cr, uid=uid, context=context, registry=self.pool)
css = self.get_attr_bool(template_attributes.get('css'), default=True)
js = self.get_attr_bool(template_attributes.get('js'), default=True)
return bundle.to_html(css=css, js=js, debug=bool(qwebcontext.get('debug')))
def render_tag_set(self, element, template_attributes, generated_attributes, qwebcontext):
if "value" in template_attributes:
qwebcontext[template_attributes["set"]] = self.eval_object(template_attributes["value"], qwebcontext)
elif "valuef" in template_attributes:
qwebcontext[template_attributes["set"]] = self.eval_format(template_attributes["valuef"], qwebcontext)
else:
qwebcontext[template_attributes["set"]] = self.render_element(element, template_attributes, generated_attributes, qwebcontext)
return ""
def render_tag_field(self, element, template_attributes, generated_attributes, qwebcontext):
""" eg: <span t-record="browse_record(res.partner, 1)" t-field="phone">+1 555 555 8069</span>"""
node_name = element.tag
assert node_name not in ("table", "tbody", "thead", "tfoot", "tr", "td",
"li", "ul", "ol", "dl", "dt", "dd"),\
"RTE widgets do not work correctly on %r elements" % node_name
assert node_name != 't',\
"t-field can not be used on a t element, provide an actual HTML node"
record, field_name = template_attributes["field"].rsplit('.', 1)
record = self.eval_object(record, qwebcontext)
field = record._fields[field_name]
options = json.loads(template_attributes.get('field-options') or '{}')
field_type = get_field_type(field, options)
converter = self.get_converter_for(field_type)
return converter.to_html(qwebcontext.cr, qwebcontext.uid, field_name, record, options,
element, template_attributes, generated_attributes, qwebcontext, context=qwebcontext.context)
def get_converter_for(self, field_type):
""" returns a :class:`~openerp.models.Model` used to render a
``t-field``.
By default, tries to get the model named
:samp:`ir.qweb.field.{field_type}`, falling back on ``ir.qweb.field``.
:param str field_type: type or widget of field to render
"""
return self.pool.get('ir.qweb.field.' + field_type, self.pool['ir.qweb.field'])
def get_widget_for(self, widget):
""" returns a :class:`~openerp.models.Model` used to render a
``t-esc``
:param str widget: name of the widget to use, or ``None``
"""
widget_model = ('ir.qweb.widget.' + widget) if widget else 'ir.qweb.widget'
return self.pool.get(widget_model) or self.pool['ir.qweb.widget']
def get_attr_bool(self, attr, default=False):
if attr:
attr = attr.lower()
if attr in ('false', '0'):
return False
elif attr in ('true', '1'):
return True
return default
#--------------------------------------------------------------------
# QWeb Fields converters
#--------------------------------------------------------------------
class FieldConverter(osv.AbstractModel):
""" Used to convert a t-field specification into an output HTML field.
:meth:`~.to_html` is the entry point of this conversion from QWeb, it:
* converts the record value to html using :meth:`~.record_to_html`
* generates the metadata attributes (``data-oe-``) to set on the root
result node
* generates the root result node itself through :meth:`~.render_element`
"""
_name = 'ir.qweb.field'
def attributes(self, cr, uid, field_name, record, options,
source_element, g_att, t_att, qweb_context,
context=None):
""" attributes(cr, uid, field_name, record, options, source_element, g_att, t_att, qweb_context, context=None)
Generates the metadata attributes (prefixed by ``data-oe-`` for the
root node of the field conversion. Attribute values are escaped by the
parent.
The default attributes are:
* ``model``, the name of the record's model
* ``id`` the id of the record to which the field belongs
* ``field`` the name of the converted field
* ``type`` the logical field type (widget, may not match the field's
``type``, may not be any Field subclass name)
* ``translate``, a boolean flag (``0`` or ``1``) denoting whether the
field is translatable
* ``expression``, the original expression
:returns: iterable of (attribute name, attribute value) pairs.
"""
field = record._fields[field_name]
field_type = get_field_type(field, options)
return [
('data-oe-model', record._name),
('data-oe-id', record.id),
('data-oe-field', field_name),
('data-oe-type', field_type),
('data-oe-expression', t_att['field']),
]
def value_to_html(self, cr, uid, value, field, options=None, context=None):
""" value_to_html(cr, uid, value, field, options=None, context=None)
Converts a single value to its HTML version/output
"""
if not value: return ''
return value
def record_to_html(self, cr, uid, field_name, record, options=None, context=None):
""" record_to_html(cr, uid, field_name, record, options=None, context=None)
Converts the specified field of the browse_record ``record`` to HTML
"""
field = record._fields[field_name]
return self.value_to_html(
cr, uid, record[field_name], field, options=options, context=context)
def to_html(self, cr, uid, field_name, record, options,
source_element, t_att, g_att, qweb_context, context=None):
""" to_html(cr, uid, field_name, record, options, source_element, t_att, g_att, qweb_context, context=None)
Converts a ``t-field`` to its HTML output. A ``t-field`` may be
extended by a ``t-field-options``, which is a JSON-serialized mapping
of configuration values.
A default configuration key is ``widget`` which can override the
field's own ``_type``.
"""
try:
content = self.record_to_html(cr, uid, field_name, record, options, context=context)
if options.get('html-escape', True):
content = escape(content)
elif hasattr(content, '__html__'):
content = content.__html__()
except Exception:
_logger.warning("Could not get field %s for model %s",
field_name, record._name, exc_info=True)
content = None
inherit_branding = context and context.get('inherit_branding')
if not inherit_branding and context and context.get('inherit_branding_auto'):
inherit_branding = self.pool['ir.model.access'].check(cr, uid, record._name, 'write', False, context=context)
if inherit_branding:
# add branding attributes
g_att += ''.join(
' %s="%s"' % (name, escape(value))
for name, value in self.attributes(
cr, uid, field_name, record, options,
source_element, g_att, t_att, qweb_context)
)
return self.render_element(cr, uid, source_element, t_att, g_att,
qweb_context, content)
def qweb_object(self):
return self.pool['ir.qweb']
def render_element(self, cr, uid, source_element, t_att, g_att,
qweb_context, content):
""" render_element(cr, uid, source_element, t_att, g_att, qweb_context, content)
Final rendering hook, by default just calls ir.qweb's ``render_element``
"""
return self.qweb_object().render_element(
source_element, t_att, g_att, qweb_context, content or '')
def user_lang(self, cr, uid, context):
""" user_lang(cr, uid, context)
Fetches the res.lang object corresponding to the language code stored
in the user's context. Fallbacks to en_US if no lang is present in the
context *or the language code is not valid*.
:returns: res.lang browse_record
"""
if context is None: context = {}
lang_code = context.get('lang') or 'en_US'
Lang = self.pool['res.lang']
lang_ids = Lang.search(cr, uid, [('code', '=', lang_code)], context=context) \
or Lang.search(cr, uid, [('code', '=', 'en_US')], context=context)
return Lang.browse(cr, uid, lang_ids[0], context=context)
class FloatConverter(osv.AbstractModel):
_name = 'ir.qweb.field.float'
_inherit = 'ir.qweb.field'
def precision(self, cr, uid, field, options=None, context=None):
_, precision = field.digits or (None, None)
return precision
def value_to_html(self, cr, uid, value, field, options=None, context=None):
if context is None:
context = {}
precision = self.precision(cr, uid, field, options=options, context=context)
fmt = '%f' if precision is None else '%.{precision}f'
lang_code = context.get('lang') or 'en_US'
lang = self.pool['res.lang']
formatted = lang.format(cr, uid, [lang_code], fmt.format(precision=precision), value, grouping=True)
# %f does not strip trailing zeroes. %g does but its precision causes
# it to switch to scientific notation starting at a million *and* to
# strip decimals. So use %f and if no precision was specified manually
# strip trailing 0.
if precision is None:
formatted = re.sub(r'(?:(0|\d+?)0+)$', r'\1', formatted)
return formatted
class DateConverter(osv.AbstractModel):
_name = 'ir.qweb.field.date'
_inherit = 'ir.qweb.field'
def value_to_html(self, cr, uid, value, field, options=None, context=None):
if not value or len(value)<10: return ''
lang = self.user_lang(cr, uid, context=context)
locale = babel.Locale.parse(lang.code)
if isinstance(value, basestring):
value = datetime.datetime.strptime(
value[:10], openerp.tools.DEFAULT_SERVER_DATE_FORMAT)
if options and 'format' in options:
pattern = options['format']
else:
strftime_pattern = lang.date_format
pattern = openerp.tools.posix_to_ldml(strftime_pattern, locale=locale)
return babel.dates.format_date(
value, format=pattern,
locale=locale)
class DateTimeConverter(osv.AbstractModel):
_name = 'ir.qweb.field.datetime'
_inherit = 'ir.qweb.field'
def value_to_html(self, cr, uid, value, field, options=None, context=None):
if not value: return ''
lang = self.user_lang(cr, uid, context=context)
locale = babel.Locale.parse(lang.code)
if isinstance(value, basestring):
value = datetime.datetime.strptime(
value, openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT)
value = fields.datetime.context_timestamp(
cr, uid, timestamp=value, context=context)
if options and 'format' in options:
pattern = options['format']
else:
strftime_pattern = (u"%s %s" % (lang.date_format, lang.time_format))
pattern = openerp.tools.posix_to_ldml(strftime_pattern, locale=locale)
if options and options.get('hide_seconds'):
pattern = pattern.replace(":ss", "").replace(":s", "")
return babel.dates.format_datetime(value, format=pattern, locale=locale)
class TextConverter(osv.AbstractModel):
_name = 'ir.qweb.field.text'
_inherit = 'ir.qweb.field'
def value_to_html(self, cr, uid, value, field, options=None, context=None):
"""
Escapes the value and converts newlines to br. This is bullshit.
"""
if not value: return ''
return nl2br(value, options=options)
class SelectionConverter(osv.AbstractModel):
_name = 'ir.qweb.field.selection'
_inherit = 'ir.qweb.field'
def record_to_html(self, cr, uid, field_name, record, options=None, context=None):
value = record[field_name]
if not value: return ''
field = record._fields[field_name]
selection = dict(field.get_description(record.env)['selection'])
return self.value_to_html(
cr, uid, selection[value], field, options=options)
class ManyToOneConverter(osv.AbstractModel):
_name = 'ir.qweb.field.many2one'
_inherit = 'ir.qweb.field'
def record_to_html(self, cr, uid, field_name, record, options=None, context=None):
[read] = record.read([field_name])
if not read[field_name]: return ''
_, value = read[field_name]
return nl2br(value, options=options)
class HTMLConverter(osv.AbstractModel):
_name = 'ir.qweb.field.html'
_inherit = 'ir.qweb.field'
def value_to_html(self, cr, uid, value, field, options=None, context=None):
return HTMLSafe(value or '')
class ImageConverter(osv.AbstractModel):
""" ``image`` widget rendering, inserts a data:uri-using image tag in the
document. May be overridden by e.g. the website module to generate links
instead.
.. todo:: what happens if different output need different converters? e.g.
reports may need embedded images or FS links whereas website
needs website-aware
"""
_name = 'ir.qweb.field.image'
_inherit = 'ir.qweb.field'
def value_to_html(self, cr, uid, value, field, options=None, context=None):
try:
image = Image.open(cStringIO.StringIO(value.decode('base64')))
image.verify()
except IOError:
raise ValueError("Non-image binary fields can not be converted to HTML")
except: # image.verify() throws "suitable exceptions", I have no idea what they are
raise ValueError("Invalid image content")
return HTMLSafe('<img src="data:%s;base64,%s">' % (Image.MIME[image.format], value))
class MonetaryConverter(osv.AbstractModel):
""" ``monetary`` converter, has a mandatory option
``display_currency``.
The currency is used for formatting *and rounding* of the float value. It
is assumed that the linked res_currency has a non-empty rounding value and
res.currency's ``round`` method is used to perform rounding.
.. note:: the monetary converter internally adds the qweb context to its
options mapping, so that the context is available to callees.
It's set under the ``_qweb_context`` key.
"""
_name = 'ir.qweb.field.monetary'
_inherit = 'ir.qweb.field'
def to_html(self, cr, uid, field_name, record, options,
source_element, t_att, g_att, qweb_context, context=None):
options['_qweb_context'] = qweb_context
return super(MonetaryConverter, self).to_html(
cr, uid, field_name, record, options,
source_element, t_att, g_att, qweb_context, context=context)
def record_to_html(self, cr, uid, field_name, record, options, context=None):
if context is None:
context = {}
Currency = self.pool['res.currency']
display_currency = self.display_currency(cr, uid, options['display_currency'], options)
# lang.format mandates a sprintf-style format. These formats are non-
# minimal (they have a default fixed precision instead), and
# lang.format will not set one by default. currency.round will not
# provide one either. So we need to generate a precision value
# (integer > 0) from the currency's rounding (a float generally < 1.0).
#
# The log10 of the rounding should be the number of digits involved if
# negative, if positive clamp to 0 digits and call it a day.
# nb: int() ~ floor(), we want nearest rounding instead
precision = int(math.floor(math.log10(display_currency.rounding)))
fmt = "%.{0}f".format(-precision if precision < 0 else 0)
from_amount = record[field_name]
if options.get('from_currency'):
from_currency = self.display_currency(cr, uid, options['from_currency'], options)
from_amount = Currency.compute(cr, uid, from_currency.id, display_currency.id, from_amount)
lang_code = context.get('lang') or 'en_US'
lang = self.pool['res.lang']
formatted_amount = lang.format(cr, uid, [lang_code],
fmt, Currency.round(cr, uid, display_currency, from_amount),
grouping=True, monetary=True)
pre = post = u''
if display_currency.position == 'before':
pre = u'{symbol} '
else:
post = u' {symbol}'
return HTMLSafe(u'{pre}<span class="oe_currency_value">{0}</span>{post}'.format(
formatted_amount,
pre=pre, post=post,
).format(
symbol=display_currency.symbol,
))
def display_currency(self, cr, uid, currency, options):
return self.qweb_object().eval_object(
currency, options['_qweb_context'])
TIMEDELTA_UNITS = (
('year', 3600 * 24 * 365),
('month', 3600 * 24 * 30),
('week', 3600 * 24 * 7),
('day', 3600 * 24),
('hour', 3600),
('minute', 60),
('second', 1)
)
class DurationConverter(osv.AbstractModel):
""" ``duration`` converter, to display integral or fractional values as
human-readable time spans (e.g. 1.5 as "1 hour 30 minutes").
Can be used on any numerical field.
Has a mandatory option ``unit`` which can be one of ``second``, ``minute``,
``hour``, ``day``, ``week`` or ``year``, used to interpret the numerical
field value before converting it.
Sub-second values will be ignored.
"""
_name = 'ir.qweb.field.duration'
_inherit = 'ir.qweb.field'
def value_to_html(self, cr, uid, value, field, options=None, context=None):
units = dict(TIMEDELTA_UNITS)
if value < 0:
raise ValueError(_("Durations can't be negative"))
if not options or options.get('unit') not in units:
raise ValueError(_("A unit must be provided to duration widgets"))
locale = babel.Locale.parse(
self.user_lang(cr, uid, context=context).code)
factor = units[options['unit']]
sections = []
r = value * factor
for unit, secs_per_unit in TIMEDELTA_UNITS:
v, r = divmod(r, secs_per_unit)
if not v: continue
section = babel.dates.format_timedelta(
v*secs_per_unit, threshold=1, locale=locale)
if section:
sections.append(section)
return ' '.join(sections)
class RelativeDatetimeConverter(osv.AbstractModel):
_name = 'ir.qweb.field.relative'
_inherit = 'ir.qweb.field'
def value_to_html(self, cr, uid, value, field, options=None, context=None):
parse_format = openerp.tools.DEFAULT_SERVER_DATETIME_FORMAT
locale = babel.Locale.parse(
self.user_lang(cr, uid, context=context).code)
if isinstance(value, basestring):
value = datetime.datetime.strptime(value, parse_format)
# value should be a naive datetime in UTC. So is fields.Datetime.now()
reference = datetime.datetime.strptime(field.now(), parse_format)
return babel.dates.format_timedelta(
value - reference, add_direction=True, locale=locale)
class Contact(orm.AbstractModel):
_name = 'ir.qweb.field.contact'
_inherit = 'ir.qweb.field.many2one'
def record_to_html(self, cr, uid, field_name, record, options=None, context=None):
if context is None:
context = {}
if options is None:
options = {}
opf = options.get('fields') or ["name", "address", "phone", "mobile", "fax", "email"]
value_rec = record[field_name]
if not value_rec:
return None
value_rec = value_rec.sudo().with_context(show_address=True)
value = value_rec.name_get()[0][1]
val = {
'name': value.split("\n")[0],
'address': escape("\n".join(value.split("\n")[1:])),
'phone': value_rec.phone,
'mobile': value_rec.mobile,
'fax': value_rec.fax,
'city': value_rec.city,
'country_id': value_rec.country_id.display_name,
'website': value_rec.website,
'email': value_rec.email,
'fields': opf,
'object': value_rec,
'options': options
}
html = self.pool["ir.ui.view"].render(cr, uid, "base.contact", val, engine='ir.qweb', context=context).decode('utf8')
return HTMLSafe(html)
class QwebView(orm.AbstractModel):
_name = 'ir.qweb.field.qweb'
_inherit = 'ir.qweb.field.many2one'
def record_to_html(self, cr, uid, field_name, record, options=None, context=None):
if not getattr(record, field_name):
return None
view = getattr(record, field_name)
if view._model._name != "ir.ui.view":
_logger.warning("%s.%s must be a 'ir.ui.view' model." % (record, field_name))
return None
ctx = (context or {}).copy()
ctx['object'] = record
html = view.render(ctx, engine='ir.qweb', context=ctx).decode('utf8')
return HTMLSafe(html)
class QwebWidget(osv.AbstractModel):
_name = 'ir.qweb.widget'
def _format(self, inner, options, qwebcontext):
return self.pool['ir.qweb'].eval_str(inner, qwebcontext)
def format(self, inner, options, qwebcontext):
return escape(self._format(inner, options, qwebcontext))
class QwebWidgetMonetary(osv.AbstractModel):
_name = 'ir.qweb.widget.monetary'
_inherit = 'ir.qweb.widget'
def _format(self, inner, options, qwebcontext):
inner = self.pool['ir.qweb'].eval(inner, qwebcontext)
display = self.pool['ir.qweb'].eval_object(options['display_currency'], qwebcontext)
precision = int(round(math.log10(display.rounding)))
fmt = "%.{0}f".format(-precision if precision < 0 else 0)
lang_code = qwebcontext.context.get('lang') or 'en_US'
formatted_amount = self.pool['res.lang'].format(
qwebcontext.cr, qwebcontext.uid, [lang_code], fmt, inner, grouping=True, monetary=True
)
pre = post = u''
if display.position == 'before':
pre = u'{symbol} '
else:
post = u' {symbol}'
return u'{pre}{0}{post}'.format(
formatted_amount, pre=pre, post=post
).format(symbol=display.symbol,)
class HTMLSafe(object):
""" HTMLSafe string wrapper, Werkzeug's escape() has special handling for
objects with a ``__html__`` methods but AFAIK does not provide any such
object.
Wrapping a string in HTML will prevent its escaping
"""
__slots__ = ['string']
def __init__(self, string):
self.string = string
def __html__(self):
return self.string
def __str__(self):
s = self.string
if isinstance(s, unicode):
return s.encode('utf-8')
return s
def __unicode__(self):
s = self.string
if isinstance(s, str):
return s.decode('utf-8')
return s
def nl2br(string, options=None):
""" Converts newlines to HTML linebreaks in ``string``. Automatically
escapes content unless options['html-escape'] is set to False, and returns
the result wrapped in an HTMLSafe object.
:param str string:
:param dict options:
:rtype: HTMLSafe
"""
if options is None: options = {}
if options.get('html-escape', True):
string = escape(string)
return HTMLSafe(string.replace('\n', '<br>\n'))
def get_field_type(field, options):
""" Gets a t-field's effective type from the field definition and its options """
return options.get('widget', field.type)
class AssetError(Exception):
pass
class AssetNotFound(AssetError):
pass
class AssetsBundle(object):
# Sass installation:
#
# sudo gem install sass compass bootstrap-sass
#
# If the following error is encountered:
# 'ERROR: Cannot load compass.'
# Use this:
# sudo gem install compass --pre
cmd_sass = ['sass', '--stdin', '-t', 'compressed', '--unix-newlines', '--compass', '-r', 'bootstrap-sass']
rx_css_import = re.compile("(@import[^;{]+;?)", re.M)
rx_sass_import = re.compile("""(@import\s?['"]([^'"]+)['"])""")
rx_css_split = re.compile("\/\*\! ([a-f0-9-]+) \*\/")
def __init__(self, xmlid, debug=False, cr=None, uid=None, context=None, registry=None):
self.xmlid = xmlid
self.cr = request.cr if cr is None else cr
self.uid = request.uid if uid is None else uid
self.context = request.context if context is None else context
self.registry = request.registry if registry is None else registry
self.javascripts = []
self.stylesheets = []
self.css_errors = []
self.remains = []
self._checksum = None
context = self.context.copy()
context['inherit_branding'] = False
context['rendering_bundle'] = True
self.html = self.registry['ir.ui.view'].render(self.cr, self.uid, xmlid, context=context)
self.parse()
def parse(self):
fragments = html.fragments_fromstring(self.html)
for el in fragments:
if isinstance(el, basestring):
self.remains.append(el)
elif isinstance(el, html.HtmlElement):
src = el.get('src', '')
href = el.get('href', '')
atype = el.get('type')
media = el.get('media')
if el.tag == 'style':
if atype == 'text/sass' or src.endswith('.sass'):
self.stylesheets.append(SassAsset(self, inline=el.text, media=media))
else:
self.stylesheets.append(StylesheetAsset(self, inline=el.text, media=media))
elif el.tag == 'link' and el.get('rel') == 'stylesheet' and self.can_aggregate(href):
if href.endswith('.sass') or atype == 'text/sass':
self.stylesheets.append(SassAsset(self, url=href, media=media))
else:
self.stylesheets.append(StylesheetAsset(self, url=href, media=media))
elif el.tag == 'script' and not src:
self.javascripts.append(JavascriptAsset(self, inline=el.text))
elif el.tag == 'script' and self.can_aggregate(src):
self.javascripts.append(JavascriptAsset(self, url=src))
else:
self.remains.append(html.tostring(el))
else:
try:
self.remains.append(html.tostring(el))
except Exception:
# notYETimplementederror
raise NotImplementedError
def can_aggregate(self, url):
return not urlparse(url).netloc and not url.startswith(('/web/css', '/web/js'))
def to_html(self, sep=None, css=True, js=True, debug=False):
if sep is None:
sep = '\n '
response = []
if debug:
if css and self.stylesheets:
self.compile_sass()
for style in self.stylesheets:
response.append(style.to_html())
if js:
for jscript in self.javascripts:
response.append(jscript.to_html())
else:
url_for = self.context.get('url_for', lambda url: url)
if css and self.stylesheets:
href = '/web/css/%s/%s' % (self.xmlid, self.version)
response.append('<link href="%s" rel="stylesheet"/>' % url_for(href))
if js:
src = '/web/js/%s/%s' % (self.xmlid, self.version)
response.append('<script type="text/javascript" src="%s"></script>' % url_for(src))
response.extend(self.remains)
return sep + sep.join(response)
@lazy_property
def last_modified(self):
"""Returns last modified date of linked files"""
return max(itertools.chain(
(asset.last_modified for asset in self.javascripts),
(asset.last_modified for asset in self.stylesheets),
))
@lazy_property
def version(self):
return self.checksum[0:7]
@lazy_property
def checksum(self):
"""
Not really a full checksum.
We compute a SHA1 on the rendered bundle + max linked files last_modified date
"""
check = self.html + str(self.last_modified)
return hashlib.sha1(check).hexdigest()
def js(self):
content = self.get_cache('js')
if content is None:
content = ';\n'.join(asset.minify() for asset in self.javascripts)
self.set_cache('js', content)
return content
def css(self):
content = self.get_cache('css')
if content is None:
self.compile_sass()
content = '\n'.join(asset.minify() for asset in self.stylesheets)
if self.css_errors:
msg = '\n'.join(self.css_errors)
content += self.css_message(msg.replace('\n', '\\A '))
# move up all @import rules to the top
matches = []
def push(matchobj):
matches.append(matchobj.group(0))
return ''
content = re.sub(self.rx_css_import, push, content)
matches.append(content)
content = u'\n'.join(matches)
if self.css_errors:
return content
self.set_cache('css', content)
return content
def get_cache(self, type):
content = None
domain = [('url', '=', '/web/%s/%s/%s' % (type, self.xmlid, self.version))]
bundle = self.registry['ir.attachment'].search_read(self.cr, openerp.SUPERUSER_ID, domain, ['datas'], context=self.context)
if bundle and bundle[0]['datas']:
content = bundle[0]['datas'].decode('base64')
return content
def set_cache(self, type, content):
ira = self.registry['ir.attachment']
url_prefix = '/web/%s/%s/' % (type, self.xmlid)
# Invalidate previous caches
oids = ira.search(self.cr, openerp.SUPERUSER_ID, [('url', '=like', url_prefix + '%')], context=self.context)
if oids:
ira.unlink(self.cr, openerp.SUPERUSER_ID, oids, context=self.context)
url = url_prefix + self.version
ira.create(self.cr, openerp.SUPERUSER_ID, dict(
datas=content.encode('utf8').encode('base64'),
type='binary',
name=url,
url=url,
), context=self.context)
def css_message(self, message):
return """
body:before {
background: #ffc;
width: 100%%;
font-size: 14px;
font-family: monospace;
white-space: pre;
content: "%s";
}
""" % message.replace('"', '\\"')
def compile_sass(self):
"""
Checks if the bundle contains any sass content, then compiles it to css.
Css compilation is done at the bundle level and not in the assets
because they are potentially interdependant.
"""
sass = [asset for asset in self.stylesheets if isinstance(asset, SassAsset)]
if not sass:
return
source = '\n'.join([asset.get_source() for asset in sass])
# move up all @import rules to the top and exclude file imports
imports = []
def push(matchobj):
ref = matchobj.group(2)
line = '@import "%s"' % ref
if '.' not in ref and line not in imports and not ref.startswith(('.', '/', '~')):
imports.append(line)
return ''
source = re.sub(self.rx_sass_import, push, source)
imports.append(source)
source = u'\n'.join(imports)
try:
compiler = Popen(self.cmd_sass, stdin=PIPE, stdout=PIPE, stderr=PIPE)
except Exception:
msg = "Could not find 'sass' program needed to compile sass/scss files"
_logger.error(msg)
self.css_errors.append(msg)
return
result = compiler.communicate(input=source.encode('utf-8'))
if compiler.returncode:
error = self.get_sass_error(''.join(result), source=source)
_logger.warning(error)
self.css_errors.append(error)
return
compiled = result[0].strip().decode('utf8')
fragments = self.rx_css_split.split(compiled)[1:]
while fragments:
asset_id = fragments.pop(0)
asset = next(asset for asset in sass if asset.id == asset_id)
asset._content = fragments.pop(0)
def get_sass_error(self, stderr, source=None):
# TODO: try to find out which asset the error belongs to
error = stderr.split('Load paths')[0].replace(' Use --trace for backtrace.', '')
error += "This error occured while compiling the bundle '%s' containing:" % self.xmlid
for asset in self.stylesheets:
if isinstance(asset, SassAsset):
error += '\n - %s' % (asset.url if asset.url else '<inline sass>')
return error
class WebAsset(object):
html_url = '%s'
def __init__(self, bundle, inline=None, url=None):
self.id = str(uuid.uuid4())
self.bundle = bundle
self.inline = inline
self.url = url
self.cr = bundle.cr
self.uid = bundle.uid
self.registry = bundle.registry
self.context = bundle.context
self._content = None
self._filename = None
self._ir_attach = None
name = '<inline asset>' if inline else url
self.name = "%s defined in bundle '%s'" % (name, bundle.xmlid)
if not inline and not url:
raise Exception("An asset should either be inlined or url linked")
def stat(self):
if not (self.inline or self._filename or self._ir_attach):
addon = filter(None, self.url.split('/'))[0]
try:
# Test url against modules static assets
mpath = openerp.http.addons_manifest[addon]['addons_path']
self._filename = mpath + self.url.replace('/', os.path.sep)
except Exception:
try:
# Test url against ir.attachments
fields = ['__last_update', 'datas', 'mimetype']
domain = [('type', '=', 'binary'), ('url', '=', self.url)]
ira = self.registry['ir.attachment']
attach = ira.search_read(self.cr, openerp.SUPERUSER_ID, domain, fields, context=self.context)
self._ir_attach = attach[0]
except Exception:
raise AssetNotFound("Could not find %s" % self.name)
def to_html(self):
raise NotImplementedError()
@lazy_property
def last_modified(self):
try:
self.stat()
if self._filename:
return datetime.datetime.fromtimestamp(os.path.getmtime(self._filename))
elif self._ir_attach:
server_format = openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT
last_update = self._ir_attach['__last_update']
try:
return datetime.datetime.strptime(last_update, server_format + '.%f')
except ValueError:
return datetime.datetime.strptime(last_update, server_format)
except Exception:
pass
return datetime.datetime(1970, 1, 1)
@property
def content(self):
if not self._content:
self._content = self.inline or self._fetch_content()
return self._content
def _fetch_content(self):
""" Fetch content from file or database"""
try:
self.stat()
if self._filename:
with open(self._filename, 'rb') as fp:
return fp.read().decode('utf-8')
else:
return self._ir_attach['datas'].decode('base64')
except UnicodeDecodeError:
raise AssetError('%s is not utf-8 encoded.' % self.name)
except IOError:
raise AssetNotFound('File %s does not exist.' % self.name)
except:
raise AssetError('Could not get content for %s.' % self.name)
def minify(self):
return self.content
def with_header(self, content=None):
if content is None:
content = self.content
return '\n/* %s */\n%s' % (self.name, content)
class JavascriptAsset(WebAsset):
def minify(self):
return self.with_header(rjsmin(self.content))
def _fetch_content(self):
try:
return super(JavascriptAsset, self)._fetch_content()
except AssetError, e:
return "console.error(%s);" % json.dumps(e.message)
def to_html(self):
if self.url:
return '<script type="text/javascript" src="%s"></script>' % (self.html_url % self.url)
else:
return '<script type="text/javascript" charset="utf-8">%s</script>' % self.with_header()
class StylesheetAsset(WebAsset):
rx_import = re.compile(r"""@import\s+('|")(?!'|"|/|https?://)""", re.U)
rx_url = re.compile(r"""url\s*\(\s*('|"|)(?!'|"|/|https?://|data:)""", re.U)
rx_sourceMap = re.compile(r'(/\*# sourceMappingURL=.*)', re.U)
rx_charset = re.compile(r'(@charset "[^"]+";)', re.U)
def __init__(self, *args, **kw):
self.media = kw.pop('media', None)
super(StylesheetAsset, self).__init__(*args, **kw)
@property
def content(self):
content = super(StylesheetAsset, self).content
if self.media:
content = '@media %s { %s }' % (self.media, content)
return content
def _fetch_content(self):
try:
content = super(StylesheetAsset, self)._fetch_content()
web_dir = os.path.dirname(self.url)
content = self.rx_import.sub(
r"""@import \1%s/""" % (web_dir,),
content,
)
content = self.rx_url.sub(
r"url(\1%s/" % (web_dir,),
content,
)
# remove charset declarations, we only support utf-8
content = self.rx_charset.sub('', content)
except AssetError, e:
self.bundle.css_errors.append(e.message)
return ''
return content
def minify(self):
# remove existing sourcemaps, make no sense after re-mini
content = self.rx_sourceMap.sub('', self.content)
# comments
content = re.sub(r'/\*.*?\*/', '', content, flags=re.S)
# space
content = re.sub(r'\s+', ' ', content)
content = re.sub(r' *([{}]) *', r'\1', content)
return self.with_header(content)
def to_html(self):
media = (' media="%s"' % werkzeug.utils.escape(self.media)) if self.media else ''
if self.url:
href = self.html_url % self.url
return '<link rel="stylesheet" href="%s" type="text/css"%s/>' % (href, media)
else:
return '<style type="text/css"%s>%s</style>' % (media, self.with_header())
class SassAsset(StylesheetAsset):
html_url = '%s.css'
rx_indent = re.compile(r'^( +|\t+)', re.M)
indent = None
reindent = ' '
def minify(self):
return self.with_header()
def to_html(self):
if self.url:
ira = self.registry['ir.attachment']
url = self.html_url % self.url
domain = [('type', '=', 'binary'), ('url', '=', self.url)]
ira_id = ira.search(self.cr, openerp.SUPERUSER_ID, domain, context=self.context)
if ira_id:
# TODO: update only if needed
ira.write(self.cr, openerp.SUPERUSER_ID, [ira_id], {'datas': self.content}, context=self.context)
else:
ira.create(self.cr, openerp.SUPERUSER_ID, dict(
datas=self.content.encode('utf8').encode('base64'),
mimetype='text/css',
type='binary',
name=url,
url=url,
), context=self.context)
return super(SassAsset, self).to_html()
def get_source(self):
content = textwrap.dedent(self.inline or self._fetch_content())
def fix_indent(m):
ind = m.group()
if self.indent is None:
self.indent = ind
if self.indent == self.reindent:
# Don't reindent the file if identation is the final one (reindent)
raise StopIteration()
return ind.replace(self.indent, self.reindent)
try:
content = self.rx_indent.sub(fix_indent, content)
except StopIteration:
pass
return "/*! %s */\n%s" % (self.id, content)
def rjsmin(script):
""" Minify js with a clever regex.
Taken from http://opensource.perlig.de/rjsmin
Apache License, Version 2.0 """
def subber(match):
""" Substitution callback """
groups = match.groups()
return (
groups[0] or
groups[1] or
groups[2] or
groups[3] or
(groups[4] and '\n') or
(groups[5] and ' ') or
(groups[6] and ' ') or
(groups[7] and ' ') or
''
)
result = re.sub(
r'([^\047"/\000-\040]+)|((?:(?:\047[^\047\\\r\n]*(?:\\(?:[^\r\n]|\r?'
r'\n|\r)[^\047\\\r\n]*)*\047)|(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|'
r'\r)[^"\\\r\n]*)*"))[^\047"/\000-\040]*)|(?:(?<=[(,=:\[!&|?{};\r\n]'
r')(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/'
r'))*((?:/(?![\r\n/*])[^/\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*'
r'(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/\\\[\r\n]*)*/)[^\047"/\000-\040]*'
r'))|(?:(?<=[\000-#%-,./:-@\[-^`{-~-]return)(?:[\000-\011\013\014\01'
r'6-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*((?:/(?![\r\n/*])[^/'
r'\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]'
r'*)*\]))[^/\\\[\r\n]*)*/)[^\047"/\000-\040]*))|(?<=[^\000-!#%&(*,./'
r':-@\[\\^`{|~])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/'
r'*][^*]*\*+)*/))*(?:((?:(?://[^\r\n]*)?[\r\n]))(?:[\000-\011\013\01'
r'4\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000-\040"#'
r'%-\047)*,./:-@\\-^`|-~])|(?<=[^\000-#%-,./:-@\[-^`{-~-])((?:[\000-'
r'\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=[^'
r'\000-#%-,./:-@\[-^`{-~-])|(?<=\+)((?:[\000-\011\013\014\016-\040]|'
r'(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=\+)|(?<=-)((?:[\000-\011\0'
r'13\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=-)|(?:[\0'
r'00-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))+|(?:'
r'(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*'
r']*\*+(?:[^/*][^*]*\*+)*/))*)+', subber, '\n%s\n' % script
).strip()
return result
# vim:et:
|
patrickm/chromium.src
|
refs/heads/nw
|
chrome/test/functional/netflix.py
|
68
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import time
import pyauto_functional
import pyauto
import test_utils
class NetflixTestHelper():
"""Helper functions for Netflix tests.
For sample usage, look at class NetflixTest.
"""
# Netflix player states.
IS_GUEST_MODE_ERROR = '0'
IS_PLAYING = '4'
TITLE_HOMEPAGE = 'http://movies.netflix.com/WiHome'
SIGNOUT_PAGE = 'https://account.netflix.com/Logout'
# 30 Rock.
VIDEO_URL = 'https://movies.netflix.com/WiPlayer?movieid=70136124'
ALT_VIDEO_URL = 'https://movies.netflix.com/WiPlayer?movieid=70133713'
_pyauto = None
def __init__(self, pyauto):
self._pyauto = pyauto
def _IsNetflixPluginEnabled(self):
"""Determine Netflix plugin availability and its state."""
return [x for x in self._pyauto.GetPluginsInfo().Plugins() \
if x['name'] == 'Netflix' and x['enabled']]
def _LoginToNetflix(self):
"""Login to Netflix."""
credentials = self._pyauto.GetPrivateInfo()['test_netflix_acct']
board_name = self._pyauto.ChromeOSBoard()
assert credentials.get(board_name), \
'No netflix credentials for %s.' % board_name
self._pyauto.NavigateToURL(credentials['login_url'])
login_js = """
document.getElementById('email').value='%s';
document.getElementById('password').value='%s';
window.domAutomationController.send('ok');
""" % (credentials[board_name], credentials['password'])
self._pyauto.assertEqual(self._pyauto.ExecuteJavascript(login_js), 'ok',
msg='Failed to set login credentials.')
self._pyauto.assertTrue(self._pyauto.SubmitForm('login-form'),
msg='Login to Netflix failed. We think this is an authetication '
'problem from the Netflix side. Sometimes we also see this while '
'login in manually.')
def _GetVideoDroppedFrames(self, tab_index=0, windex=0):
"""Returns total Netflix video dropped frames."""
js = """
var frames = nrdp.video.droppedFrames;
window.domAutomationController.send(frames + '');
"""
return int(self._pyauto.ExecuteJavascript(js, tab_index=tab_index,
windex=windex))
def _GetVideoFrames(self, tab_index=0, windex=0):
"""Returns Netflix video total frames."""
js = """
var frames = nrdp.video.totalFrames;
window.domAutomationController.send(frames + '');
"""
return int(self._pyauto.ExecuteJavascript(js, tab_index=tab_index,
windex=windex))
def _HandleInfobars(self, err_msg):
"""Manage infobars that come up during the test."""
def _HandleNetflixInfobar():
tab_info = self._pyauto.GetBrowserInfo()['windows'][0]['tabs'][0]
infobars = tab_info['infobars']
index = 0
for infobar in infobars:
if 'netflix' in infobar['text']:
# After storage infobar pops up, clicking the Ok button immediately
# returns the Storage error on faster machines like Stumpy/Lumpy so
# adding a delay of 1 second here.
time.sleep(1)
self._pyauto.PerformActionOnInfobar('accept', infobar_index=index)
return True
index = index + 1
return False
self._pyauto.assertTrue(self._pyauto.WaitUntil(_HandleNetflixInfobar),
msg=err_msg)
def CurrentPlaybackTime(self):
"""Returns the current playback time in seconds."""
time = self._pyauto.ExecuteJavascript("""
time = nrdp.video.currentTime;
window.domAutomationController.send(time + '');
""")
return int(float(time))
def SignOut(self):
"""Sign out from Netflix Login."""
self._pyauto.NavigateToURL(self.SIGNOUT_PAGE)
def LoginAndStartPlaying(self):
"""Login and start playing the video."""
self._pyauto.assertTrue(self._pyauto._IsNetflixPluginEnabled(),
msg='Netflix plugin is disabled or not available.')
self._pyauto._LoginToNetflix()
self._pyauto.assertTrue(self._pyauto.WaitUntil(
lambda: self._pyauto.GetActiveTabURL().spec(),
expect_retval=self.TITLE_HOMEPAGE),
msg='Login to Netflix failed.')
self._pyauto.NavigateToURL(self.VIDEO_URL)
self._pyauto._HandleInfobars(err_msg='Netflix infobar did not show up')
def CheckNetflixPlaying(self, expected_result, error_msg):
"""Check if Netflix is playing the video or not.
Args:
expected_result: expected return value from Netflix player.
error_msg: If expected value isn't matching, error message to throw.
"""
self._pyauto.assertTrue(self._pyauto.WaitUntil(
lambda: self._pyauto.ExecuteJavascript("""
if (typeof nrdp == 'undefined') {
window.domAutomationController.send('not ready');
}
player_status = nrdp.video.readyState;
window.domAutomationController.send(player_status + '');
"""), expect_retval=expected_result),
msg=error_msg)
class NetflixTest(pyauto.PyUITest, NetflixTestHelper):
"""Test case for Netflix player."""
def __init__(self, methodName='runTest', **kwargs):
pyauto.PyUITest.__init__(self, methodName, **kwargs)
NetflixTestHelper.__init__(self, self)
def ShouldAutoLogin(self):
return False
def _Login(self):
"""Perform login"""
credentials = self.GetPrivateInfo()['test_google_account']
self.Login(credentials['username'], credentials['password'])
logging.info('Logged in as %s' % credentials['username'])
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_logged_in'], msg='Login failed.')
self.assertFalse(login_info['is_guest'],
msg='Should not be logged in as guest.')
def setUp(self):
assert os.geteuid() == 0, 'Run test as root since we might need to logout'
pyauto.PyUITest.setUp(self)
if self.GetLoginInfo()['is_logged_in']:
self.Logout()
self._Login()
def tearDown(self):
self.SignOut()
pyauto.PyUITest.tearDown(self)
def testPlayerLoadsAndPlays(self):
"""Test that Netflix player loads and plays the title."""
self.LoginAndStartPlaying()
self._HandleInfobars(err_msg='Netflix plugin access infobar did not show up')
self.CheckNetflixPlaying(self.IS_PLAYING,
'Player did not start playing the title.')
def testMultiplePlayback(self):
"""Test that playing two titles, Netflix returns multiple play error."""
self.LoginAndStartPlaying()
self._HandleInfobars(err_msg='Netflix plugin access infobar did not show up')
self.CheckNetflixPlaying(self.IS_PLAYING,
'Player did not start playing the title.')
self.AppendTab(self.ALT_VIDEO_URL)
self.assertTrue('Multiple Play Error' in self.GetTabContents(),
msg='Multiple Play Error is not found on the page.')
def testPlaying(self):
"""Test that title playing progresses."""
self.LoginAndStartPlaying()
self._HandleInfobars(err_msg='Netflix plugin access infobar did not show up')
self.CheckNetflixPlaying(self.IS_PLAYING,
'Player did not start playing the title.')
title_length = self.ExecuteJavascript("""
time = nrdp.video.duration;
window.domAutomationController.send(time + '');
""")
title_length = int(float(title_length))
prev_time = 0
current_time = 0
count = 0
while current_time < title_length:
# We want to test playing only for ten seconds.
count = count + 1
if count == 10:
break
current_time = self.CurrentPlaybackTime()
self.assertTrue(prev_time <= current_time,
msg='Prev playing time %s is greater than current time %s.'
% (prev_time, current_time))
prev_time = current_time
# play video for some time
time.sleep(1)
# In case player doesn't start playing at all, above while loop may
# still pass. So re-verifying and assuming that player did play something
# during last 10 seconds.
self.assertTrue(current_time > 0,
msg='Netflix player did not start playing.')
class NetflixGuestModeTest(pyauto.PyUITest, NetflixTestHelper):
"""Netflix in guest mode."""
def __init__(self, methodName='runTest', **kwargs):
pyauto.PyUITest.__init__(self, methodName, **kwargs)
NetflixTestHelper.__init__(self, self)
def setUp(self):
assert os.geteuid() == 0, 'Run test as root since we might need to logout'
pyauto.PyUITest.setUp(self)
if self.GetLoginInfo()['is_logged_in']:
self.Logout()
self.LoginAsGuest()
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_logged_in'], msg='Not logged in at all.')
self.assertTrue(login_info['is_guest'], msg='Not logged in as guest.')
def ShouldAutoLogin(self):
return False
def tearDown(self):
self.AppendTab(self.SIGNOUT_PAGE)
self.Logout()
pyauto.PyUITest.tearDown(self)
def testGuestMode(self):
"""Test that Netflix doesn't play in guest mode login."""
self.LoginAndStartPlaying()
self.CheckNetflixPlaying(
self.IS_GUEST_MODE_ERROR,
'Netflix player did not return a Guest mode error.')
# crosbug.com/p/14009
self.assertTrue('Netflix Video Player Unavailable' in self.GetTabContents(),
msg='Guest Mode error is not found on the page.')
if __name__ == '__main__':
pyauto_functional.Main()
|
wietsefranssen/RVIC
|
refs/heads/master
|
rvic/__init__.py
|
3
|
from .version import short_version as __version__
|
kwagyeman/openmv
|
refs/heads/master
|
scripts/examples/OpenMV/02-Board-Control/i2c_control.py
|
5
|
# I2C Control
#
# This example shows how to use the i2c bus on your OpenMV Cam by dumping the
# contents on a standard EEPROM. To run this example either connect the
# Thermopile Shield to your OpenMV Cam or an I2C EEPROM to your OpenMV Cam.
from pyb import I2C
i2c = I2C(2, I2C.MASTER) # The i2c bus must always be 2.
mem = i2c.mem_read(256, 0x50, 0) # The eeprom slave address is 0x50.
print("\n[")
for i in range(16):
print("\t[", end='')
for j in range(16):
print("%03d" % mem[(i*16)+j], end='')
if j != 15: print(", ", end='')
print("]," if i != 15 else "]")
print("]")
|
archifix/settings
|
refs/heads/master
|
sublime/Packages/pyyaml/st2/yaml/emitter.py
|
388
|
# Emitter expects events obeying the following grammar:
# stream ::= STREAM-START document* STREAM-END
# document ::= DOCUMENT-START node DOCUMENT-END
# node ::= SCALAR | sequence | mapping
# sequence ::= SEQUENCE-START node* SEQUENCE-END
# mapping ::= MAPPING-START (node node)* MAPPING-END
__all__ = ['Emitter', 'EmitterError']
from error import YAMLError
from events import *
class EmitterError(YAMLError):
pass
class ScalarAnalysis(object):
def __init__(self, scalar, empty, multiline,
allow_flow_plain, allow_block_plain,
allow_single_quoted, allow_double_quoted,
allow_block):
self.scalar = scalar
self.empty = empty
self.multiline = multiline
self.allow_flow_plain = allow_flow_plain
self.allow_block_plain = allow_block_plain
self.allow_single_quoted = allow_single_quoted
self.allow_double_quoted = allow_double_quoted
self.allow_block = allow_block
class Emitter(object):
DEFAULT_TAG_PREFIXES = {
u'!' : u'!',
u'tag:yaml.org,2002:' : u'!!',
}
def __init__(self, stream, canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
# The stream should have the methods `write` and possibly `flush`.
self.stream = stream
# Encoding can be overriden by STREAM-START.
self.encoding = None
# Emitter is a state machine with a stack of states to handle nested
# structures.
self.states = []
self.state = self.expect_stream_start
# Current event and the event queue.
self.events = []
self.event = None
# The current indentation level and the stack of previous indents.
self.indents = []
self.indent = None
# Flow level.
self.flow_level = 0
# Contexts.
self.root_context = False
self.sequence_context = False
self.mapping_context = False
self.simple_key_context = False
# Characteristics of the last emitted character:
# - current position.
# - is it a whitespace?
# - is it an indention character
# (indentation space, '-', '?', or ':')?
self.line = 0
self.column = 0
self.whitespace = True
self.indention = True
# Whether the document requires an explicit document indicator
self.open_ended = False
# Formatting details.
self.canonical = canonical
self.allow_unicode = allow_unicode
self.best_indent = 2
if indent and 1 < indent < 10:
self.best_indent = indent
self.best_width = 80
if width and width > self.best_indent*2:
self.best_width = width
self.best_line_break = u'\n'
if line_break in [u'\r', u'\n', u'\r\n']:
self.best_line_break = line_break
# Tag prefixes.
self.tag_prefixes = None
# Prepared anchor and tag.
self.prepared_anchor = None
self.prepared_tag = None
# Scalar analysis and style.
self.analysis = None
self.style = None
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def emit(self, event):
self.events.append(event)
while not self.need_more_events():
self.event = self.events.pop(0)
self.state()
self.event = None
# In some cases, we wait for a few next events before emitting.
def need_more_events(self):
if not self.events:
return True
event = self.events[0]
if isinstance(event, DocumentStartEvent):
return self.need_events(1)
elif isinstance(event, SequenceStartEvent):
return self.need_events(2)
elif isinstance(event, MappingStartEvent):
return self.need_events(3)
else:
return False
def need_events(self, count):
level = 0
for event in self.events[1:]:
if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
level += 1
elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
level -= 1
elif isinstance(event, StreamEndEvent):
level = -1
if level < 0:
return False
return (len(self.events) < count+1)
def increase_indent(self, flow=False, indentless=False):
self.indents.append(self.indent)
if self.indent is None:
if flow:
self.indent = self.best_indent
else:
self.indent = 0
elif not indentless:
self.indent += self.best_indent
# States.
# Stream handlers.
def expect_stream_start(self):
if isinstance(self.event, StreamStartEvent):
if self.event.encoding and not getattr(self.stream, 'encoding', None):
self.encoding = self.event.encoding
self.write_stream_start()
self.state = self.expect_first_document_start
else:
raise EmitterError("expected StreamStartEvent, but got %s"
% self.event)
def expect_nothing(self):
raise EmitterError("expected nothing, but got %s" % self.event)
# Document handlers.
def expect_first_document_start(self):
return self.expect_document_start(first=True)
def expect_document_start(self, first=False):
if isinstance(self.event, DocumentStartEvent):
if (self.event.version or self.event.tags) and self.open_ended:
self.write_indicator(u'...', True)
self.write_indent()
if self.event.version:
version_text = self.prepare_version(self.event.version)
self.write_version_directive(version_text)
self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
if self.event.tags:
handles = self.event.tags.keys()
handles.sort()
for handle in handles:
prefix = self.event.tags[handle]
self.tag_prefixes[prefix] = handle
handle_text = self.prepare_tag_handle(handle)
prefix_text = self.prepare_tag_prefix(prefix)
self.write_tag_directive(handle_text, prefix_text)
implicit = (first and not self.event.explicit and not self.canonical
and not self.event.version and not self.event.tags
and not self.check_empty_document())
if not implicit:
self.write_indent()
self.write_indicator(u'---', True)
if self.canonical:
self.write_indent()
self.state = self.expect_document_root
elif isinstance(self.event, StreamEndEvent):
if self.open_ended:
self.write_indicator(u'...', True)
self.write_indent()
self.write_stream_end()
self.state = self.expect_nothing
else:
raise EmitterError("expected DocumentStartEvent, but got %s"
% self.event)
def expect_document_end(self):
if isinstance(self.event, DocumentEndEvent):
self.write_indent()
if self.event.explicit:
self.write_indicator(u'...', True)
self.write_indent()
self.flush_stream()
self.state = self.expect_document_start
else:
raise EmitterError("expected DocumentEndEvent, but got %s"
% self.event)
def expect_document_root(self):
self.states.append(self.expect_document_end)
self.expect_node(root=True)
# Node handlers.
def expect_node(self, root=False, sequence=False, mapping=False,
simple_key=False):
self.root_context = root
self.sequence_context = sequence
self.mapping_context = mapping
self.simple_key_context = simple_key
if isinstance(self.event, AliasEvent):
self.expect_alias()
elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
self.process_anchor(u'&')
self.process_tag()
if isinstance(self.event, ScalarEvent):
self.expect_scalar()
elif isinstance(self.event, SequenceStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
or self.check_empty_sequence():
self.expect_flow_sequence()
else:
self.expect_block_sequence()
elif isinstance(self.event, MappingStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
or self.check_empty_mapping():
self.expect_flow_mapping()
else:
self.expect_block_mapping()
else:
raise EmitterError("expected NodeEvent, but got %s" % self.event)
def expect_alias(self):
if self.event.anchor is None:
raise EmitterError("anchor is not specified for alias")
self.process_anchor(u'*')
self.state = self.states.pop()
def expect_scalar(self):
self.increase_indent(flow=True)
self.process_scalar()
self.indent = self.indents.pop()
self.state = self.states.pop()
# Flow sequence handlers.
def expect_flow_sequence(self):
self.write_indicator(u'[', True, whitespace=True)
self.flow_level += 1
self.increase_indent(flow=True)
self.state = self.expect_first_flow_sequence_item
def expect_first_flow_sequence_item(self):
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
self.write_indicator(u']', False)
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
def expect_flow_sequence_item(self):
if isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
if self.canonical:
self.write_indicator(u',', False)
self.write_indent()
self.write_indicator(u']', False)
self.state = self.states.pop()
else:
self.write_indicator(u',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
self.states.append(self.expect_flow_sequence_item)
self.expect_node(sequence=True)
# Flow mapping handlers.
def expect_flow_mapping(self):
self.write_indicator(u'{', True, whitespace=True)
self.flow_level += 1
self.increase_indent(flow=True)
self.state = self.expect_first_flow_mapping_key
def expect_first_flow_mapping_key(self):
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
self.write_indicator(u'}', False)
self.state = self.states.pop()
else:
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_key(self):
if isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.flow_level -= 1
if self.canonical:
self.write_indicator(u',', False)
self.write_indent()
self.write_indicator(u'}', False)
self.state = self.states.pop()
else:
self.write_indicator(u',', False)
if self.canonical or self.column > self.best_width:
self.write_indent()
if not self.canonical and self.check_simple_key():
self.states.append(self.expect_flow_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True)
self.states.append(self.expect_flow_mapping_value)
self.expect_node(mapping=True)
def expect_flow_mapping_simple_value(self):
self.write_indicator(u':', False)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
def expect_flow_mapping_value(self):
if self.canonical or self.column > self.best_width:
self.write_indent()
self.write_indicator(u':', True)
self.states.append(self.expect_flow_mapping_key)
self.expect_node(mapping=True)
# Block sequence handlers.
def expect_block_sequence(self):
indentless = (self.mapping_context and not self.indention)
self.increase_indent(flow=False, indentless=indentless)
self.state = self.expect_first_block_sequence_item
def expect_first_block_sequence_item(self):
return self.expect_block_sequence_item(first=True)
def expect_block_sequence_item(self, first=False):
if not first and isinstance(self.event, SequenceEndEvent):
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
self.write_indent()
self.write_indicator(u'-', True, indention=True)
self.states.append(self.expect_block_sequence_item)
self.expect_node(sequence=True)
# Block mapping handlers.
def expect_block_mapping(self):
self.increase_indent(flow=False)
self.state = self.expect_first_block_mapping_key
def expect_first_block_mapping_key(self):
return self.expect_block_mapping_key(first=True)
def expect_block_mapping_key(self, first=False):
if not first and isinstance(self.event, MappingEndEvent):
self.indent = self.indents.pop()
self.state = self.states.pop()
else:
self.write_indent()
if self.check_simple_key():
self.states.append(self.expect_block_mapping_simple_value)
self.expect_node(mapping=True, simple_key=True)
else:
self.write_indicator(u'?', True, indention=True)
self.states.append(self.expect_block_mapping_value)
self.expect_node(mapping=True)
def expect_block_mapping_simple_value(self):
self.write_indicator(u':', False)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
def expect_block_mapping_value(self):
self.write_indent()
self.write_indicator(u':', True, indention=True)
self.states.append(self.expect_block_mapping_key)
self.expect_node(mapping=True)
# Checkers.
def check_empty_sequence(self):
return (isinstance(self.event, SequenceStartEvent) and self.events
and isinstance(self.events[0], SequenceEndEvent))
def check_empty_mapping(self):
return (isinstance(self.event, MappingStartEvent) and self.events
and isinstance(self.events[0], MappingEndEvent))
def check_empty_document(self):
if not isinstance(self.event, DocumentStartEvent) or not self.events:
return False
event = self.events[0]
return (isinstance(event, ScalarEvent) and event.anchor is None
and event.tag is None and event.implicit and event.value == u'')
def check_simple_key(self):
length = 0
if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
length += len(self.prepared_anchor)
if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
and self.event.tag is not None:
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(self.event.tag)
length += len(self.prepared_tag)
if isinstance(self.event, ScalarEvent):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
length += len(self.analysis.scalar)
return (length < 128 and (isinstance(self.event, AliasEvent)
or (isinstance(self.event, ScalarEvent)
and not self.analysis.empty and not self.analysis.multiline)
or self.check_empty_sequence() or self.check_empty_mapping()))
# Anchor, Tag, and Scalar processors.
def process_anchor(self, indicator):
if self.event.anchor is None:
self.prepared_anchor = None
return
if self.prepared_anchor is None:
self.prepared_anchor = self.prepare_anchor(self.event.anchor)
if self.prepared_anchor:
self.write_indicator(indicator+self.prepared_anchor, True)
self.prepared_anchor = None
def process_tag(self):
tag = self.event.tag
if isinstance(self.event, ScalarEvent):
if self.style is None:
self.style = self.choose_scalar_style()
if ((not self.canonical or tag is None) and
((self.style == '' and self.event.implicit[0])
or (self.style != '' and self.event.implicit[1]))):
self.prepared_tag = None
return
if self.event.implicit[0] and tag is None:
tag = u'!'
self.prepared_tag = None
else:
if (not self.canonical or tag is None) and self.event.implicit:
self.prepared_tag = None
return
if tag is None:
raise EmitterError("tag is not specified")
if self.prepared_tag is None:
self.prepared_tag = self.prepare_tag(tag)
if self.prepared_tag:
self.write_indicator(self.prepared_tag, True)
self.prepared_tag = None
def choose_scalar_style(self):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.event.style == '"' or self.canonical:
return '"'
if not self.event.style and self.event.implicit[0]:
if (not (self.simple_key_context and
(self.analysis.empty or self.analysis.multiline))
and (self.flow_level and self.analysis.allow_flow_plain
or (not self.flow_level and self.analysis.allow_block_plain))):
return ''
if self.event.style and self.event.style in '|>':
if (not self.flow_level and not self.simple_key_context
and self.analysis.allow_block):
return self.event.style
if not self.event.style or self.event.style == '\'':
if (self.analysis.allow_single_quoted and
not (self.simple_key_context and self.analysis.multiline)):
return '\''
return '"'
def process_scalar(self):
if self.analysis is None:
self.analysis = self.analyze_scalar(self.event.value)
if self.style is None:
self.style = self.choose_scalar_style()
split = (not self.simple_key_context)
#if self.analysis.multiline and split \
# and (not self.style or self.style in '\'\"'):
# self.write_indent()
if self.style == '"':
self.write_double_quoted(self.analysis.scalar, split)
elif self.style == '\'':
self.write_single_quoted(self.analysis.scalar, split)
elif self.style == '>':
self.write_folded(self.analysis.scalar)
elif self.style == '|':
self.write_literal(self.analysis.scalar)
else:
self.write_plain(self.analysis.scalar, split)
self.analysis = None
self.style = None
# Analyzers.
def prepare_version(self, version):
major, minor = version
if major != 1:
raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
return u'%d.%d' % (major, minor)
def prepare_tag_handle(self, handle):
if not handle:
raise EmitterError("tag handle must not be empty")
if handle[0] != u'!' or handle[-1] != u'!':
raise EmitterError("tag handle must start and end with '!': %r"
% (handle.encode('utf-8')))
for ch in handle[1:-1]:
if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-_'):
raise EmitterError("invalid character %r in the tag handle: %r"
% (ch.encode('utf-8'), handle.encode('utf-8')))
return handle
def prepare_tag_prefix(self, prefix):
if not prefix:
raise EmitterError("tag prefix must not be empty")
chunks = []
start = end = 0
if prefix[0] == u'!':
end = 1
while end < len(prefix):
ch = prefix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-;/?!:@&=+$,_.~*\'()[]':
end += 1
else:
if start < end:
chunks.append(prefix[start:end])
start = end = end+1
data = ch.encode('utf-8')
for ch in data:
chunks.append(u'%%%02X' % ord(ch))
if start < end:
chunks.append(prefix[start:end])
return u''.join(chunks)
def prepare_tag(self, tag):
if not tag:
raise EmitterError("tag must not be empty")
if tag == u'!':
return tag
handle = None
suffix = tag
prefixes = self.tag_prefixes.keys()
prefixes.sort()
for prefix in prefixes:
if tag.startswith(prefix) \
and (prefix == u'!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix):]
chunks = []
start = end = 0
while end < len(suffix):
ch = suffix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-;/?:@&=+$,_.~*\'()[]' \
or (ch == u'!' and handle != u'!'):
end += 1
else:
if start < end:
chunks.append(suffix[start:end])
start = end = end+1
data = ch.encode('utf-8')
for ch in data:
chunks.append(u'%%%02X' % ord(ch))
if start < end:
chunks.append(suffix[start:end])
suffix_text = u''.join(chunks)
if handle:
return u'%s%s' % (handle, suffix_text)
else:
return u'!<%s>' % suffix_text
def prepare_anchor(self, anchor):
if not anchor:
raise EmitterError("anchor must not be empty")
for ch in anchor:
if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
or ch in u'-_'):
raise EmitterError("invalid character %r in the anchor: %r"
% (ch.encode('utf-8'), anchor.encode('utf-8')))
return anchor
def analyze_scalar(self, scalar):
# Empty scalar is a special case.
if not scalar:
return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
allow_flow_plain=False, allow_block_plain=True,
allow_single_quoted=True, allow_double_quoted=True,
allow_block=False)
# Indicators and special characters.
block_indicators = False
flow_indicators = False
line_breaks = False
special_characters = False
# Important whitespace combinations.
leading_space = False
leading_break = False
trailing_space = False
trailing_break = False
break_space = False
space_break = False
# Check document indicators.
if scalar.startswith(u'---') or scalar.startswith(u'...'):
block_indicators = True
flow_indicators = True
# First character or preceded by a whitespace.
preceeded_by_whitespace = True
# Last character or followed by a whitespace.
followed_by_whitespace = (len(scalar) == 1 or
scalar[1] in u'\0 \t\r\n\x85\u2028\u2029')
# The previous character is a space.
previous_space = False
# The previous character is a break.
previous_break = False
index = 0
while index < len(scalar):
ch = scalar[index]
# Check for indicators.
if index == 0:
# Leading indicators are special characters.
if ch in u'#,[]{}&*!|>\'\"%@`':
flow_indicators = True
block_indicators = True
if ch in u'?:':
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == u'-' and followed_by_whitespace:
flow_indicators = True
block_indicators = True
else:
# Some indicators cannot appear within a scalar as well.
if ch in u',?[]{}':
flow_indicators = True
if ch == u':':
flow_indicators = True
if followed_by_whitespace:
block_indicators = True
if ch == u'#' and preceeded_by_whitespace:
flow_indicators = True
block_indicators = True
# Check for line breaks, special, and unicode characters.
if ch in u'\n\x85\u2028\u2029':
line_breaks = True
if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'):
if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF'
or u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF':
unicode_characters = True
if not self.allow_unicode:
special_characters = True
else:
special_characters = True
# Detect important whitespace combinations.
if ch == u' ':
if index == 0:
leading_space = True
if index == len(scalar)-1:
trailing_space = True
if previous_break:
break_space = True
previous_space = True
previous_break = False
elif ch in u'\n\x85\u2028\u2029':
if index == 0:
leading_break = True
if index == len(scalar)-1:
trailing_break = True
if previous_space:
space_break = True
previous_space = False
previous_break = True
else:
previous_space = False
previous_break = False
# Prepare for the next character.
index += 1
preceeded_by_whitespace = (ch in u'\0 \t\r\n\x85\u2028\u2029')
followed_by_whitespace = (index+1 >= len(scalar) or
scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029')
# Let's decide what styles are allowed.
allow_flow_plain = True
allow_block_plain = True
allow_single_quoted = True
allow_double_quoted = True
allow_block = True
# Leading and trailing whitespaces are bad for plain scalars.
if (leading_space or leading_break
or trailing_space or trailing_break):
allow_flow_plain = allow_block_plain = False
# We do not permit trailing spaces for block scalars.
if trailing_space:
allow_block = False
# Spaces at the beginning of a new line are only acceptable for block
# scalars.
if break_space:
allow_flow_plain = allow_block_plain = allow_single_quoted = False
# Spaces followed by breaks, as well as special character are only
# allowed for double quoted scalars.
if space_break or special_characters:
allow_flow_plain = allow_block_plain = \
allow_single_quoted = allow_block = False
# Although the plain scalar writer supports breaks, we never emit
# multiline plain scalars.
if line_breaks:
allow_flow_plain = allow_block_plain = False
# Flow indicators are forbidden for flow plain scalars.
if flow_indicators:
allow_flow_plain = False
# Block indicators are forbidden for block plain scalars.
if block_indicators:
allow_block_plain = False
return ScalarAnalysis(scalar=scalar,
empty=False, multiline=line_breaks,
allow_flow_plain=allow_flow_plain,
allow_block_plain=allow_block_plain,
allow_single_quoted=allow_single_quoted,
allow_double_quoted=allow_double_quoted,
allow_block=allow_block)
# Writers.
def flush_stream(self):
if hasattr(self.stream, 'flush'):
self.stream.flush()
def write_stream_start(self):
# Write BOM if needed.
if self.encoding and self.encoding.startswith('utf-16'):
self.stream.write(u'\uFEFF'.encode(self.encoding))
def write_stream_end(self):
self.flush_stream()
def write_indicator(self, indicator, need_whitespace,
whitespace=False, indention=False):
if self.whitespace or not need_whitespace:
data = indicator
else:
data = u' '+indicator
self.whitespace = whitespace
self.indention = self.indention and indention
self.column += len(data)
self.open_ended = False
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_indent(self):
indent = self.indent or 0
if not self.indention or self.column > indent \
or (self.column == indent and not self.whitespace):
self.write_line_break()
if self.column < indent:
self.whitespace = True
data = u' '*(indent-self.column)
self.column = indent
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_line_break(self, data=None):
if data is None:
data = self.best_line_break
self.whitespace = True
self.indention = True
self.line += 1
self.column = 0
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
def write_version_directive(self, version_text):
data = u'%%YAML %s' % version_text
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
def write_tag_directive(self, handle_text, prefix_text):
data = u'%%TAG %s %s' % (handle_text, prefix_text)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_line_break()
# Scalar streams.
def write_single_quoted(self, text, split=True):
self.write_indicator(u'\'', True)
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch is None or ch != u' ':
if start+1 == end and self.column > self.best_width and split \
and start != 0 and end != len(text):
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
if text[start] == u'\n':
self.write_line_break()
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'':
if start < end:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch == u'\'':
data = u'\'\''
self.column += 2
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end + 1
if ch is not None:
spaces = (ch == u' ')
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
self.write_indicator(u'\'', False)
ESCAPE_REPLACEMENTS = {
u'\0': u'0',
u'\x07': u'a',
u'\x08': u'b',
u'\x09': u't',
u'\x0A': u'n',
u'\x0B': u'v',
u'\x0C': u'f',
u'\x0D': u'r',
u'\x1B': u'e',
u'\"': u'\"',
u'\\': u'\\',
u'\x85': u'N',
u'\xA0': u'_',
u'\u2028': u'L',
u'\u2029': u'P',
}
def write_double_quoted(self, text, split=True):
self.write_indicator(u'"', True)
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \
or not (u'\x20' <= ch <= u'\x7E'
or (self.allow_unicode
and (u'\xA0' <= ch <= u'\uD7FF'
or u'\uE000' <= ch <= u'\uFFFD'))):
if start < end:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
if ch in self.ESCAPE_REPLACEMENTS:
data = u'\\'+self.ESCAPE_REPLACEMENTS[ch]
elif ch <= u'\xFF':
data = u'\\x%02X' % ord(ch)
elif ch <= u'\uFFFF':
data = u'\\u%04X' % ord(ch)
else:
data = u'\\U%08X' % ord(ch)
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end+1
if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \
and self.column+(end-start) > self.best_width and split:
data = text[start:end]+u'\\'
if start < end:
start = end
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.write_indent()
self.whitespace = False
self.indention = False
if text[start] == u' ':
data = u'\\'
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
end += 1
self.write_indicator(u'"', False)
def determine_block_hints(self, text):
hints = u''
if text:
if text[0] in u' \n\x85\u2028\u2029':
hints += unicode(self.best_indent)
if text[-1] not in u'\n\x85\u2028\u2029':
hints += u'-'
elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029':
hints += u'+'
return hints
def write_folded(self, text):
hints = self.determine_block_hints(text)
self.write_indicator(u'>'+hints, True)
if hints[-1:] == u'+':
self.open_ended = True
self.write_line_break()
leading_space = True
spaces = False
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
if not leading_space and ch is not None and ch != u' ' \
and text[start] == u'\n':
self.write_line_break()
leading_space = (ch == u' ')
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
elif spaces:
if ch != u' ':
if start+1 == end and self.column > self.best_width:
self.write_indent()
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029':
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = (ch in u'\n\x85\u2028\u2029')
spaces = (ch == u' ')
end += 1
def write_literal(self, text):
hints = self.determine_block_hints(text)
self.write_indicator(u'|'+hints, True)
if hints[-1:] == u'+':
self.open_ended = True
self.write_line_break()
breaks = True
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if breaks:
if ch is None or ch not in u'\n\x85\u2028\u2029':
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
if ch is not None:
self.write_indent()
start = end
else:
if ch is None or ch in u'\n\x85\u2028\u2029':
data = text[start:end]
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
if ch is None:
self.write_line_break()
start = end
if ch is not None:
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
def write_plain(self, text, split=True):
if self.root_context:
self.open_ended = True
if not text:
return
if not self.whitespace:
data = u' '
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
self.whitespace = False
self.indention = False
spaces = False
breaks = False
start = end = 0
while end <= len(text):
ch = None
if end < len(text):
ch = text[end]
if spaces:
if ch != u' ':
if start+1 == end and self.column > self.best_width and split:
self.write_indent()
self.whitespace = False
self.indention = False
else:
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
elif breaks:
if ch not in u'\n\x85\u2028\u2029':
if text[start] == u'\n':
self.write_line_break()
for br in text[start:end]:
if br == u'\n':
self.write_line_break()
else:
self.write_line_break(br)
self.write_indent()
self.whitespace = False
self.indention = False
start = end
else:
if ch is None or ch in u' \n\x85\u2028\u2029':
data = text[start:end]
self.column += len(data)
if self.encoding:
data = data.encode(self.encoding)
self.stream.write(data)
start = end
if ch is not None:
spaces = (ch == u' ')
breaks = (ch in u'\n\x85\u2028\u2029')
end += 1
|
brunosmmm/hdltools
|
refs/heads/master
|
hdltools/verilog/codegen.py
|
1
|
"""Generate Verilog Statements."""
import math
from ..abshdl.codegen import HDLCodeGenerator
from scoff.codegen import indent
from ..abshdl.const import HDLIntegerConstant
_INDENT_STR = " "
class VerilogCodeGenerator(HDLCodeGenerator):
"""Generate verilog code."""
VERILOG_CONSTANT_RADIX = ["d", "b", "h"]
VERILOG_PORT_DIRECTION = ["in", "out", "inout"]
VERILOG_SIGNAL_TYPE = ["wire", "reg"]
def gen_HDLModulePort(self, element, **kwargs):
"""Generate port."""
if "evaluate" in kwargs:
evaluate = kwargs["evaluate"]
else:
evaluate = False
if element.vector is None:
_slice = ""
else:
size = None
try:
size = len(element.vector)
except:
pass
if size == 1:
_slice = ""
else:
_slice = self.dump_element(element.vector, evaluate=evaluate)
return self.dumps_port(
element.direction, element.name, _slice, **kwargs
)
def gen_HDLIntegerConstant(self, element, **kwargs):
"""Generate an integer constant."""
# check for format
if "radix" in kwargs:
radix = kwargs["radix"]
elif "radix" in element.optional_args:
radix = element.optional_args["radix"]
else:
radix = "b"
# check for size
if "size" in kwargs:
size = kwargs["size"]
else:
size = element.size
if "no_size" in kwargs:
no_size = kwargs.pop("no_size")
elif "no_size" in element.optional_args:
no_size = element.optional_args.pop("no_size")
else:
no_size = False
if no_size is False:
return self.dumps_vector(element.evaluate(), size, radix)
else:
return str(element.evaluate())
def gen_HDLMacro(self, element, **kwargs):
"""Generate a define."""
value = self.dump_element(element.value, **kwargs)
return self.dumps_define(element.name, value)
def gen_HDLSignal(self, element, **kwargs):
"""Generate signals."""
if element.sig_type == "comb":
st = "wire"
elif element.sig_type == "reg":
st = "reg"
elif element.sig_type == "const":
st = "localparam"
elif element.sig_type == "var":
st = "integer"
if element.vector is None:
_slice = ""
else:
size = None
try:
size = len(element.vector)
except:
pass
if size == 1:
_slice = ""
else:
_slice = self.dump_element(element.vector)
sig_decl = True
if "assign" in kwargs:
sig_decl = not kwargs["assign"]
if sig_decl:
ret_str = "{} {} {}".format(st, _slice, element.name)
if element.sig_type == "const":
ret_str += " = {};".format(
self.dump_element(element.default_val, format="int")
)
else:
ret_str += ";"
return ret_str
else:
return "{}".format(element.name)
def gen_HDLSignalSlice(self, element, **kwargs):
"""Generate sliced signal."""
kwargs.update({"assign": True})
signal = self.dump_element(element.signal, **kwargs)
slic = self.dump_element(element.vector, simplify_extents=True)
return "{}{}".format(signal, slic)
def gen_HDLVectorDescriptor(self, element, **kwargs):
"""Generate a vector slice."""
if "evaluate" in kwargs:
evaluate = kwargs.pop("evaluate")
else:
evaluate = False
if evaluate is True:
extents = element.evaluate()
else:
extents = element.get_bounds()
kwargs["part_select"] = element.part_select
if element.part_select is True:
extents = (extents[0], element.part_select_length)
return self.dumps_extents(*extents, **kwargs)
def gen_HDLAssignment(self, element, **kwargs):
"""Generate assignments."""
assign_lhs = self.dump_element(element.signal, assign=True)
if element.signal.get_sig_type() == "var":
assign_rhs = self.dump_element(
element.value, no_size=True, assign=True
)
else:
assign_rhs = self.dump_element(
element.value, radix="h", assign=True
)
assign_type = element.get_assignment_type()
if "no_semi" in kwargs:
no_semicolon = kwargs["no_semi"]
else:
no_semicolon = False
if assign_type == "parallel":
assign_str = "assign {} = {}".format(assign_lhs, assign_rhs)
elif assign_type == "series":
if element.assign_type == "block":
assign_op = "<="
else:
assign_op = "="
assign_str = "{} {} {}".format(assign_lhs, assign_op, assign_rhs)
if no_semicolon is False:
assign_str += ";"
return assign_str
def gen_HDLExpression(self, element, **kwargs):
"""Get an expression."""
# insert more stuff into kwargs
kwargs.update(element.optional_args)
if "format" in kwargs:
fmt = kwargs.pop("format")
if fmt == "int":
try:
return self.dump_element(
HDLIntegerConstant(
element.evaluate(), element.size, **kwargs
)
)
except KeyError:
# tried to evaluate an expression which contains
# symbols.
pass
# replace operators
ret_str = element.dumps().replace("=<", "<=")
# replace constants
ret_str = ret_str.replace("True", "1")
ret_str = ret_str.replace("False", "0")
return ret_str
def gen_HDLModuleParameter(self, element, **kwargs):
"""Generate Module parameter."""
ret_str = "parameter "
if element.ptype is not None:
ret_str += element.ptype + " "
sep_str = ","
if "last" in kwargs:
if kwargs["last"] is True:
sep_str = ""
ret_str += element.name + " "
if element.value is not None:
ret_str += "= {}{}".format(
self.dump_element(element.value), sep_str
)
else:
ret_str += sep_str
return ret_str
def gen_HDLConcatenation(self, element, **kwargs):
"""Generate concatenation."""
# force constants to be dumped with size
if element.direction == "rl":
items = element.items[::-1]
else:
items = element.items
ret_str = "{{{}}}".format(
", ".join([self.dump_element(x, format="int") for x in items])
)
return ret_str
def gen_HDLIfElse(self, element, **kwargs):
"""Generate if-else statement."""
ret_str = "if ({}) begin\n".format(
self.dump_element(element.condition)
)
ret_str += self.dump_element(element.if_scope)
ret_str += "\nend"
if len(element.else_scope):
ret_str += "\nelse begin\n"
ret_str += self.dump_element(element.else_scope)
ret_str += "\nend"
return ret_str
def gen_HDLIfExp(self, element, **kwargs):
"""Generate one-line if-else."""
ret_str = "({}) ? {} : {}".format(
self.dump_element(element.condition),
self.dump_element(element.if_value),
self.dump_element(element.else_value),
)
return ret_str
@indent
def gen_HDLScope(self, element, **kwargs):
"""Generate several assignments."""
test = [(x, self.dump_element(x)) for x in element]
for x in test:
if x[1] is None:
raise Exception(x[0])
return "\n".join([self.dump_element(x) for x in element])
def gen_HDLSensitivityDescriptor(self, element, **kwargs):
"""Generate always sensitivity elements."""
if element.sens_type == "rise":
sens_str = "posedge"
elif element.sens_type == "fall":
sens_str = "negedge"
elif element.sens_type == "both":
raise ValueError("not synthesizable")
elif element.sens_type == "any":
return "*"
return sens_str + " {}".format(
self.dump_element(element.signal, assign=True)
)
def gen_HDLSensitivityList(self, element, **kwargs):
"""Generate always sensitivity list."""
return "@({})".format(
",".join([self.dump_element(x) for x in element])
)
def gen_HDLSequentialBlock(self, element, **kwargs):
"""Generate always block."""
if element.sens_list is None:
sens_list = ""
else:
sens_list = self.dump_element(element.sens_list)
ret_str = "always {} begin\n".format(sens_list)
ret_str += self.dump_element(element.scope)
ret_str += "\nend\n"
return ret_str
def gen_HDLModule(self, element, **kwargs):
"""Generate module declaration."""
ret_str = ""
for constant in element.constants:
ret_str += self.dump_element(constant) + "\n"
ret_str += "module {}\n".format(element.name)
if len(element.params) > 0:
ret_str += "#(\n"
ret_str += ",\n".join(
[self.dump_element(p, last=True) for p in element.params]
)
ret_str += "\n)\n"
ret_str += "(\n"
if len(element.ports) > 0:
ret_str += ",\n".join(
[self.dump_element(p, last=True) for p in element.ports]
)
ret_str += "\n);\n"
# dump only declaration
if "decl_only" in kwargs:
if kwargs["decl_only"] is True:
return ret_str
ret_str += self.dump_element(element.scope)
ret_str += "\nendmodule\n"
return ret_str
def gen_HDLInstanceStatement(self, element, **kwargs):
"""Generate instance statement."""
return self.dump_element(element.instance)
def gen_HDLInstance(self, element, **kwargs):
"""Generate instance."""
if element.params:
params = " #(\n"
params += ",\n".join(
[
f".{param_name} ({param_conn})"
for param_name, param_conn in element.params.items()
]
)
params += ")\n"
else:
params = ""
ret_str = f"{element.itype.name}{params} {element.name} (\n"
ret_str += ",\n".join(
[
f".{port_name} ({port_conn})"
for port_name, port_conn in element.ports.items()
]
)
ret_str += ");\n"
return ret_str
def gen_HDLComment(self, element, **kwargs):
"""Generate single line comments."""
return "//{}".format(element.text)
def gen_HDLMultiLineComment(self, element, **kwargs):
"""Generate multi line comments."""
return "/* {} */".format(element.text)
def gen_HDLSwitch(self, element, **kwargs):
"""Generate case."""
ret_str = "case ({})\n".format(
self.dump_element(element.switch, evaluate=False)
)
for name, case in element.cases.items():
ret_str += self.dump_element(case, evaluate=False)
ret_str += "\nendcase\n"
return ret_str
def gen_HDLCase(self, element, **kwargs):
"""Generate one case."""
ret_str = (
self.dump_element(element.case_value, format="int", radix="h")
+ ": begin\n"
)
ret_str += self.dump_element(element.scope)
ret_str += "\nend\n"
return ret_str
def gen_HDLForLoop(self, element, **kwargs):
"""Generate For Loop."""
ret_str = "for ({}; {}; {}) begin\n".format(
self.dump_element(element.init, no_semi=True),
self.dump_element(element.stop),
self.dump_element(element.after, no_semi=True),
)
ret_str += self.dump_element(element.scope)
ret_str += "\nend\n"
return ret_str
def gen_HDLMacroValue(self, element, **kwargs):
"""Generate macro usage in code."""
ret_str = "`{}".format(element.name)
return ret_str
@staticmethod
def dumps_define(name, value):
"""Dump a define macro."""
return "`define {} {}".format(name, value)
@staticmethod
def dumps_vector(value, width, radix="h"):
"""Dump a verilog constant."""
if radix not in VerilogCodeGenerator.VERILOG_CONSTANT_RADIX:
raise ValueError("illegal constant type")
# check if width can hold value
if value > int(math.pow(2, float(width)) - 1):
raise ValueError(
"requested width cannot hold passed value; "
"{} bits needed at least".format(
int(math.ceil(math.log2(value))) + 1
)
)
ret_str = "{}'{}".format(str(width), radix)
if radix == "h":
fmt_str = "{{:0{}X}}".format(int(width / 4))
ret_str += fmt_str.format(value)
elif radix == "d":
ret_str += str(value)
elif radix == "b":
fmt_str = "{{:0{}b}}".format(width)
ret_str += fmt_str.format(value)
return ret_str
@staticmethod
def dumps_extents(left, right, simplify_extents=True, part_select=False):
"""Dump a vector extents."""
if repr(left) != repr(right) or simplify_extents is False:
if part_select is False:
return "[{}:{}]".format(left, right)
else:
return "[{}{}:{}]".format(
left, "+" if right > 0 else "-", right
)
else:
return "[{}]".format(left)
@staticmethod
def dumps_port(direction, name, extents, last=False):
"""Dump port declation."""
if direction not in VerilogCodeGenerator.VERILOG_PORT_DIRECTION:
raise ValueError('illegal port direction: "{}"'.format(direction))
if direction == "in":
port_direction = "input"
elif direction == "out":
port_direction = "output"
else:
port_direction = direction
if extents is not None:
ext_str = extents
else:
ext_str = ""
ret_str = "{} {} {}".format(port_direction, ext_str, name)
if last is False:
ret_str += ","
return ret_str
def gen_str(self, element, **kwargs):
"""Dump strings."""
return element
|
thiriel/maps
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/conf/locale/de/formats.py
|
329
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. F Y H:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
40223249-1/2015cd_midterm
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/sre_parse.py
|
630
|
#
# Secret Labs' Regular Expression Engine
#
# convert re-style regular expression to sre pattern
#
# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved.
#
# See the sre.py file for information on usage and redistribution.
#
"""Internal support module for sre"""
# XXX: show string offset and offending character for all errors
import sys
from sre_constants import *
from _sre import MAXREPEAT
SPECIAL_CHARS = ".\\[{()*+?^$|"
REPEAT_CHARS = "*+?{"
DIGITS = set("0123456789")
OCTDIGITS = set("01234567")
HEXDIGITS = set("0123456789abcdefABCDEF")
WHITESPACE = set(" \t\n\r\v\f")
ESCAPES = {
r"\a": (LITERAL, ord("\a")),
r"\b": (LITERAL, ord("\b")),
r"\f": (LITERAL, ord("\f")),
r"\n": (LITERAL, ord("\n")),
r"\r": (LITERAL, ord("\r")),
r"\t": (LITERAL, ord("\t")),
r"\v": (LITERAL, ord("\v")),
r"\\": (LITERAL, ord("\\"))
}
CATEGORIES = {
r"\A": (AT, AT_BEGINNING_STRING), # start of string
r"\b": (AT, AT_BOUNDARY),
r"\B": (AT, AT_NON_BOUNDARY),
r"\d": (IN, [(CATEGORY, CATEGORY_DIGIT)]),
r"\D": (IN, [(CATEGORY, CATEGORY_NOT_DIGIT)]),
r"\s": (IN, [(CATEGORY, CATEGORY_SPACE)]),
r"\S": (IN, [(CATEGORY, CATEGORY_NOT_SPACE)]),
r"\w": (IN, [(CATEGORY, CATEGORY_WORD)]),
r"\W": (IN, [(CATEGORY, CATEGORY_NOT_WORD)]),
r"\Z": (AT, AT_END_STRING), # end of string
}
FLAGS = {
# standard flags
"i": SRE_FLAG_IGNORECASE,
"L": SRE_FLAG_LOCALE,
"m": SRE_FLAG_MULTILINE,
"s": SRE_FLAG_DOTALL,
"x": SRE_FLAG_VERBOSE,
# extensions
"a": SRE_FLAG_ASCII,
"t": SRE_FLAG_TEMPLATE,
"u": SRE_FLAG_UNICODE,
}
class Pattern:
# master pattern object. keeps track of global attributes
def __init__(self):
self.flags = 0
self.open = []
self.groups = 1
self.groupdict = {}
def opengroup(self, name=None):
gid = self.groups
self.groups = gid + 1
if name is not None:
ogid = self.groupdict.get(name, None)
if ogid is not None:
raise error("redefinition of group name %s as group %d; "
"was group %d" % (repr(name), gid, ogid))
self.groupdict[name] = gid
self.open.append(gid)
return gid
def closegroup(self, gid):
self.open.remove(gid)
def checkgroup(self, gid):
return gid < self.groups and gid not in self.open
class SubPattern:
# a subpattern, in intermediate form
def __init__(self, pattern, data=None):
self.pattern = pattern
if data is None:
data = []
self.data = data
self.width = None
def __iter__(self):
return iter(self.data)
def dump(self, level=0):
nl = 1
seqtypes = (tuple, list)
for op, av in self.data:
print(level*" " + op, end=' '); nl = 0
if op == "in":
# member sublanguage
print(); nl = 1
for op, a in av:
print((level+1)*" " + op, a)
elif op == "branch":
print(); nl = 1
i = 0
for a in av[1]:
if i > 0:
print(level*" " + "or")
a.dump(level+1); nl = 1
i = i + 1
elif isinstance(av, seqtypes):
for a in av:
if isinstance(a, SubPattern):
if not nl: print()
a.dump(level+1); nl = 1
else:
print(a, end=' ') ; nl = 0
else:
print(av, end=' ') ; nl = 0
if not nl: print()
def __repr__(self):
return repr(self.data)
def __len__(self):
return len(self.data)
def __delitem__(self, index):
del self.data[index]
def __getitem__(self, index):
if isinstance(index, slice):
return SubPattern(self.pattern, self.data[index])
return self.data[index]
def __setitem__(self, index, code):
self.data[index] = code
def insert(self, index, code):
self.data.insert(index, code)
def append(self, code):
self.data.append(code)
def getwidth(self):
# determine the width (min, max) for this subpattern
if self.width:
return self.width
lo = hi = 0
UNITCODES = (ANY, RANGE, IN, LITERAL, NOT_LITERAL, CATEGORY)
REPEATCODES = (MIN_REPEAT, MAX_REPEAT)
for op, av in self.data:
if op is BRANCH:
i = sys.maxsize
j = 0
for av in av[1]:
l, h = av.getwidth()
i = min(i, l)
j = max(j, h)
lo = lo + i
hi = hi + j
elif op is CALL:
i, j = av.getwidth()
lo = lo + i
hi = hi + j
elif op is SUBPATTERN:
i, j = av[1].getwidth()
lo = lo + i
hi = hi + j
elif op in REPEATCODES:
i, j = av[2].getwidth()
lo = lo + int(i) * av[0]
hi = hi + int(j) * av[1]
elif op in UNITCODES:
lo = lo + 1
hi = hi + 1
elif op == SUCCESS:
break
self.width = int(min(lo, sys.maxsize)), int(min(hi, sys.maxsize))
return self.width
class Tokenizer:
def __init__(self, string):
self.istext = isinstance(string, str)
self.string = string
self.index = 0
self.__next()
def __next(self):
if self.index >= len(self.string):
self.next = None
return
char = self.string[self.index:self.index+1]
# Special case for the str8, since indexing returns a integer
# XXX This is only needed for test_bug_926075 in test_re.py
if char and not self.istext:
char = chr(char[0])
if char == "\\":
try:
c = self.string[self.index + 1]
except IndexError:
raise error("bogus escape (end of line)")
if not self.istext:
c = chr(c)
char = char + c
self.index = self.index + len(char)
self.next = char
def match(self, char, skip=1):
if char == self.next:
if skip:
self.__next()
return 1
return 0
def get(self):
this = self.next
self.__next()
return this
def getwhile(self, n, charset):
result = ''
for _ in range(n):
c = self.next
if c not in charset:
break
result += c
self.__next()
return result
def tell(self):
return self.index, self.next
def seek(self, index):
self.index, self.next = index
def isident(char):
return "a" <= char <= "z" or "A" <= char <= "Z" or char == "_"
def isdigit(char):
return "0" <= char <= "9"
def isname(name):
# check that group name is a valid string
if not isident(name[0]):
return False
for char in name[1:]:
if not isident(char) and not isdigit(char):
return False
return True
def _class_escape(source, escape):
# handle escape code inside character class
code = ESCAPES.get(escape)
if code:
return code
code = CATEGORIES.get(escape)
if code and code[0] == IN:
return code
try:
c = escape[1:2]
if c == "x":
# hexadecimal escape (exactly two digits)
escape += source.getwhile(2, HEXDIGITS)
if len(escape) != 4:
raise ValueError
return LITERAL, int(escape[2:], 16) & 0xff
elif c == "u" and source.istext:
# unicode escape (exactly four digits)
escape += source.getwhile(4, HEXDIGITS)
if len(escape) != 6:
raise ValueError
return LITERAL, int(escape[2:], 16)
elif c == "U" and source.istext:
# unicode escape (exactly eight digits)
escape += source.getwhile(8, HEXDIGITS)
if len(escape) != 10:
raise ValueError
c = int(escape[2:], 16)
chr(c) # raise ValueError for invalid code
return LITERAL, c
elif c in OCTDIGITS:
# octal escape (up to three digits)
escape += source.getwhile(2, OCTDIGITS)
return LITERAL, int(escape[1:], 8) & 0xff
elif c in DIGITS:
raise ValueError
if len(escape) == 2:
return LITERAL, ord(escape[1])
except ValueError:
pass
raise error("bogus escape: %s" % repr(escape))
def _escape(source, escape, state):
# handle escape code in expression
code = CATEGORIES.get(escape)
if code:
return code
code = ESCAPES.get(escape)
if code:
return code
try:
c = escape[1:2]
if c == "x":
# hexadecimal escape
escape += source.getwhile(2, HEXDIGITS)
if len(escape) != 4:
raise ValueError
return LITERAL, int(escape[2:], 16) & 0xff
elif c == "u" and source.istext:
# unicode escape (exactly four digits)
escape += source.getwhile(4, HEXDIGITS)
if len(escape) != 6:
raise ValueError
return LITERAL, int(escape[2:], 16)
elif c == "U" and source.istext:
# unicode escape (exactly eight digits)
escape += source.getwhile(8, HEXDIGITS)
if len(escape) != 10:
raise ValueError
c = int(escape[2:], 16)
chr(c) # raise ValueError for invalid code
return LITERAL, c
elif c == "0":
# octal escape
escape += source.getwhile(2, OCTDIGITS)
return LITERAL, int(escape[1:], 8) & 0xff
elif c in DIGITS:
# octal escape *or* decimal group reference (sigh)
if source.next in DIGITS:
escape = escape + source.get()
if (escape[1] in OCTDIGITS and escape[2] in OCTDIGITS and
source.next in OCTDIGITS):
# got three octal digits; this is an octal escape
escape = escape + source.get()
return LITERAL, int(escape[1:], 8) & 0xff
# not an octal escape, so this is a group reference
group = int(escape[1:])
if group < state.groups:
if not state.checkgroup(group):
raise error("cannot refer to open group")
return GROUPREF, group
raise ValueError
if len(escape) == 2:
return LITERAL, ord(escape[1])
except ValueError:
pass
raise error("bogus escape: %s" % repr(escape))
def _parse_sub(source, state, nested=1):
# parse an alternation: a|b|c
items = []
itemsappend = items.append
sourcematch = source.match
while 1:
itemsappend(_parse(source, state))
if sourcematch("|"):
continue
if not nested:
break
if not source.next or sourcematch(")", 0):
break
else:
raise error("pattern not properly closed")
if len(items) == 1:
return items[0]
subpattern = SubPattern(state)
subpatternappend = subpattern.append
# check if all items share a common prefix
while 1:
prefix = None
for item in items:
if not item:
break
if prefix is None:
prefix = item[0]
elif item[0] != prefix:
break
else:
# all subitems start with a common "prefix".
# move it out of the branch
for item in items:
del item[0]
subpatternappend(prefix)
continue # check next one
break
# check if the branch can be replaced by a character set
for item in items:
if len(item) != 1 or item[0][0] != LITERAL:
break
else:
# we can store this as a character set instead of a
# branch (the compiler may optimize this even more)
set = []
setappend = set.append
for item in items:
setappend(item[0])
subpatternappend((IN, set))
return subpattern
subpattern.append((BRANCH, (None, items)))
return subpattern
def _parse_sub_cond(source, state, condgroup):
item_yes = _parse(source, state)
if source.match("|"):
item_no = _parse(source, state)
if source.match("|"):
raise error("conditional backref with more than two branches")
else:
item_no = None
if source.next and not source.match(")", 0):
raise error("pattern not properly closed")
subpattern = SubPattern(state)
subpattern.append((GROUPREF_EXISTS, (condgroup, item_yes, item_no)))
return subpattern
_PATTERNENDERS = set("|)")
_ASSERTCHARS = set("=!<")
_LOOKBEHINDASSERTCHARS = set("=!")
_REPEATCODES = set([MIN_REPEAT, MAX_REPEAT])
def _parse(source, state):
# parse a simple pattern
subpattern = SubPattern(state)
# precompute constants into local variables
subpatternappend = subpattern.append
sourceget = source.get
sourcematch = source.match
_len = len
PATTERNENDERS = _PATTERNENDERS
ASSERTCHARS = _ASSERTCHARS
LOOKBEHINDASSERTCHARS = _LOOKBEHINDASSERTCHARS
REPEATCODES = _REPEATCODES
while 1:
if source.next in PATTERNENDERS:
break # end of subpattern
this = sourceget()
if this is None:
break # end of pattern
if state.flags & SRE_FLAG_VERBOSE:
# skip whitespace and comments
if this in WHITESPACE:
continue
if this == "#":
while 1:
this = sourceget()
if this in (None, "\n"):
break
continue
if this and this[0] not in SPECIAL_CHARS:
subpatternappend((LITERAL, ord(this)))
elif this == "[":
# character set
set = []
setappend = set.append
## if sourcematch(":"):
## pass # handle character classes
if sourcematch("^"):
setappend((NEGATE, None))
# check remaining characters
start = set[:]
while 1:
this = sourceget()
if this == "]" and set != start:
break
elif this and this[0] == "\\":
code1 = _class_escape(source, this)
elif this:
code1 = LITERAL, ord(this)
else:
raise error("unexpected end of regular expression")
if sourcematch("-"):
# potential range
this = sourceget()
if this == "]":
if code1[0] is IN:
code1 = code1[1][0]
setappend(code1)
setappend((LITERAL, ord("-")))
break
elif this:
if this[0] == "\\":
code2 = _class_escape(source, this)
else:
code2 = LITERAL, ord(this)
if code1[0] != LITERAL or code2[0] != LITERAL:
raise error("bad character range")
lo = code1[1]
hi = code2[1]
if hi < lo:
raise error("bad character range")
setappend((RANGE, (lo, hi)))
else:
raise error("unexpected end of regular expression")
else:
if code1[0] is IN:
code1 = code1[1][0]
setappend(code1)
# XXX: <fl> should move set optimization to compiler!
if _len(set)==1 and set[0][0] is LITERAL:
subpatternappend(set[0]) # optimization
elif _len(set)==2 and set[0][0] is NEGATE and set[1][0] is LITERAL:
subpatternappend((NOT_LITERAL, set[1][1])) # optimization
else:
# XXX: <fl> should add charmap optimization here
subpatternappend((IN, set))
elif this and this[0] in REPEAT_CHARS:
# repeat previous item
if this == "?":
min, max = 0, 1
elif this == "*":
min, max = 0, MAXREPEAT
elif this == "+":
min, max = 1, MAXREPEAT
elif this == "{":
if source.next == "}":
subpatternappend((LITERAL, ord(this)))
continue
here = source.tell()
min, max = 0, MAXREPEAT
lo = hi = ""
while source.next in DIGITS:
lo = lo + source.get()
if sourcematch(","):
while source.next in DIGITS:
hi = hi + sourceget()
else:
hi = lo
if not sourcematch("}"):
subpatternappend((LITERAL, ord(this)))
source.seek(here)
continue
if lo:
min = int(lo)
if min >= MAXREPEAT:
raise OverflowError("the repetition number is too large")
if hi:
max = int(hi)
if max >= MAXREPEAT:
raise OverflowError("the repetition number is too large")
if max < min:
raise error("bad repeat interval")
else:
raise error("not supported")
# figure out which item to repeat
if subpattern:
item = subpattern[-1:]
else:
item = None
if not item or (_len(item) == 1 and item[0][0] == AT):
raise error("nothing to repeat")
if item[0][0] in REPEATCODES:
raise error("multiple repeat")
if sourcematch("?"):
subpattern[-1] = (MIN_REPEAT, (min, max, item))
else:
subpattern[-1] = (MAX_REPEAT, (min, max, item))
elif this == ".":
subpatternappend((ANY, None))
elif this == "(":
group = 1
name = None
condgroup = None
if sourcematch("?"):
group = 0
# options
if sourcematch("P"):
# python extensions
if sourcematch("<"):
# named group: skip forward to end of name
name = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ">":
break
name = name + char
group = 1
if not name:
raise error("missing group name")
if not isname(name):
raise error("bad character in group name")
elif sourcematch("="):
# named backreference
name = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ")":
break
name = name + char
if not name:
raise error("missing group name")
if not isname(name):
raise error("bad character in group name")
gid = state.groupdict.get(name)
if gid is None:
raise error("unknown group name")
subpatternappend((GROUPREF, gid))
continue
else:
char = sourceget()
if char is None:
raise error("unexpected end of pattern")
raise error("unknown specifier: ?P%s" % char)
elif sourcematch(":"):
# non-capturing group
group = 2
elif sourcematch("#"):
# comment
while 1:
if source.next is None or source.next == ")":
break
sourceget()
if not sourcematch(")"):
raise error("unbalanced parenthesis")
continue
elif source.next in ASSERTCHARS:
# lookahead assertions
char = sourceget()
dir = 1
if char == "<":
if source.next not in LOOKBEHINDASSERTCHARS:
raise error("syntax error")
dir = -1 # lookbehind
char = sourceget()
p = _parse_sub(source, state)
if not sourcematch(")"):
raise error("unbalanced parenthesis")
if char == "=":
subpatternappend((ASSERT, (dir, p)))
else:
subpatternappend((ASSERT_NOT, (dir, p)))
continue
elif sourcematch("("):
# conditional backreference group
condname = ""
while 1:
char = sourceget()
if char is None:
raise error("unterminated name")
if char == ")":
break
condname = condname + char
group = 2
if not condname:
raise error("missing group name")
if isname(condname):
condgroup = state.groupdict.get(condname)
if condgroup is None:
raise error("unknown group name")
else:
try:
condgroup = int(condname)
except ValueError:
raise error("bad character in group name")
else:
# flags
if not source.next in FLAGS:
raise error("unexpected end of pattern")
while source.next in FLAGS:
state.flags = state.flags | FLAGS[sourceget()]
if group:
# parse group contents
if group == 2:
# anonymous group
group = None
else:
group = state.opengroup(name)
if condgroup:
p = _parse_sub_cond(source, state, condgroup)
else:
p = _parse_sub(source, state)
if not sourcematch(")"):
raise error("unbalanced parenthesis")
if group is not None:
state.closegroup(group)
subpatternappend((SUBPATTERN, (group, p)))
else:
while 1:
char = sourceget()
if char is None:
raise error("unexpected end of pattern")
if char == ")":
break
raise error("unknown extension")
elif this == "^":
subpatternappend((AT, AT_BEGINNING))
elif this == "$":
subpattern.append((AT, AT_END))
elif this and this[0] == "\\":
code = _escape(source, this, state)
subpatternappend(code)
else:
raise error("parser error")
return subpattern
def fix_flags(src, flags):
# Check and fix flags according to the type of pattern (str or bytes)
if isinstance(src, str):
if not flags & SRE_FLAG_ASCII:
flags |= SRE_FLAG_UNICODE
elif flags & SRE_FLAG_UNICODE:
raise ValueError("ASCII and UNICODE flags are incompatible")
else:
if flags & SRE_FLAG_UNICODE:
raise ValueError("can't use UNICODE flag with a bytes pattern")
return flags
def parse(str, flags=0, pattern=None):
# parse 're' pattern into list of (opcode, argument) tuples
source = Tokenizer(str)
if pattern is None:
pattern = Pattern()
pattern.flags = flags
pattern.str = str
p = _parse_sub(source, pattern, 0)
p.pattern.flags = fix_flags(str, p.pattern.flags)
tail = source.get()
if tail == ")":
raise error("unbalanced parenthesis")
elif tail:
raise error("bogus characters at end of regular expression")
if flags & SRE_FLAG_DEBUG:
p.dump()
if not (flags & SRE_FLAG_VERBOSE) and p.pattern.flags & SRE_FLAG_VERBOSE:
# the VERBOSE flag was switched on inside the pattern. to be
# on the safe side, we'll parse the whole thing again...
return parse(str, p.pattern.flags)
return p
def parse_template(source, pattern):
# parse 're' replacement string into list of literals and
# group references
s = Tokenizer(source)
sget = s.get
p = []
a = p.append
def literal(literal, p=p, pappend=a):
if p and p[-1][0] is LITERAL:
p[-1] = LITERAL, p[-1][1] + literal
else:
pappend((LITERAL, literal))
sep = source[:0]
if isinstance(sep, str):
makechar = chr
else:
makechar = chr
while 1:
this = sget()
if this is None:
break # end of replacement string
if this and this[0] == "\\":
# group
c = this[1:2]
if c == "g":
name = ""
if s.match("<"):
while 1:
char = sget()
if char is None:
raise error("unterminated group name")
if char == ">":
break
name = name + char
if not name:
raise error("missing group name")
try:
index = int(name)
if index < 0:
raise error("negative group number")
except ValueError:
if not isname(name):
raise error("bad character in group name")
try:
index = pattern.groupindex[name]
except KeyError:
raise IndexError("unknown group name")
a((MARK, index))
elif c == "0":
if s.next in OCTDIGITS:
this = this + sget()
if s.next in OCTDIGITS:
this = this + sget()
literal(makechar(int(this[1:], 8) & 0xff))
elif c in DIGITS:
isoctal = False
if s.next in DIGITS:
this = this + sget()
if (c in OCTDIGITS and this[2] in OCTDIGITS and
s.next in OCTDIGITS):
this = this + sget()
isoctal = True
literal(makechar(int(this[1:], 8) & 0xff))
if not isoctal:
a((MARK, int(this[1:])))
else:
try:
this = makechar(ESCAPES[this][1])
except KeyError:
pass
literal(this)
else:
literal(this)
# convert template to groups and literals lists
i = 0
groups = []
groupsappend = groups.append
literals = [None] * len(p)
if isinstance(source, str):
encode = lambda x: x
else:
# The tokenizer implicitly decodes bytes objects as latin-1, we must
# therefore re-encode the final representation.
encode = lambda x: x.encode('latin-1')
for c, s in p:
if c is MARK:
groupsappend((i, s))
# literal[i] is already None
else:
literals[i] = encode(s)
i = i + 1
return groups, literals
def expand_template(template, match):
g = match.group
sep = match.string[:0]
groups, literals = template
literals = literals[:]
try:
for index, group in groups:
literals[index] = s = g(group)
if s is None:
raise error("unmatched group")
except IndexError:
raise error("invalid group reference")
return sep.join(literals)
|
gear/motifwalk
|
refs/heads/master
|
research/src/mane/datamanager.py
|
1
|
"""data manager
"""
# Coding: utf-8
# File name: mane.py
# Created: 2016-07-19
# Description: Main file to run the model.
## v0.0: File created. Add argparse
import csv
import random
def read_edges_from_txt(filepath, sep=" ", one_origin=False):
edges = []
with open(filepath) as f:
for line in f:
n1, n2 = line.strip().split(sep)
edges.append((int(n1), int(n2)))
if one_origin:
edges = [(i-1, j-1) for i, j in edges]
return edges
def read_coms_from_txt(filepath, sep=" ", one_origin=False, index=True):
coms = []
with open(filepath) as f:
for i, line in enumerate(f):
if index:
n, c = line.strip().split(sep)
else:
c = line.strip()
n = i
coms.append((int(n), int(c)))
if one_origin:
coms = [(i-1, j-1) for i, j in coms]
return coms
class DataManager(object):
def __init__(self, edges, labels):
"""
:param edges: list of tuple (node_id1, node_id2)
:param labels: list of tuple (node_id, com_id)
"""
self.edges = edges
self.ground_truth = {}
for i, c in labels:
self.ground_truth.setdefault(i, set())
self.ground_truth[i].add(c)
self.num_nodes = len(self.ground_truth)
self.num_edges = len(self.edges)
self.unique_ground_truth = self.get_unique_ground_truth()
def calculate_fscore(self, list_of_com):
"""
:param list_of_com: list of community ids
:return: micro_fscore, macro_fscore
"""
tp = {}
fn = {}
fp = {}
coms = set()
for n, c in enumerate(list_of_com):
if n not in self.ground_truth:
continue
tp.setdefault(c, 0)
coms.add(c)
if c in self.ground_truth[n]:
tp[c] += 1
else:
fp.setdefault(c, 0)
fp[c] += 1
d = list(self.ground_truth[n] - set([c]))[0]
fn.setdefault(d, 0)
fn[d] += 1
coms.add(d)
tp_sum = sum(tp.values())
fp_sum = sum(fp.values())
fn_sum = sum(fn.values())
micro_prec = tp_sum / float(tp_sum + fp_sum)
micro_recall = tp_sum / float(tp_sum + fn_sum)
micro_fscore = 2 * micro_prec * micro_recall / (micro_prec + micro_recall)
macro_fscores = []
for c in coms:
macro_prec = tp.get(c, 0) / float(tp.get(c, 0) + fp.get(c, 0) + 10e-9)
macro_recall = tp.get(c, 0) / float(tp.get(c, 0) + fn.get(c, 0) + 10e-9)
score = macro_prec * macro_recall / (macro_prec + macro_recall + 10e-9)
macro_fscores.append(score)
macro_fscore = 2 * sum(macro_fscores) / float(len(macro_fscores))
return micro_fscore, macro_fscore
def get_unique_ground_truth(self):
gt = {}
for n, c in self.ground_truth.items():
gt[n] = list(c)[0]
return gt
def export_edges(self, path, sep=" ", one_origin=False):
edges = self.edges
if one_origin:
edges = [(i+1, j+1) for i, j in edges]
with open(path, "w") as f:
writer = csv.writer(f, delimiter=sep)
writer.writerows(edges)
def export_ground_truth(self, path, sep=" "):
with open(path, "w") as f:
writer = csv.writer(f, delimiter=sep)
writer.writerows(self.unique_ground_truth.items())
def sample_training_set(self, sample_ratio):
n_sample = int(self.num_nodes * sample_ratio)
nodes = list(range(self.num_nodes))
random.shuffle(nodes)
training_set = {}
for n in nodes[:n_sample]:
c = self.unique_ground_truth[n]
training_set[n] = c
return training_set
def export_training_set(self, path, sample_ratio, sep=" "):
training_set = self.sample_training_set(sample_ratio)
with open(path, "w") as f:
writer = csv.writer(f, delimiter=sep)
writer.writerows(training_set)
|
nelmiux/CarnotKE
|
refs/heads/master
|
jyhton/lib-python/2.7/encodings/cp1253.py
|
593
|
""" Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1253',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\ufffe' # 0x81 -> UNDEFINED
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\ufffe' # 0x88 -> UNDEFINED
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\ufffe' # 0x8A -> UNDEFINED
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x8C -> UNDEFINED
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\ufffe' # 0x8F -> UNDEFINED
u'\ufffe' # 0x90 -> UNDEFINED
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\ufffe' # 0x98 -> UNDEFINED
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\ufffe' # 0x9A -> UNDEFINED
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x9C -> UNDEFINED
u'\ufffe' # 0x9D -> UNDEFINED
u'\ufffe' # 0x9E -> UNDEFINED
u'\ufffe' # 0x9F -> UNDEFINED
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0385' # 0xA1 -> GREEK DIALYTIKA TONOS
u'\u0386' # 0xA2 -> GREEK CAPITAL LETTER ALPHA WITH TONOS
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\ufffe' # 0xAA -> UNDEFINED
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\u2015' # 0xAF -> HORIZONTAL BAR
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\u0384' # 0xB4 -> GREEK TONOS
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\u0388' # 0xB8 -> GREEK CAPITAL LETTER EPSILON WITH TONOS
u'\u0389' # 0xB9 -> GREEK CAPITAL LETTER ETA WITH TONOS
u'\u038a' # 0xBA -> GREEK CAPITAL LETTER IOTA WITH TONOS
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u038c' # 0xBC -> GREEK CAPITAL LETTER OMICRON WITH TONOS
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\u038e' # 0xBE -> GREEK CAPITAL LETTER UPSILON WITH TONOS
u'\u038f' # 0xBF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
u'\u0390' # 0xC0 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
u'\u0391' # 0xC1 -> GREEK CAPITAL LETTER ALPHA
u'\u0392' # 0xC2 -> GREEK CAPITAL LETTER BETA
u'\u0393' # 0xC3 -> GREEK CAPITAL LETTER GAMMA
u'\u0394' # 0xC4 -> GREEK CAPITAL LETTER DELTA
u'\u0395' # 0xC5 -> GREEK CAPITAL LETTER EPSILON
u'\u0396' # 0xC6 -> GREEK CAPITAL LETTER ZETA
u'\u0397' # 0xC7 -> GREEK CAPITAL LETTER ETA
u'\u0398' # 0xC8 -> GREEK CAPITAL LETTER THETA
u'\u0399' # 0xC9 -> GREEK CAPITAL LETTER IOTA
u'\u039a' # 0xCA -> GREEK CAPITAL LETTER KAPPA
u'\u039b' # 0xCB -> GREEK CAPITAL LETTER LAMDA
u'\u039c' # 0xCC -> GREEK CAPITAL LETTER MU
u'\u039d' # 0xCD -> GREEK CAPITAL LETTER NU
u'\u039e' # 0xCE -> GREEK CAPITAL LETTER XI
u'\u039f' # 0xCF -> GREEK CAPITAL LETTER OMICRON
u'\u03a0' # 0xD0 -> GREEK CAPITAL LETTER PI
u'\u03a1' # 0xD1 -> GREEK CAPITAL LETTER RHO
u'\ufffe' # 0xD2 -> UNDEFINED
u'\u03a3' # 0xD3 -> GREEK CAPITAL LETTER SIGMA
u'\u03a4' # 0xD4 -> GREEK CAPITAL LETTER TAU
u'\u03a5' # 0xD5 -> GREEK CAPITAL LETTER UPSILON
u'\u03a6' # 0xD6 -> GREEK CAPITAL LETTER PHI
u'\u03a7' # 0xD7 -> GREEK CAPITAL LETTER CHI
u'\u03a8' # 0xD8 -> GREEK CAPITAL LETTER PSI
u'\u03a9' # 0xD9 -> GREEK CAPITAL LETTER OMEGA
u'\u03aa' # 0xDA -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
u'\u03ab' # 0xDB -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
u'\u03ac' # 0xDC -> GREEK SMALL LETTER ALPHA WITH TONOS
u'\u03ad' # 0xDD -> GREEK SMALL LETTER EPSILON WITH TONOS
u'\u03ae' # 0xDE -> GREEK SMALL LETTER ETA WITH TONOS
u'\u03af' # 0xDF -> GREEK SMALL LETTER IOTA WITH TONOS
u'\u03b0' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
u'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
u'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
u'\u03b3' # 0xE3 -> GREEK SMALL LETTER GAMMA
u'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
u'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
u'\u03b6' # 0xE6 -> GREEK SMALL LETTER ZETA
u'\u03b7' # 0xE7 -> GREEK SMALL LETTER ETA
u'\u03b8' # 0xE8 -> GREEK SMALL LETTER THETA
u'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
u'\u03ba' # 0xEA -> GREEK SMALL LETTER KAPPA
u'\u03bb' # 0xEB -> GREEK SMALL LETTER LAMDA
u'\u03bc' # 0xEC -> GREEK SMALL LETTER MU
u'\u03bd' # 0xED -> GREEK SMALL LETTER NU
u'\u03be' # 0xEE -> GREEK SMALL LETTER XI
u'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
u'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
u'\u03c1' # 0xF1 -> GREEK SMALL LETTER RHO
u'\u03c2' # 0xF2 -> GREEK SMALL LETTER FINAL SIGMA
u'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
u'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
u'\u03c5' # 0xF5 -> GREEK SMALL LETTER UPSILON
u'\u03c6' # 0xF6 -> GREEK SMALL LETTER PHI
u'\u03c7' # 0xF7 -> GREEK SMALL LETTER CHI
u'\u03c8' # 0xF8 -> GREEK SMALL LETTER PSI
u'\u03c9' # 0xF9 -> GREEK SMALL LETTER OMEGA
u'\u03ca' # 0xFA -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
u'\u03cb' # 0xFB -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
u'\u03cc' # 0xFC -> GREEK SMALL LETTER OMICRON WITH TONOS
u'\u03cd' # 0xFD -> GREEK SMALL LETTER UPSILON WITH TONOS
u'\u03ce' # 0xFE -> GREEK SMALL LETTER OMEGA WITH TONOS
u'\ufffe' # 0xFF -> UNDEFINED
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
geminy/aidear
|
refs/heads/master
|
oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/third_party/skia/make.py
|
1
|
# Copyright 2011 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# "Makefile" replacement to build skia for Windows.
# More info at https://skia.org.
#
# Some usage examples:
# make clean
# make dm
# make bench BUILDTYPE=Release
# make gm GYP_DEFINES='skia_gpu=0' BUILDTYPE=Release
# make all
import os
import shutil
import sys
BUILDTYPE = os.environ.get('BUILDTYPE', 'Debug')
# special targets
TARGET_ALL = 'all'
TARGET_CLEAN = 'clean'
TARGET_DEFAULT = 'most'
TARGET_GYP = 'gyp'
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
OUT_SUBDIR = os.environ.get('SKIA_OUT', 'out')
GYP_SUBDIR = 'gyp'
# Simple functions that report what they are doing, and exit(1) on failure.
def cd(path):
print '> cd %s' % path
if not os.path.isdir(path):
print 'directory %s does not exist' % path
sys.exit(1)
os.chdir(path)
def rmtree(path):
print '> rmtree %s' % path
shutil.rmtree(path, ignore_errors=True)
def runcommand(command):
print '> %s' % command
if os.system(command):
sys.exit(1)
def MakeClean():
"""Cross-platform "make clean" operation."""
cd(SCRIPT_DIR)
rmtree(OUT_SUBDIR)
def CheckWindowsEnvironment():
"""For Windows: check environment variables needed for command-line build.
If those environment variables are missing, try to set them.
If environment variables can be set up, this function returns; otherwise,
it displays an error message and exits.
"""
# If we already have the proper environment variables, nothing to do here.
if os.environ.get('DevEnvDir'):
return
print ('\nCould not find Visual Studio environment variables.'
'\nPerhaps you have not yet run vcvars32.bat as described at'
'\nhttp://msdn.microsoft.com/en-us/library/f2ccy3wt.aspx ?')
found_path = None
try:
possible_path = os.path.abspath(os.path.join(
os.environ['VS100COMNTOOLS'], os.path.pardir, os.path.pardir,
'VC', 'bin', 'vcvars32.bat'))
if os.path.exists(possible_path):
found_path = possible_path
except KeyError:
pass
if found_path:
print '\nIt looks like you can run that script at:\n%s' % found_path
else:
print '\nUnable to find vcvars32.bat on your system.'
sys.exit(1)
def MakeWindows(targets):
"""For Windows: build as appropriate for the command line arguments.
parameters:
targets: build targets as a list of strings
"""
if os.environ.get('CHROME_HEADLESS', '0') != '1':
# TODO(epoger): I'm not sure if this is needed for ninja builds.
CheckWindowsEnvironment()
# Run gyp_skia to prepare Visual Studio projects.
cd(SCRIPT_DIR)
runcommand('python gyp_skia --no-parallel -G config=%s' % BUILDTYPE)
# We already built the gypfiles...
while TARGET_GYP in targets:
targets.remove(TARGET_GYP)
# And call ninja to do the work!
if targets:
runcommand('ninja -C %s %s' % (
os.path.join(OUT_SUBDIR, BUILDTYPE), ' '.join(targets)))
def Make(args):
"""Main function.
parameters:
args: command line arguments as a list of strings
"""
# handle any variable-setting parameters or special targets
global BUILDTYPE
# if no targets were specified at all, make default target
if not args:
args = [TARGET_DEFAULT]
targets = []
for arg in args:
# If user requests "make all", chain to our explicitly-declared
# "everything" target. See
# https://code.google.com/p/skia/issues/detail?id=932 ("gyp
# automatically creates "all" target on some build flavors but not
# others")
if arg == TARGET_ALL:
targets.append('everything')
elif arg == TARGET_CLEAN:
MakeClean()
elif arg.startswith('BUILDTYPE='):
BUILDTYPE = arg[10:]
elif arg.startswith('GYP_DEFINES='):
os.environ['GYP_DEFINES'] = arg[12:]
else:
targets.append(arg)
# if there are no remaining targets, we're done
if not targets:
sys.exit(0)
# dispatch to appropriate Make<Platform>() variant.
if os.name == 'nt':
MakeWindows(targets)
sys.exit(0)
elif os.name == 'posix':
if sys.platform == 'darwin':
print ('Mac developers should not run this script; see '
'https://skia.org/user/quick/macos')
sys.exit(1)
elif sys.platform == 'cygwin':
print ('Windows development on Cygwin is not currently supported; '
'see https://skia.org/user/quick/windows')
sys.exit(1)
else:
print ('Unix developers should not run this script; see '
'https://skia.org/user/quick/linux')
sys.exit(1)
else:
print 'unknown platform (os.name=%s, sys.platform=%s); see %s' % (
os.name, sys.platform, 'https://skia.org/user/quick')
sys.exit(1)
sys.exit(0)
# main()
Make(sys.argv[1:])
|
alex/django-old
|
refs/heads/master
|
django/template/smartif.py
|
331
|
"""
Parser and utilities for the smart 'if' tag
"""
import operator
# Using a simple top down parser, as described here:
# http://effbot.org/zone/simple-top-down-parsing.htm.
# 'led' = left denotation
# 'nud' = null denotation
# 'bp' = binding power (left = lbp, right = rbp)
class TokenBase(object):
"""
Base class for operators and literals, mainly for debugging and for throwing
syntax errors.
"""
id = None # node/token type name
value = None # used by literals
first = second = None # used by tree nodes
def nud(self, parser):
# Null denotation - called in prefix context
raise parser.error_class(
"Not expecting '%s' in this position in if tag." % self.id
)
def led(self, left, parser):
# Left denotation - called in infix context
raise parser.error_class(
"Not expecting '%s' as infix operator in if tag." % self.id
)
def display(self):
"""
Returns what to display in error messages for this node
"""
return self.id
def __repr__(self):
out = [str(x) for x in [self.id, self.first, self.second] if x is not None]
return "(" + " ".join(out) + ")"
def infix(bp, func):
"""
Creates an infix operator, given a binding power and a function that
evaluates the node
"""
class Operator(TokenBase):
lbp = bp
def led(self, left, parser):
self.first = left
self.second = parser.expression(bp)
return self
def eval(self, context):
try:
return func(context, self.first, self.second)
except Exception:
# Templates shouldn't throw exceptions when rendering. We are
# most likely to get exceptions for things like {% if foo in bar
# %} where 'bar' does not support 'in', so default to False
return False
return Operator
def prefix(bp, func):
"""
Creates a prefix operator, given a binding power and a function that
evaluates the node.
"""
class Operator(TokenBase):
lbp = bp
def nud(self, parser):
self.first = parser.expression(bp)
self.second = None
return self
def eval(self, context):
try:
return func(context, self.first)
except Exception:
return False
return Operator
# Operator precedence follows Python.
# NB - we can get slightly more accurate syntax error messages by not using the
# same object for '==' and '='.
# We defer variable evaluation to the lambda to ensure that terms are
# lazily evaluated using Python's boolean parsing logic.
OPERATORS = {
'or': infix(6, lambda context, x, y: x.eval(context) or y.eval(context)),
'and': infix(7, lambda context, x, y: x.eval(context) and y.eval(context)),
'not': prefix(8, lambda context, x: not x.eval(context)),
'in': infix(9, lambda context, x, y: x.eval(context) in y.eval(context)),
'not in': infix(9, lambda context, x, y: x.eval(context) not in y.eval(context)),
'=': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'==': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'!=': infix(10, lambda context, x, y: x.eval(context) != y.eval(context)),
'>': infix(10, lambda context, x, y: x.eval(context) > y.eval(context)),
'>=': infix(10, lambda context, x, y: x.eval(context) >= y.eval(context)),
'<': infix(10, lambda context, x, y: x.eval(context) < y.eval(context)),
'<=': infix(10, lambda context, x, y: x.eval(context) <= y.eval(context)),
}
# Assign 'id' to each:
for key, op in OPERATORS.items():
op.id = key
class Literal(TokenBase):
"""
A basic self-resolvable object similar to a Django template variable.
"""
# IfParser uses Literal in create_var, but TemplateIfParser overrides
# create_var so that a proper implementation that actually resolves
# variables, filters etc is used.
id = "literal"
lbp = 0
def __init__(self, value):
self.value = value
def display(self):
return repr(self.value)
def nud(self, parser):
return self
def eval(self, context):
return self.value
def __repr__(self):
return "(%s %r)" % (self.id, self.value)
class EndToken(TokenBase):
lbp = 0
def nud(self, parser):
raise parser.error_class("Unexpected end of expression in if tag.")
EndToken = EndToken()
class IfParser(object):
error_class = ValueError
def __init__(self, tokens):
# pre-pass necessary to turn 'not','in' into single token
l = len(tokens)
mapped_tokens = []
i = 0
while i < l:
token = tokens[i]
if token == "not" and i + 1 < l and tokens[i+1] == "in":
token = "not in"
i += 1 # skip 'in'
mapped_tokens.append(self.translate_token(token))
i += 1
self.tokens = mapped_tokens
self.pos = 0
self.current_token = self.next()
def translate_token(self, token):
try:
op = OPERATORS[token]
except (KeyError, TypeError):
return self.create_var(token)
else:
return op()
def next(self):
if self.pos >= len(self.tokens):
return EndToken
else:
retval = self.tokens[self.pos]
self.pos += 1
return retval
def parse(self):
retval = self.expression()
# Check that we have exhausted all the tokens
if self.current_token is not EndToken:
raise self.error_class("Unused '%s' at end of if expression." %
self.current_token.display())
return retval
def expression(self, rbp=0):
t = self.current_token
self.current_token = self.next()
left = t.nud(self)
while rbp < self.current_token.lbp:
t = self.current_token
self.current_token = self.next()
left = t.led(left, self)
return left
def create_var(self, value):
return Literal(value)
|
trishnaguha/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/f5/bigip_trunk.py
|
14
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_trunk
short_description: Manage trunks on a BIG-IP
description:
- Manages trunks on a BIG-IP.
version_added: 2.6
options:
name:
description:
- Specifies the name of the trunk.
required: True
interfaces:
description:
- The interfaces that are part of the trunk.
- To clear the list of interfaces, specify an empty list.
description:
description:
- Description of the trunk.
version_added: 2.7
link_selection_policy:
description:
- Specifies, once the trunk is configured, the policy that the trunk uses to determine
which member link (interface) can handle new traffic.
- When creating a new trunk, if this value is not specific, the default is C(auto).
- When C(auto), specifies that the system automatically determines which interfaces
can handle new traffic. For the C(auto) option, the member links must all be the
same media type and speed.
- When C(maximum-bandwidth), specifies that the system determines which interfaces
can handle new traffic based on the members' maximum bandwidth.
choices:
- auto
- maximum-bandwidth
frame_distribution_hash:
description:
- Specifies the basis for the hash that the system uses as the frame distribution
algorithm. The system uses the resulting hash to determine which interface to
use for forwarding traffic.
- When creating a new trunk, if this parameter is not specified, the default is
C(source-destination-ip).
- When C(source-destination-mac), specifies that the system bases the hash on the
combined MAC addresses of the source and the destination.
- When C(destination-mac), specifies that the system bases the hash on the MAC
address of the destination.
- When C(source-destination-ip), specifies that the system bases the hash on the
combined IP addresses of the source and the destination.
choices:
- destination-mac
- source-destination-ip
- source-destination-mac
lacp_enabled:
description:
- When C(yes), specifies that the system supports the link aggregation control
protocol (LACP), which monitors the trunk by exchanging control packets over
the member links to determine the health of the links.
- If LACP detects a failure in a member link, it removes the link from the link
aggregation.
- When creating a new trunk, if this parameter is not specified, LACP is C(no).
- LACP is disabled by default for backward compatibility. If this does not apply
to your network, we recommend that you enable LACP.
type: bool
lacp_mode:
description:
- Specifies the operation mode for link aggregation control protocol (LACP),
if LACP is enabled for the trunk.
- When creating a new trunk, if this parameter is not specified, the default
is C(active).
- When C(active), specifies that the system periodically sends control packets
regardless of whether the partner system has issued a request.
- When C(passive), specifies that the system sends control packets only when
the partner system has issued a request.
choices:
- active
- passive
lacp_timeout:
description:
- Specifies the rate at which the system sends the LACP control packets.
- When creating a new trunk, if this parameter is not specified, the default is
C(long).
- When C(long), specifies that the system sends an LACP control packet every 30 seconds.
- When C(short), specifies that the system sends an LACP control packet every 1 seconds.
choices:
- long
- short
qinq_ethertype:
description:
- Specifies the ether-type value used for the packets handled on this trunk when
it is a member in a QinQ vlan.
- The ether-type can be set to any string containing a valid hexadecimal 16 bits
number, or any of the well known ether-types; C(0x8100), C(0x9100), C(0x88a8).
- This parameter is not supported on Virtual Editions.
- You should always wrap this value in quotes to prevent Ansible from interpreting
the value as a literal hexadecimal number and converting it to an integer.
version_added: 2.7
state:
description:
- When C(present), ensures that the resource exists.
- When C(absent), ensures the resource is removed.
default: present
choices:
- present
- absent
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a trunk on hardware
bigip_trunk:
name: trunk1
interfaces:
- 1.1
- 1.2
link_selection_policy: maximum-bandwidth
frame_distribution_hash: destination-mac
lacp_enabled: yes
lacp_mode: passive
lacp_timeout: short
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
lacp_mode:
description: Operation mode for LACP if the lacp option is enabled for the trunk.
returned: changed
type: str
sample: active
lacp_timeout:
description: Rate at which the system sends the LACP control packets.
returned: changed
type: str
sample: long
link_selection_policy:
description:
- LACP policy that the trunk uses to determine which member link (interface)
can handle new traffic.
returned: changed
type: str
sample: auto
frame_distribution_hash:
description: Hash that the system uses as the frame distribution algorithm.
returned: changed
type: str
sample: src-dst-ipport
lacp_enabled:
description: Whether the system supports the link aggregation control protocol (LACP) or not.
returned: changed
type: bool
sample: yes
interfaces:
description: Interfaces that are part of the trunk.
returned: changed
type: list
sample: ['int1', 'int2']
description:
description: Description of the trunk.
returned: changed
type: str
sample: My trunk
qinq_ethertype:
description: Ether-type value used for the packets handled on this trunk when it is a member in a QinQ vlan.
returned: changed
type: str
sample: 0x9100
'''
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.compare import cmp_simple_list
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.compare import cmp_simple_list
class Parameters(AnsibleF5Parameters):
api_map = {
'lacpMode': 'lacp_mode',
'lacpTimeout': 'lacp_timeout',
'linkSelectPolicy': 'link_selection_policy',
'distributionHash': 'frame_distribution_hash',
'lacp': 'lacp_enabled',
'qinqEthertype': 'qinq_ethertype',
}
api_attributes = [
'lacp',
'lacpMode',
'lacpTimeout',
'linkSelectPolicy',
'distributionHash',
'interfaces',
'description',
'qinqEthertype',
]
returnables = [
'lacp_mode',
'lacp_timeout',
'link_selection_policy',
'frame_distribution_hash',
'lacp_enabled',
'interfaces',
'description',
'qinq_ethertype',
]
updatables = [
'lacp_mode',
'lacp_timeout',
'link_selection_policy',
'frame_distribution_hash',
'lacp_enabled',
'interfaces',
'description',
'qinq_ethertype',
]
class ApiParameters(Parameters):
@property
def lacp_enabled(self):
if self._values['lacp_enabled'] is None:
return None
if self._values['lacp_enabled'] == 'enabled':
return True
return False
@property
def interfaces(self):
if self._values['interfaces'] is None:
return None
result = list(set(self._values['interfaces']))
result.sort()
return result
class ModuleParameters(Parameters):
@property
def frame_distribution_hash(self):
if self._values['frame_distribution_hash'] is None:
return None
elif self._values['frame_distribution_hash'] == 'source-destination-ip':
return 'src-dst-ipport'
elif self._values['frame_distribution_hash'] == 'source-destination-mac':
return 'src-dst-mac'
elif self._values['frame_distribution_hash'] == 'destination-mac':
return 'dst-mac'
@property
def interfaces(self):
if self._values['interfaces'] is None:
return None
if len(self._values['interfaces']) == 1 and self._values['interfaces'][0] == '':
return ''
result = [str(x) for x in self._values['interfaces']]
result = list(set(result))
result.sort()
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def lacp_enabled(self):
if self._values['lacp_enabled'] is None:
return None
if self._values['lacp_enabled']:
return 'enabled'
return 'disabled'
class ReportableChanges(Changes):
@property
def frame_distribution_hash(self):
if self._values['frame_distribution_hash'] is None:
return None
elif self._values['frame_distribution_hash'] == 'src-dst-ipport':
return 'source-destination-ip'
elif self._values['frame_distribution_hash'] == 'src-dst-mac':
return 'source-destination-mac'
elif self._values['frame_distribution_hash'] == 'dst-mac':
return 'destination-mac'
@property
def lacp_enabled(self):
if self._values['lacp_enabled'] is None:
return None
if self._values['lacp_enabled'] == 'enabled':
return True
return False
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def interfaces(self):
result = cmp_simple_list(self.want.interfaces, self.have.interfaces)
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
if self.want.link_selection_policy is None:
self.want.update({'link_selection_policy': 'auto'})
if self.want.frame_distribution_hash is None:
self.want.update({'frame_distribution_hash': 'source-destination-ip'})
if self.want.lacp_enabled is None:
self.want.update({'lacp_enabled': False})
if self.want.lacp_mode is None:
self.want.update({'lacp_mode': 'active'})
if self.want.lacp_timeout is None:
self.want.update({'lacp_timeout': 'long'})
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/net/trunk/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
uri = "https://{0}:{1}/mgmt/tm/net/trunk/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403, 409]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/net/trunk/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/net/trunk/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/net/trunk/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
interfaces=dict(type='list'),
link_selection_policy=dict(
choices=['auto', 'maximum-bandwidth']
),
frame_distribution_hash=dict(
choices=['destination-mac', 'source-destination-ip', 'source-destination-mac']
),
lacp_enabled=dict(type='bool'),
lacp_mode=dict(choices=['active', 'passive']),
lacp_timeout=dict(choices=['short', 'long']),
description=dict(),
state=dict(
default='present',
choices=['absent', 'present']
),
qinq_ethertype=dict(type='raw'),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
|
petrvanblokland/Xierpa3
|
refs/heads/master
|
xierpa3/adapters/dynamodbadapter.py
|
1
|
# -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
# xierpa server
# Copyright (c) 2014+ buro@petr.com, www.petr.com, www.xierpa.com
#
# X I E R P A 3
# Distribution by the MIT License.
#
# -----------------------------------------------------------------------------
#
# dynamodbadapter.py
#
from xierpa3.adapters.adapter import Adapter
#from xierpa3.toolbox.database.dynamodb.dynamodbconnector import Connector
class Connector():
# @@@ Under development
pass
class DynamoDBAdapter(Adapter):
u"""
Wrapper around the DynamoDB Connector, using:
- Connector.getItem(id)
- Connector.saveItem(item)
- Connector.newItem(d)
"""
# @@@ Under development
def __init__(self):
Adapter.__init__(self)
def getItem(self, id):
return Connector.getItem(id)
def newItem(self, d=None):
return Connector.newItem(d)
def saveItem(self, item):
Connector.saveItem(item)
def getMessage(self, count):
return self.newArticle(text=u'English is not native. For corrections on disaster misspellings please contact buro (at) petr.com')
def getLogo(self, count):
return self.newArticle(url='http://petr.com/_images/contact.png')
if __name__ == "__main__":
pass
|
rafael-neri/bigbashview
|
refs/heads/master
|
usr/share/bigbashview3/ui/__init__.py
|
4
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Wilson Pinto Júnior <wilson@openlanhouse.org>
# Copyright (C) 2011 Thomaz de Oliveira dos Reis <thor27@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__all__ = ("gtk", "qt4", "base")
|
kool79/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyMethodMayBeStaticInspection/property.py
|
83
|
__author__ = 'ktisha'
class C(object):
def __init__(self):
self._x = None
@property
def x(self):
"""I'm the 'x' property."""
return "property"
@x.setter
def x(self, value):
print "setter"
@x.deleter
def x(self):
print "deleter"
|
vmindru/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/azure/azure_rm_postgresqldatabase_facts.py
|
22
|
#!/usr/bin/python
#
# Copyright (c) 2017 Zim Kalinowski, <zikalino@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_postgresqldatabase_facts
version_added: "2.7"
short_description: Get Azure PostgreSQL Database facts.
description:
- Get facts of PostgreSQL Database.
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
server_name:
description:
- The name of the server.
required: True
name:
description:
- The name of the database.
extends_documentation_fragment:
- azure
author:
- "Zim Kalinowski (@zikalino)"
'''
EXAMPLES = '''
- name: Get instance of PostgreSQL Database
azure_rm_postgresqldatabase_facts:
resource_group: resource_group_name
server_name: server_name
name: database_name
- name: List instances of PostgreSQL Database
azure_rm_postgresqldatabase_facts:
resource_group: resource_group_name
server_name: server_name
'''
RETURN = '''
databases:
description: A list of dict results where the key is the name of the PostgreSQL Database and the values are the facts for that PostgreSQL Database.
returned: always
type: complex
contains:
id:
description:
- Resource ID
returned: always
type: str
sample: "/subscriptions/ffffffff-ffff-ffff-ffff-ffffffffffff/resourceGroups/TestGroup/providers/Microsoft.DBforPostgreSQL/servers/testser
ver/databases/db1"
resource_group:
description:
- Resource group name.
returned: always
type: str
sample: testrg
server_name:
description:
- Server name.
returned: always
type: str
sample: testserver
name:
description:
- Resource name.
returned: always
type: str
sample: db1
charset:
description:
- The charset of the database.
returned: always
type: str
sample: UTF8
collation:
description:
- The collation of the database.
returned: always
type: str
sample: English_United States.1252
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.rdbms.postgresql import PostgreSQLManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMDatabasesFacts(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
server_name=dict(
type='str',
required=True
),
name=dict(
type='str'
)
)
# store the results of the module operation
self.results = dict(
changed=False
)
self.resource_group = None
self.server_name = None
self.name = None
super(AzureRMDatabasesFacts, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if (self.resource_group is not None and
self.server_name is not None and
self.name is not None):
self.results['databases'] = self.get()
elif (self.resource_group is not None and
self.server_name is not None):
self.results['databases'] = self.list_by_server()
return self.results
def get(self):
response = None
results = []
try:
response = self.postgresql_client.databases.get(resource_group_name=self.resource_group,
server_name=self.server_name,
database_name=self.name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for Databases.')
if response is not None:
results.append(self.format_item(response))
return results
def list_by_server(self):
response = None
results = []
try:
response = self.postgresql_client.databases.list_by_server(resource_group_name=self.resource_group,
server_name=self.server_name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.fail("Error listing for server {0} - {1}".format(self.server_name, str(e)))
if response is not None:
for item in response:
results.append(self.format_item(item))
return results
def format_item(self, item):
d = item.as_dict()
d = {
'resource_group': self.resource_group,
'server_name': self.server_name,
'name': d['name'],
'charset': d['charset'],
'collation': d['collation']
}
return d
def main():
AzureRMDatabasesFacts()
if __name__ == '__main__':
main()
|
ikaritw/crouton
|
refs/heads/master
|
chroot-etc/xbmc-cycle.py
|
13
|
#!/usr/bin/env python
# Copyright (c) 2015 The crouton Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Python script to call croutoncycle. This is needed to let the
# hotkeys ctr-shift-alt F1/F2 work when xbmc is in fullscreen.
import subprocess
import sys
if len(sys.argv) == 2 and sys.argv[1] in ("prev", "next"):
exitcode = subprocess.call(["/usr/local/bin/croutoncycle", sys.argv[1]])
else:
sys.stderr.write("Usage: %s prev|next\n" % str(sys.argv[0]))
exitcode = 2
sys.exit(exitcode)
|
beni55/django-tastypie
|
refs/heads/master
|
tastypie/api.py
|
44
|
from __future__ import unicode_literals
import warnings
from django.conf.urls import url, patterns, include
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseBadRequest
from tastypie.exceptions import NotRegistered, BadRequest
from tastypie.serializers import Serializer
from tastypie.utils import trailing_slash, is_valid_jsonp_callback_value
from tastypie.utils.mime import determine_format, build_content_type
class Api(object):
"""
Implements a registry to tie together the various resources that make up
an API.
Especially useful for navigation, HATEOAS and for providing multiple
versions of your API.
Optionally supplying ``api_name`` allows you to name the API. Generally,
this is done with version numbers (i.e. ``v1``, ``v2``, etc.) but can
be named any string.
"""
def __init__(self, api_name="v1", serializer_class=Serializer):
self.api_name = api_name
self._registry = {}
self._canonicals = {}
self.serializer = serializer_class()
def register(self, resource, canonical=True):
"""
Registers an instance of a ``Resource`` subclass with the API.
Optionally accept a ``canonical`` argument, which indicates that the
resource being registered is the canonical variant. Defaults to
``True``.
"""
resource_name = getattr(resource._meta, 'resource_name', None)
if resource_name is None:
raise ImproperlyConfigured("Resource %r must define a 'resource_name'." % resource)
self._registry[resource_name] = resource
if canonical is True:
if resource_name in self._canonicals:
warnings.warn("A new resource '%r' is replacing the existing canonical URL for '%s'." % (resource, resource_name), Warning, stacklevel=2)
self._canonicals[resource_name] = resource
# TODO: This is messy, but makes URI resolution on FK/M2M fields
# work consistently.
resource._meta.api_name = self.api_name
resource.__class__.Meta.api_name = self.api_name
def unregister(self, resource_name):
"""
If present, unregisters a resource from the API.
"""
if resource_name in self._registry:
del(self._registry[resource_name])
if resource_name in self._canonicals:
del(self._canonicals[resource_name])
def canonical_resource_for(self, resource_name):
"""
Returns the canonical resource for a given ``resource_name``.
"""
if resource_name in self._canonicals:
return self._canonicals[resource_name]
raise NotRegistered("No resource was registered as canonical for '%s'." % resource_name)
def wrap_view(self, view):
def wrapper(request, *args, **kwargs):
try:
return getattr(self, view)(request, *args, **kwargs)
except BadRequest:
return HttpResponseBadRequest()
return wrapper
def override_urls(self):
"""
Deprecated. Will be removed by v1.0.0. Please use ``prepend_urls`` instead.
"""
return []
def prepend_urls(self):
"""
A hook for adding your own URLs or matching before the default URLs.
"""
return []
@property
def urls(self):
"""
Provides URLconf details for the ``Api`` and all registered
``Resources`` beneath it.
"""
pattern_list = [
url(r"^(?P<api_name>%s)%s$" % (self.api_name, trailing_slash()), self.wrap_view('top_level'), name="api_%s_top_level" % self.api_name),
]
for name in sorted(self._registry.keys()):
self._registry[name].api_name = self.api_name
pattern_list.append((r"^(?P<api_name>%s)/" % self.api_name, include(self._registry[name].urls)))
urlpatterns = self.prepend_urls()
overridden_urls = self.override_urls()
if overridden_urls:
warnings.warn("'override_urls' is a deprecated method & will be removed by v1.0.0. Please rename your method to ``prepend_urls``.")
urlpatterns += overridden_urls
urlpatterns += patterns('',
*pattern_list
)
return urlpatterns
def top_level(self, request, api_name=None):
"""
A view that returns a serialized list of all resources registers
to the ``Api``. Useful for discovery.
"""
available_resources = {}
if api_name is None:
api_name = self.api_name
for name in sorted(self._registry.keys()):
available_resources[name] = {
'list_endpoint': self._build_reverse_url("api_dispatch_list", kwargs={
'api_name': api_name,
'resource_name': name,
}),
'schema': self._build_reverse_url("api_get_schema", kwargs={
'api_name': api_name,
'resource_name': name,
}),
}
desired_format = determine_format(request, self.serializer)
options = {}
if 'text/javascript' in desired_format:
callback = request.GET.get('callback', 'callback')
if not is_valid_jsonp_callback_value(callback):
raise BadRequest('JSONP callback name is invalid.')
options['callback'] = callback
serialized = self.serializer.serialize(available_resources, desired_format, options)
return HttpResponse(content=serialized, content_type=build_content_type(desired_format))
def _build_reverse_url(self, name, args=None, kwargs=None):
"""
A convenience hook for overriding how URLs are built.
See ``NamespacedApi._build_reverse_url`` for an example.
"""
return reverse(name, args=args, kwargs=kwargs)
class NamespacedApi(Api):
"""
An API subclass that respects Django namespaces.
"""
def __init__(self, api_name="v1", urlconf_namespace=None, **kwargs):
super(NamespacedApi, self).__init__(api_name=api_name, **kwargs)
self.urlconf_namespace = urlconf_namespace
def register(self, resource, canonical=True):
super(NamespacedApi, self).register(resource, canonical=canonical)
if canonical is True:
# Plop in the namespace here as well.
resource._meta.urlconf_namespace = self.urlconf_namespace
def _build_reverse_url(self, name, args=None, kwargs=None):
namespaced = "%s:%s" % (self.urlconf_namespace, name)
return reverse(namespaced, args=args, kwargs=kwargs)
|
eggmaster/tempest
|
refs/heads/master
|
tempest/api/compute/admin/test_quotas.py
|
3
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import six
from tempest_lib.common.utils import data_utils
from testtools import matchers
from tempest.api.compute import base
from tempest.common import tempest_fixtures as fixtures
from tempest import test
LOG = logging.getLogger(__name__)
class QuotasAdminTestJSON(base.BaseV2ComputeAdminTest):
force_tenant_isolation = True
def setUp(self):
# NOTE(mriedem): Avoid conflicts with os-quota-class-sets tests.
self.useFixture(fixtures.LockFixture('compute_quotas'))
super(QuotasAdminTestJSON, self).setUp()
@classmethod
def setup_clients(cls):
super(QuotasAdminTestJSON, cls).setup_clients()
cls.adm_client = cls.os_adm.quotas_client
@classmethod
def resource_setup(cls):
super(QuotasAdminTestJSON, cls).resource_setup()
# NOTE(afazekas): these test cases should always create and use a new
# tenant most of them should be skipped if we can't do that
cls.demo_tenant_id = cls.quotas_client.tenant_id
cls.default_quota_set = set(('injected_file_content_bytes',
'metadata_items', 'injected_files',
'ram', 'floating_ips',
'fixed_ips', 'key_pairs',
'injected_file_path_bytes',
'instances', 'security_group_rules',
'cores', 'security_groups'))
@test.idempotent_id('3b0a7c8f-cf58-46b8-a60c-715a32a8ba7d')
def test_get_default_quotas(self):
# Admin can get the default resource quota set for a tenant
expected_quota_set = self.default_quota_set | set(['id'])
quota_set = self.adm_client.get_default_quota_set(
self.demo_tenant_id)
self.assertEqual(quota_set['id'], self.demo_tenant_id)
for quota in expected_quota_set:
self.assertIn(quota, quota_set.keys())
@test.idempotent_id('55fbe2bf-21a9-435b-bbd2-4162b0ed799a')
def test_update_all_quota_resources_for_tenant(self):
# Admin can update all the resource quota limits for a tenant
default_quota_set = self.adm_client.get_default_quota_set(
self.demo_tenant_id)
new_quota_set = {'injected_file_content_bytes': 20480,
'metadata_items': 256, 'injected_files': 10,
'ram': 10240, 'floating_ips': 20, 'fixed_ips': 10,
'key_pairs': 200, 'injected_file_path_bytes': 512,
'instances': 20, 'security_group_rules': 20,
'cores': 2, 'security_groups': 20}
# Update limits for all quota resources
quota_set = self.adm_client.update_quota_set(
self.demo_tenant_id,
force=True,
**new_quota_set)
default_quota_set.pop('id')
# NOTE(PhilDay) The following is safe as we're not updating these
# two quota values yet. Once the Nova change to add these is merged
# and the client updated to support them this can be removed
if 'server_groups' in default_quota_set:
default_quota_set.pop('server_groups')
if 'server_group_members' in default_quota_set:
default_quota_set.pop('server_group_members')
self.addCleanup(self.adm_client.update_quota_set,
self.demo_tenant_id, **default_quota_set)
for quota in new_quota_set:
self.assertIn(quota, quota_set.keys())
# TODO(afazekas): merge these test cases
@test.idempotent_id('ce9e0815-8091-4abd-8345-7fe5b85faa1d')
def test_get_updated_quotas(self):
# Verify that GET shows the updated quota set of tenant
tenant_name = data_utils.rand_name('cpu_quota_tenant')
tenant_desc = tenant_name + '-desc'
identity_client = self.os_adm.identity_client
tenant = identity_client.create_tenant(name=tenant_name,
description=tenant_desc)
tenant_id = tenant['id']
self.addCleanup(identity_client.delete_tenant, tenant_id)
self.adm_client.update_quota_set(tenant_id, ram='5120')
quota_set = self.adm_client.get_quota_set(tenant_id)
self.assertEqual(5120, quota_set['ram'])
# Verify that GET shows the updated quota set of user
user_name = data_utils.rand_name('cpu_quota_user')
password = data_utils.rand_name('password')
email = user_name + '@testmail.tm'
user = identity_client.create_user(name=user_name,
password=password,
tenant_id=tenant_id,
email=email)
user_id = user['id']
self.addCleanup(identity_client.delete_user, user_id)
self.adm_client.update_quota_set(tenant_id,
user_id=user_id,
ram='2048')
quota_set = self.adm_client.get_quota_set(tenant_id,
user_id=user_id)
self.assertEqual(2048, quota_set['ram'])
@test.idempotent_id('389d04f0-3a41-405f-9317-e5f86e3c44f0')
def test_delete_quota(self):
# Admin can delete the resource quota set for a tenant
tenant_name = data_utils.rand_name('ram_quota_tenant')
tenant_desc = tenant_name + '-desc'
identity_client = self.os_adm.identity_client
tenant = identity_client.create_tenant(name=tenant_name,
description=tenant_desc)
tenant_id = tenant['id']
self.addCleanup(identity_client.delete_tenant, tenant_id)
quota_set_default = self.adm_client.get_quota_set(tenant_id)
ram_default = quota_set_default['ram']
self.adm_client.update_quota_set(tenant_id, ram='5120')
self.adm_client.delete_quota_set(tenant_id)
quota_set_new = self.adm_client.get_quota_set(tenant_id)
self.assertEqual(ram_default, quota_set_new['ram'])
class QuotaClassesAdminTestJSON(base.BaseV2ComputeAdminTest):
"""Tests the os-quota-class-sets API to update default quotas.
"""
def setUp(self):
# All test cases in this class need to externally lock on doing
# anything with default quota values.
self.useFixture(fixtures.LockFixture('compute_quotas'))
super(QuotaClassesAdminTestJSON, self).setUp()
@classmethod
def resource_setup(cls):
super(QuotaClassesAdminTestJSON, cls).resource_setup()
cls.adm_client = cls.os_adm.quota_classes_client
def _restore_default_quotas(self, original_defaults):
LOG.debug("restoring quota class defaults")
self.adm_client.update_quota_class_set(
'default', **original_defaults)
# NOTE(sdague): this test is problematic as it changes
# global state, and possibly needs to be part of a set of
# tests that get run all by themselves at the end under a
# 'danger' flag.
@test.idempotent_id('7932ab0f-5136-4075-b201-c0e2338df51a')
def test_update_default_quotas(self):
LOG.debug("get the current 'default' quota class values")
body = self.adm_client.get_quota_class_set('default')
self.assertIn('id', body)
self.assertEqual('default', body.pop('id'))
# restore the defaults when the test is done
self.addCleanup(self._restore_default_quotas, body.copy())
# increment all of the values for updating the default quota class
for quota, default in six.iteritems(body):
# NOTE(sdague): we need to increment a lot, otherwise
# there is a real chance that we go from -1 (unlimitted)
# to a very small number which causes issues.
body[quota] = default + 100
LOG.debug("update limits for the default quota class set")
update_body = self.adm_client.update_quota_class_set('default',
**body)
LOG.debug("assert that the response has all of the changed values")
self.assertThat(update_body.items(),
matchers.ContainsAll(body.items()))
|
ilayn/scipy
|
refs/heads/master
|
scipy/sparse/csgraph/tests/test_connected_components.py
|
21
|
import numpy as np
from numpy.testing import assert_equal, assert_array_almost_equal
from scipy.sparse import csgraph
def test_weak_connections():
Xde = np.array([[0, 1, 0],
[0, 0, 0],
[0, 0, 0]])
Xsp = csgraph.csgraph_from_dense(Xde, null_value=0)
for X in Xsp, Xde:
n_components, labels =\
csgraph.connected_components(X, directed=True,
connection='weak')
assert_equal(n_components, 2)
assert_array_almost_equal(labels, [0, 0, 1])
def test_strong_connections():
X1de = np.array([[0, 1, 0],
[0, 0, 0],
[0, 0, 0]])
X2de = X1de + X1de.T
X1sp = csgraph.csgraph_from_dense(X1de, null_value=0)
X2sp = csgraph.csgraph_from_dense(X2de, null_value=0)
for X in X1sp, X1de:
n_components, labels =\
csgraph.connected_components(X, directed=True,
connection='strong')
assert_equal(n_components, 3)
labels.sort()
assert_array_almost_equal(labels, [0, 1, 2])
for X in X2sp, X2de:
n_components, labels =\
csgraph.connected_components(X, directed=True,
connection='strong')
assert_equal(n_components, 2)
labels.sort()
assert_array_almost_equal(labels, [0, 0, 1])
def test_strong_connections2():
X = np.array([[0, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0]])
n_components, labels =\
csgraph.connected_components(X, directed=True,
connection='strong')
assert_equal(n_components, 5)
labels.sort()
assert_array_almost_equal(labels, [0, 1, 2, 2, 3, 4])
def test_weak_connections2():
X = np.array([[0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0]])
n_components, labels =\
csgraph.connected_components(X, directed=True,
connection='weak')
assert_equal(n_components, 2)
labels.sort()
assert_array_almost_equal(labels, [0, 0, 1, 1, 1, 1])
def test_ticket1876():
# Regression test: this failed in the original implementation
# There should be two strongly-connected components; previously gave one
g = np.array([[0, 1, 1, 0],
[1, 0, 0, 1],
[0, 0, 0, 1],
[0, 0, 1, 0]])
n_components, labels = csgraph.connected_components(g, connection='strong')
assert_equal(n_components, 2)
assert_equal(labels[0], labels[1])
assert_equal(labels[2], labels[3])
def test_fully_connected_graph():
# Fully connected dense matrices raised an exception.
# https://github.com/scipy/scipy/issues/3818
g = np.ones((4, 4))
n_components, labels = csgraph.connected_components(g)
assert_equal(n_components, 1)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.