repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
meabsence/python-for-android | python3-alpha/python3-src/Lib/ctypes/test/test_anon.py | 264 | 2051 | import unittest
from ctypes import *
class AnonTest(unittest.TestCase):
def test_anon(self):
class ANON(Union):
_fields_ = [("a", c_int),
("b", c_int)]
class Y(Structure):
_fields_ = [("x", c_int),
("_", ANON),
("y", c_int)]
_anonymous_ = ["_"]
self.assertEqual(Y.a.offset, sizeof(c_int))
self.assertEqual(Y.b.offset, sizeof(c_int))
self.assertEqual(ANON.a.offset, 0)
self.assertEqual(ANON.b.offset, 0)
def test_anon_nonseq(self):
# TypeError: _anonymous_ must be a sequence
self.assertRaises(TypeError,
lambda: type(Structure)("Name",
(Structure,),
{"_fields_": [], "_anonymous_": 42}))
def test_anon_nonmember(self):
# AttributeError: type object 'Name' has no attribute 'x'
self.assertRaises(AttributeError,
lambda: type(Structure)("Name",
(Structure,),
{"_fields_": [],
"_anonymous_": ["x"]}))
def test_nested(self):
class ANON_S(Structure):
_fields_ = [("a", c_int)]
class ANON_U(Union):
_fields_ = [("_", ANON_S),
("b", c_int)]
_anonymous_ = ["_"]
class Y(Structure):
_fields_ = [("x", c_int),
("_", ANON_U),
("y", c_int)]
_anonymous_ = ["_"]
self.assertEqual(Y.x.offset, 0)
self.assertEqual(Y.a.offset, sizeof(c_int))
self.assertEqual(Y.b.offset, sizeof(c_int))
self.assertEqual(Y._.offset, sizeof(c_int))
self.assertEqual(Y.y.offset, sizeof(c_int) * 2)
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
iulian787/spack | lib/spack/spack/util/string.py | 5 | 1573 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
def comma_list(sequence, article=''):
if type(sequence) != list:
sequence = list(sequence)
if not sequence:
return
elif len(sequence) == 1:
return sequence[0]
else:
out = ', '.join(str(s) for s in sequence[:-1])
if len(sequence) != 2:
out += ',' # oxford comma
out += ' '
if article:
out += article + ' '
out += str(sequence[-1])
return out
def comma_or(sequence):
return comma_list(sequence, 'or')
def comma_and(sequence):
return comma_list(sequence, 'and')
def quote(sequence, q="'"):
return ['%s%s%s' % (q, e, q) for e in sequence]
def plural(n, singular, plural=None, show_n=True):
"""Pluralize <singular> word by adding an s if n != 1.
Arguments:
n (int): number of things there are
singular (str): singular form of word
plural (str, optional): optional plural form, for when it's not just
singular + 's'
show_n (bool): whether to include n in the result string (default True)
Returns:
(str): "1 thing" if n == 1 or "n things" if n != 1
"""
number = '%s ' % n if show_n else ''
if n == 1:
return "%s%s" % (number, singular)
elif plural is not None:
return "%s%s" % (number, plural)
else:
return "%s%ss" % (number, singular)
| lgpl-2.1 |
m1ck/bookadoptions | django/core/mail/backends/console.py | 308 | 1295 | """
Email backend that writes messages to console instead of sending them.
"""
import sys
import threading
from django.core.mail.backends.base import BaseEmailBackend
class EmailBackend(BaseEmailBackend):
def __init__(self, *args, **kwargs):
self.stream = kwargs.pop('stream', sys.stdout)
self._lock = threading.RLock()
super(EmailBackend, self).__init__(*args, **kwargs)
def send_messages(self, email_messages):
"""Write all messages to the stream in a thread-safe way."""
if not email_messages:
return
self._lock.acquire()
try:
# The try-except is nested to allow for
# Python 2.4 support (Refs #12147)
try:
stream_created = self.open()
for message in email_messages:
self.stream.write('%s\n' % message.message().as_string())
self.stream.write('-'*79)
self.stream.write('\n')
self.stream.flush() # flush after each message
if stream_created:
self.close()
except:
if not self.fail_silently:
raise
finally:
self._lock.release()
return len(email_messages)
| bsd-3-clause |
pkappesser/youtube-dl | devscripts/fish-completion.py | 39 | 1613 | #!/usr/bin/env python
from __future__ import unicode_literals
import optparse
import os
from os.path import dirname as dirn
import sys
sys.path.insert(0, dirn(dirn((os.path.abspath(__file__)))))
import youtube_dl
from youtube_dl.utils import shell_quote
FISH_COMPLETION_FILE = 'youtube-dl.fish'
FISH_COMPLETION_TEMPLATE = 'devscripts/fish-completion.in'
EXTRA_ARGS = {
'recode-video': ['--arguments', 'mp4 flv ogg webm mkv', '--exclusive'],
# Options that need a file parameter
'download-archive': ['--require-parameter'],
'cookies': ['--require-parameter'],
'load-info': ['--require-parameter'],
'batch-file': ['--require-parameter'],
}
def build_completion(opt_parser):
commands = []
for group in opt_parser.option_groups:
for option in group.option_list:
long_option = option.get_opt_string().strip('-')
complete_cmd = ['complete', '--command', 'youtube-dl', '--long-option', long_option]
if option._short_opts:
complete_cmd += ['--short-option', option._short_opts[0].strip('-')]
if option.help != optparse.SUPPRESS_HELP:
complete_cmd += ['--description', option.help]
complete_cmd.extend(EXTRA_ARGS.get(long_option, []))
commands.append(shell_quote(complete_cmd))
with open(FISH_COMPLETION_TEMPLATE) as f:
template = f.read()
filled_template = template.replace('{{commands}}', '\n'.join(commands))
with open(FISH_COMPLETION_FILE, 'w') as f:
f.write(filled_template)
parser = youtube_dl.parseOpts()[0]
build_completion(parser)
| unlicense |
simark/pysaleae | saleae/logic.py | 1 | 6347 | # This file is part of pysaleae.
#
# pysaleae is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pysaleae is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pysaleae. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (c) 2015 Simon Marchi <simon.marchi@polymtl.ca>
import socket
_CAPTURE = 'CAPTURE'
_GET_ALL_SAMPLE_RATES = 'GET_ALL_SAMPLE_RATES'
_GET_CONNECTED_DEVICES = 'GET_CONNECTED_DEVICES'
_GET_SAMPLE_RATE = 'GET_SAMPLE_RATE'
_SET_NUM_SAMPLES = 'SET_NUM_SAMPLES'
_SET_SAMPLE_RATE = 'SET_SAMPLE_RATE'
class Internalerror(Exception):
def __init__(self, response):
self._response = response
def __str__(self):
'Internal error, got response: {}'.format(self._response)
class Device:
def __init__(self, num, name, kind, dev_id, active):
self._num = num
self._name = name
self._kind = kind
self._dev_id = dev_id
self._active = active
@property
def num(self):
return self._num
@property
def name(self):
return self._name
@property
def kind(self):
return self._kind
@property
def dev_id(self):
return self._dev_id
@property
def active(self):
return self._active
def __str__(self):
return repr(self)
def __repr__(self):
f = 'Device(num={self.num}, name="{self.name}", ' \
'kind="{self.kind}", dev_id="{self.dev_id}", ' \
'active={self.active})'
return f.format(self=self)
class Logic:
def __init__(self, ip='127.0.0.1', port=10429):
'''Initialize a connection to Logic.
If ip is omitted, 127.0.0.1 is used. If port is omitted, 10429
is used (the default value in Logic).
'''
self._ip = ip
self._port = port
self._socket_real = None
self._recv_size = 0x10000
@property
def _socket(self):
if not self._socket_real:
self._socket_real = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
self._socket_real.connect((self._ip, self._port))
return self._socket_real
def _socket_send(self, b):
self._socket.send(b.encode() + b'\0')
def _socket_recv(self):
b = self._socket.recv(self._recv_size).decode()
b = b.split('\n')
if b[-1] != 'ACK':
raise Internalerror(b)
return b[:-1]
@property
def connected_devices(self):
'''Get the connected Saleae devices.
Returns a list of Device objects.
'''
self._socket_send(_GET_CONNECTED_DEVICES)
lines = self._socket_recv()
devices = []
for line in lines:
dev_info = [x.strip() for x in line.split(',')]
num = int(dev_info[0])
name = dev_info[1]
kind = dev_info[2]
dev_id = dev_info[3]
active = len(dev_info) > 4 and dev_info[4] == 'ACTIVE'
dev = Device(num, name, kind, dev_id, active)
devices.append(dev)
return devices
def set_num_samples(self, num_samples):
'''Set the duration of the capture in number of samples.'''
s = '{}, {}'.format(_SET_NUM_SAMPLES, num_samples)
self._socket_send(s)
self._socket_recv()
def set_num_seconds(self, seconds):
'''Set the duration of the capture in seconds.
Set the duration of the capture in seconds, based on the current
sample rate. Therefore, to get the desired result, this method
must be called after the sample rate has been set.
'''
sr = self.sample_rate[0]
samples = sr * seconds
self.set_num_samples(samples)
@property
def all_sample_rates(self):
'''Get all the available sample rates values.
The returned value is a list of sample rates. Each sample rate
is a value in the form:
(digital_sample_rate, analog_sample_rate)
The available sample rates values depend on the combination of
digital and analog channel that are enabled.'''
self._socket_send(_GET_ALL_SAMPLE_RATES)
lines = self._socket_recv()
rates = []
for line in lines:
line = line.split(',')
digital = int(line[0])
analog = int(line[1])
rate = (digital, analog)
rates.append(rate)
return rates
def sample_rate_at_least(self, digital_min=0, analog_min=0):
'''Find a sample rate value that is at least a certain value.
Find the first sample rate that has a digital sample rate of
at least digital_min and an analog sample rate of at least
analog_min.
The returned value is in the form:
(digital_sample_rate, analog_sample_rate)
'''
sample_rates = self.all_sample_rates
for sr in sorted(sample_rates):
if sr[0] >= digital_min and sr[1] >= analog_min:
return sr
return None
def set_sample_rate(self, sample_rate):
'''Set the capture sample rate.'''
s = '{0}, {1[0]}, {1[1]}'.format(_SET_SAMPLE_RATE, sample_rate)
self._socket_send(s)
self._socket_recv()
@property
def sample_rate(self):
'''Get the current capture sample rate.'''
self._socket_send(_GET_SAMPLE_RATE)
sr = self._socket_recv()
return int(sr[0]), int(sr[1])
def capture(self):
'''Start a capture.
Start a capture and block until it is complete. The duration
and resolution of the capture can be set using a mix of
set_num_samples, set_num_seconds and set_sample_rate.
'''
self._socket_send('CAPTURE')
self._socket_recv()
def close(self):
'''Close the connection to Logic.'''
self._socket_real.close()
self._socket_real = None
| gpl-3.0 |
pymedusa/SickRage | ext2/enum/__init__.py | 105 | 31054 | """Python Enumerations"""
import sys as _sys
__all__ = ['Enum', 'IntEnum', 'unique']
version = 1, 1, 6
pyver = float('%s.%s' % _sys.version_info[:2])
try:
any
except NameError:
def any(iterable):
for element in iterable:
if element:
return True
return False
try:
from collections import OrderedDict
except ImportError:
OrderedDict = None
try:
basestring
except NameError:
# In Python 2 basestring is the ancestor of both str and unicode
# in Python 3 it's just str, but was missing in 3.1
basestring = str
try:
unicode
except NameError:
# In Python 3 unicode no longer exists (it's just str)
unicode = str
class _RouteClassAttributeToGetattr(object):
"""Route attribute access on a class to __getattr__.
This is a descriptor, used to define attributes that act differently when
accessed through an instance and through a class. Instance access remains
normal, but access to an attribute through a class will be routed to the
class's __getattr__ method; this is done by raising AttributeError.
"""
def __init__(self, fget=None):
self.fget = fget
def __get__(self, instance, ownerclass=None):
if instance is None:
raise AttributeError()
return self.fget(instance)
def __set__(self, instance, value):
raise AttributeError("can't set attribute")
def __delete__(self, instance):
raise AttributeError("can't delete attribute")
def _is_descriptor(obj):
"""Returns True if obj is a descriptor, False otherwise."""
return (
hasattr(obj, '__get__') or
hasattr(obj, '__set__') or
hasattr(obj, '__delete__'))
def _is_dunder(name):
"""Returns True if a __dunder__ name, False otherwise."""
return (name[:2] == name[-2:] == '__' and
name[2:3] != '_' and
name[-3:-2] != '_' and
len(name) > 4)
def _is_sunder(name):
"""Returns True if a _sunder_ name, False otherwise."""
return (name[0] == name[-1] == '_' and
name[1:2] != '_' and
name[-2:-1] != '_' and
len(name) > 2)
def _make_class_unpicklable(cls):
"""Make the given class un-picklable."""
def _break_on_call_reduce(self, protocol=None):
raise TypeError('%r cannot be pickled' % self)
cls.__reduce_ex__ = _break_on_call_reduce
cls.__module__ = '<unknown>'
class _EnumDict(dict):
"""Track enum member order and ensure member names are not reused.
EnumMeta will use the names found in self._member_names as the
enumeration member names.
"""
def __init__(self):
super(_EnumDict, self).__init__()
self._member_names = []
def __setitem__(self, key, value):
"""Changes anything not dundered or not a descriptor.
If a descriptor is added with the same name as an enum member, the name
is removed from _member_names (this may leave a hole in the numerical
sequence of values).
If an enum member name is used twice, an error is raised; duplicate
values are not checked for.
Single underscore (sunder) names are reserved.
Note: in 3.x __order__ is simply discarded as a not necessary piece
leftover from 2.x
"""
if pyver >= 3.0 and key in ('_order_', '__order__'):
return
elif key == '__order__':
key = '_order_'
if _is_sunder(key):
if key != '_order_':
raise ValueError('_names_ are reserved for future Enum use')
elif _is_dunder(key):
pass
elif key in self._member_names:
# descriptor overwriting an enum?
raise TypeError('Attempted to reuse key: %r' % key)
elif not _is_descriptor(value):
if key in self:
# enum overwriting a descriptor?
raise TypeError('Key already defined as: %r' % self[key])
self._member_names.append(key)
super(_EnumDict, self).__setitem__(key, value)
# Dummy value for Enum as EnumMeta explicity checks for it, but of course until
# EnumMeta finishes running the first time the Enum class doesn't exist. This
# is also why there are checks in EnumMeta like `if Enum is not None`
Enum = None
class EnumMeta(type):
"""Metaclass for Enum"""
@classmethod
def __prepare__(metacls, cls, bases):
return _EnumDict()
def __new__(metacls, cls, bases, classdict):
# an Enum class is final once enumeration items have been defined; it
# cannot be mixed with other types (int, float, etc.) if it has an
# inherited __new__ unless a new __new__ is defined (or the resulting
# class will fail).
if type(classdict) is dict:
original_dict = classdict
classdict = _EnumDict()
for k, v in original_dict.items():
classdict[k] = v
member_type, first_enum = metacls._get_mixins_(bases)
__new__, save_new, use_args = metacls._find_new_(classdict, member_type,
first_enum)
# save enum items into separate mapping so they don't get baked into
# the new class
members = dict((k, classdict[k]) for k in classdict._member_names)
for name in classdict._member_names:
del classdict[name]
# py2 support for definition order
_order_ = classdict.get('_order_')
if _order_ is None:
if pyver < 3.0:
try:
_order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])]
except TypeError:
_order_ = [name for name in sorted(members.keys())]
else:
_order_ = classdict._member_names
else:
del classdict['_order_']
if pyver < 3.0:
_order_ = _order_.replace(',', ' ').split()
aliases = [name for name in members if name not in _order_]
_order_ += aliases
# check for illegal enum names (any others?)
invalid_names = set(members) & set(['mro'])
if invalid_names:
raise ValueError('Invalid enum member name(s): %s' % (
', '.join(invalid_names), ))
# save attributes from super classes so we know if we can take
# the shortcut of storing members in the class dict
base_attributes = set([a for b in bases for a in b.__dict__])
# create our new Enum type
enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict)
enum_class._member_names_ = [] # names in random order
if OrderedDict is not None:
enum_class._member_map_ = OrderedDict()
else:
enum_class._member_map_ = {} # name->value map
enum_class._member_type_ = member_type
# Reverse value->name map for hashable values.
enum_class._value2member_map_ = {}
# instantiate them, checking for duplicates as we go
# we instantiate first instead of checking for duplicates first in case
# a custom __new__ is doing something funky with the values -- such as
# auto-numbering ;)
if __new__ is None:
__new__ = enum_class.__new__
for member_name in _order_:
value = members[member_name]
if not isinstance(value, tuple):
args = (value, )
else:
args = value
if member_type is tuple: # special case for tuple enums
args = (args, ) # wrap it one more time
if not use_args or not args:
enum_member = __new__(enum_class)
if not hasattr(enum_member, '_value_'):
enum_member._value_ = value
else:
enum_member = __new__(enum_class, *args)
if not hasattr(enum_member, '_value_'):
enum_member._value_ = member_type(*args)
value = enum_member._value_
enum_member._name_ = member_name
enum_member.__objclass__ = enum_class
enum_member.__init__(*args)
# If another member with the same value was already defined, the
# new member becomes an alias to the existing one.
for name, canonical_member in enum_class._member_map_.items():
if canonical_member.value == enum_member._value_:
enum_member = canonical_member
break
else:
# Aliases don't appear in member names (only in __members__).
enum_class._member_names_.append(member_name)
# performance boost for any member that would not shadow
# a DynamicClassAttribute (aka _RouteClassAttributeToGetattr)
if member_name not in base_attributes:
setattr(enum_class, member_name, enum_member)
# now add to _member_map_
enum_class._member_map_[member_name] = enum_member
try:
# This may fail if value is not hashable. We can't add the value
# to the map, and by-value lookups for this value will be
# linear.
enum_class._value2member_map_[value] = enum_member
except TypeError:
pass
# If a custom type is mixed into the Enum, and it does not know how
# to pickle itself, pickle.dumps will succeed but pickle.loads will
# fail. Rather than have the error show up later and possibly far
# from the source, sabotage the pickle protocol for this class so
# that pickle.dumps also fails.
#
# However, if the new class implements its own __reduce_ex__, do not
# sabotage -- it's on them to make sure it works correctly. We use
# __reduce_ex__ instead of any of the others as it is preferred by
# pickle over __reduce__, and it handles all pickle protocols.
unpicklable = False
if '__reduce_ex__' not in classdict:
if member_type is not object:
methods = ('__getnewargs_ex__', '__getnewargs__',
'__reduce_ex__', '__reduce__')
if not any(m in member_type.__dict__ for m in methods):
_make_class_unpicklable(enum_class)
unpicklable = True
# double check that repr and friends are not the mixin's or various
# things break (such as pickle)
for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
class_method = getattr(enum_class, name)
obj_method = getattr(member_type, name, None)
enum_method = getattr(first_enum, name, None)
if name not in classdict and class_method is not enum_method:
if name == '__reduce_ex__' and unpicklable:
continue
setattr(enum_class, name, enum_method)
# method resolution and int's are not playing nice
# Python's less than 2.6 use __cmp__
if pyver < 2.6:
if issubclass(enum_class, int):
setattr(enum_class, '__cmp__', getattr(int, '__cmp__'))
elif pyver < 3.0:
if issubclass(enum_class, int):
for method in (
'__le__',
'__lt__',
'__gt__',
'__ge__',
'__eq__',
'__ne__',
'__hash__',
):
setattr(enum_class, method, getattr(int, method))
# replace any other __new__ with our own (as long as Enum is not None,
# anyway) -- again, this is to support pickle
if Enum is not None:
# if the user defined their own __new__, save it before it gets
# clobbered in case they subclass later
if save_new:
setattr(enum_class, '__member_new__', enum_class.__dict__['__new__'])
setattr(enum_class, '__new__', Enum.__dict__['__new__'])
return enum_class
def __bool__(cls):
"""
classes/types should always be True.
"""
return True
def __call__(cls, value, names=None, module=None, type=None, start=1):
"""Either returns an existing member, or creates a new enum class.
This method is used both when an enum class is given a value to match
to an enumeration member (i.e. Color(3)) and for the functional API
(i.e. Color = Enum('Color', names='red green blue')).
When used for the functional API: `module`, if set, will be stored in
the new class' __module__ attribute; `type`, if set, will be mixed in
as the first base class.
Note: if `module` is not set this routine will attempt to discover the
calling module by walking the frame stack; if this is unsuccessful
the resulting class will not be pickleable.
"""
if names is None: # simple value lookup
return cls.__new__(cls, value)
# otherwise, functional API: we're creating a new Enum type
return cls._create_(value, names, module=module, type=type, start=start)
def __contains__(cls, member):
return isinstance(member, cls) and member.name in cls._member_map_
def __delattr__(cls, attr):
# nicer error message when someone tries to delete an attribute
# (see issue19025).
if attr in cls._member_map_:
raise AttributeError(
"%s: cannot delete Enum member." % cls.__name__)
super(EnumMeta, cls).__delattr__(attr)
def __dir__(self):
return (['__class__', '__doc__', '__members__', '__module__'] +
self._member_names_)
@property
def __members__(cls):
"""Returns a mapping of member name->value.
This mapping lists all enum members, including aliases. Note that this
is a copy of the internal mapping.
"""
return cls._member_map_.copy()
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
if _is_dunder(name):
raise AttributeError(name)
try:
return cls._member_map_[name]
except KeyError:
raise AttributeError(name)
def __getitem__(cls, name):
return cls._member_map_[name]
def __iter__(cls):
return (cls._member_map_[name] for name in cls._member_names_)
def __reversed__(cls):
return (cls._member_map_[name] for name in reversed(cls._member_names_))
def __len__(cls):
return len(cls._member_names_)
__nonzero__ = __bool__
def __repr__(cls):
return "<enum %r>" % cls.__name__
def __setattr__(cls, name, value):
"""Block attempts to reassign Enum members.
A simple assignment to the class namespace only changes one of the
several possible ways to get an Enum member from the Enum class,
resulting in an inconsistent Enumeration.
"""
member_map = cls.__dict__.get('_member_map_', {})
if name in member_map:
raise AttributeError('Cannot reassign members.')
super(EnumMeta, cls).__setattr__(name, value)
def _create_(cls, class_name, names=None, module=None, type=None, start=1):
"""Convenience method to create a new Enum class.
`names` can be:
* A string containing member names, separated either with spaces or
commas. Values are auto-numbered from 1.
* An iterable of member names. Values are auto-numbered from 1.
* An iterable of (member name, value) pairs.
* A mapping of member name -> value.
"""
if pyver < 3.0:
# if class_name is unicode, attempt a conversion to ASCII
if isinstance(class_name, unicode):
try:
class_name = class_name.encode('ascii')
except UnicodeEncodeError:
raise TypeError('%r is not representable in ASCII' % class_name)
metacls = cls.__class__
if type is None:
bases = (cls, )
else:
bases = (type, cls)
classdict = metacls.__prepare__(class_name, bases)
_order_ = []
# special processing needed for names?
if isinstance(names, basestring):
names = names.replace(',', ' ').split()
if isinstance(names, (tuple, list)) and isinstance(names[0], basestring):
names = [(e, i+start) for (i, e) in enumerate(names)]
# Here, names is either an iterable of (name, value) or a mapping.
item = None # in case names is empty
for item in names:
if isinstance(item, basestring):
member_name, member_value = item, names[item]
else:
member_name, member_value = item
classdict[member_name] = member_value
_order_.append(member_name)
# only set _order_ in classdict if name/value was not from a mapping
if not isinstance(item, basestring):
classdict['_order_'] = ' '.join(_order_)
enum_class = metacls.__new__(metacls, class_name, bases, classdict)
# TODO: replace the frame hack if a blessed way to know the calling
# module is ever developed
if module is None:
try:
module = _sys._getframe(2).f_globals['__name__']
except (AttributeError, ValueError):
pass
if module is None:
_make_class_unpicklable(enum_class)
else:
enum_class.__module__ = module
return enum_class
@staticmethod
def _get_mixins_(bases):
"""Returns the type for creating enum members, and the first inherited
enum class.
bases: the tuple of bases that was given to __new__
"""
if not bases or Enum is None:
return object, Enum
# double check that we are not subclassing a class with existing
# enumeration members; while we're at it, see if any other data
# type has been mixed in so we can use the correct __new__
member_type = first_enum = None
for base in bases:
if (base is not Enum and
issubclass(base, Enum) and
base._member_names_):
raise TypeError("Cannot extend enumerations")
# base is now the last base in bases
if not issubclass(base, Enum):
raise TypeError("new enumerations must be created as "
"`ClassName([mixin_type,] enum_type)`")
# get correct mix-in type (either mix-in type of Enum subclass, or
# first base if last base is Enum)
if not issubclass(bases[0], Enum):
member_type = bases[0] # first data type
first_enum = bases[-1] # enum type
else:
for base in bases[0].__mro__:
# most common: (IntEnum, int, Enum, object)
# possible: (<Enum 'AutoIntEnum'>, <Enum 'IntEnum'>,
# <class 'int'>, <Enum 'Enum'>,
# <class 'object'>)
if issubclass(base, Enum):
if first_enum is None:
first_enum = base
else:
if member_type is None:
member_type = base
return member_type, first_enum
if pyver < 3.0:
@staticmethod
def _find_new_(classdict, member_type, first_enum):
"""Returns the __new__ to be used for creating the enum members.
classdict: the class dictionary given to __new__
member_type: the data type whose __new__ will be used by default
first_enum: enumeration to check for an overriding __new__
"""
# now find the correct __new__, checking to see of one was defined
# by the user; also check earlier enum classes in case a __new__ was
# saved as __member_new__
__new__ = classdict.get('__new__', None)
if __new__:
return None, True, True # __new__, save_new, use_args
N__new__ = getattr(None, '__new__')
O__new__ = getattr(object, '__new__')
if Enum is None:
E__new__ = N__new__
else:
E__new__ = Enum.__dict__['__new__']
# check all possibles for __member_new__ before falling back to
# __new__
for method in ('__member_new__', '__new__'):
for possible in (member_type, first_enum):
try:
target = possible.__dict__[method]
except (AttributeError, KeyError):
target = getattr(possible, method, None)
if target not in [
None,
N__new__,
O__new__,
E__new__,
]:
if method == '__member_new__':
classdict['__new__'] = target
return None, False, True
if isinstance(target, staticmethod):
target = target.__get__(member_type)
__new__ = target
break
if __new__ is not None:
break
else:
__new__ = object.__new__
# if a non-object.__new__ is used then whatever value/tuple was
# assigned to the enum member name will be passed to __new__ and to the
# new enum member's __init__
if __new__ is object.__new__:
use_args = False
else:
use_args = True
return __new__, False, use_args
else:
@staticmethod
def _find_new_(classdict, member_type, first_enum):
"""Returns the __new__ to be used for creating the enum members.
classdict: the class dictionary given to __new__
member_type: the data type whose __new__ will be used by default
first_enum: enumeration to check for an overriding __new__
"""
# now find the correct __new__, checking to see of one was defined
# by the user; also check earlier enum classes in case a __new__ was
# saved as __member_new__
__new__ = classdict.get('__new__', None)
# should __new__ be saved as __member_new__ later?
save_new = __new__ is not None
if __new__ is None:
# check all possibles for __member_new__ before falling back to
# __new__
for method in ('__member_new__', '__new__'):
for possible in (member_type, first_enum):
target = getattr(possible, method, None)
if target not in (
None,
None.__new__,
object.__new__,
Enum.__new__,
):
__new__ = target
break
if __new__ is not None:
break
else:
__new__ = object.__new__
# if a non-object.__new__ is used then whatever value/tuple was
# assigned to the enum member name will be passed to __new__ and to the
# new enum member's __init__
if __new__ is object.__new__:
use_args = False
else:
use_args = True
return __new__, save_new, use_args
########################################################
# In order to support Python 2 and 3 with a single
# codebase we have to create the Enum methods separately
# and then use the `type(name, bases, dict)` method to
# create the class.
########################################################
temp_enum_dict = {}
temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n"
def __new__(cls, value):
# all enum instances are actually created during class construction
# without calling this method; this method is called by the metaclass'
# __call__ (i.e. Color(3) ), and by pickle
if type(value) is cls:
# For lookups like Color(Color.red)
value = value.value
#return value
# by-value search for a matching enum member
# see if it's in the reverse mapping (for hashable values)
try:
if value in cls._value2member_map_:
return cls._value2member_map_[value]
except TypeError:
# not there, now do long search -- O(n) behavior
for member in cls._member_map_.values():
if member.value == value:
return member
raise ValueError("%s is not a valid %s" % (value, cls.__name__))
temp_enum_dict['__new__'] = __new__
del __new__
def __repr__(self):
return "<%s.%s: %r>" % (
self.__class__.__name__, self._name_, self._value_)
temp_enum_dict['__repr__'] = __repr__
del __repr__
def __str__(self):
return "%s.%s" % (self.__class__.__name__, self._name_)
temp_enum_dict['__str__'] = __str__
del __str__
if pyver >= 3.0:
def __dir__(self):
added_behavior = [
m
for cls in self.__class__.mro()
for m in cls.__dict__
if m[0] != '_' and m not in self._member_map_
]
return (['__class__', '__doc__', '__module__', ] + added_behavior)
temp_enum_dict['__dir__'] = __dir__
del __dir__
def __format__(self, format_spec):
# mixed-in Enums should use the mixed-in type's __format__, otherwise
# we can get strange results with the Enum name showing up instead of
# the value
# pure Enum branch
if self._member_type_ is object:
cls = str
val = str(self)
# mix-in branch
else:
cls = self._member_type_
val = self.value
return cls.__format__(val, format_spec)
temp_enum_dict['__format__'] = __format__
del __format__
####################################
# Python's less than 2.6 use __cmp__
if pyver < 2.6:
def __cmp__(self, other):
if type(other) is self.__class__:
if self is other:
return 0
return -1
return NotImplemented
raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__))
temp_enum_dict['__cmp__'] = __cmp__
del __cmp__
else:
def __le__(self, other):
raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__))
temp_enum_dict['__le__'] = __le__
del __le__
def __lt__(self, other):
raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__))
temp_enum_dict['__lt__'] = __lt__
del __lt__
def __ge__(self, other):
raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__))
temp_enum_dict['__ge__'] = __ge__
del __ge__
def __gt__(self, other):
raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__))
temp_enum_dict['__gt__'] = __gt__
del __gt__
def __eq__(self, other):
if type(other) is self.__class__:
return self is other
return NotImplemented
temp_enum_dict['__eq__'] = __eq__
del __eq__
def __ne__(self, other):
if type(other) is self.__class__:
return self is not other
return NotImplemented
temp_enum_dict['__ne__'] = __ne__
del __ne__
def __hash__(self):
return hash(self._name_)
temp_enum_dict['__hash__'] = __hash__
del __hash__
def __reduce_ex__(self, proto):
return self.__class__, (self._value_, )
temp_enum_dict['__reduce_ex__'] = __reduce_ex__
del __reduce_ex__
# _RouteClassAttributeToGetattr is used to provide access to the `name`
# and `value` properties of enum members while keeping some measure of
# protection from modification, while still allowing for an enumeration
# to have members named `name` and `value`. This works because enumeration
# members are not set directly on the enum class -- __getattr__ is
# used to look them up.
@_RouteClassAttributeToGetattr
def name(self):
return self._name_
temp_enum_dict['name'] = name
del name
@_RouteClassAttributeToGetattr
def value(self):
return self._value_
temp_enum_dict['value'] = value
del value
@classmethod
def _convert(cls, name, module, filter, source=None):
"""
Create a new Enum subclass that replaces a collection of global constants
"""
# convert all constants from source (or module) that pass filter() to
# a new Enum called name, and export the enum and its members back to
# module;
# also, replace the __reduce_ex__ method so unpickling works in
# previous Python versions
module_globals = vars(_sys.modules[module])
if source:
source = vars(source)
else:
source = module_globals
members = dict((name, value) for name, value in source.items() if filter(name))
cls = cls(name, members, module=module)
cls.__reduce_ex__ = _reduce_ex_by_name
module_globals.update(cls.__members__)
module_globals[name] = cls
return cls
temp_enum_dict['_convert'] = _convert
del _convert
Enum = EnumMeta('Enum', (object, ), temp_enum_dict)
del temp_enum_dict
# Enum has now been created
###########################
class IntEnum(int, Enum):
"""Enum where members are also (and must be) ints"""
def _reduce_ex_by_name(self, proto):
return self.name
def unique(enumeration):
"""Class decorator that ensures only unique members exist in an enumeration."""
duplicates = []
for name, member in enumeration.__members__.items():
if name != member.name:
duplicates.append((name, member.name))
if duplicates:
duplicate_names = ', '.join(
["%s -> %s" % (alias, name) for (alias, name) in duplicates]
)
raise ValueError('duplicate names found in %r: %s' %
(enumeration, duplicate_names)
)
return enumeration
| gpl-3.0 |
rallylee/gem5 | ext/pybind11/tests/test_smart_ptr.py | 3 | 8048 | import pytest
from pybind11_tests import ConstructorStats
def test_smart_ptr(capture):
# Object1
from pybind11_tests import (MyObject1, make_object_1, make_object_2,
print_object_1, print_object_2, print_object_3, print_object_4)
for i, o in enumerate([make_object_1(), make_object_2(), MyObject1(3)], start=1):
assert o.getRefCount() == 1
with capture:
print_object_1(o)
print_object_2(o)
print_object_3(o)
print_object_4(o)
assert capture == "MyObject1[{i}]\n".format(i=i) * 4
from pybind11_tests import (make_myobject1_1, make_myobject1_2,
print_myobject1_1, print_myobject1_2,
print_myobject1_3, print_myobject1_4)
for i, o in enumerate([make_myobject1_1(), make_myobject1_2(), MyObject1(6), 7], start=4):
print(o)
with capture:
if not isinstance(o, int):
print_object_1(o)
print_object_2(o)
print_object_3(o)
print_object_4(o)
print_myobject1_1(o)
print_myobject1_2(o)
print_myobject1_3(o)
print_myobject1_4(o)
assert capture == "MyObject1[{i}]\n".format(i=i) * (4 if isinstance(o, int) else 8)
cstats = ConstructorStats.get(MyObject1)
assert cstats.alive() == 0
expected_values = ['MyObject1[{}]'.format(i) for i in range(1, 7)] + ['MyObject1[7]'] * 4
assert cstats.values() == expected_values
assert cstats.default_constructions == 0
assert cstats.copy_constructions == 0
# assert cstats.move_constructions >= 0 # Doesn't invoke any
assert cstats.copy_assignments == 0
assert cstats.move_assignments == 0
# Object2
from pybind11_tests import (MyObject2, make_myobject2_1, make_myobject2_2,
make_myobject3_1, make_myobject3_2,
print_myobject2_1, print_myobject2_2,
print_myobject2_3, print_myobject2_4)
for i, o in zip([8, 6, 7], [MyObject2(8), make_myobject2_1(), make_myobject2_2()]):
print(o)
with capture:
print_myobject2_1(o)
print_myobject2_2(o)
print_myobject2_3(o)
print_myobject2_4(o)
assert capture == "MyObject2[{i}]\n".format(i=i) * 4
cstats = ConstructorStats.get(MyObject2)
assert cstats.alive() == 1
o = None
assert cstats.alive() == 0
assert cstats.values() == ['MyObject2[8]', 'MyObject2[6]', 'MyObject2[7]']
assert cstats.default_constructions == 0
assert cstats.copy_constructions == 0
# assert cstats.move_constructions >= 0 # Doesn't invoke any
assert cstats.copy_assignments == 0
assert cstats.move_assignments == 0
# Object3
from pybind11_tests import (MyObject3, print_myobject3_1, print_myobject3_2,
print_myobject3_3, print_myobject3_4)
for i, o in zip([9, 8, 9], [MyObject3(9), make_myobject3_1(), make_myobject3_2()]):
print(o)
with capture:
print_myobject3_1(o)
print_myobject3_2(o)
print_myobject3_3(o)
print_myobject3_4(o)
assert capture == "MyObject3[{i}]\n".format(i=i) * 4
cstats = ConstructorStats.get(MyObject3)
assert cstats.alive() == 1
o = None
assert cstats.alive() == 0
assert cstats.values() == ['MyObject3[9]', 'MyObject3[8]', 'MyObject3[9]']
assert cstats.default_constructions == 0
assert cstats.copy_constructions == 0
# assert cstats.move_constructions >= 0 # Doesn't invoke any
assert cstats.copy_assignments == 0
assert cstats.move_assignments == 0
# Object and ref
from pybind11_tests import Object, cstats_ref
cstats = ConstructorStats.get(Object)
assert cstats.alive() == 0
assert cstats.values() == []
assert cstats.default_constructions == 10
assert cstats.copy_constructions == 0
# assert cstats.move_constructions >= 0 # Doesn't invoke any
assert cstats.copy_assignments == 0
assert cstats.move_assignments == 0
cstats = cstats_ref()
assert cstats.alive() == 0
assert cstats.values() == ['from pointer'] * 10
assert cstats.default_constructions == 30
assert cstats.copy_constructions == 12
# assert cstats.move_constructions >= 0 # Doesn't invoke any
assert cstats.copy_assignments == 30
assert cstats.move_assignments == 0
def test_smart_ptr_refcounting():
from pybind11_tests import test_object1_refcounting
assert test_object1_refcounting()
def test_unique_nodelete():
from pybind11_tests import MyObject4
o = MyObject4(23)
assert o.value == 23
cstats = ConstructorStats.get(MyObject4)
assert cstats.alive() == 1
del o
cstats = ConstructorStats.get(MyObject4)
assert cstats.alive() == 1 # Leak, but that's intentional
def test_shared_ptr_and_references():
from pybind11_tests.smart_ptr import SharedPtrRef, A
s = SharedPtrRef()
stats = ConstructorStats.get(A)
assert stats.alive() == 2
ref = s.ref # init_holder_helper(holder_ptr=false, owned=false)
assert stats.alive() == 2
assert s.set_ref(ref)
with pytest.raises(RuntimeError) as excinfo:
assert s.set_holder(ref)
assert "Unable to cast from non-held to held instance" in str(excinfo.value)
copy = s.copy # init_holder_helper(holder_ptr=false, owned=true)
assert stats.alive() == 3
assert s.set_ref(copy)
assert s.set_holder(copy)
holder_ref = s.holder_ref # init_holder_helper(holder_ptr=true, owned=false)
assert stats.alive() == 3
assert s.set_ref(holder_ref)
assert s.set_holder(holder_ref)
holder_copy = s.holder_copy # init_holder_helper(holder_ptr=true, owned=true)
assert stats.alive() == 3
assert s.set_ref(holder_copy)
assert s.set_holder(holder_copy)
del ref, copy, holder_ref, holder_copy, s
assert stats.alive() == 0
def test_shared_ptr_from_this_and_references():
from pybind11_tests.smart_ptr import SharedFromThisRef, B
s = SharedFromThisRef()
stats = ConstructorStats.get(B)
assert stats.alive() == 2
ref = s.ref # init_holder_helper(holder_ptr=false, owned=false, bad_wp=false)
assert stats.alive() == 2
assert s.set_ref(ref)
assert s.set_holder(ref) # std::enable_shared_from_this can create a holder from a reference
bad_wp = s.bad_wp # init_holder_helper(holder_ptr=false, owned=false, bad_wp=true)
assert stats.alive() == 2
assert s.set_ref(bad_wp)
with pytest.raises(RuntimeError) as excinfo:
assert s.set_holder(bad_wp)
assert "Unable to cast from non-held to held instance" in str(excinfo.value)
copy = s.copy # init_holder_helper(holder_ptr=false, owned=true, bad_wp=false)
assert stats.alive() == 3
assert s.set_ref(copy)
assert s.set_holder(copy)
holder_ref = s.holder_ref # init_holder_helper(holder_ptr=true, owned=false, bad_wp=false)
assert stats.alive() == 3
assert s.set_ref(holder_ref)
assert s.set_holder(holder_ref)
holder_copy = s.holder_copy # init_holder_helper(holder_ptr=true, owned=true, bad_wp=false)
assert stats.alive() == 3
assert s.set_ref(holder_copy)
assert s.set_holder(holder_copy)
del ref, bad_wp, copy, holder_ref, holder_copy, s
assert stats.alive() == 0
def test_move_only_holder():
from pybind11_tests.smart_ptr import TypeWithMoveOnlyHolder
a = TypeWithMoveOnlyHolder.make()
stats = ConstructorStats.get(TypeWithMoveOnlyHolder)
assert stats.alive() == 1
del a
assert stats.alive() == 0
def test_smart_ptr_from_default():
from pybind11_tests.smart_ptr import HeldByDefaultHolder
instance = HeldByDefaultHolder()
with pytest.raises(RuntimeError) as excinfo:
HeldByDefaultHolder.load_shared_ptr(instance)
assert "Unable to load a custom holder type from a default-holder instance" in str(excinfo)
| bsd-3-clause |
linuxlewis/django-diffs | diffs/signals.py | 1 | 2338 | from __future__ import absolute_import, unicode_literals
import logging
from django.core import serializers
from django.db import connection
from django.db.models.signals import pre_save, post_save
from .helpers import precise_timestamp
from .settings import diffs_settings
logger = logging.getLogger("diffs")
def on_pre_save(sender, instance, **kwargs):
instance.__dirty_fields = instance.get_dirty_fields()
def on_post_save(sender, instance, created, **kwargs):
if instance.__dirty_fields or created:
# check if we should send it
if hasattr(instance, 'send_diff') and instance.send_diff() is False:
logger.debug("Skipped diff because send_diff returned False")
return
# get the data
if hasattr(instance, 'serialize_diff'):
data = instance.serialize_diff(instance.__dirty_fields, created=created)
else:
data = serialize_object(instance, instance.__dirty_fields)
if data:
model = instance
# check if should be related to another "parent" model
if hasattr(instance, 'get_diff_parent'):
parent = instance.get_diff_parent()
if parent:
model = parent
create_kwargs = {
'data': data,
'created': created,
'pk': model.id,
'model_cls': model.__class__,
'timestamp': getattr(instance, '_last_save_at', precise_timestamp())
}
# Respect the transaction if we can and should.
if hasattr(connection, 'on_commit') and diffs_settings['use_transactions']:
connection.on_commit(lambda: sender.diffs.create(**create_kwargs))
else:
sender.diffs.create(**create_kwargs)
else:
logger.debug("Skipped diff because it was emtpy.")
# clean up
del instance.__dirty_fields
else:
logger.debug("Skipped diff because no fields had changed.")
def serialize_object(instance, dirty_fields):
"""Serializes a django model using the default serialization."""
return serializers.serialize('json', [instance], fields=list(dirty_fields.keys()))
def connect(cls):
pre_save.connect(on_pre_save, cls)
post_save.connect(on_post_save, cls)
| mit |
albertliangcode/Pi_MonteCarloSim | venv/lib/python2.7/site-packages/pip/__init__.py | 61 | 10305 | #!/usr/bin/env python
from __future__ import absolute_import
import logging
import os
import optparse
import warnings
import sys
import re
from pip.exceptions import InstallationError, CommandError, PipError
from pip.utils import get_installed_distributions, get_prog
from pip.utils import deprecation
from pip.vcs import git, mercurial, subversion, bazaar # noqa
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.commands import get_summaries, get_similar_commands
from pip.commands import commands_dict
from pip._vendor.requests.packages.urllib3.exceptions import (
InsecureRequestWarning,
)
# assignment for flake8 to be happy
# This fixes a peculiarity when importing via __import__ - as we are
# initialising the pip module, "from pip import cmdoptions" is recursive
# and appears not to work properly in that situation.
import pip.cmdoptions
cmdoptions = pip.cmdoptions
# The version as used in the setup.py and the docs conf.py
__version__ = "6.0.8"
logger = logging.getLogger(__name__)
# Hide the InsecureRequestWArning from urllib3
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
def autocomplete():
"""Command and option completion for the main option parser (and options)
and its subcommands (and options).
Enable by sourcing one of the completion shell scripts (bash or zsh).
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
current = cwords[cword - 1]
except IndexError:
current = ''
subcommands = [cmd for cmd, summary in get_summaries()]
options = []
# subcommand
try:
subcommand_name = [w for w in cwords if w in subcommands][0]
except IndexError:
subcommand_name = None
parser = create_main_parser()
# subcommand options
if subcommand_name:
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
sys.exit(1)
# special case: list locally installed dists for uninstall command
if subcommand_name == 'uninstall' and not current.startswith('-'):
installed = []
lc = current.lower()
for dist in get_installed_distributions(local_only=True):
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
installed.append(dist.key)
# if there are no dists installed, fall back to option completion
if installed:
for dist in installed:
print(dist)
sys.exit(1)
subcommand = commands_dict[subcommand_name]()
options += [(opt.get_opt_string(), opt.nargs)
for opt in subcommand.parser.option_list_all
if opt.help != optparse.SUPPRESS_HELP]
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for k, v in options if k.startswith(current)]
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1]:
opt_label += '='
print(opt_label)
else:
# show main parser options only when necessary
if current.startswith('-') or current.startswith('--'):
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
opts = (o for it in opts for o in it)
subcommands += [i.get_opt_string() for i in opts
if i.help != optparse.SUPPRESS_HELP]
print(' '.join([x for x in subcommands if x.startswith(current)]))
sys.exit(1)
def create_main_parser():
parser_kw = {
'usage': '\n%prog <command> [options]',
'add_help_option': False,
'formatter': UpdatingDefaultsHelpFormatter(),
'name': 'global',
'prog': get_prog(),
}
parser = ConfigOptionParser(**parser_kw)
parser.disable_interspersed_args()
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
parser.version = 'pip %s from %s (python %s)' % (
__version__, pip_pkg_dir, sys.version[:3])
# add the general options
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
parser.add_option_group(gen_opts)
parser.main = True # so the help formatter knows
# create command listing for description
command_summaries = get_summaries()
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
parser.description = '\n'.join(description)
return parser
def parseopts(args):
parser = create_main_parser()
# Note: parser calls disable_interspersed_args(), so the result of this
# call is to split the initial args into the general options before the
# subcommand and everything else.
# For example:
# args: ['--timeout=5', 'install', '--user', 'INITools']
# general_options: ['--timeout==5']
# args_else: ['install', '--user', 'INITools']
general_options, args_else = parser.parse_args(args)
# --version
if general_options.version:
sys.stdout.write(parser.version)
sys.stdout.write(os.linesep)
sys.exit()
# pip || pip help -> print_help()
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
parser.print_help()
sys.exit()
# the subcommand name
cmd_name = args_else[0]
if cmd_name not in commands_dict:
guess = get_similar_commands(cmd_name)
msg = ['unknown command "%s"' % cmd_name]
if guess:
msg.append('maybe you meant "%s"' % guess)
raise CommandError(' - '.join(msg))
# all the args without the subcommand
cmd_args = args[:]
cmd_args.remove(cmd_name)
return cmd_name, cmd_args
def check_isolated(args):
isolated = False
if "--isolated" in args:
isolated = True
return isolated
def main(args=None):
if args is None:
args = sys.argv[1:]
# Enable our Deprecation Warnings
for deprecation_warning in deprecation.DEPRECATIONS:
warnings.simplefilter("default", deprecation_warning)
# Configure our deprecation warnings to be sent through loggers
deprecation.install_warning_logger()
autocomplete()
try:
cmd_name, cmd_args = parseopts(args)
except PipError as exc:
sys.stderr.write("ERROR: %s" % exc)
sys.stderr.write(os.linesep)
sys.exit(1)
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
return command.main(cmd_args)
# ###########################################################
# # Writing freeze files
class FrozenRequirement(object):
def __init__(self, name, req, editable, comments=()):
self.name = name
self.req = req
self.editable = editable
self.comments = comments
_rev_re = re.compile(r'-r(\d+)$')
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
@classmethod
def from_dist(cls, dist, dependency_links, find_tags=False):
location = os.path.normcase(os.path.abspath(dist.location))
comments = []
from pip.vcs import vcs, get_src_requirement
if vcs.get_backend_name(location):
editable = True
try:
req = get_src_requirement(dist, location, find_tags)
except InstallationError as exc:
logger.warning(
"Error when trying to get requirement for VCS system %s, "
"falling back to uneditable format", exc
)
req = None
if req is None:
logger.warning(
'Could not determine repository location of %s', location
)
comments.append(
'## !! Could not determine repository location'
)
req = dist.as_requirement()
editable = False
else:
editable = False
req = dist.as_requirement()
specs = req.specs
assert len(specs) == 1 and specs[0][0] in ["==", "==="]
version = specs[0][1]
ver_match = cls._rev_re.search(version)
date_match = cls._date_re.search(version)
if ver_match or date_match:
svn_backend = vcs.get_backend('svn')
if svn_backend:
svn_location = svn_backend().get_location(
dist,
dependency_links,
)
if not svn_location:
logger.warning(
'Warning: cannot find svn location for %s', req)
comments.append(
'## FIXME: could not find svn URL in dependency_links '
'for this package:'
)
else:
comments.append(
'# Installing as editable to satisfy requirement %s:' %
req
)
if ver_match:
rev = ver_match.group(1)
else:
rev = '{%s}' % date_match.group(1)
editable = True
req = '%s@%s#egg=%s' % (
svn_location,
rev,
cls.egg_name(dist)
)
return cls(dist.project_name, req, editable, comments)
@staticmethod
def egg_name(dist):
name = dist.egg_name()
match = re.search(r'-py\d\.\d$', name)
if match:
name = name[:match.start()]
return name
def __str__(self):
req = self.req
if self.editable:
req = '-e %s' % req
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
if __name__ == '__main__':
sys.exit(main())
| mit |
ychen820/microblog | y/google-cloud-sdk/.install/.backup/lib/yaml/events.py | 985 | 2445 |
# Abstract classes.
class Event(object):
def __init__(self, start_mark=None, end_mark=None):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
if hasattr(self, key)]
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
class NodeEvent(Event):
def __init__(self, anchor, start_mark=None, end_mark=None):
self.anchor = anchor
self.start_mark = start_mark
self.end_mark = end_mark
class CollectionStartEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
flow_style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class CollectionEndEvent(Event):
pass
# Implementations.
class StreamStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None, encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndEvent(Event):
pass
class DocumentStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None, version=None, tags=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
self.version = version
self.tags = tags
class DocumentEndEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
class AliasEvent(NodeEvent):
pass
class ScalarEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, value,
start_mark=None, end_mark=None, style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class SequenceStartEvent(CollectionStartEvent):
pass
class SequenceEndEvent(CollectionEndEvent):
pass
class MappingStartEvent(CollectionStartEvent):
pass
class MappingEndEvent(CollectionEndEvent):
pass
| bsd-3-clause |
bixbydev/Bixby | google/gdata-2.0.18/src/gdata/tlslite/utils/dateFuncs.py | 407 | 2181 |
import os
#Functions for manipulating datetime objects
#CCYY-MM-DDThh:mm:ssZ
def parseDateClass(s):
year, month, day = s.split("-")
day, tail = day[:2], day[2:]
hour, minute, second = tail[1:].split(":")
second = second[:2]
year, month, day = int(year), int(month), int(day)
hour, minute, second = int(hour), int(minute), int(second)
return createDateClass(year, month, day, hour, minute, second)
if os.name != "java":
from datetime import datetime, timedelta
#Helper functions for working with a date/time class
def createDateClass(year, month, day, hour, minute, second):
return datetime(year, month, day, hour, minute, second)
def printDateClass(d):
#Split off fractional seconds, append 'Z'
return d.isoformat().split(".")[0]+"Z"
def getNow():
return datetime.utcnow()
def getHoursFromNow(hours):
return datetime.utcnow() + timedelta(hours=hours)
def getMinutesFromNow(minutes):
return datetime.utcnow() + timedelta(minutes=minutes)
def isDateClassExpired(d):
return d < datetime.utcnow()
def isDateClassBefore(d1, d2):
return d1 < d2
else:
#Jython 2.1 is missing lots of python 2.3 stuff,
#which we have to emulate here:
import java
import jarray
def createDateClass(year, month, day, hour, minute, second):
c = java.util.Calendar.getInstance()
c.setTimeZone(java.util.TimeZone.getTimeZone("UTC"))
c.set(year, month-1, day, hour, minute, second)
return c
def printDateClass(d):
return "%04d-%02d-%02dT%02d:%02d:%02dZ" % \
(d.get(d.YEAR), d.get(d.MONTH)+1, d.get(d.DATE), \
d.get(d.HOUR_OF_DAY), d.get(d.MINUTE), d.get(d.SECOND))
def getNow():
c = java.util.Calendar.getInstance()
c.setTimeZone(java.util.TimeZone.getTimeZone("UTC"))
c.get(c.HOUR) #force refresh?
return c
def getHoursFromNow(hours):
d = getNow()
d.add(d.HOUR, hours)
return d
def isDateClassExpired(d):
n = getNow()
return d.before(n)
def isDateClassBefore(d1, d2):
return d1.before(d2)
| gpl-3.0 |
jburger424/MediaQueueHCI | m-q-env/lib/python3.4/site-packages/pip/_vendor/distlib/util.py | 190 | 51230 | #
# Copyright (C) 2012-2013 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import codecs
from collections import deque
import contextlib
import csv
from glob import iglob as std_iglob
import io
import json
import logging
import os
import py_compile
import re
import shutil
import socket
import ssl
import subprocess
import sys
import tarfile
import tempfile
try:
import threading
except ImportError:
import dummy_threading as threading
import time
from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
cache_from_source, urlopen, httplib, xmlrpclib, splittype,
HTTPHandler, HTTPSHandler as BaseHTTPSHandler,
BaseConfigurator, valid_ident, Container, configparser,
URLError, match_hostname, CertificateError, ZipFile)
logger = logging.getLogger(__name__)
#
# Requirement parsing code for name + optional constraints + optional extras
#
# e.g. 'foo >= 1.2, < 2.0 [bar, baz]'
#
# The regex can seem a bit hairy, so we build it up out of smaller pieces
# which are manageable.
#
COMMA = r'\s*,\s*'
COMMA_RE = re.compile(COMMA)
IDENT = r'(\w|[.-])+'
EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')'
VERSPEC = IDENT + r'\*?'
RELOP = '([<>=!~]=)|[<>]'
#
# The first relop is optional - if absent, will be taken as '~='
#
BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' +
RELOP + r')\s*(' + VERSPEC + '))*')
DIRECT_REF = '(from\s+(?P<diref>.*))'
#
# Either the bare constraints or the bare constraints in parentheses
#
CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + '|' + DIRECT_REF +
r')\s*\)|(?P<c2>' + BARE_CONSTRAINTS + '\s*)')
EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*'
EXTRAS = r'\[\s*(?P<ex>' + EXTRA_LIST + r')?\s*\]'
REQUIREMENT = ('(?P<dn>' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +
CONSTRAINTS + ')?$')
REQUIREMENT_RE = re.compile(REQUIREMENT)
#
# Used to scan through the constraints
#
RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + VERSPEC + ')'
RELOP_IDENT_RE = re.compile(RELOP_IDENT)
def parse_requirement(s):
def get_constraint(m):
d = m.groupdict()
return d['op'], d['vn']
result = None
m = REQUIREMENT_RE.match(s)
if m:
d = m.groupdict()
name = d['dn']
cons = d['c1'] or d['c2']
if not d['diref']:
url = None
else:
# direct reference
cons = None
url = d['diref'].strip()
if not cons:
cons = None
constr = ''
rs = d['dn']
else:
if cons[0] not in '<>!=':
cons = '~=' + cons
iterator = RELOP_IDENT_RE.finditer(cons)
cons = [get_constraint(m) for m in iterator]
rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))
if not d['ex']:
extras = None
else:
extras = COMMA_RE.split(d['ex'])
result = Container(name=name, constraints=cons, extras=extras,
requirement=rs, source=s, url=url)
return result
def get_resources_dests(resources_root, rules):
"""Find destinations for resources files"""
def get_rel_path(base, path):
# normalizes and returns a lstripped-/-separated path
base = base.replace(os.path.sep, '/')
path = path.replace(os.path.sep, '/')
assert path.startswith(base)
return path[len(base):].lstrip('/')
destinations = {}
for base, suffix, dest in rules:
prefix = os.path.join(resources_root, base)
for abs_base in iglob(prefix):
abs_glob = os.path.join(abs_base, suffix)
for abs_path in iglob(abs_glob):
resource_file = get_rel_path(resources_root, abs_path)
if dest is None: # remove the entry if it was here
destinations.pop(resource_file, None)
else:
rel_path = get_rel_path(abs_base, abs_path)
rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
destinations[resource_file] = rel_dest + '/' + rel_path
return destinations
def in_venv():
if hasattr(sys, 'real_prefix'):
# virtualenv venvs
result = True
else:
# PEP 405 venvs
result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
return result
def get_executable():
if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
in os.environ):
result = os.environ['__PYVENV_LAUNCHER__']
else:
result = sys.executable
return result
def proceed(prompt, allowed_chars, error_prompt=None, default=None):
p = prompt
while True:
s = raw_input(p)
p = prompt
if not s and default:
s = default
if s:
c = s[0].lower()
if c in allowed_chars:
break
if error_prompt:
p = '%c: %s\n%s' % (c, error_prompt, prompt)
return c
def extract_by_key(d, keys):
if isinstance(keys, string_types):
keys = keys.split()
result = {}
for key in keys:
if key in d:
result[key] = d[key]
return result
def read_exports(stream):
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getreader('utf-8')(stream)
# Try to load as JSON, falling back on legacy format
data = stream.read()
stream = StringIO(data)
try:
data = json.load(stream)
result = data['exports']
for group, entries in result.items():
for k, v in entries.items():
s = '%s = %s' % (k, v)
entry = get_export_entry(s)
assert entry is not None
entries[k] = entry
return result
except Exception:
stream.seek(0, 0)
cp = configparser.ConfigParser()
if hasattr(cp, 'read_file'):
cp.read_file(stream)
else:
cp.readfp(stream)
result = {}
for key in cp.sections():
result[key] = entries = {}
for name, value in cp.items(key):
s = '%s = %s' % (name, value)
entry = get_export_entry(s)
assert entry is not None
#entry.dist = self
entries[name] = entry
return result
def write_exports(exports, stream):
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getwriter('utf-8')(stream)
cp = configparser.ConfigParser()
for k, v in exports.items():
# TODO check k, v for valid values
cp.add_section(k)
for entry in v.values():
if entry.suffix is None:
s = entry.prefix
else:
s = '%s:%s' % (entry.prefix, entry.suffix)
if entry.flags:
s = '%s [%s]' % (s, ', '.join(entry.flags))
cp.set(k, entry.name, s)
cp.write(stream)
@contextlib.contextmanager
def tempdir():
td = tempfile.mkdtemp()
try:
yield td
finally:
shutil.rmtree(td)
@contextlib.contextmanager
def chdir(d):
cwd = os.getcwd()
try:
os.chdir(d)
yield
finally:
os.chdir(cwd)
@contextlib.contextmanager
def socket_timeout(seconds=15):
cto = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(seconds)
yield
finally:
socket.setdefaulttimeout(cto)
class cached_property(object):
def __init__(self, func):
self.func = func
#for attr in ('__name__', '__module__', '__doc__'):
# setattr(self, attr, getattr(func, attr, None))
def __get__(self, obj, cls=None):
if obj is None:
return self
value = self.func(obj)
object.__setattr__(obj, self.func.__name__, value)
#obj.__dict__[self.func.__name__] = value = self.func(obj)
return value
def convert_path(pathname):
"""Return 'pathname' as a name that will work on the native filesystem.
The path is split on '/' and put back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we can actually use them in the filesystem. Raises
ValueError on non-Unix-ish systems if 'pathname' either starts or
ends with a slash.
"""
if os.sep == '/':
return pathname
if not pathname:
return pathname
if pathname[0] == '/':
raise ValueError("path '%s' cannot be absolute" % pathname)
if pathname[-1] == '/':
raise ValueError("path '%s' cannot end with '/'" % pathname)
paths = pathname.split('/')
while os.curdir in paths:
paths.remove(os.curdir)
if not paths:
return os.curdir
return os.path.join(*paths)
class FileOperator(object):
def __init__(self, dry_run=False):
self.dry_run = dry_run
self.ensured = set()
self._init_record()
def _init_record(self):
self.record = False
self.files_written = set()
self.dirs_created = set()
def record_as_written(self, path):
if self.record:
self.files_written.add(path)
def newer(self, source, target):
"""Tell if the target is newer than the source.
Returns true if 'source' exists and is more recently modified than
'target', or if 'source' exists and 'target' doesn't.
Returns false if both exist and 'target' is the same age or younger
than 'source'. Raise PackagingFileError if 'source' does not exist.
Note that this test is not very accurate: files created in the same
second will have the same "age".
"""
if not os.path.exists(source):
raise DistlibException("file '%r' does not exist" %
os.path.abspath(source))
if not os.path.exists(target):
return True
return os.stat(source).st_mtime > os.stat(target).st_mtime
def copy_file(self, infile, outfile, check=True):
"""Copy a file respecting dry-run and force flags.
"""
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying %s to %s', infile, outfile)
if not self.dry_run:
msg = None
if check:
if os.path.islink(outfile):
msg = '%s is a symlink' % outfile
elif os.path.exists(outfile) and not os.path.isfile(outfile):
msg = '%s is a non-regular file' % outfile
if msg:
raise ValueError(msg + ' which would be overwritten')
shutil.copyfile(infile, outfile)
self.record_as_written(outfile)
def copy_stream(self, instream, outfile, encoding=None):
assert not os.path.isdir(outfile)
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying stream %s to %s', instream, outfile)
if not self.dry_run:
if encoding is None:
outstream = open(outfile, 'wb')
else:
outstream = codecs.open(outfile, 'w', encoding=encoding)
try:
shutil.copyfileobj(instream, outstream)
finally:
outstream.close()
self.record_as_written(outfile)
def write_binary_file(self, path, data):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data)
self.record_as_written(path)
def write_text_file(self, path, data, encoding):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data.encode(encoding))
self.record_as_written(path)
def set_mode(self, bits, mask, files):
if os.name == 'posix':
# Set the executable bits (owner, group, and world) on
# all the files specified.
for f in files:
if self.dry_run:
logger.info("changing mode of %s", f)
else:
mode = (os.stat(f).st_mode | bits) & mask
logger.info("changing mode of %s to %o", f, mode)
os.chmod(f, mode)
set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
def ensure_dir(self, path):
path = os.path.abspath(path)
if path not in self.ensured and not os.path.exists(path):
self.ensured.add(path)
d, f = os.path.split(path)
self.ensure_dir(d)
logger.info('Creating %s' % path)
if not self.dry_run:
os.mkdir(path)
if self.record:
self.dirs_created.add(path)
def byte_compile(self, path, optimize=False, force=False, prefix=None):
dpath = cache_from_source(path, not optimize)
logger.info('Byte-compiling %s to %s', path, dpath)
if not self.dry_run:
if force or self.newer(path, dpath):
if not prefix:
diagpath = None
else:
assert path.startswith(prefix)
diagpath = path[len(prefix):]
py_compile.compile(path, dpath, diagpath, True) # raise error
self.record_as_written(dpath)
return dpath
def ensure_removed(self, path):
if os.path.exists(path):
if os.path.isdir(path) and not os.path.islink(path):
logger.debug('Removing directory tree at %s', path)
if not self.dry_run:
shutil.rmtree(path)
if self.record:
if path in self.dirs_created:
self.dirs_created.remove(path)
else:
if os.path.islink(path):
s = 'link'
else:
s = 'file'
logger.debug('Removing %s %s', s, path)
if not self.dry_run:
os.remove(path)
if self.record:
if path in self.files_written:
self.files_written.remove(path)
def is_writable(self, path):
result = False
while not result:
if os.path.exists(path):
result = os.access(path, os.W_OK)
break
parent = os.path.dirname(path)
if parent == path:
break
path = parent
return result
def commit(self):
"""
Commit recorded changes, turn off recording, return
changes.
"""
assert self.record
result = self.files_written, self.dirs_created
self._init_record()
return result
def rollback(self):
if not self.dry_run:
for f in list(self.files_written):
if os.path.exists(f):
os.remove(f)
# dirs should all be empty now, except perhaps for
# __pycache__ subdirs
# reverse so that subdirs appear before their parents
dirs = sorted(self.dirs_created, reverse=True)
for d in dirs:
flist = os.listdir(d)
if flist:
assert flist == ['__pycache__']
sd = os.path.join(d, flist[0])
os.rmdir(sd)
os.rmdir(d) # should fail if non-empty
self._init_record()
def resolve(module_name, dotted_path):
if module_name in sys.modules:
mod = sys.modules[module_name]
else:
mod = __import__(module_name)
if dotted_path is None:
result = mod
else:
parts = dotted_path.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
class ExportEntry(object):
def __init__(self, name, prefix, suffix, flags):
self.name = name
self.prefix = prefix
self.suffix = suffix
self.flags = flags
@cached_property
def value(self):
return resolve(self.prefix, self.suffix)
def __repr__(self):
return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
self.suffix, self.flags)
def __eq__(self, other):
if not isinstance(other, ExportEntry):
result = False
else:
result = (self.name == other.name and
self.prefix == other.prefix and
self.suffix == other.suffix and
self.flags == other.flags)
return result
__hash__ = object.__hash__
ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.])+)
\s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
\s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
''', re.VERBOSE)
def get_export_entry(specification):
m = ENTRY_RE.search(specification)
if not m:
result = None
if '[' in specification or ']' in specification:
raise DistlibException('Invalid specification '
'%r' % specification)
else:
d = m.groupdict()
name = d['name']
path = d['callable']
colons = path.count(':')
if colons == 0:
prefix, suffix = path, None
else:
if colons != 1:
raise DistlibException('Invalid specification '
'%r' % specification)
prefix, suffix = path.split(':')
flags = d['flags']
if flags is None:
if '[' in specification or ']' in specification:
raise DistlibException('Invalid specification '
'%r' % specification)
flags = []
else:
flags = [f.strip() for f in flags.split(',')]
result = ExportEntry(name, prefix, suffix, flags)
return result
def get_cache_base(suffix=None):
"""
Return the default base location for distlib caches. If the directory does
not exist, it is created. Use the suffix provided for the base directory,
and default to '.distlib' if it isn't provided.
On Windows, if LOCALAPPDATA is defined in the environment, then it is
assumed to be a directory, and will be the parent directory of the result.
On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
directory - using os.expanduser('~') - will be the parent directory of
the result.
The result is just the directory '.distlib' in the parent directory as
determined above, or with the name specified with ``suffix``.
"""
if suffix is None:
suffix = '.distlib'
if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
result = os.path.expandvars('$localappdata')
else:
# Assume posix, or old Windows
result = os.path.expanduser('~')
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
if os.path.isdir(result):
usable = os.access(result, os.W_OK)
if not usable:
logger.warning('Directory exists but is not writable: %s', result)
else:
try:
os.makedirs(result)
usable = True
except OSError:
logger.warning('Unable to create %s', result, exc_info=True)
usable = False
if not usable:
result = tempfile.mkdtemp()
logger.warning('Default location unusable, using %s', result)
return os.path.join(result, suffix)
def path_to_cache_dir(path):
"""
Convert an absolute path to a directory name for use in a cache.
The algorithm used is:
#. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
#. Any occurrence of ``os.sep`` is replaced with ``'--'``.
#. ``'.cache'`` is appended.
"""
d, p = os.path.splitdrive(os.path.abspath(path))
if d:
d = d.replace(':', '---')
p = p.replace(os.sep, '--')
return d + p + '.cache'
def ensure_slash(s):
if not s.endswith('/'):
return s + '/'
return s
def parse_credentials(netloc):
username = password = None
if '@' in netloc:
prefix, netloc = netloc.split('@', 1)
if ':' not in prefix:
username = prefix
else:
username, password = prefix.split(':', 1)
return username, password, netloc
def get_process_umask():
result = os.umask(0o22)
os.umask(result)
return result
def is_string_sequence(seq):
result = True
i = None
for i, s in enumerate(seq):
if not isinstance(s, string_types):
result = False
break
assert i is not None
return result
PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
'([a-z0-9_.+-]+)', re.I)
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
def split_filename(filename, project_name=None):
"""
Extract name, version, python version from a filename (no extension)
Return name, version, pyver or None
"""
result = None
pyver = None
m = PYTHON_VERSION.search(filename)
if m:
pyver = m.group(1)
filename = filename[:m.start()]
if project_name and len(filename) > len(project_name) + 1:
m = re.match(re.escape(project_name) + r'\b', filename)
if m:
n = m.end()
result = filename[:n], filename[n + 1:], pyver
if result is None:
m = PROJECT_NAME_AND_VERSION.match(filename)
if m:
result = m.group(1), m.group(3), pyver
return result
# Allow spaces in name because of legacy dists like "Twisted Core"
NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
r'\(\s*(?P<ver>[^\s)]+)\)$')
def parse_name_and_version(p):
"""
A utility method used to get name and version from a string.
From e.g. a Provides-Dist value.
:param p: A value in a form 'foo (1.0)'
:return: The name and version as a tuple.
"""
m = NAME_VERSION_RE.match(p)
if not m:
raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
d = m.groupdict()
return d['name'].strip().lower(), d['ver']
def get_extras(requested, available):
result = set()
requested = set(requested or [])
available = set(available or [])
if '*' in requested:
requested.remove('*')
result |= available
for r in requested:
if r == '-':
result.add(r)
elif r.startswith('-'):
unwanted = r[1:]
if unwanted not in available:
logger.warning('undeclared extra: %s' % unwanted)
if unwanted in result:
result.remove(unwanted)
else:
if r not in available:
logger.warning('undeclared extra: %s' % r)
result.add(r)
return result
#
# Extended metadata functionality
#
def _get_external_data(url):
result = {}
try:
# urlopen might fail if it runs into redirections,
# because of Python issue #13696. Fixed in locators
# using a custom redirect handler.
resp = urlopen(url)
headers = resp.info()
if headers.get('Content-Type') != 'application/json':
logger.debug('Unexpected response for JSON request')
else:
reader = codecs.getreader('utf-8')(resp)
#data = reader.read().decode('utf-8')
#result = json.loads(data)
result = json.load(reader)
except Exception as e:
logger.exception('Failed to get external data for %s: %s', url, e)
return result
def get_project_data(name):
url = ('https://www.red-dove.com/pypi/projects/'
'%s/%s/project.json' % (name[0].upper(), name))
result = _get_external_data(url)
return result
def get_package_data(name, version):
url = ('https://www.red-dove.com/pypi/projects/'
'%s/%s/package-%s.json' % (name[0].upper(), name, version))
return _get_external_data(url)
class Cache(object):
"""
A class implementing a cache for resources that need to live in the file system
e.g. shared libraries. This class was moved from resources to here because it
could be used by other modules, e.g. the wheel module.
"""
def __init__(self, base):
"""
Initialise an instance.
:param base: The base directory where the cache should be located.
"""
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
if not os.path.isdir(base):
os.makedirs(base)
if (os.stat(base).st_mode & 0o77) != 0:
logger.warning('Directory \'%s\' is not private', base)
self.base = os.path.abspath(os.path.normpath(base))
def prefix_to_dir(self, prefix):
"""
Converts a resource prefix to a directory name in the cache.
"""
return path_to_cache_dir(prefix)
def clear(self):
"""
Clear the cache.
"""
not_removed = []
for fn in os.listdir(self.base):
fn = os.path.join(self.base, fn)
try:
if os.path.islink(fn) or os.path.isfile(fn):
os.remove(fn)
elif os.path.isdir(fn):
shutil.rmtree(fn)
except Exception:
not_removed.append(fn)
return not_removed
class EventMixin(object):
"""
A very simple publish/subscribe system.
"""
def __init__(self):
self._subscribers = {}
def add(self, event, subscriber, append=True):
"""
Add a subscriber for an event.
:param event: The name of an event.
:param subscriber: The subscriber to be added (and called when the
event is published).
:param append: Whether to append or prepend the subscriber to an
existing subscriber list for the event.
"""
subs = self._subscribers
if event not in subs:
subs[event] = deque([subscriber])
else:
sq = subs[event]
if append:
sq.append(subscriber)
else:
sq.appendleft(subscriber)
def remove(self, event, subscriber):
"""
Remove a subscriber for an event.
:param event: The name of an event.
:param subscriber: The subscriber to be removed.
"""
subs = self._subscribers
if event not in subs:
raise ValueError('No subscribers: %r' % event)
subs[event].remove(subscriber)
def get_subscribers(self, event):
"""
Return an iterator for the subscribers for an event.
:param event: The event to return subscribers for.
"""
return iter(self._subscribers.get(event, ()))
def publish(self, event, *args, **kwargs):
"""
Publish a event and return a list of values returned by its
subscribers.
:param event: The event to publish.
:param args: The positional arguments to pass to the event's
subscribers.
:param kwargs: The keyword arguments to pass to the event's
subscribers.
"""
result = []
for subscriber in self.get_subscribers(event):
try:
value = subscriber(event, *args, **kwargs)
except Exception:
logger.exception('Exception during event publication')
value = None
result.append(value)
logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
event, args, kwargs, result)
return result
#
# Simple sequencing
#
class Sequencer(object):
def __init__(self):
self._preds = {}
self._succs = {}
self._nodes = set() # nodes with no preds/succs
def add_node(self, node):
self._nodes.add(node)
def remove_node(self, node, edges=False):
if node in self._nodes:
self._nodes.remove(node)
if edges:
for p in set(self._preds.get(node, ())):
self.remove(p, node)
for s in set(self._succs.get(node, ())):
self.remove(node, s)
# Remove empties
for k, v in list(self._preds.items()):
if not v:
del self._preds[k]
for k, v in list(self._succs.items()):
if not v:
del self._succs[k]
def add(self, pred, succ):
assert pred != succ
self._preds.setdefault(succ, set()).add(pred)
self._succs.setdefault(pred, set()).add(succ)
def remove(self, pred, succ):
assert pred != succ
try:
preds = self._preds[succ]
succs = self._succs[pred]
except KeyError:
raise ValueError('%r not a successor of anything' % succ)
try:
preds.remove(pred)
succs.remove(succ)
except KeyError:
raise ValueError('%r not a successor of %r' % (succ, pred))
def is_step(self, step):
return (step in self._preds or step in self._succs or
step in self._nodes)
def get_steps(self, final):
if not self.is_step(final):
raise ValueError('Unknown: %r' % final)
result = []
todo = []
seen = set()
todo.append(final)
while todo:
step = todo.pop(0)
if step in seen:
# if a step was already seen,
# move it to the end (so it will appear earlier
# when reversed on return) ... but not for the
# final step, as that would be confusing for
# users
if step != final:
result.remove(step)
result.append(step)
else:
seen.add(step)
result.append(step)
preds = self._preds.get(step, ())
todo.extend(preds)
return reversed(result)
@property
def strong_connections(self):
#http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
index_counter = [0]
stack = []
lowlinks = {}
index = {}
result = []
graph = self._succs
def strongconnect(node):
# set the depth index for this node to the smallest unused index
index[node] = index_counter[0]
lowlinks[node] = index_counter[0]
index_counter[0] += 1
stack.append(node)
# Consider successors
try:
successors = graph[node]
except Exception:
successors = []
for successor in successors:
if successor not in lowlinks:
# Successor has not yet been visited
strongconnect(successor)
lowlinks[node] = min(lowlinks[node],lowlinks[successor])
elif successor in stack:
# the successor is in the stack and hence in the current
# strongly connected component (SCC)
lowlinks[node] = min(lowlinks[node],index[successor])
# If `node` is a root node, pop the stack and generate an SCC
if lowlinks[node] == index[node]:
connected_component = []
while True:
successor = stack.pop()
connected_component.append(successor)
if successor == node: break
component = tuple(connected_component)
# storing the result
result.append(component)
for node in graph:
if node not in lowlinks:
strongconnect(node)
return result
@property
def dot(self):
result = ['digraph G {']
for succ in self._preds:
preds = self._preds[succ]
for pred in preds:
result.append(' %s -> %s;' % (pred, succ))
for node in self._nodes:
result.append(' %s;' % node)
result.append('}')
return '\n'.join(result)
#
# Unarchiving functionality for zip, tar, tgz, tbz, whl
#
ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
'.tgz', '.tbz', '.whl')
def unarchive(archive_filename, dest_dir, format=None, check=True):
def check_path(path):
if not isinstance(path, text_type):
path = path.decode('utf-8')
p = os.path.abspath(os.path.join(dest_dir, path))
if not p.startswith(dest_dir) or p[plen] != os.sep:
raise ValueError('path outside destination: %r' % p)
dest_dir = os.path.abspath(dest_dir)
plen = len(dest_dir)
archive = None
if format is None:
if archive_filename.endswith(('.zip', '.whl')):
format = 'zip'
elif archive_filename.endswith(('.tar.gz', '.tgz')):
format = 'tgz'
mode = 'r:gz'
elif archive_filename.endswith(('.tar.bz2', '.tbz')):
format = 'tbz'
mode = 'r:bz2'
elif archive_filename.endswith('.tar'):
format = 'tar'
mode = 'r'
else:
raise ValueError('Unknown format for %r' % archive_filename)
try:
if format == 'zip':
archive = ZipFile(archive_filename, 'r')
if check:
names = archive.namelist()
for name in names:
check_path(name)
else:
archive = tarfile.open(archive_filename, mode)
if check:
names = archive.getnames()
for name in names:
check_path(name)
if format != 'zip' and sys.version_info[0] < 3:
# See Python issue 17153. If the dest path contains Unicode,
# tarfile extraction fails on Python 2.x if a member path name
# contains non-ASCII characters - it leads to an implicit
# bytes -> unicode conversion using ASCII to decode.
for tarinfo in archive.getmembers():
if not isinstance(tarinfo.name, text_type):
tarinfo.name = tarinfo.name.decode('utf-8')
archive.extractall(dest_dir)
finally:
if archive:
archive.close()
def zip_dir(directory):
"""zip a directory tree into a BytesIO object"""
result = io.BytesIO()
dlen = len(directory)
with ZipFile(result, "w") as zf:
for root, dirs, files in os.walk(directory):
for name in files:
full = os.path.join(root, name)
rel = root[dlen:]
dest = os.path.join(rel, name)
zf.write(full, dest)
return result
#
# Simple progress bar
#
UNITS = ('', 'K', 'M', 'G','T','P')
class Progress(object):
unknown = 'UNKNOWN'
def __init__(self, minval=0, maxval=100):
assert maxval is None or maxval >= minval
self.min = self.cur = minval
self.max = maxval
self.started = None
self.elapsed = 0
self.done = False
def update(self, curval):
assert self.min <= curval
assert self.max is None or curval <= self.max
self.cur = curval
now = time.time()
if self.started is None:
self.started = now
else:
self.elapsed = now - self.started
def increment(self, incr):
assert incr >= 0
self.update(self.cur + incr)
def start(self):
self.update(self.min)
return self
def stop(self):
if self.max is not None:
self.update(self.max)
self.done = True
@property
def maximum(self):
return self.unknown if self.max is None else self.max
@property
def percentage(self):
if self.done:
result = '100 %'
elif self.max is None:
result = ' ?? %'
else:
v = 100.0 * (self.cur - self.min) / (self.max - self.min)
result = '%3d %%' % v
return result
def format_duration(self, duration):
if (duration <= 0) and self.max is None or self.cur == self.min:
result = '??:??:??'
#elif duration < 1:
# result = '--:--:--'
else:
result = time.strftime('%H:%M:%S', time.gmtime(duration))
return result
@property
def ETA(self):
if self.done:
prefix = 'Done'
t = self.elapsed
#import pdb; pdb.set_trace()
else:
prefix = 'ETA '
if self.max is None:
t = -1
elif self.elapsed == 0 or (self.cur == self.min):
t = 0
else:
#import pdb; pdb.set_trace()
t = float(self.max - self.min)
t /= self.cur - self.min
t = (t - 1) * self.elapsed
return '%s: %s' % (prefix, self.format_duration(t))
@property
def speed(self):
if self.elapsed == 0:
result = 0.0
else:
result = (self.cur - self.min) / self.elapsed
for unit in UNITS:
if result < 1000:
break
result /= 1000.0
return '%d %sB/s' % (result, unit)
#
# Glob functionality
#
RICH_GLOB = re.compile(r'\{([^}]*)\}')
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
def iglob(path_glob):
"""Extended globbing function that supports ** and {opt1,opt2,opt3}."""
if _CHECK_RECURSIVE_GLOB.search(path_glob):
msg = """invalid glob %r: recursive glob "**" must be used alone"""
raise ValueError(msg % path_glob)
if _CHECK_MISMATCH_SET.search(path_glob):
msg = """invalid glob %r: mismatching set marker '{' or '}'"""
raise ValueError(msg % path_glob)
return _iglob(path_glob)
def _iglob(path_glob):
rich_path_glob = RICH_GLOB.split(path_glob, 1)
if len(rich_path_glob) > 1:
assert len(rich_path_glob) == 3, rich_path_glob
prefix, set, suffix = rich_path_glob
for item in set.split(','):
for path in _iglob(''.join((prefix, item, suffix))):
yield path
else:
if '**' not in path_glob:
for item in std_iglob(path_glob):
yield item
else:
prefix, radical = path_glob.split('**', 1)
if prefix == '':
prefix = '.'
if radical == '':
radical = '*'
else:
# we support both
radical = radical.lstrip('/')
radical = radical.lstrip('\\')
for path, dir, files in os.walk(prefix):
path = os.path.normpath(path)
for fn in _iglob(os.path.join(path, radical)):
yield fn
#
# HTTPSConnection which verifies certificates/matches domains
#
class HTTPSConnection(httplib.HTTPSConnection):
ca_certs = None # set this to the path to the certs file (.pem)
check_domain = True # only used if ca_certs is not None
# noinspection PyPropertyAccess
def connect(self):
sock = socket.create_connection((self.host, self.port), self.timeout)
if getattr(self, '_tunnel_host', False):
self.sock = sock
self._tunnel()
if not hasattr(ssl, 'SSLContext'):
# For 2.x
if self.ca_certs:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=cert_reqs,
ssl_version=ssl.PROTOCOL_SSLv23,
ca_certs=self.ca_certs)
else:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
if self.cert_file:
context.load_cert_chain(self.cert_file, self.key_file)
kwargs = {}
if self.ca_certs:
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(cafile=self.ca_certs)
if getattr(ssl, 'HAS_SNI', False):
kwargs['server_hostname'] = self.host
self.sock = context.wrap_socket(sock, **kwargs)
if self.ca_certs and self.check_domain:
try:
match_hostname(self.sock.getpeercert(), self.host)
logger.debug('Host verified: %s', self.host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
class HTTPSHandler(BaseHTTPSHandler):
def __init__(self, ca_certs, check_domain=True):
BaseHTTPSHandler.__init__(self)
self.ca_certs = ca_certs
self.check_domain = check_domain
def _conn_maker(self, *args, **kwargs):
"""
This is called to create a connection instance. Normally you'd
pass a connection class to do_open, but it doesn't actually check for
a class, and just expects a callable. As long as we behave just as a
constructor would have, we should be OK. If it ever changes so that
we *must* pass a class, we'll create an UnsafeHTTPSConnection class
which just sets check_domain to False in the class definition, and
choose which one to pass to do_open.
"""
result = HTTPSConnection(*args, **kwargs)
if self.ca_certs:
result.ca_certs = self.ca_certs
result.check_domain = self.check_domain
return result
def https_open(self, req):
try:
return self.do_open(self._conn_maker, req)
except URLError as e:
if 'certificate verify failed' in str(e.reason):
raise CertificateError('Unable to verify server certificate '
'for %s' % req.host)
else:
raise
#
# To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
# Middle proxy using HTTP listens on port 443, or an index mistakenly serves
# HTML containing a http://xyz link when it should be https://xyz),
# you can use the following handler class, which does not allow HTTP traffic.
#
# It works by inheriting from HTTPHandler - so build_opener won't add a
# handler for HTTP itself.
#
class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
def http_open(self, req):
raise URLError('Unexpected HTTP request on what should be a secure '
'connection: %s' % req)
#
# XML-RPC with timeouts
#
_ver_info = sys.version_info[:2]
if _ver_info == (2, 6):
class HTTP(httplib.HTTP):
def __init__(self, host='', port=None, **kwargs):
if port == 0: # 0 means use port 0, not the default port
port = None
self._setup(self._connection_class(host, port, **kwargs))
class HTTPS(httplib.HTTPS):
def __init__(self, host='', port=None, **kwargs):
if port == 0: # 0 means use port 0, not the default port
port = None
self._setup(self._connection_class(host, port, **kwargs))
class Transport(xmlrpclib.Transport):
def __init__(self, timeout, use_datetime=0):
self.timeout = timeout
xmlrpclib.Transport.__init__(self, use_datetime)
def make_connection(self, host):
h, eh, x509 = self.get_host_info(host)
if _ver_info == (2, 6):
result = HTTP(h, timeout=self.timeout)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPConnection(h)
result = self._connection[1]
return result
class SafeTransport(xmlrpclib.SafeTransport):
def __init__(self, timeout, use_datetime=0):
self.timeout = timeout
xmlrpclib.SafeTransport.__init__(self, use_datetime)
def make_connection(self, host):
h, eh, kwargs = self.get_host_info(host)
if not kwargs:
kwargs = {}
kwargs['timeout'] = self.timeout
if _ver_info == (2, 6):
result = HTTPS(host, None, **kwargs)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPSConnection(h, None,
**kwargs)
result = self._connection[1]
return result
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, **kwargs):
self.timeout = timeout = kwargs.pop('timeout', None)
# The above classes only come into play if a timeout
# is specified
if timeout is not None:
scheme, _ = splittype(uri)
use_datetime = kwargs.get('use_datetime', 0)
if scheme == 'https':
tcls = SafeTransport
else:
tcls = Transport
kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
self.transport = t
xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
#
# CSV functionality. This is provided because on 2.x, the csv module can't
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
#
def _csv_open(fn, mode, **kwargs):
if sys.version_info[0] < 3:
mode += 'b'
else:
kwargs['newline'] = ''
return open(fn, mode, **kwargs)
class CSVBase(object):
defaults = {
'delimiter': str(','), # The strs are used because we need native
'quotechar': str('"'), # str in the csv API (2.x won't take
'lineterminator': str('\n') # Unicode)
}
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.stream.close()
class CSVReader(CSVBase):
def __init__(self, **kwargs):
if 'stream' in kwargs:
stream = kwargs['stream']
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getreader('utf-8')(stream)
self.stream = stream
else:
self.stream = _csv_open(kwargs['path'], 'r')
self.reader = csv.reader(self.stream, **self.defaults)
def __iter__(self):
return self
def next(self):
result = next(self.reader)
if sys.version_info[0] < 3:
for i, item in enumerate(result):
if not isinstance(item, text_type):
result[i] = item.decode('utf-8')
return result
__next__ = next
class CSVWriter(CSVBase):
def __init__(self, fn, **kwargs):
self.stream = _csv_open(fn, 'w')
self.writer = csv.writer(self.stream, **self.defaults)
def writerow(self, row):
if sys.version_info[0] < 3:
r = []
for item in row:
if isinstance(item, text_type):
item = item.encode('utf-8')
r.append(item)
row = r
self.writer.writerow(row)
#
# Configurator functionality
#
class Configurator(BaseConfigurator):
value_converters = dict(BaseConfigurator.value_converters)
value_converters['inc'] = 'inc_convert'
def __init__(self, config, base=None):
super(Configurator, self).__init__(config)
self.base = base or os.getcwd()
def configure_custom(self, config):
def convert(o):
if isinstance(o, (list, tuple)):
result = type(o)([convert(i) for i in o])
elif isinstance(o, dict):
if '()' in o:
result = self.configure_custom(o)
else:
result = {}
for k in o:
result[k] = convert(o[k])
else:
result = self.convert(o)
return result
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
args = config.pop('[]', ())
if args:
args = tuple([convert(o) for o in args])
items = [(k, convert(config[k])) for k in config if valid_ident(k)]
kwargs = dict(items)
result = c(*args, **kwargs)
if props:
for n, v in props.items():
setattr(result, n, convert(v))
return result
def __getitem__(self, key):
result = self.config[key]
if isinstance(result, dict) and '()' in result:
self.config[key] = result = self.configure_custom(result)
return result
def inc_convert(self, value):
"""Default converter for the inc:// protocol."""
if not os.path.isabs(value):
value = os.path.join(self.base, value)
with codecs.open(value, 'r', encoding='utf-8') as f:
result = json.load(f)
return result
#
# Mixin for running subprocesses and capturing their output
#
class SubprocessMixin(object):
def __init__(self, verbose=False, progress=None):
self.verbose = verbose
self.progress = progress
def reader(self, stream, context):
"""
Read lines from a subprocess' output stream and either pass to a progress
callable (if specified) or write progress information to sys.stderr.
"""
progress = self.progress
verbose = self.verbose
while True:
s = stream.readline()
if not s:
break
if progress is not None:
progress(s, context)
else:
if not verbose:
sys.stderr.write('.')
else:
sys.stderr.write(s.decode('utf-8'))
sys.stderr.flush()
stream.close()
def run_command(self, cmd, **kwargs):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, **kwargs)
t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
t1.start()
t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
t2.start()
p.wait()
t1.join()
t2.join()
if self.progress is not None:
self.progress('done.', 'main')
elif self.verbose:
sys.stderr.write('done.\n')
return p
| mit |
dulaccc/django-accounting | accounting/apps/reports/wrappers.py | 3 | 7987 | from decimal import Decimal as D
from collections import defaultdict, OrderedDict
from dateutil.relativedelta import relativedelta
from accounting.apps.books.models import Invoice, Bill
from accounting.apps.books.calculators import ProfitsLossCalculator
from accounting.libs.intervals import TimeInterval
class BaseReport(object):
title = None
period = None
def __init__(self, title, start, end):
self.title = title
self.period = TimeInterval(start, end)
def generate(self):
raise NotImplementedError
class TaxRateSummary(object):
tax_rate = None
taxable_amount = D('0')
expenses_amount = D('0')
@property
def collected_taxes(self):
return self.tax_rate.rate * self.taxable_amount
@property
def deductible_taxes(self):
return self.tax_rate.rate * self.expenses_amount
@property
def net_amount(self):
return self.taxable_amount - self.expenses_amount
@property
def net_taxes(self):
return self.tax_rate.rate * self.net_amount
class TaxReport(BaseReport):
# TODO implement 'Billed (Accrual) / Collected (Cash based)'
organization = None
tax_summaries = None
def __init__(self, organization, start, end):
super().__init__("Tax Report", start, end)
self.organization = organization
self.tax_summaries = defaultdict(TaxRateSummary)
def generate(self):
invoice_queryset = Invoice.objects.all()
bill_queryset = Bill.objects.all()
self.generate_for_sales(invoice_queryset)
self.generate_for_sales(bill_queryset)
def generate_for_sales(self, sales_queryset):
calculator = ProfitsLossCalculator(self.organization,
start=self.period.start,
end=self.period.end)
for output in calculator.process_generator(sales_queryset):
summary = self.tax_summaries[output.tax_rate.pk]
summary.tax_rate = output.tax_rate
if isinstance(output.sale, Invoice):
summary.taxable_amount += output.amount_excl_tax
elif isinstance(output.sale, Bill):
summary.expenses_amount += output.amount_excl_tax
else:
raise ValueError("Unsupported type of sale {}"
.format(output.sale.__class__))
class ProfitAndLossSummary(object):
grouping_date = None
sales_amount = D('0')
expenses_amount = D('0')
@property
def net_profit(self):
return self.sales_amount - self.expenses_amount
class ProfitAndLossReport(BaseReport):
# TODO implement 'Billed (Accrual) / Collected (Cash based)'
organization = None
summaries = None
total_summary = None
RESOLUTION_MONTHLY = 'monthly'
RESOLUTION_CHOICES = (
RESOLUTION_MONTHLY,
)
group_by_resolution = RESOLUTION_MONTHLY
def __init__(self, organization, start, end):
super().__init__("Profit and Loss", start, end)
self.organization = organization
self.summaries = {}
steps_interval = relativedelta(end, start)
assert self.group_by_resolution in self.RESOLUTION_CHOICES, \
"No a resolution choice"
if self.group_by_resolution == self.RESOLUTION_MONTHLY:
for step in range(0, steps_interval.months):
key_date = start + relativedelta(months=step)
self.summaries[key_date] = ProfitAndLossSummary()
else:
raise ValueError("Unsupported resolution {}"
.format(self.group_by_resolution))
self.total_summary = ProfitAndLossSummary()
def group_by_date(self, date):
if self.group_by_resolution == self.RESOLUTION_MONTHLY:
grouping_date = date.replace(day=1)
else:
raise ValueError("Unsupported resolution {}"
.format(self.group_by_resolution))
return grouping_date
def generate(self):
invoice_queryset = Invoice.objects.all()
bill_queryset = Bill.objects.all()
self.generate_for_sales(invoice_queryset)
self.generate_for_sales(bill_queryset)
# order the results
self.summaries = OrderedDict(sorted(self.summaries.items()))
# compute totals
for summary in self.summaries.values():
self.total_summary.sales_amount += summary.sales_amount
self.total_summary.expenses_amount += summary.expenses_amount
def generate_for_sales(self, sales_queryset):
calculator = ProfitsLossCalculator(self.organization,
start=self.period.start,
end=self.period.end)
for output in calculator.process_generator(sales_queryset):
key_date = self.group_by_date(output.payment.date_paid)
summary = self.summaries[key_date]
if isinstance(output.sale, Invoice):
summary.sales_amount += output.amount_excl_tax
elif isinstance(output.sale, Bill):
summary.expenses_amount += output.amount_excl_tax
else:
raise ValueError("Unsupported type of sale {}"
.format(output.sale.__class__))
class PayRunSummary(object):
payroll_tax_rate = None
total_excl_tax = D('0')
@property
def payroll_taxes(self):
return self.payroll_tax_rate * self.total_excl_tax
class PayRunReport(BaseReport):
organization = None
summaries = None
total_payroll_taxes = D('0')
def __init__(self, organization, start, end):
super().__init__("Pay Run Report", start, end)
self.organization = organization
self.summaries = defaultdict(PayRunSummary)
def generate(self):
employee_queryset = self.organization.employees.all()
self.generate_for_employees(employee_queryset)
def generate_for_employees(self, employee_queryset):
total_payroll_taxes = D('0')
calculator = ProfitsLossCalculator(self.organization,
start=self.period.start,
end=self.period.end)
for emp in employee_queryset:
summary = self.summaries[emp.composite_name]
summary.employee = emp
summary.payroll_tax_rate = emp.payroll_tax_rate
if emp.salary_follows_profits:
# TODO compute profits based on the period interval
profits = calculator.profits()
summary.total_excl_tax = profits * emp.shares_percentage
else:
raise ValueError("Salary not indexed on the profits "
"are not supported yet")
total_payroll_taxes += summary.payroll_taxes
# Total payroll
self.total_payroll_taxes = total_payroll_taxes
class InvoiceDetailsReport(BaseReport):
organization = None
invoices = None
tax_rates = None
def __init__(self, organization, start, end):
super().__init__("Pay Run Report", start, end)
self.organization = organization
self.tax_rates = organization.tax_rates.all()
def generate(self):
invoice_queryset = self.organization.invoices.all()
self.generate_for_invoices(invoice_queryset)
def generate_for_invoices(self, invoice_queryset):
invoice_queryset = (invoice_queryset
.filter(payments__date_paid__range=[
self.period.start,
self.period.end
]))
# optimize the query
invoice_queryset = (invoice_queryset
.select_related(
'organization')
.prefetch_related(
'lines',
'lines__tax_rate',
'payments',
'organization__employees',)
.distinct())
self.invoices = invoice_queryset
| mit |
rafalo1333/kivy | kivy/uix/behaviors/emacs.py | 10 | 5601 | # -*- encoding: utf-8 -*-
'''
Emacs Behavior
==============
The :class:`~kivy.uix.behaviors.emacs.EmacsBehavior`
`mixin <https://en.wikipedia.org/wiki/Mixin>`_ allows you to add
`Emacs <https://www.gnu.org/software/emacs/>`_ keyboard shortcuts for basic
movement and editing to the :class:`~kivy.uix.textinput.TextInput` widget.
The shortcuts currently available are listed below:
Emacs shortcuts
---------------
=============== ========================================================
Shortcut Description
--------------- --------------------------------------------------------
Control + a Move cursor to the beginning of the line
Control + e Move cursor to the end of the line
Control + f Move cursor one character to the right
Control + b Move cursor one character to the left
Alt + f Move cursor to the end of the word to the right
Alt + b Move cursor to the start of the word to the left
Alt + Backspace Delete text left of the cursor to the beginning of word
Alt + d Delete text right of the cursor to the end of the word
Alt + w Copy selection
Control + w Cut selection
Control + y Paste selection
=============== ========================================================
.. warning::
If you have the :mod:`~kivy.modules.inspector` module enabled, the
shortcut for opening the inspector (Control + e) conflicts with the
Emacs shortcut to move to the end of the line (it will still move the
cursor to the end of the line, but the inspector will open as well).
'''
from kivy.properties import StringProperty
__all__ = ('EmacsBehavior', )
class EmacsBehavior(object):
'''
A `mixin <https://en.wikipedia.org/wiki/Mixin>`_ that enables Emacs-style
keyboard shortcuts for the :class:`~kivy.uix.textinput.TextInput` widget.
Please see the :mod:`Emacs behaviors module <kivy.uix.behaviors.emacs>`
documentation for more information.
.. versionadded:: 1.9.1
'''
key_bindings = StringProperty('emacs')
'''String name which determines the type of key bindings to use with the
:class:`~kivy.uix.textinput.TextInput`. This allows Emacs key bindings to
be enabled/disabled programmatically for widgets that inherit from
:class:`EmacsBehavior`. If the value is not ``'emacs'``, Emacs bindings
will be disabled. Use ``'default'`` for switching to the default key
bindings of TextInput.
:attr:`key_bindings` is a :class:`~kivy.properties.StringProperty`
and defaults to ``'emacs'``.
.. versionadded:: 1.10.0
'''
def __init__(self, **kwargs):
super(EmacsBehavior, self).__init__(**kwargs)
self.bindings = {
'ctrl': {
'a': lambda: self.do_cursor_movement('cursor_home'),
'e': lambda: self.do_cursor_movement('cursor_end'),
'f': lambda: self.do_cursor_movement('cursor_right'),
'b': lambda: self.do_cursor_movement('cursor_left'),
'w': lambda: self._cut(self.selection_text),
'y': self.paste,
},
'alt': {
'w': self.copy,
'f': lambda: self.do_cursor_movement('cursor_right',
control=True),
'b': lambda: self.do_cursor_movement('cursor_left',
control=True),
'd': self.delete_word_right,
'\x08': self.delete_word_left, # alt + backspace
},
}
def keyboard_on_key_down(self, window, keycode, text, modifiers):
key, key_str = keycode
mod = modifiers[0] if modifiers else None
is_emacs_shortcut = False
if key in range(256) and self.key_bindings == 'emacs':
is_emacs_shortcut = ((mod == 'ctrl' and
chr(key) in self.bindings['ctrl'].keys()) or
(mod == 'alt' and
chr(key) in self.bindings['alt'].keys()))
if is_emacs_shortcut:
# Look up mod and key
emacs_shortcut = self.bindings[mod][chr(key)]
emacs_shortcut()
else:
super(EmacsBehavior, self).keyboard_on_key_down(window, keycode,
text, modifiers)
def delete_word_right(self):
'''Delete text right of the cursor to the end of the word'''
if self._selection:
return
start_index = self.cursor_index()
start_cursor = self.cursor
self.do_cursor_movement('cursor_right', control=True)
end_index = self.cursor_index()
if start_index != end_index:
s = self.text[start_index:end_index]
self._set_unredo_delsel(start_index, end_index, s, from_undo=False)
self.text = self.text[:start_index] + self.text[end_index:]
self._set_cursor(pos=start_cursor)
def delete_word_left(self):
'''Delete text left of the cursor to the beginning of word'''
if self._selection:
return
start_index = self.cursor_index()
self.do_cursor_movement('cursor_left', control=True)
end_cursor = self.cursor
end_index = self.cursor_index()
if start_index != end_index:
s = self.text[end_index:start_index]
self._set_unredo_delsel(end_index, start_index, s, from_undo=False)
self.text = self.text[:end_index] + self.text[start_index:]
self._set_cursor(pos=end_cursor)
| mit |
SnappleCap/oh-mainline | vendor/packages/twisted/doc/core/examples/mouse.py | 19 | 2392 | #!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Example using MouseMan protocol with the SerialPort transport.
"""
# TODO set tty modes, etc.
# This works for me:
# speed 1200 baud; rows 0; columns 0; line = 0;
# intr = ^C; quit = ^\; erase = ^?; kill = ^U; eof = ^D;
# eol = <undef>; eol2 = <undef>; start = ^Q; stop = ^S; susp = ^Z;
# rprnt = ^R; werase = ^W; lnext = ^V; flush = ^O; min = 1; time = 0;
# -parenb -parodd cs7 hupcl -cstopb cread clocal -crtscts ignbrk
# -brkint ignpar -parmrk -inpck -istrip -inlcr -igncr -icrnl -ixon
# -ixoff -iuclc -ixany -imaxbel -opost -olcuc -ocrnl -onlcr -onocr
# -onlret -ofill -ofdel nl0 cr0 tab0 bs0 vt0 ff0 -isig -icanon -iexten
# -echo -echoe -echok -echonl -noflsh -xcase -tostop -echoprt -echoctl
# -echoke
import sys
from twisted.python import usage, log
from twisted.protocols.mice import mouseman
if sys.platform == 'win32':
# win32 serial does not work yet!
raise NotImplementedError, "The SerialPort transport does not currently support Win32"
from twisted.internet import win32eventreactor
win32eventreactor.install()
class Options(usage.Options):
optParameters = [
['port', 'p', '/dev/mouse', 'Device for serial mouse'],
['baudrate', 'b', '1200', 'Baudrate for serial mouse'],
['outfile', 'o', None, 'Logfile [default: sys.stdout]'],
]
class McFooMouse(mouseman.MouseMan):
def down_left(self):
log.msg("LEFT")
def up_left(self):
log.msg("left")
def down_middle(self):
log.msg("MIDDLE")
def up_middle(self):
log.msg("middle")
def down_right(self):
log.msg("RIGHT")
def up_right(self):
log.msg("right")
def move(self, x, y):
log.msg("(%d,%d)" % (x, y))
if __name__ == '__main__':
from twisted.internet import reactor
from twisted.internet.serialport import SerialPort
o = Options()
try:
o.parseOptions()
except usage.UsageError, errortext:
print "%s: %s" % (sys.argv[0], errortext)
print "%s: Try --help for usage details." % (sys.argv[0])
raise SystemExit, 1
logFile = sys.stdout
if o.opts['outfile']:
logFile = o.opts['outfile']
log.startLogging(logFile)
SerialPort(McFooMouse(), o.opts['port'], reactor, baudrate=int(o.opts['baudrate']))
reactor.run()
| agpl-3.0 |
mbauskar/Das_Erpnext | erpnext/stock/report/batch_wise_balance_history/batch_wise_balance_history.py | 3 | 3255 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, cint, getdate
def execute(filters=None):
if not filters: filters = {}
float_precision = cint(frappe.db.get_default("float_precision")) or 3
columns = get_columns(filters)
item_map = get_item_details(filters)
iwb_map = get_item_warehouse_batch_map(filters, float_precision)
data = []
for item in sorted(iwb_map):
for wh in sorted(iwb_map[item]):
for batch in sorted(iwb_map[item][wh]):
qty_dict = iwb_map[item][wh][batch]
if qty_dict.opening_qty or qty_dict.in_qty or qty_dict.out_qty or qty_dict.bal_qty:
data.append([item, item_map[item]["item_name"], item_map[item]["description"], wh, batch,
flt(qty_dict.opening_qty, float_precision), flt(qty_dict.in_qty, float_precision),
flt(qty_dict.out_qty, float_precision), flt(qty_dict.bal_qty, float_precision)
])
return columns, data
def get_columns(filters):
"""return columns based on filters"""
columns = [_("Item") + ":Link/Item:100"] + [_("Item Name") + "::150"] + [_("Description") + "::150"] + \
[_("Warehouse") + ":Link/Warehouse:100"] + [_("Batch") + ":Link/Batch:100"] + [_("Opening Qty") + "::90"] + \
[_("In Qty") + "::80"] + [_("Out Qty") + "::80"] + [_("Balance Qty") + "::90"]
return columns
def get_conditions(filters):
conditions = ""
if not filters.get("from_date"):
frappe.throw(_("'From Date' is required"))
if filters.get("to_date"):
conditions += " and posting_date <= '%s'" % filters["to_date"]
else:
frappe.throw(_("'To Date' is required"))
return conditions
#get all details
def get_stock_ledger_entries(filters):
conditions = get_conditions(filters)
return frappe.db.sql("""select item_code, batch_no, warehouse,
posting_date, actual_qty
from `tabStock Ledger Entry`
where docstatus < 2 %s order by item_code, warehouse""" %
conditions, as_dict=1)
def get_item_warehouse_batch_map(filters, float_precision):
sle = get_stock_ledger_entries(filters)
iwb_map = {}
from_date = getdate(filters["from_date"])
to_date = getdate(filters["to_date"])
for d in sle:
iwb_map.setdefault(d.item_code, {}).setdefault(d.warehouse, {})\
.setdefault(d.batch_no, frappe._dict({
"opening_qty": 0.0, "in_qty": 0.0, "out_qty": 0.0, "bal_qty": 0.0
}))
qty_dict = iwb_map[d.item_code][d.warehouse][d.batch_no]
if d.posting_date < from_date:
qty_dict.opening_qty = flt(qty_dict.opening_qty, float_precision) \
+ flt(d.actual_qty, float_precision)
elif d.posting_date >= from_date and d.posting_date <= to_date:
if flt(d.actual_qty) > 0:
qty_dict.in_qty = flt(qty_dict.in_qty, float_precision) + flt(d.actual_qty, float_precision)
else:
qty_dict.out_qty = flt(qty_dict.out_qty, float_precision) \
+ abs(flt(d.actual_qty, float_precision))
qty_dict.bal_qty = flt(qty_dict.bal_qty, float_precision) + flt(d.actual_qty, float_precision)
return iwb_map
def get_item_details(filters):
item_map = {}
for d in frappe.db.sql("select name, item_name, description from tabItem", as_dict=1):
item_map.setdefault(d.name, d)
return item_map
| agpl-3.0 |
tylertian/Openstack | openstack F/nova/nova/tests/compute/test_compute.py | 1 | 234580 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for compute service"""
import base64
import copy
import datetime
import functools
import sys
import time
import mox
import nova
from nova import compute
from nova.compute import api as compute_api
from nova.compute import instance_types
from nova.compute import manager as compute_manager
from nova.compute import power_state
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova.openstack.common import importutils
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common.notifier import api as notifier_api
from nova.openstack.common.notifier import test_notifier
from nova.openstack.common import policy as common_policy
from nova.openstack.common import rpc
from nova.openstack.common.rpc import common as rpc_common
from nova.openstack.common import timeutils
import nova.policy
from nova import quota
from nova import test
from nova.tests.compute import fake_resource_tracker
from nova.tests.db.fakes import FakeModel
from nova.tests import fake_network
from nova.tests.image import fake as fake_image
from nova import utils
import nova.volume
QUOTAS = quota.QUOTAS
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
flags.DECLARE('live_migration_retry_count', 'nova.compute.manager')
FAKE_IMAGE_REF = 'fake-image-ref'
def nop_report_driver_status(self):
pass
class FakeSchedulerAPI(object):
def run_instance(self, ctxt, request_spec, admin_password,
injected_files, requested_networks, is_first_time,
filter_properties):
pass
def live_migration(self, ctxt, block_migration, disk_over_commit,
instance, dest):
pass
class BaseTestCase(test.TestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
self.flags(compute_driver='nova.virt.fake.FakeDriver',
notification_driver=[test_notifier.__name__],
network_manager='nova.network.manager.FlatManager')
self.compute = importutils.import_object(FLAGS.compute_manager)
# override tracker with a version that doesn't need the database:
self.compute.resource_tracker = \
fake_resource_tracker.FakeResourceTracker(self.compute.host,
self.compute.driver)
self.compute.update_available_resource(
context.get_admin_context())
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id,
self.project_id)
test_notifier.NOTIFICATIONS = []
def fake_show(meh, context, id):
return {'id': id, 'min_disk': None, 'min_ram': None,
'name': 'fake_name',
'properties': {'kernel_id': 'fake_kernel_id',
'ramdisk_id': 'fake_ramdisk_id',
'something_else': 'meow'}}
fake_image.stub_out_image_service(self.stubs)
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
fake_rpcapi = FakeSchedulerAPI()
self.stubs.Set(self.compute, 'scheduler_rpcapi', fake_rpcapi)
fake_network.set_stub_network_methods(self.stubs)
def tearDown(self):
fake_image.FakeImageService_reset()
instances = db.instance_get_all(self.context.elevated())
notifier_api._reset_drivers()
for instance in instances:
db.instance_destroy(self.context.elevated(), instance['uuid'])
super(BaseTestCase, self).tearDown()
def _create_fake_instance(self, params=None, type_name='m1.tiny'):
"""Create a test instance"""
if not params:
params = {}
inst = {}
inst['vm_state'] = vm_states.ACTIVE
inst['image_ref'] = FAKE_IMAGE_REF
inst['reservation_id'] = 'r-fakeres'
inst['launch_time'] = '10'
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
inst['host'] = 'fake_host'
type_id = instance_types.get_instance_type_by_name(type_name)['id']
inst['instance_type_id'] = type_id
inst['ami_launch_index'] = 0
inst['memory_mb'] = 0
inst['vcpus'] = 0
inst['root_gb'] = 0
inst['ephemeral_gb'] = 0
inst['architecture'] = 'x86_64'
inst['os_type'] = 'Linux'
inst.update(params)
_create_service_entries(self.context.elevated(),
{'fake_zone': [inst['host']]})
return db.instance_create(self.context, inst)
def _create_instance(self, params=None, type_name='m1.tiny'):
"""Create a test instance. Returns uuid"""
return self._create_fake_instance(params, type_name=type_name)
def _create_instance_type(self, params=None):
"""Create a test instance type"""
if not params:
params = {}
context = self.context.elevated()
inst = {}
inst['name'] = 'm1.small'
inst['memory_mb'] = 1024
inst['vcpus'] = 1
inst['root_gb'] = 20
inst['ephemeral_gb'] = 10
inst['flavorid'] = '1'
inst['swap'] = 2048
inst['rxtx_factor'] = 1
inst.update(params)
return db.instance_type_create(context, inst)['id']
def _create_group(self):
values = {'name': 'testgroup',
'description': 'testgroup',
'user_id': self.user_id,
'project_id': self.project_id}
return db.security_group_create(self.context, values)
class ComputeTestCase(BaseTestCase):
def setUp(self):
def fake_get_nw_info(cls, ctxt, instance, *args, **kwargs):
self.assertTrue(ctxt.is_admin)
return fake_network.fake_get_instance_nw_info(self.stubs, 1, 1,
spectacular=True)
super(ComputeTestCase, self).setUp()
self.stubs.Set(nova.network.API, 'get_instance_nw_info',
fake_get_nw_info)
self.stubs.Set(nova.network.API, 'allocate_for_instance',
fake_get_nw_info)
self.compute_api = compute.API()
def tearDown(self):
super(ComputeTestCase, self).tearDown()
timeutils.clear_time_override()
def test_wrap_instance_fault(self):
inst = {"uuid": "fake_uuid"}
called = {'fault_added': False}
def did_it_add_fault(*args):
called['fault_added'] = True
self.stubs.Set(compute_utils, 'add_instance_fault_from_exc',
did_it_add_fault)
@nova.compute.manager.wrap_instance_fault
def failer(self2, context, instance):
raise NotImplementedError()
self.assertRaises(NotImplementedError, failer,
self.compute, self.context, instance=inst)
self.assertTrue(called['fault_added'])
def test_wrap_instance_fault_no_instance(self):
inst_uuid = "fake_uuid"
called = {'fault_added': False}
def did_it_add_fault(*args):
called['fault_added'] = True
self.stubs.Set(compute_utils, 'add_instance_fault_from_exc',
did_it_add_fault)
@nova.compute.manager.wrap_instance_fault
def failer(self2, context, instance_uuid):
raise exception.InstanceNotFound()
self.assertRaises(exception.InstanceNotFound, failer,
self.compute, self.context, inst_uuid)
self.assertFalse(called['fault_added'])
def test_create_instance_with_img_ref_associates_config_drive(self):
"""Make sure create associates a config drive."""
instance = jsonutils.to_primitive(self._create_fake_instance(
params={'config_drive': '1234', }))
try:
self.compute.run_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
instance = instances[0]
self.assertTrue(instance.config_drive)
finally:
db.instance_destroy(self.context, instance['uuid'])
def test_create_instance_associates_config_drive(self):
"""Make sure create associates a config drive."""
instance = jsonutils.to_primitive(self._create_fake_instance(
params={'config_drive': '1234', }))
try:
self.compute.run_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
instance = instances[0]
self.assertTrue(instance.config_drive)
finally:
db.instance_destroy(self.context, instance['uuid'])
def test_create_instance_unlimited_memory(self):
"""Default of memory limit=None is unlimited"""
self.flags(reserved_host_disk_mb=0, reserved_host_memory_mb=0)
self.compute.resource_tracker.update_available_resource(self.context)
params = {"memory_mb": 999999999999}
filter_properties = {'limits': {'memory_mb': None}}
instance = self._create_fake_instance(params)
self.compute.run_instance(self.context, instance=instance,
filter_properties=filter_properties)
self.assertEqual(999999999999,
self.compute.resource_tracker.compute_node['memory_mb_used'])
def test_create_instance_unlimited_disk(self):
self.flags(reserved_host_disk_mb=0, reserved_host_memory_mb=0)
self.compute.resource_tracker.update_available_resource(self.context)
params = {"root_gb": 999999999999,
"ephemeral_gb": 99999999999}
filter_properties = {'limits': {'disk_gb': None}}
instance = self._create_fake_instance(params)
self.compute.run_instance(self.context, instance=instance,
filter_properties=filter_properties)
def test_create_multiple_instances_then_starve(self):
self.flags(reserved_host_disk_mb=0, reserved_host_memory_mb=0)
self.compute.resource_tracker.update_available_resource(self.context)
filter_properties = {'limits': {'memory_mb': 4096, 'disk_gb': 1000}}
params = {"memory_mb": 1024, "root_gb": 128, "ephemeral_gb": 128}
instance = self._create_fake_instance(params)
self.compute.run_instance(self.context, instance=instance,
filter_properties=filter_properties)
self.assertEquals(1024,
self.compute.resource_tracker.compute_node['memory_mb_used'])
self.assertEquals(256,
self.compute.resource_tracker.compute_node['local_gb_used'])
params = {"memory_mb": 2048, "root_gb": 256, "ephemeral_gb": 256}
instance = self._create_fake_instance(params)
self.compute.run_instance(self.context, instance=instance,
filter_properties=filter_properties)
self.assertEquals(3072,
self.compute.resource_tracker.compute_node['memory_mb_used'])
self.assertEquals(768,
self.compute.resource_tracker.compute_node['local_gb_used'])
params = {"memory_mb": 8192, "root_gb": 8192, "ephemeral_gb": 8192}
instance = self._create_fake_instance(params)
self.assertRaises(exception.ComputeResourcesUnavailable,
self.compute.run_instance, self.context, instance=instance,
filter_properties=filter_properties)
def test_create_instance_with_oversubscribed_ram(self):
"""Test passing of oversubscribed ram policy from the scheduler."""
self.flags(reserved_host_disk_mb=0, reserved_host_memory_mb=0)
self.compute.resource_tracker.update_available_resource(self.context)
# get total memory as reported by virt driver:
resources = self.compute.driver.get_available_resource()
total_mem_mb = resources['memory_mb']
oversub_limit_mb = total_mem_mb * 1.5
instance_mb = int(total_mem_mb * 1.45)
# build an instance, specifying an amount of memory that exceeds
# total_mem_mb, but is less than the oversubscribed limit:
params = {"memory_mb": instance_mb, "root_gb": 128,
"ephemeral_gb": 128}
instance = self._create_fake_instance(params)
limits = {'memory_mb': oversub_limit_mb}
filter_properties = {'limits': limits}
self.compute.run_instance(self.context, instance=instance,
filter_properties=filter_properties)
self.assertEqual(instance_mb,
self.compute.resource_tracker.compute_node['memory_mb_used'])
def test_create_instance_with_oversubscribed_ram_fail(self):
"""Test passing of oversubscribed ram policy from the scheduler, but
with insufficient memory.
"""
self.flags(reserved_host_disk_mb=0, reserved_host_memory_mb=0)
self.compute.resource_tracker.update_available_resource(self.context)
# get total memory as reported by virt driver:
resources = self.compute.driver.get_available_resource()
total_mem_mb = resources['memory_mb']
oversub_limit_mb = total_mem_mb * 1.5
instance_mb = int(total_mem_mb * 1.55)
# build an instance, specifying an amount of memory that exceeds
# total_mem_mb, but is less than the oversubscribed limit:
params = {"memory_mb": instance_mb, "root_gb": 128,
"ephemeral_gb": 128}
instance = self._create_fake_instance(params)
filter_properties = {'limits': {'memory_mb': oversub_limit_mb}}
self.assertRaises(exception.ComputeResourcesUnavailable,
self.compute.run_instance, self.context, instance=instance,
filter_properties=filter_properties)
def test_create_instance_with_oversubscribed_cpu(self):
"""Test passing of oversubscribed cpu policy from the scheduler."""
self.flags(reserved_host_disk_mb=0, reserved_host_memory_mb=0)
self.compute.resource_tracker.update_available_resource(self.context)
limits = {'vcpu': 3}
filter_properties = {'limits': limits}
# get total memory as reported by virt driver:
resources = self.compute.driver.get_available_resource()
self.assertEqual(1, resources['vcpus'])
# build an instance, specifying an amount of memory that exceeds
# total_mem_mb, but is less than the oversubscribed limit:
params = {"memory_mb": 10, "root_gb": 1,
"ephemeral_gb": 1, "vcpus": 2}
instance = self._create_fake_instance(params)
self.compute.run_instance(self.context, instance=instance,
filter_properties=filter_properties)
self.assertEqual(2,
self.compute.resource_tracker.compute_node['vcpus_used'])
# create one more instance:
params = {"memory_mb": 10, "root_gb": 1,
"ephemeral_gb": 1, "vcpus": 1}
instance = self._create_fake_instance(params)
self.compute.run_instance(self.context, instance=instance,
filter_properties=filter_properties)
self.assertEqual(3,
self.compute.resource_tracker.compute_node['vcpus_used'])
# delete the instance:
instance['vm_state'] = vm_states.DELETED
self.compute.resource_tracker.update_usage(self.context,
instance=instance)
self.assertEqual(2,
self.compute.resource_tracker.compute_node['vcpus_used'])
# now oversubscribe vcpus and fail:
params = {"memory_mb": 10, "root_gb": 1,
"ephemeral_gb": 1, "vcpus": 2}
instance = self._create_fake_instance(params)
limits = {'vcpu': 3}
filter_properties = {'limits': limits}
self.assertRaises(exception.ComputeResourcesUnavailable,
self.compute.run_instance, self.context, instance=instance,
filter_properties=filter_properties)
def test_create_instance_with_oversubscribed_disk(self):
"""Test passing of oversubscribed disk policy from the scheduler."""
self.flags(reserved_host_disk_mb=0, reserved_host_memory_mb=0)
self.compute.resource_tracker.update_available_resource(self.context)
# get total memory as reported by virt driver:
resources = self.compute.driver.get_available_resource()
total_disk_gb = resources['local_gb']
oversub_limit_gb = total_disk_gb * 1.5
instance_gb = int(total_disk_gb * 1.45)
# build an instance, specifying an amount of disk that exceeds
# total_disk_gb, but is less than the oversubscribed limit:
params = {"root_gb": instance_gb, "memory_mb": 10}
instance = self._create_fake_instance(params)
limits = {'disk_gb': oversub_limit_gb}
filter_properties = {'limits': limits}
self.compute.run_instance(self.context, instance=instance,
filter_properties=filter_properties)
self.assertEqual(instance_gb,
self.compute.resource_tracker.compute_node['local_gb_used'])
def test_create_instance_with_oversubscribed_disk_fail(self):
"""Test passing of oversubscribed disk policy from the scheduler, but
with insufficient disk.
"""
self.flags(reserved_host_disk_mb=0, reserved_host_memory_mb=0)
self.compute.resource_tracker.update_available_resource(self.context)
# get total memory as reported by virt driver:
resources = self.compute.driver.get_available_resource()
total_disk_gb = resources['local_gb']
oversub_limit_gb = total_disk_gb * 1.5
instance_gb = int(total_disk_gb * 1.55)
# build an instance, specifying an amount of disk that exceeds
# total_disk_gb, but is less than the oversubscribed limit:
params = {"root_gb": instance_gb, "memory_mb": 10}
instance = self._create_fake_instance(params)
limits = {'disk_gb': oversub_limit_gb}
filter_properties = {'limits': limits}
self.assertRaises(exception.ComputeResourcesUnavailable,
self.compute.run_instance, self.context, instance=instance,
filter_properties=filter_properties)
def test_default_access_ip(self):
self.flags(default_access_ip_network_name='test1')
fake_network.unset_stub_network_methods(self.stubs)
instance = jsonutils.to_primitive(self._create_fake_instance())
try:
self.compute.run_instance(self.context, instance=instance,
is_first_time=True)
instances = db.instance_get_all(context.get_admin_context())
instance = instances[0]
self.assertEqual(instance.access_ip_v4, '192.168.1.100')
self.assertEqual(instance.access_ip_v6, '2001:db8:0:1::1')
finally:
db.instance_destroy(self.context, instance['uuid'])
def test_no_default_access_ip(self):
instance = jsonutils.to_primitive(self._create_fake_instance())
try:
self.compute.run_instance(self.context, instance=instance,
is_first_time=True)
instances = db.instance_get_all(context.get_admin_context())
instance = instances[0]
self.assertFalse(instance.access_ip_v4)
self.assertFalse(instance.access_ip_v6)
finally:
db.instance_destroy(self.context, instance['uuid'])
def test_fail_to_schedule_persists(self):
"""check the persistence of the ERROR(scheduling) state"""
self._create_instance(params={'vm_state': vm_states.ERROR,
'task_state': task_states.SCHEDULING})
#check state is failed even after the periodic poll
self.compute.periodic_tasks(context.get_admin_context())
self._assert_state({'vm_state': vm_states.ERROR,
'task_state': task_states.SCHEDULING})
def test_run_instance_setup_block_device_mapping_fail(self):
""" block device mapping failure test.
Make sure that when there is a block device mapping problem,
the instance goes to ERROR state, keeping the task state
"""
def fake(*args, **kwargs):
raise test.TestingException()
self.stubs.Set(nova.compute.manager.ComputeManager,
'_setup_block_device_mapping', fake)
instance = self._create_instance()
self.assertRaises(test.TestingException, self.compute.run_instance,
self.context, instance=instance)
#check state is failed even after the periodic poll
self._assert_state({'vm_state': vm_states.ERROR,
'task_state': None})
self.compute.periodic_tasks(context.get_admin_context())
self._assert_state({'vm_state': vm_states.ERROR,
'task_state': None})
def test_run_instance_spawn_fail(self):
""" spawn failure test.
Make sure that when there is a spawning problem,
the instance goes to ERROR state, keeping the task state"""
def fake(*args, **kwargs):
raise test.TestingException()
self.stubs.Set(self.compute.driver, 'spawn', fake)
instance = self._create_instance()
self.assertRaises(test.TestingException, self.compute.run_instance,
self.context, instance=instance)
#check state is failed even after the periodic poll
self._assert_state({'vm_state': vm_states.ERROR,
'task_state': None})
self.compute.periodic_tasks(context.get_admin_context())
self._assert_state({'vm_state': vm_states.ERROR,
'task_state': None})
def test_can_terminate_on_error_state(self):
"""Make sure that the instance can be terminated in ERROR state"""
elevated = context.get_admin_context()
#check failed to schedule --> terminate
instance = self._create_instance(params={'vm_state': vm_states.ERROR})
self.compute.terminate_instance(self.context, instance=instance)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
elevated, instance['uuid'])
def test_run_terminate(self):
"""Make sure it is possible to run and terminate instance"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("Running instances: %s"), instances)
self.assertEqual(len(instances), 1)
self.compute.terminate_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("After terminating instances: %s"), instances)
self.assertEqual(len(instances), 0)
def test_run_terminate_with_vol_attached(self):
"""Make sure it is possible to run and terminate instance with volume
attached
"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("Running instances: %s"), instances)
self.assertEqual(len(instances), 1)
def fake_check_attach(*args, **kwargs):
pass
def fake_reserve_volume(*args, **kwargs):
pass
def fake_volume_get(self, context, volume_id):
return {'id': volume_id}
self.stubs.Set(nova.volume.api.API, 'get', fake_volume_get)
self.stubs.Set(nova.volume.api.API, 'check_attach', fake_check_attach)
self.stubs.Set(nova.volume.api.API, 'reserve_volume',
fake_reserve_volume)
self.compute_api.attach_volume(self.context, instance, 1,
'/dev/vdc')
self.compute.terminate_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("After terminating instances: %s"), instances)
self.assertEqual(len(instances), 0)
def test_terminate_no_network(self):
# This is as reported in LP bug 1008875
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("Running instances: %s"), instances)
self.assertEqual(len(instances), 1)
# Make it look like this is no instance
self.mox.StubOutWithMock(self.compute, '_get_instance_nw_info')
self.compute._get_instance_nw_info(
mox.IgnoreArg(),
mox.IgnoreArg()).AndRaise(exception.NetworkNotFound())
self.mox.ReplayAll()
self.compute.terminate_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("After terminating instances: %s"), instances)
self.assertEqual(len(instances), 0)
def test_terminate_failure_leaves_task_state(self):
"""Ensure that a failure in terminate_instance does not result
in the task state being reverted from DELETING (see LP 1046236).
"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("Running instances: %s"), instances)
self.assertEqual(len(instances), 1)
# Network teardown fails ungracefully
self.mox.StubOutWithMock(self.compute, '_get_instance_nw_info')
self.compute._get_instance_nw_info(
mox.IgnoreArg(),
mox.IgnoreArg()).AndRaise(TypeError())
self.mox.ReplayAll()
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.DELETING})
try:
self.compute.terminate_instance(self.context, instance=instance)
except TypeError:
pass
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("After terminating instances: %s"), instances)
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['task_state'], 'deleting')
def test_run_terminate_timestamps(self):
"""Make sure timestamps are set for launched and destroyed"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.assertEqual(instance['launched_at'], None)
self.assertEqual(instance['deleted_at'], None)
launch = timeutils.utcnow()
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assert_(instance['launched_at'] > launch)
self.assertEqual(instance['deleted_at'], None)
terminate = timeutils.utcnow()
self.compute.terminate_instance(self.context, instance=instance)
context = self.context.elevated(read_deleted="only")
instance = db.instance_get_by_uuid(context, instance['uuid'])
self.assert_(instance['launched_at'] < terminate)
self.assert_(instance['deleted_at'] > terminate)
def test_stop(self):
"""Ensure instance can be stopped"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.STOPPING})
self.compute.stop_instance(self.context, instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_start(self):
"""Ensure instance can be started"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.STOPPING})
self.compute.stop_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.STARTING})
self.compute.start_instance(self.context, instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_rescue(self):
"""Ensure instance can be rescued and unrescued"""
called = {'rescued': False,
'unrescued': False}
def fake_rescue(self, context, instance_ref, network_info, image_meta,
rescue_password):
called['rescued'] = True
self.stubs.Set(nova.virt.fake.FakeDriver, 'rescue', fake_rescue)
def fake_unrescue(self, instance_ref, network_info):
called['unrescued'] = True
self.stubs.Set(nova.virt.fake.FakeDriver, 'unrescue',
fake_unrescue)
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.RESCUING})
self.compute.rescue_instance(self.context, instance=instance)
self.assertTrue(called['rescued'])
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.UNRESCUING})
self.compute.unrescue_instance(self.context, instance=instance)
self.assertTrue(called['unrescued'])
self.compute.terminate_instance(self.context, instance=instance)
def test_power_on(self):
"""Ensure instance can be powered on"""
called = {'power_on': False}
def fake_driver_power_on(self, instance):
called['power_on'] = True
self.stubs.Set(nova.virt.fake.FakeDriver, 'power_on',
fake_driver_power_on)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.POWERING_ON})
self.compute.power_on_instance(self.context, instance=instance)
self.assertTrue(called['power_on'])
self.compute.terminate_instance(self.context, instance=instance)
def test_power_off(self):
"""Ensure instance can be powered off"""
called = {'power_off': False}
def fake_driver_power_off(self, instance):
called['power_off'] = True
self.stubs.Set(nova.virt.fake.FakeDriver, 'power_off',
fake_driver_power_off)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.POWERING_OFF})
self.compute.power_off_instance(self.context, instance=instance)
self.assertTrue(called['power_off'])
self.compute.terminate_instance(self.context, instance=instance)
def test_pause(self):
"""Ensure instance can be paused and unpaused"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.PAUSING})
self.compute.pause_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.UNPAUSING})
self.compute.unpause_instance(self.context, instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_suspend(self):
"""ensure instance can be suspended and resumed"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.SUSPENDING})
self.compute.suspend_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.RESUMING})
self.compute.resume_instance(self.context, instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_suspend_error(self):
"""Ensure vm_state is ERROR when suspend error occurs"""
def fake(*args, **kwargs):
raise test.TestingException()
self.stubs.Set(self.compute.driver, 'suspend', fake)
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
self.assertRaises(test.TestingException,
self.compute.suspend_instance,
self.context,
instance=instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['vm_state'], vm_states.ERROR)
self.compute.terminate_instance(self.context, instance=instance)
def test_rebuild(self):
"""Ensure instance can be rebuilt"""
instance = jsonutils.to_primitive(self._create_fake_instance())
image_ref = instance['image_ref']
sys_metadata = db.instance_system_metadata_get(self.context,
instance['uuid'])
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.REBUILDING})
self.compute.rebuild_instance(self.context, instance,
image_ref, image_ref,
injected_files=[],
new_pass="new_password",
orig_sys_metadata=sys_metadata)
self.compute.terminate_instance(self.context, instance=instance)
def test_rebuild_launch_time(self):
"""Ensure instance can be rebuilt"""
old_time = datetime.datetime(2012, 4, 1)
cur_time = datetime.datetime(2012, 12, 21, 12, 21)
timeutils.set_time_override(old_time)
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
image_ref = instance['image_ref']
self.compute.run_instance(self.context, instance=instance)
timeutils.set_time_override(cur_time)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.REBUILDING})
self.compute.rebuild_instance(self.context, instance,
image_ref, image_ref,
injected_files=[],
new_pass="new_password")
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEquals(cur_time, instance['launched_at'])
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(instance))
def test_reboot_soft(self):
"""Ensure instance can be soft rebooted"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{'task_state': task_states.REBOOTING})
reboot_type = "SOFT"
self.compute.reboot_instance(self.context,
instance=instance,
reboot_type=reboot_type)
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(inst_ref['power_state'], power_state.RUNNING)
self.assertEqual(inst_ref['task_state'], None)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(inst_ref))
def test_reboot_hard(self):
"""Ensure instance can be hard rebooted"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{'task_state': task_states.REBOOTING_HARD})
reboot_type = "HARD"
self.compute.reboot_instance(self.context, instance=instance,
reboot_type=reboot_type)
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(inst_ref['power_state'], power_state.RUNNING)
self.assertEqual(inst_ref['task_state'], None)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(inst_ref))
def test_set_admin_password(self):
"""Ensure instance can have its admin password set"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{'task_state': task_states.UPDATING_PASSWORD})
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(inst_ref['vm_state'], vm_states.ACTIVE)
self.assertEqual(inst_ref['task_state'], task_states.UPDATING_PASSWORD)
self.compute.set_admin_password(self.context, instance=instance)
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(inst_ref['vm_state'], vm_states.ACTIVE)
self.assertEqual(inst_ref['task_state'], None)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(inst_ref))
def test_set_admin_password_bad_state(self):
"""Test setting password while instance is rebuilding."""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'], {
"power_state": power_state.NOSTATE,
})
instance = jsonutils.to_primitive(db.instance_get_by_uuid(
self.context, instance['uuid']))
self.assertEqual(instance['power_state'], power_state.NOSTATE)
def fake_driver_get_info(self2, _instance):
return {'state': power_state.NOSTATE,
'max_mem': 0,
'mem': 0,
'num_cpu': 2,
'cpu_time': 0}
self.stubs.Set(nova.virt.fake.FakeDriver, 'get_info',
fake_driver_get_info)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.UPDATING_PASSWORD})
self.assertRaises(exception.InstancePasswordSetFailed,
self.compute.set_admin_password,
self.context,
instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def _do_test_set_admin_password_driver_error(self, exc, expected_vm_state,
expected_task_state):
"""Ensure expected exception is raised if set_admin_password fails"""
def fake_sleep(_time):
pass
self.stubs.Set(time, 'sleep', fake_sleep)
def fake_driver_set_pass(self2, _instance, _pwd):
raise exc
self.stubs.Set(nova.virt.fake.FakeDriver, 'set_admin_password',
fake_driver_set_pass)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{'task_state': task_states.UPDATING_PASSWORD})
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(inst_ref['vm_state'], vm_states.ACTIVE)
self.assertEqual(inst_ref['task_state'], task_states.UPDATING_PASSWORD)
#error raised from the driver should not reveal internal information
#so a new error is raised
self.assertRaises(exception.InstancePasswordSetFailed,
self.compute.set_admin_password,
self.context,
instance=jsonutils.to_primitive(inst_ref))
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(inst_ref['vm_state'], expected_vm_state)
self.assertEqual(inst_ref['task_state'], expected_task_state)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(inst_ref))
def test_set_admin_password_driver_not_authorized(self):
"""
Ensure expected exception is raised if set_admin_password not
authorized.
"""
exc = exception.NotAuthorized(_('Internal error'))
self._do_test_set_admin_password_driver_error(exc,
vm_states.ERROR,
None)
def test_set_admin_password_driver_not_implemented(self):
"""
Ensure expected exception is raised if set_admin_password not
implemented by driver.
"""
exc = NotImplementedError()
self._do_test_set_admin_password_driver_error(exc,
vm_states.ACTIVE,
None)
def test_inject_file(self):
"""Ensure we can write a file to an instance"""
called = {'inject': False}
def fake_driver_inject_file(self2, instance, path, contents):
self.assertEqual(path, "/tmp/test")
self.assertEqual(contents, "File Contents")
called['inject'] = True
self.stubs.Set(nova.virt.fake.FakeDriver, 'inject_file',
fake_driver_inject_file)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
self.compute.inject_file(self.context, "/tmp/test",
"File Contents", instance=instance)
self.assertTrue(called['inject'])
self.compute.terminate_instance(self.context, instance=instance)
def test_inject_network_info(self):
"""Ensure we can inject network info"""
called = {'inject': False}
def fake_driver_inject_network(self, instance, network_info):
called['inject'] = True
self.stubs.Set(nova.virt.fake.FakeDriver, 'inject_network_info',
fake_driver_inject_network)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
self.compute.inject_network_info(self.context, instance=instance)
self.assertTrue(called['inject'])
self.compute.terminate_instance(self.context, instance=instance)
def test_reset_network(self):
"""Ensure we can reset networking on an instance"""
called = {'count': 0}
def fake_driver_reset_network(self, instance):
called['count'] += 1
self.stubs.Set(nova.virt.fake.FakeDriver, 'reset_network',
fake_driver_reset_network)
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
self.compute.reset_network(self.context, instance=instance)
self.assertEqual(called['count'], 1)
self.compute.terminate_instance(self.context, instance=instance)
def test_snapshot(self):
"""Ensure instance can be snapshotted"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
name = "myfakesnapshot"
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.IMAGE_SNAPSHOT})
self.compute.snapshot_instance(self.context, name, instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_snapshot_fails(self):
"""Ensure task_state is set to None if snapshot fails"""
def fake_snapshot(*args, **kwargs):
raise test.TestingException()
self.stubs.Set(self.compute.driver, 'snapshot', fake_snapshot)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.IMAGE_SNAPSHOT})
self.assertRaises(test.TestingException,
self.compute.snapshot_instance,
self.context, "failing_snapshot", instance=instance)
self._assert_state({'task_state': None})
self.compute.terminate_instance(self.context, instance=instance)
def _assert_state(self, state_dict):
"""Assert state of VM is equal to state passed as parameter"""
instances = db.instance_get_all(context.get_admin_context())
self.assertEqual(len(instances), 1)
if 'vm_state' in state_dict:
self.assertEqual(state_dict['vm_state'], instances[0]['vm_state'])
if 'task_state' in state_dict:
self.assertEqual(state_dict['task_state'],
instances[0]['task_state'])
if 'power_state' in state_dict:
self.assertEqual(state_dict['power_state'],
instances[0]['power_state'])
def test_console_output(self):
"""Make sure we can get console output from instance"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
output = self.compute.get_console_output(self.context,
instance=instance)
self.assertEqual(output, 'FAKE CONSOLE OUTPUT\nANOTHER\nLAST LINE')
self.compute.terminate_instance(self.context, instance=instance)
def test_console_output_tail(self):
"""Make sure we can get console output from instance"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
output = self.compute.get_console_output(self.context,
instance=instance, tail_length=2)
self.assertEqual(output, 'ANOTHER\nLAST LINE')
self.compute.terminate_instance(self.context, instance=instance)
def test_novnc_vnc_console(self):
"""Make sure we can a vnc console for an instance."""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
# Try with the full instance
console = self.compute.get_vnc_console(self.context, 'novnc',
instance=instance)
self.assert_(console)
self.compute.terminate_instance(self.context, instance=instance)
def test_xvpvnc_vnc_console(self):
"""Make sure we can a vnc console for an instance."""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
console = self.compute.get_vnc_console(self.context, 'xvpvnc',
instance=instance)
self.assert_(console)
self.compute.terminate_instance(self.context, instance=instance)
def test_invalid_vnc_console_type(self):
"""Raise useful error if console type is an unrecognised string"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
self.assertRaises(exception.ConsoleTypeInvalid,
self.compute.get_vnc_console,
self.context, 'invalid', instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_missing_vnc_console_type(self):
"""Raise useful error is console type is None"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
self.assertRaises(exception.ConsoleTypeInvalid,
self.compute.get_vnc_console,
self.context, None, instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_diagnostics(self):
"""Make sure we can get diagnostics for an instance."""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
diagnostics = self.compute.get_diagnostics(self.context,
instance=instance)
self.assertEqual(diagnostics, 'FAKE_DIAGNOSTICS')
diagnostics = self.compute.get_diagnostics(self.context,
instance=instance)
self.assertEqual(diagnostics, 'FAKE_DIAGNOSTICS')
self.compute.terminate_instance(self.context, instance=instance)
def test_add_fixed_ip_usage_notification(self):
def dummy(*args, **kwargs):
pass
self.stubs.Set(nova.network.API, 'add_fixed_ip_to_instance',
dummy)
self.stubs.Set(nova.compute.manager.ComputeManager,
'inject_network_info', dummy)
self.stubs.Set(nova.compute.manager.ComputeManager,
'reset_network', dummy)
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.assertEquals(len(test_notifier.NOTIFICATIONS), 0)
self.compute.add_fixed_ip_to_instance(self.context, network_id=1,
instance=instance)
self.assertEquals(len(test_notifier.NOTIFICATIONS), 2)
self.compute.terminate_instance(self.context, instance=instance)
def test_remove_fixed_ip_usage_notification(self):
def dummy(*args, **kwargs):
pass
self.stubs.Set(nova.network.API, 'remove_fixed_ip_from_instance',
dummy)
self.stubs.Set(nova.compute.manager.ComputeManager,
'inject_network_info', dummy)
self.stubs.Set(nova.compute.manager.ComputeManager,
'reset_network', dummy)
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.assertEquals(len(test_notifier.NOTIFICATIONS), 0)
self.compute.remove_fixed_ip_from_instance(self.context, 1,
instance=instance)
self.assertEquals(len(test_notifier.NOTIFICATIONS), 2)
self.compute.terminate_instance(self.context, instance=instance)
def test_run_instance_usage_notification(self):
"""Ensure run instance generates appropriate usage notification"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
self.assertEquals(len(test_notifier.NOTIFICATIONS), 2)
inst_ref = db.instance_get_by_uuid(self.context, instance_uuid)
msg = test_notifier.NOTIFICATIONS[0]
self.assertEquals(msg['event_type'], 'compute.instance.create.start')
self.assertEquals(msg['payload']['image_name'], 'fake_name')
# The last event is the one with the sugar in it.
msg = test_notifier.NOTIFICATIONS[1]
self.assertEquals(msg['priority'], 'INFO')
self.assertEquals(msg['event_type'], 'compute.instance.create.end')
payload = msg['payload']
self.assertEquals(payload['tenant_id'], self.project_id)
self.assertEquals(payload['image_name'], 'fake_name')
self.assertEquals(payload['user_id'], self.user_id)
self.assertEquals(payload['instance_id'], inst_ref.uuid)
self.assertEquals(payload['instance_type'], 'm1.tiny')
type_id = instance_types.get_instance_type_by_name('m1.tiny')['id']
self.assertEquals(str(payload['instance_type_id']), str(type_id))
self.assertEquals(payload['state'], 'active')
self.assertTrue('display_name' in payload)
self.assertTrue('created_at' in payload)
self.assertTrue('launched_at' in payload)
self.assertTrue(payload['launched_at'])
image_ref_url = utils.generate_image_url(FAKE_IMAGE_REF)
self.assertEquals(payload['image_ref_url'], image_ref_url)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(inst_ref))
def test_terminate_usage_notification(self):
"""Ensure terminate_instance generates correct usage notification"""
old_time = datetime.datetime(2012, 4, 1)
cur_time = datetime.datetime(2012, 12, 21, 12, 21)
timeutils.set_time_override(old_time)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
test_notifier.NOTIFICATIONS = []
timeutils.set_time_override(cur_time)
self.compute.terminate_instance(self.context, instance=instance)
self.assertEquals(len(test_notifier.NOTIFICATIONS), 4)
msg = test_notifier.NOTIFICATIONS[0]
self.assertEquals(msg['priority'], 'INFO')
self.assertEquals(msg['event_type'], 'compute.instance.delete.start')
msg1 = test_notifier.NOTIFICATIONS[1]
self.assertEquals(msg1['event_type'],
'compute.instance.shutdown.start')
msg1 = test_notifier.NOTIFICATIONS[2]
self.assertEquals(msg1['event_type'], 'compute.instance.shutdown.end')
msg1 = test_notifier.NOTIFICATIONS[3]
self.assertEquals(msg1['event_type'], 'compute.instance.delete.end')
payload = msg1['payload']
self.assertEquals(payload['tenant_id'], self.project_id)
self.assertEquals(payload['user_id'], self.user_id)
self.assertEquals(payload['instance_id'], instance['uuid'])
self.assertEquals(payload['instance_type'], 'm1.tiny')
type_id = instance_types.get_instance_type_by_name('m1.tiny')['id']
self.assertEquals(str(payload['instance_type_id']), str(type_id))
self.assertTrue('display_name' in payload)
self.assertTrue('created_at' in payload)
self.assertTrue('launched_at' in payload)
self.assertTrue('deleted_at' in payload)
self.assertEqual(payload['deleted_at'], str(cur_time))
image_ref_url = utils.generate_image_url(FAKE_IMAGE_REF)
self.assertEquals(payload['image_ref_url'], image_ref_url)
def test_run_instance_existing(self):
"""Ensure failure when running an instance that already exists"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
self.assertRaises(exception.Invalid,
self.compute.run_instance,
self.context,
instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_instance_set_to_error_on_uncaught_exception(self):
"""Test that instance is set to error state when exception is raised"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.mox.StubOutWithMock(self.compute.network_api,
"allocate_for_instance")
self.compute.network_api.allocate_for_instance(
mox.IgnoreArg(),
mox.IgnoreArg(),
requested_networks=None,
vpn=False).AndRaise(rpc_common.RemoteError())
fake_network.unset_stub_network_methods(self.stubs)
self.mox.ReplayAll()
self.assertRaises(rpc_common.RemoteError,
self.compute.run_instance,
self.context,
instance=instance)
instance = db.instance_get_by_uuid(context.get_admin_context(),
instance['uuid'])
self.assertEqual(vm_states.ERROR, instance['vm_state'])
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(instance))
def test_delete_instance_succedes_on_volume_fail(self):
instance = self._create_fake_instance()
def fake_cleanup_volumes(context, instance):
raise test.TestingException()
self.stubs.Set(self.compute, '_cleanup_volumes',
fake_cleanup_volumes)
self.compute._delete_instance(self.context,
instance=jsonutils.to_primitive(instance))
def test_instance_termination_exception_sets_error(self):
"""Test that we handle InstanceTerminationFailure
which is propagated up from the underlying driver.
"""
instance = self._create_fake_instance()
def fake_delete_instance(context, instance):
raise exception.InstanceTerminationFailure(reason='')
self.stubs.Set(self.compute, '_delete_instance',
fake_delete_instance)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(instance))
instance = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(instance['vm_state'], vm_states.ERROR)
def test_network_is_deallocated_on_spawn_failure(self):
"""When a spawn fails the network must be deallocated"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.mox.StubOutWithMock(self.compute, "_setup_block_device_mapping")
self.compute._setup_block_device_mapping(
mox.IgnoreArg(),
mox.IgnoreArg()).AndRaise(rpc.common.RemoteError('', '', ''))
self.mox.ReplayAll()
self.assertRaises(rpc.common.RemoteError,
self.compute.run_instance,
self.context, instance=instance)
self.compute.terminate_instance(self.context, instance=instance)
def test_lock(self):
"""ensure locked instance cannot be changed"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
non_admin_context = context.RequestContext(None,
None,
is_admin=False)
def check_task_state(task_state):
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_state)
# should fail with locked nonadmin context
self.compute_api.lock(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertRaises(exception.InstanceIsLocked,
self.compute_api.reboot,
non_admin_context, instance, 'SOFT')
check_task_state(None)
# should fail with invalid task state
self.compute_api.unlock(self.context, instance)
instance = db.instance_update(self.context, instance_uuid,
{'task_state': task_states.REBOOTING})
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.reboot,
non_admin_context, instance, 'SOFT')
check_task_state(task_states.REBOOTING)
# should succeed with admin context
instance = db.instance_update(self.context, instance_uuid,
{'task_state': None})
self.compute_api.reboot(self.context, instance, 'SOFT')
check_task_state(task_states.REBOOTING)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(instance))
def _test_state_revert(self, operation, pre_task_state,
post_task_state):
instance = self._create_fake_instance()
self.compute.run_instance(self.context, instance=instance)
# The API would have set task_state, so do that here to test
# that the state gets reverted on failure
db.instance_update(self.context, instance['uuid'],
{"task_state": pre_task_state})
orig_elevated = self.context.elevated
orig_notify = self.compute._notify_about_instance_usage
def _get_an_exception(*args, **kwargs):
raise Exception("This fails every single time!")
self.stubs.Set(self.context, 'elevated', _get_an_exception)
self.stubs.Set(self.compute,
'_notify_about_instance_usage', _get_an_exception)
raised = False
try:
ret_val = getattr(self.compute, operation)(self.context,
instance=instance)
except Exception:
raised = True
finally:
# self.context.elevated() is called in tearDown()
self.stubs.Set(self.context, 'elevated', orig_elevated)
self.stubs.Set(self.compute,
'_notify_about_instance_usage', orig_notify)
self.assertTrue(raised)
# Fetch the instance's task_state and make sure it went to expected
# post-state
instance = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(instance["task_state"], post_task_state)
def test_state_revert(self):
"""ensure that task_state is reverted after a failed operation"""
actions = [
("reboot_instance", task_states.REBOOTING, None),
("stop_instance", task_states.STOPPING, None),
("start_instance", task_states.STARTING, None),
("terminate_instance", task_states.DELETING,
task_states.DELETING),
("power_off_instance", task_states.POWERING_OFF, None),
("power_on_instance", task_states.POWERING_ON, None),
("rebuild_instance", task_states.REBUILDING, None),
("set_admin_password", task_states.UPDATING_PASSWORD, None),
("rescue_instance", task_states.RESCUING, None),
("unrescue_instance", task_states.UNRESCUING, None),
("revert_resize", task_states.RESIZE_REVERTING, None),
("prep_resize", task_states.RESIZE_PREP, None),
("resize_instance", task_states.RESIZE_PREP, None),
("pause_instance", task_states.PAUSING, None),
("unpause_instance", task_states.UNPAUSING, None),
("suspend_instance", task_states.SUSPENDING, None),
("resume_instance", task_states.RESUMING, None),
]
for operation, pre_state, post_state in actions:
self._test_state_revert(operation, pre_state, post_state)
def _ensure_quota_reservations_committed(self):
"""Mock up commit of quota reservations"""
reservations = list('fake_res')
self.mox.StubOutWithMock(nova.quota.QUOTAS, 'commit')
nova.quota.QUOTAS.commit(mox.IgnoreArg(), reservations)
self.mox.ReplayAll()
return reservations
def _ensure_quota_reservations_rolledback(self):
"""Mock up rollback of quota reservations"""
reservations = list('fake_res')
self.mox.StubOutWithMock(nova.quota.QUOTAS, 'rollback')
nova.quota.QUOTAS.rollback(mox.IgnoreArg(), reservations)
self.mox.ReplayAll()
return reservations
def test_finish_resize(self):
"""Contrived test to ensure finish_resize doesn't raise anything"""
def fake(*args, **kwargs):
pass
self.stubs.Set(self.compute.driver, 'finish_migration', fake)
reservations = self._ensure_quota_reservations_committed()
context = self.context.elevated()
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_type = instance_types.get_default_instance_type()
db.instance_update(self.context, instance["uuid"],
{"task_state": task_states.RESIZE_PREP})
self.compute.prep_resize(context, instance=instance,
instance_type=instance_type,
image={})
migration_ref = db.migration_get_by_instance_and_status(context,
instance['uuid'], 'pre-migrating')
db.instance_update(self.context, instance["uuid"],
{"task_state": task_states.RESIZE_MIGRATED})
self.compute.finish_resize(context,
migration_id=int(migration_ref['id']),
disk_info={}, image={}, instance=instance,
reservations=reservations)
self.compute.terminate_instance(self.context, instance=instance)
def test_finish_resize_handles_error(self):
"""Make sure we don't leave the instance in RESIZE on error"""
def throw_up(*args, **kwargs):
raise test.TestingException()
def fake(*args, **kwargs):
pass
self.stubs.Set(self.compute.driver, 'finish_migration', throw_up)
reservations = self._ensure_quota_reservations_rolledback()
context = self.context.elevated()
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_type = instance_types.get_default_instance_type()
self.compute.prep_resize(context, instance=instance,
instance_type=instance_type,
image={}, reservations=reservations)
migration_ref = db.migration_get_by_instance_and_status(context,
instance['uuid'], 'pre-migrating')
db.instance_update(self.context, instance["uuid"],
{"task_state": task_states.RESIZE_MIGRATED})
self.assertRaises(test.TestingException, self.compute.finish_resize,
context, migration_id=int(migration_ref['id']),
disk_info={}, image={}, instance=instance,
reservations=reservations)
instance = db.instance_get_by_uuid(context, instance['uuid'])
self.assertEqual(instance['vm_state'], vm_states.ERROR)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(instance))
def test_rebuild_instance_notification(self):
"""Ensure notifications on instance migrate/resize"""
old_time = datetime.datetime(2012, 4, 1)
cur_time = datetime.datetime(2012, 12, 21, 12, 21)
timeutils.set_time_override(old_time)
inst_ref = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=inst_ref)
timeutils.set_time_override(cur_time)
test_notifier.NOTIFICATIONS = []
instance = db.instance_get_by_uuid(self.context, inst_ref['uuid'])
orig_sys_metadata = db.instance_system_metadata_get(self.context,
inst_ref['uuid'])
image_ref = instance["image_ref"]
new_image_ref = image_ref + '-new_image_ref'
db.instance_update(self.context, inst_ref['uuid'],
{'image_ref': new_image_ref})
password = "new_password"
instance = db.instance_get_by_uuid(self.context, inst_ref['uuid'])
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.REBUILDING})
self.compute.rebuild_instance(self.context.elevated(),
jsonutils.to_primitive(instance),
image_ref, new_image_ref,
injected_files=[],
new_pass=password,
orig_sys_metadata=orig_sys_metadata)
instance = db.instance_get_by_uuid(self.context, inst_ref['uuid'])
image_ref_url = utils.generate_image_url(image_ref)
new_image_ref_url = utils.generate_image_url(new_image_ref)
self.assertEquals(len(test_notifier.NOTIFICATIONS), 3)
msg = test_notifier.NOTIFICATIONS[0]
self.assertEquals(msg['event_type'],
'compute.instance.exists')
self.assertEquals(msg['payload']['image_ref_url'], image_ref_url)
msg = test_notifier.NOTIFICATIONS[1]
self.assertEquals(msg['event_type'],
'compute.instance.rebuild.start')
self.assertEquals(msg['payload']['image_ref_url'], new_image_ref_url)
self.assertEquals(msg['payload']['image_name'], 'fake_name')
msg = test_notifier.NOTIFICATIONS[2]
self.assertEquals(msg['event_type'],
'compute.instance.rebuild.end')
self.assertEquals(msg['priority'], 'INFO')
payload = msg['payload']
self.assertEquals(payload['image_name'], 'fake_name')
self.assertEquals(payload['tenant_id'], self.project_id)
self.assertEquals(payload['user_id'], self.user_id)
self.assertEquals(payload['instance_id'], inst_ref['uuid'])
self.assertEquals(payload['instance_type'], 'm1.tiny')
type_id = instance_types.get_instance_type_by_name('m1.tiny')['id']
self.assertEquals(str(payload['instance_type_id']), str(type_id))
self.assertTrue('display_name' in payload)
self.assertTrue('created_at' in payload)
self.assertTrue('launched_at' in payload)
self.assertEqual(payload['launched_at'], str(cur_time))
self.assertEquals(payload['image_ref_url'], new_image_ref_url)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(inst_ref))
def test_finish_resize_instance_notification(self):
"""Ensure notifications on instance migrate/resize"""
old_time = datetime.datetime(2012, 4, 1)
cur_time = datetime.datetime(2012, 12, 21, 12, 21)
timeutils.set_time_override(old_time)
instance = jsonutils.to_primitive(self._create_fake_instance())
context = self.context.elevated()
old_type_id = instance_types.get_instance_type_by_name(
'm1.tiny')['id']
new_type = instance_types.get_instance_type_by_name('m1.small')
new_type = jsonutils.to_primitive(new_type)
new_type_id = new_type['id']
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'], {'host': 'foo'})
db.instance_update(self.context, instance["uuid"],
{"task_state": task_states.RESIZE_PREP})
self.compute.prep_resize(context, instance=instance,
instance_type=new_type, image={})
migration_ref = db.migration_get_by_instance_and_status(context,
instance['uuid'],
'pre-migrating')
self.compute.resize_instance(context, instance=instance,
migration_id=migration_ref['id'],
image={})
timeutils.set_time_override(cur_time)
test_notifier.NOTIFICATIONS = []
self.compute.finish_resize(context,
migration_id=int(migration_ref['id']), disk_info={}, image={},
instance=instance)
self.assertEquals(len(test_notifier.NOTIFICATIONS), 2)
msg = test_notifier.NOTIFICATIONS[0]
self.assertEquals(msg['event_type'],
'compute.instance.finish_resize.start')
msg = test_notifier.NOTIFICATIONS[1]
self.assertEquals(msg['event_type'],
'compute.instance.finish_resize.end')
self.assertEquals(msg['priority'], 'INFO')
payload = msg['payload']
self.assertEquals(payload['tenant_id'], self.project_id)
self.assertEquals(payload['user_id'], self.user_id)
self.assertEquals(payload['instance_id'], instance['uuid'])
self.assertEquals(payload['instance_type'], 'm1.small')
self.assertEquals(str(payload['instance_type_id']), str(new_type_id))
self.assertTrue('display_name' in payload)
self.assertTrue('created_at' in payload)
self.assertTrue('launched_at' in payload)
self.assertEqual(payload['launched_at'], str(cur_time))
image_ref_url = utils.generate_image_url(FAKE_IMAGE_REF)
self.assertEquals(payload['image_ref_url'], image_ref_url)
self.compute.terminate_instance(context,
instance=jsonutils.to_primitive(instance))
def test_resize_instance_notification(self):
"""Ensure notifications on instance migrate/resize"""
old_time = datetime.datetime(2012, 4, 1)
cur_time = datetime.datetime(2012, 12, 21, 12, 21)
timeutils.set_time_override(old_time)
instance = jsonutils.to_primitive(self._create_fake_instance())
context = self.context.elevated()
self.compute.run_instance(self.context, instance=instance)
timeutils.set_time_override(cur_time)
test_notifier.NOTIFICATIONS = []
db.instance_update(self.context, instance['uuid'], {'host': 'foo'})
instance_type = instance_types.get_default_instance_type()
self.compute.prep_resize(context, instance=instance,
instance_type=instance_type, image={})
db.migration_get_by_instance_and_status(context,
instance['uuid'],
'pre-migrating')
self.assertEquals(len(test_notifier.NOTIFICATIONS), 3)
msg = test_notifier.NOTIFICATIONS[0]
self.assertEquals(msg['event_type'],
'compute.instance.exists')
msg = test_notifier.NOTIFICATIONS[1]
self.assertEquals(msg['event_type'],
'compute.instance.resize.prep.start')
msg = test_notifier.NOTIFICATIONS[2]
self.assertEquals(msg['event_type'],
'compute.instance.resize.prep.end')
self.assertEquals(msg['priority'], 'INFO')
payload = msg['payload']
self.assertEquals(payload['tenant_id'], self.project_id)
self.assertEquals(payload['user_id'], self.user_id)
self.assertEquals(payload['instance_id'], instance['uuid'])
self.assertEquals(payload['instance_type'], 'm1.tiny')
type_id = instance_types.get_instance_type_by_name('m1.tiny')['id']
self.assertEquals(str(payload['instance_type_id']), str(type_id))
self.assertTrue('display_name' in payload)
self.assertTrue('created_at' in payload)
self.assertTrue('launched_at' in payload)
image_ref_url = utils.generate_image_url(FAKE_IMAGE_REF)
self.assertEquals(payload['image_ref_url'], image_ref_url)
self.compute.terminate_instance(context, instance=instance)
def test_prep_resize_instance_migration_error(self):
"""Ensure prep_resize raise a migration error"""
self.flags(host="foo", allow_resize_to_same_host=False)
instance = jsonutils.to_primitive(self._create_fake_instance())
context = self.context.elevated()
reservations = self._ensure_quota_reservations_rolledback()
self.compute.run_instance(self.context, instance=instance)
new_instance = db.instance_update(self.context, instance['uuid'],
{'host': self.compute.host})
new_instance = jsonutils.to_primitive(new_instance)
instance_type = instance_types.get_default_instance_type()
self.assertRaises(exception.MigrationError, self.compute.prep_resize,
context, instance=new_instance,
instance_type=instance_type, image={},
reservations=reservations)
self.compute.terminate_instance(context, instance=new_instance)
def test_resize_instance_driver_error(self):
"""Ensure instance status set to Error on resize error"""
def throw_up(*args, **kwargs):
raise test.TestingException()
self.stubs.Set(self.compute.driver, 'migrate_disk_and_power_off',
throw_up)
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_type = instance_types.get_default_instance_type()
context = self.context.elevated()
reservations = self._ensure_quota_reservations_rolledback()
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'], {'host': 'foo'})
self.compute.prep_resize(context, instance=instance,
instance_type=instance_type, image={},
reservations=reservations)
migration_ref = db.migration_get_by_instance_and_status(context,
instance['uuid'], 'pre-migrating')
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.RESIZE_PREP})
#verify
self.assertRaises(test.TestingException, self.compute.resize_instance,
context, instance=instance,
migration_id=migration_ref['id'], image={},
reservations=reservations)
instance = db.instance_get_by_uuid(context, instance['uuid'])
self.assertEqual(instance['vm_state'], vm_states.ERROR)
self.compute.terminate_instance(context,
instance=jsonutils.to_primitive(instance))
def test_resize_instance(self):
"""Ensure instance can be migrated/resized"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_type = instance_types.get_default_instance_type()
context = self.context.elevated()
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{'host': 'foo'})
self.compute.prep_resize(context, instance=instance,
instance_type=instance_type, image={})
migration_ref = db.migration_get_by_instance_and_status(context,
instance['uuid'], 'pre-migrating')
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.RESIZE_PREP})
self.compute.resize_instance(context, instance=instance,
migration_id=migration_ref['id'],
image={})
self.compute.terminate_instance(context,
instance=jsonutils.to_primitive(instance))
def test_finish_revert_resize(self):
"""Ensure that the flavor is reverted to the original on revert"""
def fake(*args, **kwargs):
pass
self.stubs.Set(self.compute.driver, 'finish_migration', fake)
self.stubs.Set(self.compute.driver, 'finish_revert_migration', fake)
reservations = self._ensure_quota_reservations_committed()
context = self.context.elevated()
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
# Confirm the instance size before the resize starts
inst_ref = db.instance_get_by_uuid(context, instance['uuid'])
instance_type_ref = db.instance_type_get(context,
inst_ref['instance_type_id'])
self.assertEqual(instance_type_ref['flavorid'], '1')
new_inst_ref = db.instance_update(self.context, instance['uuid'],
{'host': 'foo'})
new_instance_type_ref = db.instance_type_get_by_flavor_id(context, 3)
self.compute.prep_resize(context,
instance=jsonutils.to_primitive(new_inst_ref),
instance_type=jsonutils.to_primitive(new_instance_type_ref),
image={}, reservations=reservations)
migration_ref = db.migration_get_by_instance_and_status(context,
inst_ref['uuid'], 'pre-migrating')
instance = jsonutils.to_primitive(inst_ref)
db.instance_update(self.context, instance["uuid"],
{"task_state": task_states.RESIZE_PREP})
self.compute.resize_instance(context, instance=instance,
migration_id=migration_ref['id'],
image={})
self.compute.finish_resize(context,
migration_id=int(migration_ref['id']), disk_info={},
image={}, instance=instance)
# Prove that the instance size is now the new size
inst_ref = db.instance_get_by_uuid(context, instance['uuid'])
instance_type_ref = db.instance_type_get(context,
inst_ref['instance_type_id'])
self.assertEqual(instance_type_ref['flavorid'], '3')
# Finally, revert and confirm the old flavor has been applied
rpcinst = jsonutils.to_primitive(inst_ref)
db.instance_update(self.context, instance["uuid"],
{"task_state": task_states.RESIZE_REVERTING})
self.compute.revert_resize(context,
migration_id=migration_ref['id'], instance=rpcinst,
reservations=reservations)
self.compute.finish_revert_resize(context,
migration_id=migration_ref['id'], instance=rpcinst,
reservations=reservations)
instance = db.instance_get_by_uuid(context, instance['uuid'])
self.assertEqual(instance['vm_state'], vm_states.ACTIVE)
self.assertEqual(instance['task_state'], None)
inst_ref = db.instance_get_by_uuid(context, instance['uuid'])
instance_type_ref = db.instance_type_get(context,
inst_ref['instance_type_id'])
self.assertEqual(instance_type_ref['flavorid'], '1')
self.assertEqual(inst_ref['host'], migration_ref['source_compute'])
self.compute.terminate_instance(context,
instance=jsonutils.to_primitive(inst_ref))
def test_get_by_flavor_id(self):
type = instance_types.get_instance_type_by_flavor_id(1)
self.assertEqual(type['name'], 'm1.tiny')
def test_resize_same_source_fails(self):
"""Ensure instance fails to migrate when source and destination are
the same host"""
reservations = self._ensure_quota_reservations_rolledback()
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance['uuid'])
instance_type = instance_types.get_default_instance_type()
self.assertRaises(exception.MigrationError, self.compute.prep_resize,
self.context, instance=instance,
instance_type=instance_type, image={},
reservations=reservations)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(instance))
def test_resize_instance_handles_migration_error(self):
"""Ensure vm_state is ERROR when error occurs"""
def raise_migration_failure(*args):
raise test.TestingException()
self.stubs.Set(self.compute.driver,
'migrate_disk_and_power_off',
raise_migration_failure)
reservations = self._ensure_quota_reservations_rolledback()
inst_ref = jsonutils.to_primitive(self._create_fake_instance())
instance_type = instance_types.get_default_instance_type()
context = self.context.elevated()
self.compute.run_instance(self.context, instance=inst_ref)
db.instance_update(self.context, inst_ref['uuid'], {'host': 'foo'})
self.compute.prep_resize(context, instance=inst_ref,
instance_type=instance_type,
image={}, reservations=reservations)
migration_ref = db.migration_get_by_instance_and_status(context,
inst_ref['uuid'], 'pre-migrating')
db.instance_update(self.context, inst_ref['uuid'],
{"task_state": task_states.RESIZE_PREP})
self.assertRaises(test.TestingException, self.compute.resize_instance,
context, instance=inst_ref,
migration_id=migration_ref['id'], image={},
reservations=reservations)
inst_ref = db.instance_get_by_uuid(context, inst_ref['uuid'])
self.assertEqual(inst_ref['vm_state'], vm_states.ERROR)
self.compute.terminate_instance(context,
instance=jsonutils.to_primitive(inst_ref))
def test_check_can_live_migrate_source_works_correctly(self):
"""Confirm check_can_live_migrate_source works on positive path"""
context = self.context.elevated()
inst_ref = jsonutils.to_primitive(self._create_fake_instance(
{'host': 'fake_host_2'}))
inst_id = inst_ref["id"]
dest = "fake_host_1"
self.mox.StubOutWithMock(db, 'instance_get')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_source')
dest_check_data = {"test": "data"}
self.compute.driver.check_can_live_migrate_source(context,
inst_ref,
dest_check_data)
self.mox.ReplayAll()
self.compute.check_can_live_migrate_source(context,
dest_check_data=dest_check_data, instance=inst_ref)
def test_check_can_live_migrate_destination_works_correctly(self):
"""Confirm check_can_live_migrate_destination works on positive path"""
context = self.context.elevated()
inst_ref = jsonutils.to_primitive(self._create_fake_instance(
{'host': 'fake_host_2'}))
inst_id = inst_ref["id"]
dest = "fake_host_1"
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination')
self.mox.StubOutWithMock(self.compute.compute_rpcapi,
'check_can_live_migrate_source')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination_cleanup')
dest_check_data = {"test": "data"}
self.compute.driver.check_can_live_migrate_destination(context,
inst_ref, True, False).AndReturn(dest_check_data)
self.compute.compute_rpcapi.check_can_live_migrate_source(context,
inst_ref, dest_check_data)
self.compute.driver.check_can_live_migrate_destination_cleanup(
context, dest_check_data)
self.mox.ReplayAll()
self.compute.check_can_live_migrate_destination(context,
block_migration=True, disk_over_commit=False,
instance=inst_ref)
def test_check_can_live_migrate_destination_fails_dest_check(self):
"""Confirm check_can_live_migrate_destination works on positive path"""
context = self.context.elevated()
inst_ref = jsonutils.to_primitive(self._create_fake_instance(
{'host': 'fake_host_2'}))
inst_id = inst_ref["id"]
dest = "fake_host_1"
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination')
self.compute.driver.check_can_live_migrate_destination(context,
inst_ref, True, False).AndRaise(exception.Invalid())
self.mox.ReplayAll()
self.assertRaises(exception.Invalid,
self.compute.check_can_live_migrate_destination,
context, block_migration=True,
disk_over_commit=False, instance=inst_ref)
def test_check_can_live_migrate_destination_fails_source(self):
"""Confirm check_can_live_migrate_destination works on positive path"""
context = self.context.elevated()
inst_ref = jsonutils.to_primitive(self._create_fake_instance(
{'host': 'fake_host_2'}))
inst_id = inst_ref["id"]
dest = "fake_host_1"
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination')
self.mox.StubOutWithMock(self.compute.compute_rpcapi,
'check_can_live_migrate_source')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination_cleanup')
dest_check_data = {"test": "data"}
self.compute.driver.check_can_live_migrate_destination(context,
inst_ref, True, False).AndReturn(dest_check_data)
self.compute.compute_rpcapi.check_can_live_migrate_source(context,
inst_ref, dest_check_data).AndRaise(exception.Invalid())
self.compute.driver.check_can_live_migrate_destination_cleanup(
context, dest_check_data)
self.mox.ReplayAll()
self.assertRaises(exception.Invalid,
self.compute.check_can_live_migrate_destination,
context, block_migration=True,
disk_over_commit=False, instance=inst_ref)
def test_pre_live_migration_instance_has_no_fixed_ip(self):
"""Confirm raising exception if instance doesn't have fixed_ip."""
# creating instance testdata
context = self.context.elevated()
instance = jsonutils.to_primitive(self._create_fake_instance())
inst_id = instance["id"]
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpNotFoundForInstance,
self.compute.pre_live_migration, context,
instance=instance)
def test_pre_live_migration_works_correctly(self):
"""Confirm setup_compute_volume is called when volume is mounted."""
def stupid(*args, **kwargs):
return fake_network.fake_get_instance_nw_info(self.stubs,
spectacular=True)
self.stubs.Set(nova.compute.manager.ComputeManager,
'_get_instance_nw_info', stupid)
# creating instance testdata
instance = jsonutils.to_primitive(self._create_fake_instance(
{'host': 'dummy'}))
inst_id = instance['id']
c = context.get_admin_context()
nw_info = fake_network.fake_get_instance_nw_info(self.stubs)
# creating mocks
self.mox.StubOutWithMock(self.compute.driver, 'pre_live_migration')
self.compute.driver.pre_live_migration(mox.IsA(c), mox.IsA(instance),
{'block_device_mapping': []},
mox.IgnoreArg())
self.mox.StubOutWithMock(self.compute.driver,
'ensure_filtering_rules_for_instance')
self.compute.driver.ensure_filtering_rules_for_instance(
mox.IsA(instance), nw_info)
# start test
self.mox.ReplayAll()
ret = self.compute.pre_live_migration(c, instance=instance)
self.assertEqual(ret, None)
# cleanup
db.instance_destroy(c, instance['uuid'])
def test_live_migration_dest_raises_exception(self):
"""Confirm exception when pre_live_migration fails."""
# creating instance testdata
instance_ref = self._create_fake_instance({'host': 'dummy'})
instance = jsonutils.to_primitive(instance_ref)
inst_uuid = instance['uuid']
inst_id = instance['id']
c = context.get_admin_context()
topic = rpc.queue_get_for(c, FLAGS.compute_topic, instance['host'])
# creating volume testdata
volume_id = db.volume_create(c, {'size': 1})['id']
values = {'instance_uuid': inst_uuid, 'device_name': '/dev/vdc',
'delete_on_termination': False, 'volume_id': volume_id}
db.block_device_mapping_create(c, values)
# creating mocks
self.mox.StubOutWithMock(rpc, 'call')
self.mox.StubOutWithMock(self.compute.driver,
'get_instance_disk_info')
self.compute.driver.get_instance_disk_info(instance['name'])
self.mox.StubOutWithMock(self.compute.compute_rpcapi,
'pre_live_migration')
self.compute.compute_rpcapi.pre_live_migration(c,
mox.IsA(instance), True, None, instance['host']).AndRaise(
rpc.common.RemoteError('', '', ''))
db.instance_update(self.context, instance['uuid'],
{'task_state': task_states.MIGRATING})
# mocks for rollback
rpc.call(c, 'network', {'method': 'setup_networks_on_host',
'args': {'instance_id': inst_id,
'host': self.compute.host,
'teardown': False}})
rpcinst = jsonutils.to_primitive(
db.instance_get_by_uuid(self.context, instance['uuid']))
rpc.call(c, topic,
{"method": "remove_volume_connection",
"args": {'instance': rpcinst,
'volume_id': volume_id},
"version": compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION},
None)
rpc.cast(c, topic,
{"method": "rollback_live_migration_at_destination",
"args": {'instance': rpcinst},
"version": compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION})
# start test
self.mox.ReplayAll()
self.assertRaises(rpc_common.RemoteError,
self.compute.live_migration,
c, dest=instance['host'], block_migration=True,
instance=rpcinst)
# cleanup
for bdms in db.block_device_mapping_get_all_by_instance(
c, inst_uuid):
db.block_device_mapping_destroy(c, bdms['id'])
db.volume_destroy(c, volume_id)
db.instance_destroy(c, inst_uuid)
def test_live_migration_works_correctly(self):
"""Confirm live_migration() works as expected correctly."""
# creating instance testdata
c = context.get_admin_context()
instance_ref = self._create_fake_instance({'host': 'dummy'})
inst_uuid = instance_ref['uuid']
inst_id = instance_ref['id']
instance = jsonutils.to_primitive(db.instance_get(c, inst_id))
# create
self.mox.StubOutWithMock(rpc, 'call')
topic = rpc.queue_get_for(c, FLAGS.compute_topic, instance['host'])
rpc.call(c, topic,
{"method": "pre_live_migration",
"args": {'instance': instance,
'block_migration': False,
'disk': None},
"version": compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION},
None)
# start test
self.mox.ReplayAll()
ret = self.compute.live_migration(c, dest=instance['host'],
instance=instance)
self.assertEqual(ret, None)
# cleanup
db.instance_destroy(c, inst_uuid)
def test_post_live_migration_working_correctly(self):
"""Confirm post_live_migration() works as expected correctly."""
dest = 'desthost'
flo_addr = '1.2.1.2'
# creating testdata
c = context.get_admin_context()
inst_ref = jsonutils.to_primitive(self._create_fake_instance({
'state_description': 'migrating',
'state': power_state.PAUSED}))
inst_uuid = inst_ref['uuid']
inst_id = inst_ref['id']
db.instance_update(c, inst_uuid,
{'task_state': task_states.MIGRATING,
'power_state': power_state.PAUSED})
v_ref = db.volume_create(c, {'size': 1, 'instance_id': inst_id})
fix_addr = db.fixed_ip_create(c, {'address': '1.1.1.1',
'instance_uuid': inst_ref['uuid']})
fix_ref = db.fixed_ip_get_by_address(c, fix_addr)
db.floating_ip_create(c, {'address': flo_addr,
'fixed_ip_id': fix_ref['id']})
# creating mocks
self.mox.StubOutWithMock(self.compute.driver, 'unfilter_instance')
self.compute.driver.unfilter_instance(inst_ref, [])
self.mox.StubOutWithMock(rpc, 'call')
rpc.call(c, rpc.queue_get_for(c, FLAGS.compute_topic, dest),
{"method": "post_live_migration_at_destination",
"args": {'instance': inst_ref, 'block_migration': False},
"version": compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION},
None)
self.mox.StubOutWithMock(self.compute.driver, 'unplug_vifs')
self.compute.driver.unplug_vifs(inst_ref, [])
rpc.call(c, 'network', {'method': 'setup_networks_on_host',
'args': {'instance_id': inst_id,
'host': self.compute.host,
'teardown': True}})
# start test
self.mox.ReplayAll()
self.compute._post_live_migration(c, inst_ref, dest)
# make sure floating ips are rewritten to destinatioin hostname.
flo_refs = db.floating_ip_get_all_by_host(c, dest)
self.assertTrue(flo_refs)
self.assertEqual(flo_refs[0]['address'], flo_addr)
# cleanup
db.instance_destroy(c, inst_uuid)
db.volume_destroy(c, v_ref['id'])
db.floating_ip_destroy(c, flo_addr)
def test_run_kill_vm(self):
"""Detect when a vm is terminated behind the scenes"""
self.stubs.Set(compute_manager.ComputeManager,
'_report_driver_status', nop_report_driver_status)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
LOG.info(_("Running instances: %s"), instances)
self.assertEqual(len(instances), 1)
instance_name = instances[0].name
self.compute.driver.test_remove_vm(instance_name)
# Force the compute manager to do its periodic poll
ctxt = context.get_admin_context()
self.compute._sync_power_states(ctxt)
instances = db.instance_get_all(ctxt)
LOG.info(_("After force-killing instances: %s"), instances)
self.assertEqual(len(instances), 1)
self.assertEqual(task_states.STOPPING, instances[0]['task_state'])
def test_add_instance_fault(self):
exc_info = None
instance_uuid = str(utils.gen_uuid())
def fake_db_fault_create(ctxt, values):
self.assertTrue(values['details'].startswith('test'))
self.assertTrue('raise NotImplementedError' in values['details'])
del values['details']
expected = {
'code': 500,
'message': 'NotImplementedError',
'instance_uuid': instance_uuid,
}
self.assertEquals(expected, values)
try:
raise NotImplementedError('test')
except Exception:
exc_info = sys.exc_info()
self.stubs.Set(nova.db, 'instance_fault_create', fake_db_fault_create)
ctxt = context.get_admin_context()
compute_utils.add_instance_fault_from_exc(ctxt, instance_uuid,
NotImplementedError('test'),
exc_info)
def test_add_instance_fault_user_error(self):
exc_info = None
instance_uuid = str(utils.gen_uuid())
def fake_db_fault_create(ctxt, values):
expected = {
'code': 400,
'message': 'Invalid',
'details': 'fake details',
'instance_uuid': instance_uuid,
}
self.assertEquals(expected, values)
user_exc = exception.Invalid('fake details', code=400)
try:
raise user_exc
except Exception:
exc_info = sys.exc_info()
self.stubs.Set(nova.db, 'instance_fault_create', fake_db_fault_create)
ctxt = context.get_admin_context()
compute_utils.add_instance_fault_from_exc(ctxt, instance_uuid,
user_exc, exc_info)
def test_add_instance_fault_no_exc_info(self):
instance_uuid = str(utils.gen_uuid())
def fake_db_fault_create(ctxt, values):
expected = {
'code': 500,
'message': 'NotImplementedError',
'details': 'test',
'instance_uuid': instance_uuid,
}
self.assertEquals(expected, values)
self.stubs.Set(nova.db, 'instance_fault_create', fake_db_fault_create)
ctxt = context.get_admin_context()
compute_utils.add_instance_fault_from_exc(ctxt, instance_uuid,
NotImplementedError('test'))
def test_cleanup_running_deleted_instances(self):
admin_context = context.get_admin_context()
deleted_at = (timeutils.utcnow() -
datetime.timedelta(hours=1, minutes=5))
instance = self._create_fake_instance({"deleted_at": deleted_at,
"deleted": True})
self.compute.host = instance['host']
self.mox.StubOutWithMock(self.compute.driver, 'list_instances')
self.compute.driver.list_instances().AndReturn([instance['name']])
FLAGS.running_deleted_instance_timeout = 3600
FLAGS.running_deleted_instance_action = 'reap'
self.mox.StubOutWithMock(self.compute.db, "instance_get_all_by_host")
self.compute.db.instance_get_all_by_host(admin_context,
self.compute.host
).AndReturn([instance])
self.mox.StubOutWithMock(self.compute, "_shutdown_instance")
self.compute._shutdown_instance(admin_context,
instance).AndReturn(None)
self.mox.StubOutWithMock(self.compute, "_cleanup_volumes")
self.compute._cleanup_volumes(admin_context,
instance['uuid']).AndReturn(None)
self.mox.ReplayAll()
self.compute._cleanup_running_deleted_instances(admin_context)
def test_running_deleted_instances(self):
self.mox.StubOutWithMock(self.compute.driver, 'list_instances')
self.compute.driver.list_instances().AndReturn(['herp', 'derp'])
self.compute.host = 'host'
instance1 = mox.MockAnything()
instance1.name = 'herp'
instance1.deleted = True
instance1.deleted_at = "sometimeago"
instance2 = mox.MockAnything()
instance2.name = 'derp'
instance2.deleted = False
instance2.deleted_at = None
self.mox.StubOutWithMock(timeutils, 'is_older_than')
timeutils.is_older_than('sometimeago',
FLAGS.running_deleted_instance_timeout).AndReturn(True)
self.mox.StubOutWithMock(self.compute.db, "instance_get_all_by_host")
self.compute.db.instance_get_all_by_host('context',
'host').AndReturn(
[instance1,
instance2])
self.mox.ReplayAll()
val = self.compute._running_deleted_instances('context')
self.assertEqual(val, [instance1])
def test_heal_instance_info_cache(self):
# Update on every call for the test
self.flags(heal_instance_info_cache_interval=-1)
ctxt = context.get_admin_context()
instance_map = {}
instances = []
for x in xrange(5):
uuid = 'fake-uuid-%s' % x
instance_map[uuid] = {'uuid': uuid, 'host': FLAGS.host}
instances.append(instance_map[uuid])
call_info = {'get_all_by_host': 0, 'get_by_uuid': 0,
'get_nw_info': 0, 'expected_instance': None}
def fake_instance_get_all_by_host(context, host):
call_info['get_all_by_host'] += 1
return instances[:]
def fake_instance_get_by_uuid(context, instance_uuid):
if instance_uuid not in instance_map:
raise exception.InstanceNotFound
call_info['get_by_uuid'] += 1
return instance_map[instance_uuid]
# NOTE(comstud): Override the stub in setUp()
def fake_get_instance_nw_info(context, instance):
# Note that this exception gets caught in compute/manager
# and is ignored. However, the below increment of
# 'get_nw_info' won't happen, and you'll get an assert
# failure checking it below.
self.assertEqual(instance, call_info['expected_instance'])
call_info['get_nw_info'] += 1
self.stubs.Set(db, 'instance_get_all_by_host',
fake_instance_get_all_by_host)
self.stubs.Set(db, 'instance_get_by_uuid',
fake_instance_get_by_uuid)
self.stubs.Set(self.compute.network_api, 'get_instance_nw_info',
fake_get_instance_nw_info)
call_info['expected_instance'] = instances[0]
self.compute._heal_instance_info_cache(ctxt)
self.assertEqual(call_info['get_all_by_host'], 1)
self.assertEqual(call_info['get_by_uuid'], 0)
self.assertEqual(call_info['get_nw_info'], 1)
call_info['expected_instance'] = instances[1]
self.compute._heal_instance_info_cache(ctxt)
self.assertEqual(call_info['get_all_by_host'], 1)
self.assertEqual(call_info['get_by_uuid'], 1)
self.assertEqual(call_info['get_nw_info'], 2)
# Make an instance switch hosts
instances[2]['host'] = 'not-me'
# Make an instance disappear
instance_map.pop(instances[3]['uuid'])
# '2' and '3' should be skipped..
call_info['expected_instance'] = instances[4]
self.compute._heal_instance_info_cache(ctxt)
self.assertEqual(call_info['get_all_by_host'], 1)
# Incremented for '2' and '4'.. '3' caused a raise above.
self.assertEqual(call_info['get_by_uuid'], 3)
self.assertEqual(call_info['get_nw_info'], 3)
# Should be no more left.
self.assertEqual(len(self.compute._instance_uuids_to_heal), 0)
# This should cause a DB query now so we get first instance
# back again
call_info['expected_instance'] = instances[0]
self.compute._heal_instance_info_cache(ctxt)
self.assertEqual(call_info['get_all_by_host'], 2)
# Stays the same, beacuse the instance came from the DB
self.assertEqual(call_info['get_by_uuid'], 3)
self.assertEqual(call_info['get_nw_info'], 4)
def test_poll_unconfirmed_resizes(self):
instances = [{'uuid': 'fake_uuid1', 'vm_state': vm_states.RESIZED,
'task_state': None},
{'uuid': 'noexist'},
{'uuid': 'fake_uuid2', 'vm_state': vm_states.ERROR,
'task_state': None},
{'uuid': 'fake_uuid3', 'vm_state': vm_states.ACTIVE,
'task_state': task_states.REBOOTING},
{'uuid': 'fake_uuid4', 'vm_state': vm_states.RESIZED,
'task_state': None},
{'uuid': 'fake_uuid5', 'vm_state': vm_states.ACTIVE,
'task_state': None},
{'uuid': 'fake_uuid6', 'vm_state': vm_states.RESIZED,
'task_state': 'deleting'}]
expected_migration_status = {'fake_uuid1': 'confirmed',
'noexist': 'error',
'fake_uuid2': 'error',
'fake_uuid3': 'error',
'fake_uuid4': None,
'fake_uuid5': 'error',
'fake_uuid6': 'error'}
migrations = []
for i, instance in enumerate(instances, start=1):
migrations.append({'id': i,
'instance_uuid': instance['uuid'],
'status': None})
def fake_instance_get_by_uuid(context, instance_uuid):
# raise InstanceNotFound exception for uuid 'noexist'
if instance_uuid == 'noexist':
raise exception.InstanceNotFound(instance_id=instance_uuid)
for instance in instances:
if instance['uuid'] == instance_uuid:
return instance
def fake_migration_get_unconfirmed_by_dest_compute(context,
resize_confirm_window, dest_compute):
self.assertEqual(dest_compute, FLAGS.host)
return migrations
def fake_migration_update(context, migration_id, values):
for migration in migrations:
if migration['id'] == migration_id and 'status' in values:
migration['status'] = values['status']
def fake_confirm_resize(context, instance):
# raise exception for 'fake_uuid4' to check migration status
# does not get set to 'error' on confirm_resize failure.
if instance['uuid'] == 'fake_uuid4':
raise test.TestingException
for migration in migrations:
if migration['instance_uuid'] == instance['uuid']:
migration['status'] = 'confirmed'
self.stubs.Set(db, 'instance_get_by_uuid',
fake_instance_get_by_uuid)
self.stubs.Set(db, 'migration_get_unconfirmed_by_dest_compute',
fake_migration_get_unconfirmed_by_dest_compute)
self.stubs.Set(db, 'migration_update',
fake_migration_update)
self.stubs.Set(self.compute.compute_api, 'confirm_resize',
fake_confirm_resize)
def fetch_instance_migration_status(instance_uuid):
for migration in migrations:
if migration['instance_uuid'] == instance_uuid:
return migration['status']
self.flags(resize_confirm_window=60)
ctxt = context.get_admin_context()
self.compute._poll_unconfirmed_resizes(ctxt)
for uuid, status in expected_migration_status.iteritems():
self.assertEqual(status, fetch_instance_migration_status(uuid))
def test_instance_build_timeout_disabled(self):
self.flags(instance_build_timeout=0)
ctxt = context.get_admin_context()
called = {'get_all': False, 'set_error_state': 0}
created_at = timeutils.utcnow() + datetime.timedelta(seconds=-60)
def fake_instance_get_all_by_filters(*args, **kwargs):
called['get_all'] = True
return instances[:]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_instance_get_all_by_filters)
def fake_set_instance_error_state(_ctxt, instance_uuid, **kwargs):
called['set_error_state'] += 1
self.stubs.Set(self.compute, '_set_instance_error_state',
fake_set_instance_error_state)
instance_map = {}
instances = []
for x in xrange(5):
uuid = 'fake-uuid-%s' % x
instance_map[uuid] = {'uuid': uuid, 'host': FLAGS.host,
'vm_state': vm_states.BUILDING,
'created_at': created_at}
instances.append(instance_map[uuid])
self.compute._check_instance_build_time(ctxt)
self.assertFalse(called['get_all'])
self.assertEqual(called['set_error_state'], 0)
def test_instance_build_timeout(self):
self.flags(instance_build_timeout=30)
ctxt = context.get_admin_context()
called = {'get_all': False, 'set_error_state': 0}
created_at = timeutils.utcnow() + datetime.timedelta(seconds=-60)
def fake_instance_get_all_by_filters(*args, **kwargs):
called['get_all'] = True
return instances[:]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_instance_get_all_by_filters)
def fake_set_instance_error_state(_ctxt, instance_uuid, **kwargs):
called['set_error_state'] += 1
self.stubs.Set(self.compute, '_set_instance_error_state',
fake_set_instance_error_state)
instance_map = {}
instances = []
for x in xrange(5):
uuid = 'fake-uuid-%s' % x
instance_map[uuid] = {'uuid': uuid, 'host': FLAGS.host,
'vm_state': vm_states.BUILDING,
'created_at': created_at}
instances.append(instance_map[uuid])
self.compute._check_instance_build_time(ctxt)
self.assertTrue(called['get_all'])
self.assertEqual(called['set_error_state'], 5)
def test_instance_build_timeout_mixed_instances(self):
self.flags(instance_build_timeout=30)
ctxt = context.get_admin_context()
called = {'get_all': False, 'set_error_state': 0}
created_at = timeutils.utcnow() + datetime.timedelta(seconds=-60)
def fake_instance_get_all_by_filters(*args, **kwargs):
called['get_all'] = True
return instances[:]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_instance_get_all_by_filters)
def fake_set_instance_error_state(_ctxt, instance_uuid, **kwargs):
called['set_error_state'] += 1
self.stubs.Set(self.compute, '_set_instance_error_state',
fake_set_instance_error_state)
instance_map = {}
instances = []
#expired instances
for x in xrange(4):
uuid = 'fake-uuid-%s' % x
instance_map[uuid] = {'uuid': uuid, 'host': FLAGS.host,
'vm_state': vm_states.BUILDING,
'created_at': created_at}
instances.append(instance_map[uuid])
#not expired
uuid = 'fake-uuid-5'
instance_map[uuid] = {
'uuid': uuid,
'host': FLAGS.host,
'vm_state': vm_states.BUILDING,
'created_at': timeutils.utcnow(),
}
instances.append(instance_map[uuid])
self.compute._check_instance_build_time(ctxt)
self.assertTrue(called['get_all'])
self.assertEqual(called['set_error_state'], 4)
class ComputeAPITestCase(BaseTestCase):
def setUp(self):
def fake_get_nw_info(cls, ctxt, instance):
self.assertTrue(ctxt.is_admin)
return fake_network.fake_get_instance_nw_info(self.stubs, 1, 1,
spectacular=True)
super(ComputeAPITestCase, self).setUp()
self.stubs.Set(nova.network.API, 'get_instance_nw_info',
fake_get_nw_info)
self.security_group_api = compute.api.SecurityGroupAPI()
self.compute_api = compute.API(
security_group_api=self.security_group_api)
self.fake_image = {
'id': 1,
'name': 'fake_name',
'properties': {'kernel_id': 'fake_kernel_id',
'ramdisk_id': 'fake_ramdisk_id'},
}
def _run_instance(self, params=None):
instance = jsonutils.to_primitive(self._create_fake_instance(params))
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], None)
return instance, instance_uuid
def test_create_with_too_little_ram(self):
"""Test an instance type with too little memory"""
inst_type = instance_types.get_default_instance_type()
inst_type['memory_mb'] = 1
def fake_show(*args):
img = copy.copy(self.fake_image)
img['min_ram'] = 2
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InstanceTypeMemoryTooSmall,
self.compute_api.create, self.context, inst_type, None)
# Now increase the inst_type memory and make sure all is fine.
inst_type['memory_mb'] = 2
(refs, resv_id) = self.compute_api.create(self.context,
inst_type, None)
db.instance_destroy(self.context, refs[0]['uuid'])
def test_create_with_too_little_disk(self):
"""Test an instance type with too little disk space"""
inst_type = instance_types.get_default_instance_type()
inst_type['root_gb'] = 1
def fake_show(*args):
img = copy.copy(self.fake_image)
img['min_disk'] = 2
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InstanceTypeDiskTooSmall,
self.compute_api.create, self.context, inst_type, None)
# Now increase the inst_type disk space and make sure all is fine.
inst_type['root_gb'] = 2
(refs, resv_id) = self.compute_api.create(self.context,
inst_type, None)
db.instance_destroy(self.context, refs[0]['uuid'])
def test_create_just_enough_ram_and_disk(self):
"""Test an instance type with just enough ram and disk space"""
inst_type = instance_types.get_default_instance_type()
inst_type['root_gb'] = 2
inst_type['memory_mb'] = 2
def fake_show(*args):
img = copy.copy(self.fake_image)
img['min_ram'] = 2
img['min_disk'] = 2
img['name'] = 'fake_name'
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
(refs, resv_id) = self.compute_api.create(self.context,
inst_type, None)
db.instance_destroy(self.context, refs[0]['uuid'])
def test_create_with_no_ram_and_disk_reqs(self):
"""Test an instance type with no min_ram or min_disk"""
inst_type = instance_types.get_default_instance_type()
inst_type['root_gb'] = 1
inst_type['memory_mb'] = 1
def fake_show(*args):
return copy.copy(self.fake_image)
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
(refs, resv_id) = self.compute_api.create(self.context,
inst_type, None)
db.instance_destroy(self.context, refs[0]['uuid'])
def test_create_instance_defaults_display_name(self):
"""Verify that an instance cannot be created without a display_name."""
cases = [dict(), dict(display_name=None)]
for instance in cases:
(ref, resv_id) = self.compute_api.create(self.context,
instance_types.get_default_instance_type(), None, **instance)
try:
self.assertNotEqual(ref[0]['display_name'], None)
finally:
db.instance_destroy(self.context, ref[0]['uuid'])
def test_create_instance_sets_system_metadata(self):
"""Make sure image properties are copied into system metadata."""
(ref, resv_id) = self.compute_api.create(
self.context,
instance_type=instance_types.get_default_instance_type(),
image_href=None)
try:
sys_metadata = db.instance_system_metadata_get(self.context,
ref[0]['uuid'])
image_props = {'image_kernel_id': 'fake_kernel_id',
'image_ramdisk_id': 'fake_ramdisk_id',
'image_something_else': 'meow', }
for key, value in image_props.iteritems():
self.assertTrue(key in sys_metadata)
self.assertEqual(value, sys_metadata[key])
finally:
db.instance_destroy(self.context, ref[0]['uuid'])
def test_create_instance_associates_security_groups(self):
"""Make sure create associates security groups"""
group = self._create_group()
(ref, resv_id) = self.compute_api.create(
self.context,
instance_type=instance_types.get_default_instance_type(),
image_href=None,
security_group=['testgroup'])
try:
self.assertEqual(len(db.security_group_get_by_instance(
self.context, ref[0]['id'])), 1)
group = db.security_group_get(self.context, group['id'])
self.assert_(len(group.instances) == 1)
finally:
db.security_group_destroy(self.context, group['id'])
db.instance_destroy(self.context, ref[0]['uuid'])
def test_create_instance_with_invalid_security_group_raises(self):
instance_type = instance_types.get_default_instance_type()
pre_build_len = len(db.instance_get_all(context.get_admin_context()))
self.assertRaises(exception.SecurityGroupNotFoundForProject,
self.compute_api.create,
self.context,
instance_type=instance_type,
image_href=None,
security_group=['this_is_a_fake_sec_group'])
self.assertEqual(pre_build_len,
len(db.instance_get_all(context.get_admin_context())))
def test_create_with_large_user_data(self):
"""Test an instance type with too much user data."""
inst_type = instance_types.get_default_instance_type()
def fake_show(*args):
img = copy.copy(self.fake_image)
img['min_ram'] = 2
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InstanceUserDataTooLarge,
self.compute_api.create, self.context, inst_type, None,
user_data=('1' * 65536))
def test_create_with_malformed_user_data(self):
"""Test an instance type with malformed user data."""
inst_type = instance_types.get_default_instance_type()
def fake_show(*args):
img = copy.copy(self.fake_image)
img['min_ram'] = 2
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
self.assertRaises(exception.InstanceUserDataMalformed,
self.compute_api.create, self.context, inst_type, None,
user_data='banana')
def test_create_with_base64_user_data(self):
"""Test an instance type with ok much user data."""
inst_type = instance_types.get_default_instance_type()
def fake_show(*args):
img = copy.copy(self.fake_image)
img['min_ram'] = 2
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
# NOTE(mikal): a string of length 48510 encodes to 65532 characters of
# base64
(refs, resv_id) = self.compute_api.create(
self.context, inst_type, None,
user_data=base64.encodestring('1' * 48510))
db.instance_destroy(self.context, refs[0]['uuid'])
def test_default_hostname_generator(self):
fake_uuids = [str(utils.gen_uuid()) for x in xrange(4)]
orig_populate = self.compute_api._populate_instance_for_create
def _fake_populate(base_options, *args, **kwargs):
base_options['uuid'] = fake_uuids.pop(0)
return orig_populate(base_options, *args, **kwargs)
self.stubs.Set(self.compute_api,
'_populate_instance_for_create',
_fake_populate)
cases = [(None, 'server-%s' % fake_uuids[0]),
('Hello, Server!', 'hello-server'),
('<}\x1fh\x10e\x08l\x02l\x05o\x12!{>', 'hello'),
('hello_server', 'hello-server')]
for display_name, hostname in cases:
(ref, resv_id) = self.compute_api.create(self.context,
instance_types.get_default_instance_type(), None,
display_name=display_name)
try:
self.assertEqual(ref[0]['hostname'], hostname)
finally:
db.instance_destroy(self.context, ref[0]['uuid'])
def test_destroy_instance_disassociates_security_groups(self):
"""Make sure destroying disassociates security groups"""
group = self._create_group()
(ref, resv_id) = self.compute_api.create(
self.context,
instance_type=instance_types.get_default_instance_type(),
image_href=None,
security_group=['testgroup'])
try:
db.instance_destroy(self.context, ref[0]['uuid'])
group = db.security_group_get(self.context, group['id'])
self.assert_(len(group.instances) == 0)
finally:
db.security_group_destroy(self.context, group['id'])
def test_destroy_security_group_disassociates_instances(self):
"""Make sure destroying security groups disassociates instances"""
group = self._create_group()
(ref, resv_id) = self.compute_api.create(
self.context,
instance_type=instance_types.get_default_instance_type(),
image_href=None,
security_group=['testgroup'])
try:
db.security_group_destroy(self.context, group['id'])
admin_deleted_context = context.get_admin_context(
read_deleted="only")
group = db.security_group_get(admin_deleted_context, group['id'])
self.assert_(len(group.instances) == 0)
finally:
db.instance_destroy(self.context, ref[0]['uuid'])
def test_start(self):
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.STOPPING})
self.compute.stop_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], None)
self.compute_api.start(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.STARTING)
db.instance_destroy(self.context, instance['uuid'])
def test_stop(self):
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], None)
self.compute_api.stop(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.STOPPING)
db.instance_destroy(self.context, instance['uuid'])
def test_start_shutdown(self):
def check_state(instance_uuid, power_state_, vm_state_, task_state_):
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['power_state'], power_state_)
self.assertEqual(instance['vm_state'], vm_state_)
self.assertEqual(instance['task_state'], task_state_)
def start_check_state(instance_uuid,
power_state_, vm_state_, task_state_):
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.compute_api.start(self.context, instance)
check_state(instance_uuid, power_state_, vm_state_, task_state_)
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
check_state(instance['uuid'], power_state.RUNNING, vm_states.ACTIVE,
None)
# NOTE(yamahata): emulate compute.manager._sync_power_state() that
# the instance is shutdown by itself
db.instance_update(self.context, instance['uuid'],
{'power_state': power_state.NOSTATE,
'vm_state': vm_states.STOPPED})
check_state(instance['uuid'], power_state.NOSTATE, vm_states.STOPPED,
None)
start_check_state(instance['uuid'], power_state.NOSTATE,
vm_states.STOPPED, task_states.STARTING)
db.instance_destroy(self.context, instance['uuid'])
def test_delete(self):
instance, instance_uuid = self._run_instance(params={
'host': FLAGS.host})
self.compute_api.delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.DELETING)
db.instance_destroy(self.context, instance['uuid'])
def test_repeated_delete_quota(self):
in_use = {'instances': 1}
def fake_reserve(context, **deltas):
return dict(deltas.iteritems())
self.stubs.Set(QUOTAS, 'reserve', fake_reserve)
def fake_commit(context, deltas):
for k, v in deltas.iteritems():
in_use[k] = in_use.get(k, 0) + v
self.stubs.Set(QUOTAS, 'commit', fake_commit)
instance, instance_uuid = self._run_instance(params={
'host': FLAGS.host})
self.compute_api.delete(self.context, instance)
self.compute_api.delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.DELETING)
self.assertEquals(in_use['instances'], 0)
db.instance_destroy(self.context, instance['uuid'])
def test_delete_fast_if_host_not_set(self):
instance = self._create_fake_instance({'host': None})
self.compute_api.delete(self.context, instance)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
self.context, instance['uuid'])
def test_delete_handles_host_setting_race_condition(self):
instance, instance_uuid = self._run_instance(params={
'host': FLAGS.host})
instance['host'] = None # make it think host was never set
self.compute_api.delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.DELETING)
db.instance_destroy(self.context, instance['uuid'])
def test_delete_fail(self):
instance, instance_uuid = self._run_instance(params={
'host': FLAGS.host})
instance = db.instance_update(self.context, instance_uuid,
{'disable_terminate': True})
self.compute_api.delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], None)
db.instance_destroy(self.context, instance['uuid'])
def test_delete_soft(self):
instance, instance_uuid = self._run_instance()
self.compute_api.soft_delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.POWERING_OFF)
db.instance_destroy(self.context, instance['uuid'])
def test_delete_soft_fail(self):
instance, instance_uuid = self._run_instance()
instance = db.instance_update(self.context, instance_uuid,
{'disable_terminate': True})
self.compute_api.soft_delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], None)
db.instance_destroy(self.context, instance['uuid'])
def test_force_delete(self):
"""Ensure instance can be deleted after a soft delete"""
instance = jsonutils.to_primitive(self._create_fake_instance(params={
'host': FLAGS.host}))
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.compute_api.soft_delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.POWERING_OFF)
# set the state that the instance gets when soft_delete finishes
instance = db.instance_update(self.context, instance['uuid'],
{'vm_state': vm_states.SOFT_DELETED,
'task_state': None})
self.compute_api.force_delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.DELETING)
def test_suspend(self):
"""Ensure instance can be suspended"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
self.assertEqual(instance['task_state'], None)
self.compute_api.suspend(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.SUSPENDING)
db.instance_destroy(self.context, instance['uuid'])
def test_resume(self):
"""Ensure instance can be resumed (if suspended)"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
db.instance_update(self.context, instance['uuid'],
{'vm_state': vm_states.SUSPENDED})
instance = db.instance_get(self.context, instance['id'])
self.assertEqual(instance['task_state'], None)
self.compute_api.resume(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(instance['task_state'], task_states.RESUMING)
db.instance_destroy(self.context, instance['uuid'])
def test_pause(self):
"""Ensure instance can be paused"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
self.assertEqual(instance['task_state'], None)
self.compute_api.pause(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.PAUSING)
db.instance_destroy(self.context, instance['uuid'])
def test_unpause(self):
"""Ensure instance can be unpaused"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
self.assertEqual(instance['task_state'], None)
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.PAUSING})
self.compute.pause_instance(self.context, instance=instance)
# set the state that the instance gets when pause finishes
instance = db.instance_update(self.context, instance['uuid'],
{'vm_state': vm_states.PAUSED})
self.compute_api.unpause(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.UNPAUSING)
db.instance_destroy(self.context, instance['uuid'])
def test_restore(self):
"""Ensure instance can be restored from a soft delete"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.compute_api.soft_delete(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.POWERING_OFF)
# set the state that the instance gets when soft_delete finishes
instance = db.instance_update(self.context, instance['uuid'],
{'vm_state': vm_states.SOFT_DELETED,
'task_state': None})
self.compute_api.restore(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.POWERING_ON)
db.instance_destroy(self.context, instance['uuid'])
def test_rebuild(self):
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], None)
# Set some image metadata that should get wiped out and reset
# as well as some other metadata that should be preserved.
db.instance_system_metadata_update(self.context, instance_uuid,
{'image_kernel_id': 'old-data',
'image_ramdisk_id': 'old_data',
'image_something_else': 'old-data',
'image_should_remove': 'bye-bye',
'preserved': 'preserve this!'},
True)
# Make sure Compute API updates the image_ref before casting to
# compute manager.
orig_update = self.compute_api.update
info = {'image_ref': None}
def update_wrapper(*args, **kwargs):
if 'image_ref' in kwargs:
info['image_ref'] = kwargs['image_ref']
return orig_update(*args, **kwargs)
self.stubs.Set(self.compute_api, 'update', update_wrapper)
image_ref = instance["image_ref"] + '-new_image_ref'
password = "new_password"
self.compute_api.rebuild(self.context, instance, image_ref, password)
self.assertEqual(info['image_ref'], image_ref)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.REBUILDING)
sys_metadata = db.instance_system_metadata_get(self.context,
instance_uuid)
self.assertEqual(sys_metadata,
{'image_kernel_id': 'fake_kernel_id',
'image_ramdisk_id': 'fake_ramdisk_id',
'image_something_else': 'meow',
'preserved': 'preserve this!'})
db.instance_destroy(self.context, instance['uuid'])
def test_reboot_soft(self):
"""Ensure instance can be soft rebooted"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(inst_ref['task_state'], None)
reboot_type = "SOFT"
self.compute_api.reboot(self.context, inst_ref, reboot_type)
inst_ref = db.instance_get_by_uuid(self.context, inst_ref['uuid'])
self.assertEqual(inst_ref['task_state'], task_states.REBOOTING)
db.instance_destroy(self.context, inst_ref['uuid'])
def test_reboot_hard(self):
"""Ensure instance can be hard rebooted"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
self.assertEqual(inst_ref['task_state'], None)
reboot_type = "HARD"
self.compute_api.reboot(self.context, inst_ref, reboot_type)
inst_ref = db.instance_get_by_uuid(self.context, inst_ref['uuid'])
self.assertEqual(inst_ref['task_state'], task_states.REBOOTING_HARD)
db.instance_destroy(self.context, inst_ref['uuid'])
def test_hard_reboot_of_soft_rebooting_instance(self):
"""Ensure instance can be hard rebooted while soft rebooting"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.REBOOTING})
reboot_type = "HARD"
self.compute_api.reboot(self.context, inst_ref, reboot_type)
inst_ref = db.instance_get_by_uuid(self.context, inst_ref['uuid'])
self.assertEqual(inst_ref['task_state'], task_states.REBOOTING_HARD)
db.instance_destroy(self.context, inst_ref['uuid'])
def test_soft_reboot_of_rebooting_instance(self):
"""Ensure instance can't be soft rebooted while rebooting"""
instance = jsonutils.to_primitive(self._create_fake_instance())
self.compute.run_instance(self.context, instance=instance)
inst_ref = db.instance_get_by_uuid(self.context, instance['uuid'])
db.instance_update(self.context, instance['uuid'],
{"task_state": task_states.REBOOTING})
inst_ref = db.instance_get_by_uuid(self.context, inst_ref['uuid'])
reboot_type = "SOFT"
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.reboot,
self.context,
inst_ref,
reboot_type)
def test_hostname_create(self):
"""Ensure instance hostname is set during creation."""
inst_type = instance_types.get_instance_type_by_name('m1.tiny')
(instances, _) = self.compute_api.create(self.context,
inst_type,
None,
display_name='test host')
self.assertEqual('test-host', instances[0]['hostname'])
def test_set_admin_password(self):
"""Ensure instance can have its admin password set"""
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
inst_ref = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(inst_ref['vm_state'], vm_states.ACTIVE)
self.assertEqual(inst_ref['task_state'], None)
def fake_rpc_method(context, topic, msg, do_cast=True):
self.assertFalse(do_cast)
self.stubs.Set(rpc, 'call', fake_rpc_method)
self.compute_api.set_admin_password(self.context, inst_ref)
inst_ref = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(inst_ref['vm_state'], vm_states.ACTIVE)
self.assertEqual(inst_ref['task_state'],
task_states.UPDATING_PASSWORD)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(inst_ref))
def test_rescue_unrescue(self):
instance = jsonutils.to_primitive(self._create_fake_instance())
instance_uuid = instance['uuid']
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['vm_state'], vm_states.ACTIVE)
self.assertEqual(instance['task_state'], None)
self.compute_api.rescue(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['vm_state'], vm_states.ACTIVE)
self.assertEqual(instance['task_state'], task_states.RESCUING)
params = {'vm_state': vm_states.RESCUED, 'task_state': None}
db.instance_update(self.context, instance_uuid, params)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.compute_api.unrescue(self.context, instance)
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['vm_state'], vm_states.RESCUED)
self.assertEqual(instance['task_state'], task_states.UNRESCUING)
self.compute.terminate_instance(self.context,
instance=jsonutils.to_primitive(instance))
def test_snapshot(self):
"""Ensure a snapshot of an instance can be created"""
instance = self._create_fake_instance()
image = self.compute_api.snapshot(self.context, instance, 'snap1',
{'extra_param': 'value1'})
self.assertEqual(image['name'], 'snap1')
properties = image['properties']
self.assertTrue('backup_type' not in properties)
self.assertEqual(properties['image_type'], 'snapshot')
self.assertEqual(properties['instance_uuid'], instance['uuid'])
self.assertEqual(properties['extra_param'], 'value1')
db.instance_destroy(self.context, instance['uuid'])
def test_snapshot_minram_mindisk_VHD(self):
"""Ensure a snapshots min_ram and min_disk are correct.
A snapshot of a non-shrinkable VHD should have min_ram
and min_disk set to that of the original instances flavor.
"""
def fake_show(*args):
img = copy.copy(self.fake_image)
img['disk_format'] = 'vhd'
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
instance = self._create_fake_instance()
inst_params = {'root_gb': 2, 'memory_mb': 256}
instance['instance_type'].update(inst_params)
image = self.compute_api.snapshot(self.context, instance, 'snap1',
{'extra_param': 'value1'})
self.assertEqual(image['name'], 'snap1')
self.assertEqual(image['min_ram'], 256)
self.assertEqual(image['min_disk'], 2)
properties = image['properties']
self.assertTrue('backup_type' not in properties)
self.assertEqual(properties['image_type'], 'snapshot')
self.assertEqual(properties['instance_uuid'], instance['uuid'])
self.assertEqual(properties['extra_param'], 'value1')
db.instance_destroy(self.context, instance['uuid'])
def test_snapshot_minram_mindisk(self):
"""Ensure a snapshots min_ram and min_disk are correct.
A snapshot of an instance should have min_ram and min_disk
set to that of the instances original image unless that
image had a disk format of vhd.
"""
def fake_show(*args):
img = copy.copy(self.fake_image)
img['disk_format'] = 'raw'
img['min_ram'] = 512
img['min_disk'] = 1
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
instance = self._create_fake_instance()
image = self.compute_api.snapshot(self.context, instance, 'snap1',
{'extra_param': 'value1'})
self.assertEqual(image['name'], 'snap1')
self.assertEqual(image['min_ram'], 512)
self.assertEqual(image['min_disk'], 1)
properties = image['properties']
self.assertTrue('backup_type' not in properties)
self.assertEqual(properties['image_type'], 'snapshot')
self.assertEqual(properties['instance_uuid'], instance['uuid'])
self.assertEqual(properties['extra_param'], 'value1')
db.instance_destroy(self.context, instance['uuid'])
def test_snapshot_minram_mindisk_img_missing_minram(self):
"""Ensure a snapshots min_ram and min_disk are correct.
Do not show an attribute that the orig img did not have.
"""
def fake_show(*args):
img = copy.copy(self.fake_image)
img['disk_format'] = 'raw'
img['min_disk'] = 1
return img
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
instance = self._create_fake_instance()
image = self.compute_api.snapshot(self.context, instance, 'snap1',
{'extra_param': 'value1'})
self.assertEqual(image['name'], 'snap1')
self.assertFalse('min_ram' in image)
self.assertEqual(image['min_disk'], 1)
properties = image['properties']
self.assertTrue('backup_type' not in properties)
self.assertEqual(properties['image_type'], 'snapshot')
self.assertEqual(properties['instance_uuid'], instance['uuid'])
self.assertEqual(properties['extra_param'], 'value1')
db.instance_destroy(self.context, instance['uuid'])
def test_snapshot_minram_mindisk_no_image(self):
"""Ensure a snapshots min_ram and min_disk are correct.
A snapshots min_ram and min_disk should be set to default if
an instances original image cannot be found.
"""
def fake_show(*args):
raise exception.ImageNotFound
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
instance = self._create_fake_instance()
image = self.compute_api.snapshot(self.context, instance, 'snap1',
{'extra_param': 'value1'})
self.assertEqual(image['name'], 'snap1')
# min_ram and min_disk are not returned when set to default
self.assertFalse('min_ram' in image)
self.assertFalse('min_disk' in image)
properties = image['properties']
self.assertTrue('backup_type' not in properties)
self.assertEqual(properties['image_type'], 'snapshot')
self.assertEqual(properties['instance_uuid'], instance['uuid'])
self.assertEqual(properties['extra_param'], 'value1')
db.instance_destroy(self.context, instance['uuid'])
def test_backup(self):
"""Can't backup an instance which is already being backed up."""
instance = self._create_fake_instance()
image = self.compute_api.backup(self.context, instance,
'backup1', 'DAILY', None,
{'extra_param': 'value1'})
self.assertEqual(image['name'], 'backup1')
properties = image['properties']
self.assertEqual(properties['backup_type'], 'DAILY')
self.assertEqual(properties['image_type'], 'backup')
self.assertEqual(properties['instance_uuid'], instance['uuid'])
self.assertEqual(properties['extra_param'], 'value1')
db.instance_destroy(self.context, instance['uuid'])
def test_backup_conflict(self):
"""Can't backup an instance which is already being backed up."""
instance = self._create_fake_instance()
instance_values = {'task_state': task_states.IMAGE_BACKUP}
db.instance_update(self.context, instance['uuid'], instance_values)
instance = self.compute_api.get(self.context, instance['uuid'])
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.backup,
self.context,
instance,
None,
None,
None)
db.instance_destroy(self.context, instance['uuid'])
def test_snapshot_conflict(self):
"""Can't snapshot an instance which is already being snapshotted."""
instance = self._create_fake_instance()
instance_values = {'task_state': task_states.IMAGE_SNAPSHOT}
db.instance_update(self.context, instance['uuid'], instance_values)
instance = self.compute_api.get(self.context, instance['uuid'])
self.assertRaises(exception.InstanceInvalidState,
self.compute_api.snapshot,
self.context,
instance,
None)
db.instance_destroy(self.context, instance['uuid'])
def test_resize_confirm_through_api(self):
instance = jsonutils.to_primitive(self._create_fake_instance())
context = self.context.elevated()
self.compute.run_instance(self.context, instance=instance)
instance = db.instance_get_by_uuid(context, instance['uuid'])
self.compute_api.resize(context, instance, '4')
# create a fake migration record (manager does this)
db.migration_create(context,
{'instance_uuid': instance['uuid'],
'status': 'finished'})
# set the state that the instance gets when resize finishes
instance = db.instance_update(self.context, instance['uuid'],
{'task_state': None,
'vm_state': vm_states.RESIZED})
self.compute_api.confirm_resize(context, instance)
self.compute.terminate_instance(context,
instance=jsonutils.to_primitive(instance))
def test_resize_revert_through_api(self):
instance = jsonutils.to_primitive(self._create_fake_instance())
context = self.context.elevated()
instance = db.instance_get_by_uuid(context, instance['uuid'])
self.compute.run_instance(self.context, instance=instance)
self.compute_api.resize(context, instance, '4')
# create a fake migration record (manager does this)
db.migration_create(context,
{'instance_uuid': instance['uuid'],
'status': 'finished'})
# set the state that the instance gets when resize finishes
instance = db.instance_update(self.context, instance['uuid'],
{'task_state': None,
'vm_state': vm_states.RESIZED})
self.compute_api.revert_resize(context, instance)
instance = db.instance_get_by_uuid(context, instance['uuid'])
self.assertEqual(instance['vm_state'], vm_states.RESIZED)
self.assertEqual(instance['task_state'], task_states.RESIZE_REVERTING)
self.compute.terminate_instance(context,
instance=jsonutils.to_primitive(instance))
def test_resize_invalid_flavor_fails(self):
"""Ensure invalid flavors raise"""
instance = self._create_fake_instance()
context = self.context.elevated()
instance = db.instance_get_by_uuid(context, instance['uuid'])
instance = jsonutils.to_primitive(instance)
self.compute.run_instance(self.context, instance=instance)
self.assertRaises(exception.NotFound, self.compute_api.resize,
context, instance, 200)
self.compute.terminate_instance(context, instance=instance)
def test_resize_same_flavor_fails(self):
"""Ensure invalid flavors raise"""
context = self.context.elevated()
instance = self._create_fake_instance()
instance = db.instance_get_by_uuid(context, instance['uuid'])
instance = jsonutils.to_primitive(instance)
self.compute.run_instance(self.context, instance=instance)
self.assertRaises(exception.CannotResizeToSameFlavor,
self.compute_api.resize, context, instance, 1)
self.compute.terminate_instance(context, instance=instance)
def test_migrate(self):
context = self.context.elevated()
instance = self._create_fake_instance()
instance = db.instance_get_by_uuid(context, instance['uuid'])
instance = jsonutils.to_primitive(instance)
self.compute.run_instance(self.context, instance=instance)
# Migrate simply calls resize() without a flavor_id.
self.compute_api.resize(context, instance, None)
self.compute.terminate_instance(context, instance=instance)
def test_resize_request_spec(self):
def _fake_cast(context, topic, msg):
request_spec = msg['args']['request_spec']
filter_properties = msg['args']['filter_properties']
instance_properties = request_spec['instance_properties']
# resize with flavor_id = None will still send instance_type
self.assertEqual(request_spec['instance_type'],
orig_instance_type)
self.assertEqual(request_spec['instance_uuids'],
[instance['uuid']])
self.assertEqual(instance_properties['uuid'], instance['uuid'])
self.assertEqual(instance_properties['host'], 'host2')
# Ensure the instance passed to us has been updated with
# progress set to 0 and task_state set to RESIZE_PREP.
self.assertEqual(instance_properties['task_state'],
task_states.RESIZE_PREP)
self.assertEqual(instance_properties['progress'], 0)
self.assertIn('host2', filter_properties['ignore_hosts'])
self.stubs.Set(rpc, 'cast', _fake_cast)
context = self.context.elevated()
instance = self._create_fake_instance(dict(host='host2'))
instance = db.instance_get_by_uuid(context, instance['uuid'])
instance = jsonutils.to_primitive(instance)
orig_instance_type = instance['instance_type']
self.compute.run_instance(self.context, instance=instance)
# We need to set the host to something 'known'. Unfortunately,
# the compute manager is using a cached copy of FLAGS.host,
# so we can't just self.flags(host='host2') before calling
# run_instance above. Also, set progress to 10 so we ensure
# it is reset to 0 in compute_api.resize(). (verified in
# _fake_cast above).
instance = db.instance_update(self.context, instance['uuid'],
dict(host='host2', progress=10))
# different host
self.flags(host='host3')
try:
self.compute_api.resize(context, instance, None)
finally:
self.compute.terminate_instance(context, instance=instance)
def test_resize_request_spec_noavoid(self):
def _fake_cast(context, topic, msg):
request_spec = msg['args']['request_spec']
filter_properties = msg['args']['filter_properties']
instance_properties = request_spec['instance_properties']
self.assertEqual(instance_properties['host'], 'host2')
# Ensure the instance passed to us has been updated with
# progress set to 0 and task_state set to RESIZE_PREP.
self.assertEqual(instance_properties['task_state'],
task_states.RESIZE_PREP)
self.assertEqual(instance_properties['progress'], 0)
self.assertNotIn('host2', filter_properties['ignore_hosts'])
self.stubs.Set(rpc, 'cast', _fake_cast)
self.flags(allow_resize_to_same_host=True)
context = self.context.elevated()
instance = self._create_fake_instance(dict(host='host2'))
instance = db.instance_get_by_uuid(context, instance['uuid'])
instance = jsonutils.to_primitive(instance)
self.compute.run_instance(self.context, instance=instance)
# We need to set the host to something 'known'. Unfortunately,
# the compute manager is using a cached copy of FLAGS.host,
# so we can't just self.flags(host='host2') before calling
# run_instance above. Also, set progress to 10 so we ensure
# it is reset to 0 in compute_api.resize(). (verified in
# _fake_cast above).
instance = db.instance_update(self.context, instance['uuid'],
dict(host='host2', progress=10))
# different host
try:
self.compute_api.resize(context, instance, None)
finally:
self.compute.terminate_instance(context, instance=instance)
def test_get(self):
"""Test get instance"""
c = context.get_admin_context()
exp_instance = self._create_fake_instance()
expected = dict(exp_instance.iteritems())
expected['name'] = exp_instance['name']
def fake_db_get(context, instance_uuid):
return exp_instance
self.stubs.Set(db, 'instance_get_by_uuid', fake_db_get)
instance = self.compute_api.get(c, exp_instance['uuid'])
self.assertEquals(expected, instance)
def test_get_with_integer_id(self):
"""Test get instance with an integer id"""
c = context.get_admin_context()
exp_instance = self._create_fake_instance()
expected = dict(exp_instance.iteritems())
expected['name'] = exp_instance['name']
def fake_db_get(context, instance_id):
return exp_instance
self.stubs.Set(db, 'instance_get', fake_db_get)
instance = self.compute_api.get(c, exp_instance['id'])
self.assertEquals(expected, instance)
def test_get_all_by_name_regexp(self):
"""Test searching instances by name (display_name)"""
c = context.get_admin_context()
instance1 = self._create_fake_instance({'display_name': 'woot'})
instance2 = self._create_fake_instance({
'display_name': 'woo'})
instance3 = self._create_fake_instance({
'display_name': 'not-woot'})
instances = self.compute_api.get_all(c,
search_opts={'name': '^woo.*'})
self.assertEqual(len(instances), 2)
instance_uuids = [instance['uuid'] for instance in instances]
self.assertTrue(instance1['uuid'] in instance_uuids)
self.assertTrue(instance2['uuid'] in instance_uuids)
instances = self.compute_api.get_all(c,
search_opts={'name': '^woot.*'})
instance_uuids = [instance['uuid'] for instance in instances]
self.assertEqual(len(instances), 1)
self.assertTrue(instance1['uuid'] in instance_uuids)
instances = self.compute_api.get_all(c,
search_opts={'name': '.*oot.*'})
self.assertEqual(len(instances), 2)
instance_uuids = [instance['uuid'] for instance in instances]
self.assertTrue(instance1['uuid'] in instance_uuids)
self.assertTrue(instance3['uuid'] in instance_uuids)
instances = self.compute_api.get_all(c,
search_opts={'name': '^n.*'})
self.assertEqual(len(instances), 1)
instance_uuids = [instance['uuid'] for instance in instances]
self.assertTrue(instance3['uuid'] in instance_uuids)
instances = self.compute_api.get_all(c,
search_opts={'name': 'noth.*'})
self.assertEqual(len(instances), 0)
db.instance_destroy(c, instance1['uuid'])
db.instance_destroy(c, instance2['uuid'])
db.instance_destroy(c, instance3['uuid'])
def test_get_all_by_multiple_options_at_once(self):
"""Test searching by multiple options at once"""
c = context.get_admin_context()
network_manager = fake_network.FakeNetworkManager()
self.stubs.Set(self.compute_api.network_api,
'get_instance_uuids_by_ip_filter',
network_manager.get_instance_uuids_by_ip_filter)
instance1 = self._create_fake_instance({
'display_name': 'woot',
'id': 0,
'uuid': '00000000-0000-0000-0000-000000000010'})
instance2 = self._create_fake_instance({
'display_name': 'woo',
'id': 20,
'uuid': '00000000-0000-0000-0000-000000000020'})
instance3 = self._create_fake_instance({
'display_name': 'not-woot',
'id': 30,
'uuid': '00000000-0000-0000-0000-000000000030'})
# ip ends up matching 2nd octet here.. so all 3 match ip
# but 'name' only matches one
instances = self.compute_api.get_all(c,
search_opts={'ip': '.*\.1', 'name': 'not.*'})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['uuid'], instance3['uuid'])
# ip ends up matching any ip with a '1' in the last octet..
# so instance 1 and 3.. but name should only match #1
# but 'name' only matches one
instances = self.compute_api.get_all(c,
search_opts={'ip': '.*\.1$', 'name': '^woo.*'})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['uuid'], instance1['uuid'])
# same as above but no match on name (name matches instance1
# but the ip query doesn't
instances = self.compute_api.get_all(c,
search_opts={'ip': '.*\.2$', 'name': '^woot.*'})
self.assertEqual(len(instances), 0)
# ip matches all 3... ipv6 matches #2+#3...name matches #3
instances = self.compute_api.get_all(c,
search_opts={'ip': '.*\.1',
'name': 'not.*',
'ip6': '^.*12.*34.*'})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['uuid'], instance3['uuid'])
db.instance_destroy(c, instance1['uuid'])
db.instance_destroy(c, instance2['uuid'])
db.instance_destroy(c, instance3['uuid'])
def test_get_all_by_image(self):
"""Test searching instances by image"""
c = context.get_admin_context()
instance1 = self._create_fake_instance({'image_ref': '1234'})
instance2 = self._create_fake_instance({'image_ref': '4567'})
instance3 = self._create_fake_instance({'image_ref': '4567'})
instances = self.compute_api.get_all(c, search_opts={'image': '123'})
self.assertEqual(len(instances), 0)
instances = self.compute_api.get_all(c, search_opts={'image': '1234'})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['uuid'], instance1['uuid'])
instances = self.compute_api.get_all(c, search_opts={'image': '4567'})
self.assertEqual(len(instances), 2)
instance_uuids = [instance['uuid'] for instance in instances]
self.assertTrue(instance2['uuid'] in instance_uuids)
self.assertTrue(instance3['uuid'] in instance_uuids)
# Test passing a list as search arg
instances = self.compute_api.get_all(c,
search_opts={'image': ['1234', '4567']})
self.assertEqual(len(instances), 3)
db.instance_destroy(c, instance1['uuid'])
db.instance_destroy(c, instance2['uuid'])
db.instance_destroy(c, instance3['uuid'])
def test_get_all_by_flavor(self):
"""Test searching instances by image"""
c = context.get_admin_context()
instance1 = self._create_fake_instance({'instance_type_id': 1})
instance2 = self._create_fake_instance({'instance_type_id': 2})
instance3 = self._create_fake_instance({'instance_type_id': 2})
# NOTE(comstud): Migrations set up the instance_types table
# for us. Therefore, we assume the following is true for
# these tests:
# instance_type_id 1 == flavor 3
# instance_type_id 2 == flavor 1
# instance_type_id 3 == flavor 4
# instance_type_id 4 == flavor 5
# instance_type_id 5 == flavor 2
instances = self.compute_api.get_all(c,
search_opts={'flavor': 5})
self.assertEqual(len(instances), 0)
# ensure unknown filter maps to an empty list, not an exception
instances = self.compute_api.get_all(c, search_opts={'flavor': 99})
self.assertEqual(instances, [])
instances = self.compute_api.get_all(c, search_opts={'flavor': 3})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['id'], instance1['id'])
instances = self.compute_api.get_all(c, search_opts={'flavor': 1})
self.assertEqual(len(instances), 2)
instance_uuids = [instance['uuid'] for instance in instances]
self.assertTrue(instance2['uuid'] in instance_uuids)
self.assertTrue(instance3['uuid'] in instance_uuids)
db.instance_destroy(c, instance1['uuid'])
db.instance_destroy(c, instance2['uuid'])
db.instance_destroy(c, instance3['uuid'])
def test_get_all_by_state(self):
"""Test searching instances by state"""
c = context.get_admin_context()
instance1 = self._create_fake_instance({
'power_state': power_state.SHUTDOWN,
})
instance2 = self._create_fake_instance({
'power_state': power_state.RUNNING,
})
instance3 = self._create_fake_instance({
'power_state': power_state.RUNNING,
})
instances = self.compute_api.get_all(c,
search_opts={'power_state': power_state.SUSPENDED})
self.assertEqual(len(instances), 0)
instances = self.compute_api.get_all(c,
search_opts={'power_state': power_state.SHUTDOWN})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['uuid'], instance1['uuid'])
instances = self.compute_api.get_all(c,
search_opts={'power_state': power_state.RUNNING})
self.assertEqual(len(instances), 2)
instance_uuids = [instance['uuid'] for instance in instances]
self.assertTrue(instance2['uuid'] in instance_uuids)
self.assertTrue(instance3['uuid'] in instance_uuids)
# Test passing a list as search arg
instances = self.compute_api.get_all(c,
search_opts={'power_state': [power_state.SHUTDOWN,
power_state.RUNNING]})
self.assertEqual(len(instances), 3)
db.instance_destroy(c, instance1['uuid'])
db.instance_destroy(c, instance2['uuid'])
db.instance_destroy(c, instance3['uuid'])
def test_get_all_by_metadata(self):
"""Test searching instances by metadata"""
c = context.get_admin_context()
instance0 = self._create_fake_instance()
instance1 = self._create_fake_instance({
'metadata': {'key1': 'value1'}})
instance2 = self._create_fake_instance({
'metadata': {'key2': 'value2'}})
instance3 = self._create_fake_instance({
'metadata': {'key3': 'value3'}})
instance4 = self._create_fake_instance({
'metadata': {'key3': 'value3',
'key4': 'value4'}})
# get all instances
instances = self.compute_api.get_all(c,
search_opts={'metadata': {}})
self.assertEqual(len(instances), 5)
# wrong key/value combination
instances = self.compute_api.get_all(c,
search_opts={'metadata': {'key1': 'value3'}})
self.assertEqual(len(instances), 0)
# non-existing keys
instances = self.compute_api.get_all(c,
search_opts={'metadata': {'key5': 'value1'}})
self.assertEqual(len(instances), 0)
# find existing instance
instances = self.compute_api.get_all(c,
search_opts={'metadata': {'key2': 'value2'}})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['uuid'], instance2['uuid'])
instances = self.compute_api.get_all(c,
search_opts={'metadata': {'key3': 'value3'}})
self.assertEqual(len(instances), 2)
instance_uuids = [instance['uuid'] for instance in instances]
self.assertTrue(instance3['uuid'] in instance_uuids)
self.assertTrue(instance4['uuid'] in instance_uuids)
# multiple criterias as a dict
instances = self.compute_api.get_all(c,
search_opts={'metadata': {'key3': 'value3',
'key4': 'value4'}})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['uuid'], instance4['uuid'])
# multiple criterias as a list
instances = self.compute_api.get_all(c,
search_opts={'metadata': [{'key4': 'value4'},
{'key3': 'value3'}]})
self.assertEqual(len(instances), 1)
self.assertEqual(instances[0]['uuid'], instance4['uuid'])
db.instance_destroy(c, instance0['uuid'])
db.instance_destroy(c, instance1['uuid'])
db.instance_destroy(c, instance2['uuid'])
db.instance_destroy(c, instance3['uuid'])
db.instance_destroy(c, instance4['uuid'])
def test_instance_metadata(self):
meta_changes = [None]
self.flags(notify_on_any_change=True)
def fake_change_instance_metadata(inst, ctxt, diff, instance=None,
instance_uuid=None):
meta_changes[0] = diff
self.stubs.Set(compute_rpcapi.ComputeAPI, 'change_instance_metadata',
fake_change_instance_metadata)
_context = context.get_admin_context()
instance = self._create_fake_instance({'metadata': {'key1': 'value1'}})
instance = dict(instance)
metadata = self.compute_api.get_instance_metadata(_context, instance)
self.assertEqual(metadata, {'key1': 'value1'})
self.compute_api.update_instance_metadata(_context, instance,
{'key2': 'value2'})
metadata = self.compute_api.get_instance_metadata(_context, instance)
self.assertEqual(metadata, {'key1': 'value1', 'key2': 'value2'})
self.assertEqual(meta_changes, [{'key2': ['+', 'value2']}])
self.assertEquals(len(test_notifier.NOTIFICATIONS), 1)
msg = test_notifier.NOTIFICATIONS[0]
payload = msg['payload']
self.assertTrue('metadata' in payload)
self.assertEquals(payload['metadata'], metadata)
new_metadata = {'key2': 'bah', 'key3': 'value3'}
self.compute_api.update_instance_metadata(_context, instance,
new_metadata, delete=True)
metadata = self.compute_api.get_instance_metadata(_context, instance)
self.assertEqual(metadata, new_metadata)
self.assertEqual(meta_changes, [{
'key1': ['-'],
'key2': ['+', 'bah'],
'key3': ['+', 'value3'],
}])
self.assertEquals(len(test_notifier.NOTIFICATIONS), 2)
msg = test_notifier.NOTIFICATIONS[1]
payload = msg['payload']
self.assertTrue('metadata' in payload)
self.assertEquals(payload['metadata'], metadata)
self.compute_api.delete_instance_metadata(_context, instance, 'key2')
metadata = self.compute_api.get_instance_metadata(_context, instance)
self.assertEqual(metadata, {'key3': 'value3'})
self.assertEqual(meta_changes, [{'key2': ['-']}])
self.assertEquals(len(test_notifier.NOTIFICATIONS), 3)
msg = test_notifier.NOTIFICATIONS[2]
payload = msg['payload']
self.assertTrue('metadata' in payload)
self.assertEquals(payload['metadata'], {})
db.instance_destroy(_context, instance['uuid'])
def test_get_instance_faults(self):
"""Get an instances latest fault"""
instance = self._create_fake_instance()
fault_fixture = {
'code': 404,
'instance_uuid': instance['uuid'],
'message': "HTTPNotFound",
'details': "Stock details for test",
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
def return_fault(_ctxt, instance_uuids):
return dict.fromkeys(instance_uuids, [fault_fixture])
self.stubs.Set(nova.db,
'instance_fault_get_by_instance_uuids',
return_fault)
_context = context.get_admin_context()
output = self.compute_api.get_instance_faults(_context, [instance])
expected = {instance['uuid']: [fault_fixture]}
self.assertEqual(output, expected)
db.instance_destroy(_context, instance['uuid'])
@staticmethod
def _parse_db_block_device_mapping(bdm_ref):
attr_list = ('delete_on_termination', 'device_name', 'no_device',
'virtual_name', 'volume_id', 'volume_size', 'snapshot_id')
bdm = {}
for attr in attr_list:
val = bdm_ref.get(attr, None)
if val:
bdm[attr] = val
return bdm
def test_update_block_device_mapping(self):
swap_size = 1
instance_type = {'swap': swap_size}
instance = self._create_fake_instance()
mappings = [
{'virtual': 'ami', 'device': 'sda1'},
{'virtual': 'root', 'device': '/dev/sda1'},
{'virtual': 'swap', 'device': 'sdb4'},
{'virtual': 'swap', 'device': 'sdb3'},
{'virtual': 'swap', 'device': 'sdb2'},
{'virtual': 'swap', 'device': 'sdb1'},
{'virtual': 'ephemeral0', 'device': 'sdc1'},
{'virtual': 'ephemeral1', 'device': 'sdc2'},
{'virtual': 'ephemeral2', 'device': 'sdc3'}]
block_device_mapping = [
# root
{'device_name': '/dev/sda1',
'snapshot_id': '00000000-aaaa-bbbb-cccc-000000000000',
'delete_on_termination': False},
# overwrite swap
{'device_name': '/dev/sdb2',
'snapshot_id': '11111111-aaaa-bbbb-cccc-111111111111',
'delete_on_termination': False},
{'device_name': '/dev/sdb3',
'snapshot_id': '22222222-aaaa-bbbb-cccc-222222222222'},
{'device_name': '/dev/sdb4',
'no_device': True},
# overwrite ephemeral
{'device_name': '/dev/sdc2',
'snapshot_id': '33333333-aaaa-bbbb-cccc-333333333333',
'delete_on_termination': False},
{'device_name': '/dev/sdc3',
'snapshot_id': '44444444-aaaa-bbbb-cccc-444444444444'},
{'device_name': '/dev/sdc4',
'no_device': True},
# volume
{'device_name': '/dev/sdd1',
'snapshot_id': '55555555-aaaa-bbbb-cccc-555555555555',
'delete_on_termination': False},
{'device_name': '/dev/sdd2',
'snapshot_id': '66666666-aaaa-bbbb-cccc-666666666666'},
{'device_name': '/dev/sdd3',
'snapshot_id': '77777777-aaaa-bbbb-cccc-777777777777'},
{'device_name': '/dev/sdd4',
'no_device': True}]
self.compute_api._update_image_block_device_mapping(
self.context, instance_type, instance['uuid'], mappings)
bdms = [self._parse_db_block_device_mapping(bdm_ref)
for bdm_ref in db.block_device_mapping_get_all_by_instance(
self.context, instance['uuid'])]
expected_result = [
{'virtual_name': 'swap', 'device_name': '/dev/sdb1',
'volume_size': swap_size},
{'virtual_name': 'ephemeral0', 'device_name': '/dev/sdc1'},
# NOTE(yamahata): ATM only ephemeral0 is supported.
# they're ignored for now
#{'virtual_name': 'ephemeral1', 'device_name': '/dev/sdc2'},
#{'virtual_name': 'ephemeral2', 'device_name': '/dev/sdc3'}
]
bdms.sort()
expected_result.sort()
self.assertDictListMatch(bdms, expected_result)
self.compute_api._update_block_device_mapping(
self.context, instance_types.get_default_instance_type(),
instance['uuid'], block_device_mapping)
bdms = [self._parse_db_block_device_mapping(bdm_ref)
for bdm_ref in db.block_device_mapping_get_all_by_instance(
self.context, instance['uuid'])]
expected_result = [
{'snapshot_id': '00000000-aaaa-bbbb-cccc-000000000000',
'device_name': '/dev/sda1'},
{'virtual_name': 'swap', 'device_name': '/dev/sdb1',
'volume_size': swap_size},
{'snapshot_id': '11111111-aaaa-bbbb-cccc-111111111111',
'device_name': '/dev/sdb2'},
{'snapshot_id': '22222222-aaaa-bbbb-cccc-222222222222',
'device_name': '/dev/sdb3'},
{'no_device': True, 'device_name': '/dev/sdb4'},
{'virtual_name': 'ephemeral0', 'device_name': '/dev/sdc1'},
{'snapshot_id': '33333333-aaaa-bbbb-cccc-333333333333',
'device_name': '/dev/sdc2'},
{'snapshot_id': '44444444-aaaa-bbbb-cccc-444444444444',
'device_name': '/dev/sdc3'},
{'no_device': True, 'device_name': '/dev/sdc4'},
{'snapshot_id': '55555555-aaaa-bbbb-cccc-555555555555',
'device_name': '/dev/sdd1'},
{'snapshot_id': '66666666-aaaa-bbbb-cccc-666666666666',
'device_name': '/dev/sdd2'},
{'snapshot_id': '77777777-aaaa-bbbb-cccc-777777777777',
'device_name': '/dev/sdd3'},
{'no_device': True, 'device_name': '/dev/sdd4'}]
bdms.sort()
expected_result.sort()
self.assertDictListMatch(bdms, expected_result)
for bdm in db.block_device_mapping_get_all_by_instance(
self.context, instance['uuid']):
db.block_device_mapping_destroy(self.context, bdm['id'])
instance = db.instance_get_by_uuid(self.context, instance['uuid'])
self.compute.terminate_instance(self.context, instance)
def test_volume_size(self):
ephemeral_size = 2
swap_size = 3
inst_type = {'ephemeral_gb': ephemeral_size, 'swap': swap_size}
self.assertEqual(self.compute_api._volume_size(inst_type,
'ephemeral0'),
ephemeral_size)
self.assertEqual(self.compute_api._volume_size(inst_type,
'ephemeral1'),
0)
self.assertEqual(self.compute_api._volume_size(inst_type,
'swap'),
swap_size)
def test_reservation_id_one_instance(self):
"""Verify building an instance has a reservation_id that
matches return value from create"""
(refs, resv_id) = self.compute_api.create(self.context,
instance_types.get_default_instance_type(), None)
try:
self.assertEqual(len(refs), 1)
self.assertEqual(refs[0]['reservation_id'], resv_id)
finally:
db.instance_destroy(self.context, refs[0]['uuid'])
def test_reservation_ids_two_instances(self):
"""Verify building 2 instances at once results in a
reservation_id being returned equal to reservation id set
in both instances
"""
(refs, resv_id) = self.compute_api.create(self.context,
instance_types.get_default_instance_type(), None,
min_count=2, max_count=2)
try:
self.assertEqual(len(refs), 2)
self.assertNotEqual(resv_id, None)
finally:
for instance in refs:
self.assertEqual(instance['reservation_id'], resv_id)
db.instance_destroy(self.context, refs[0]['uuid'])
def test_instance_architecture(self):
"""Test the instance architecture"""
i_ref = self._create_fake_instance()
self.assertEqual(i_ref['architecture'], 'x86_64')
db.instance_destroy(self.context, i_ref['uuid'])
def test_instance_unknown_architecture(self):
"""Test if the architecture is unknown."""
instance = jsonutils.to_primitive(self._create_fake_instance(
params={'architecture': ''}))
try:
self.compute.run_instance(self.context, instance=instance)
instances = db.instance_get_all(context.get_admin_context())
instance = instances[0]
self.assertNotEqual(instance['architecture'], 'Unknown')
finally:
db.instance_destroy(self.context, instance['uuid'])
def test_instance_name_template(self):
"""Test the instance_name template"""
self.flags(instance_name_template='instance-%d')
i_ref = self._create_fake_instance()
self.assertEqual(i_ref['name'], 'instance-%d' % i_ref['id'])
db.instance_destroy(self.context, i_ref['uuid'])
self.flags(instance_name_template='instance-%(uuid)s')
i_ref = self._create_fake_instance()
self.assertEqual(i_ref['name'], 'instance-%s' % i_ref['uuid'])
db.instance_destroy(self.context, i_ref['uuid'])
self.flags(instance_name_template='%(id)d-%(uuid)s')
i_ref = self._create_fake_instance()
self.assertEqual(i_ref['name'], '%d-%s' %
(i_ref['id'], i_ref['uuid']))
db.instance_destroy(self.context, i_ref['uuid'])
# not allowed.. default is uuid
self.flags(instance_name_template='%(name)s')
i_ref = self._create_fake_instance()
self.assertEqual(i_ref['name'], i_ref['uuid'])
db.instance_destroy(self.context, i_ref['uuid'])
def test_add_remove_fixed_ip(self):
instance = self._create_fake_instance(params={'host': FLAGS.host})
self.compute_api.add_fixed_ip(self.context, instance, '1')
self.compute_api.remove_fixed_ip(self.context, instance, '192.168.1.1')
self.compute_api.delete(self.context, instance)
def test_attach_volume_invalid(self):
self.assertRaises(exception.InvalidDevicePath,
self.compute_api.attach_volume,
self.context,
{'locked': False},
None,
'/invalid')
def test_vnc_console(self):
"""Make sure we can a vnc console for an instance."""
fake_instance = {'uuid': 'fake_uuid',
'host': 'fake_compute_host'}
fake_console_type = "novnc"
fake_connect_info = {'token': 'fake_token',
'console_type': fake_console_type,
'host': 'fake_console_host',
'port': 'fake_console_port',
'internal_access_path': 'fake_access_path'}
fake_connect_info2 = copy.deepcopy(fake_connect_info)
fake_connect_info2['access_url'] = 'fake_console_url'
self.mox.StubOutWithMock(rpc, 'call')
rpc_msg1 = {'method': 'get_vnc_console',
'args': {'instance': fake_instance,
'console_type': fake_console_type},
'version': compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION}
rpc_msg2 = {'method': 'authorize_console',
'args': fake_connect_info,
'version': '1.0'}
rpc.call(self.context, 'compute.%s' % fake_instance['host'],
rpc_msg1, None).AndReturn(fake_connect_info2)
rpc.call(self.context, FLAGS.consoleauth_topic,
rpc_msg2, None).AndReturn(None)
self.mox.ReplayAll()
console = self.compute_api.get_vnc_console(self.context,
fake_instance, fake_console_type)
self.assertEqual(console, {'url': 'fake_console_url'})
def test_get_vnc_console_no_host(self):
instance = self._create_fake_instance(params={'host': ''})
self.assertRaises(exception.InstanceNotReady,
self.compute_api.get_vnc_console,
self.context, instance, 'novnc')
db.instance_destroy(self.context, instance['uuid'])
def test_console_output(self):
fake_instance = {'uuid': 'fake_uuid',
'host': 'fake_compute_host'}
fake_tail_length = 699
fake_console_output = 'fake console output'
self.mox.StubOutWithMock(rpc, 'call')
rpc_msg = {'method': 'get_console_output',
'args': {'instance': fake_instance,
'tail_length': fake_tail_length},
'version': compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION}
rpc.call(self.context, 'compute.%s' % fake_instance['host'],
rpc_msg, None).AndReturn(fake_console_output)
self.mox.ReplayAll()
output = self.compute_api.get_console_output(self.context,
fake_instance, tail_length=fake_tail_length)
self.assertEqual(output, fake_console_output)
def test_attach_volume(self):
"""Ensure instance can be soft rebooted"""
called = {}
def fake_check_attach(*args, **kwargs):
called['fake_check_attach'] = True
def fake_reserve_volume(*args, **kwargs):
called['fake_reserve_volume'] = True
def fake_volume_get(self, context, volume_id):
called['fake_volume_get'] = True
return {'id': volume_id}
def fake_rpc_attach_volume(self, context, **kwargs):
called['fake_rpc_attach_volume'] = True
self.stubs.Set(nova.volume.api.API, 'get', fake_volume_get)
self.stubs.Set(nova.volume.api.API, 'check_attach', fake_check_attach)
self.stubs.Set(nova.volume.api.API, 'reserve_volume',
fake_reserve_volume)
self.stubs.Set(compute_rpcapi.ComputeAPI, 'attach_volume',
fake_rpc_attach_volume)
instance = self._create_fake_instance()
self.compute_api.attach_volume(self.context, instance, 1, '/dev/vdb')
self.assertTrue(called.get('fake_check_attach'))
self.assertTrue(called.get('fake_reserve_volume'))
self.assertTrue(called.get('fake_reserve_volume'))
self.assertTrue(called.get('fake_rpc_attach_volume'))
def test_attach_volume_no_device(self):
called = {}
def fake_check_attach(*args, **kwargs):
called['fake_check_attach'] = True
def fake_reserve_volume(*args, **kwargs):
called['fake_reserve_volume'] = True
def fake_volume_get(self, context, volume_id):
called['fake_volume_get'] = True
return {'id': volume_id}
def fake_rpc_attach_volume(self, context, **kwargs):
called['fake_rpc_attach_volume'] = True
self.stubs.Set(nova.volume.api.API, 'get', fake_volume_get)
self.stubs.Set(nova.volume.api.API, 'check_attach', fake_check_attach)
self.stubs.Set(nova.volume.api.API, 'reserve_volume',
fake_reserve_volume)
self.stubs.Set(compute_rpcapi.ComputeAPI, 'attach_volume',
fake_rpc_attach_volume)
instance = self._create_fake_instance()
self.compute_api.attach_volume(self.context, instance, 1, device=None)
self.assertTrue(called.get('fake_check_attach'))
self.assertTrue(called.get('fake_reserve_volume'))
self.assertTrue(called.get('fake_reserve_volume'))
self.assertTrue(called.get('fake_rpc_attach_volume'))
def test_inject_network_info(self):
instance = self._create_fake_instance(params={'host': FLAGS.host})
self.compute.run_instance(self.context,
instance=jsonutils.to_primitive(instance))
instance = self.compute_api.get(self.context, instance['uuid'])
self.compute_api.inject_network_info(self.context, instance)
self.compute_api.delete(self.context, instance)
def test_reset_network(self):
instance = self._create_fake_instance()
self.compute.run_instance(self.context,
instance=jsonutils.to_primitive(instance))
instance = self.compute_api.get(self.context, instance['uuid'])
self.compute_api.reset_network(self.context, instance)
def test_lock(self):
instance = self._create_fake_instance()
self.compute_api.lock(self.context, instance)
self.compute_api.delete(self.context, instance)
def test_unlock(self):
instance = self._create_fake_instance()
self.compute_api.unlock(self.context, instance)
self.compute_api.delete(self.context, instance)
def test_get_lock(self):
instance = self._create_fake_instance()
self.assertFalse(self.compute_api.get_lock(self.context, instance))
db.instance_update(self.context, instance['uuid'], {'locked': True})
self.assertTrue(self.compute_api.get_lock(self.context, instance))
def test_add_remove_security_group(self):
instance = self._create_fake_instance()
self.compute.run_instance(self.context,
instance=jsonutils.to_primitive(instance))
instance = self.compute_api.get(self.context, instance['uuid'])
security_group_name = self._create_group()['name']
self.security_group_api.add_to_instance(self.context,
instance,
security_group_name)
self.security_group_api.remove_from_instance(self.context,
instance,
security_group_name)
def test_get_diagnostics(self):
instance = self._create_fake_instance()
self.compute_api.get_diagnostics(self.context, instance)
self.compute_api.delete(self.context, instance)
def test_inject_file(self):
"""Ensure we can write a file to an instance"""
instance = self._create_fake_instance()
self.compute_api.inject_file(self.context, instance,
"/tmp/test", "File Contents")
db.instance_destroy(self.context, instance['uuid'])
def test_secgroup_refresh(self):
instance = self._create_fake_instance()
def rule_get(*args, **kwargs):
mock_rule = FakeModel({'parent_group_id': 1})
return [mock_rule]
def group_get(*args, **kwargs):
mock_group = FakeModel({'instances': [instance]})
return mock_group
self.stubs.Set(
self.compute_api.db,
'security_group_rule_get_by_security_group_grantee',
rule_get)
self.stubs.Set(self.compute_api.db, 'security_group_get', group_get)
self.mox.StubOutWithMock(rpc, 'cast')
topic = rpc.queue_get_for(self.context, FLAGS.compute_topic,
instance['host'])
rpc.cast(self.context, topic,
{"method": "refresh_instance_security_rules",
"args": {'instance': jsonutils.to_primitive(instance)},
"version":
compute_rpcapi.SecurityGroupAPI.BASE_RPC_API_VERSION})
self.mox.ReplayAll()
self.security_group_api.trigger_members_refresh(self.context, [1])
def test_secgroup_refresh_once(self):
instance = self._create_fake_instance()
def rule_get(*args, **kwargs):
mock_rule = FakeModel({'parent_group_id': 1})
return [mock_rule]
def group_get(*args, **kwargs):
mock_group = FakeModel({'instances': [instance]})
return mock_group
self.stubs.Set(
self.compute_api.db,
'security_group_rule_get_by_security_group_grantee',
rule_get)
self.stubs.Set(self.compute_api.db, 'security_group_get', group_get)
self.mox.StubOutWithMock(rpc, 'cast')
topic = rpc.queue_get_for(self.context, FLAGS.compute_topic,
instance['host'])
rpc.cast(self.context, topic,
{"method": "refresh_instance_security_rules",
"args": {'instance': jsonutils.to_primitive(instance)},
"version":
compute_rpcapi.SecurityGroupAPI.BASE_RPC_API_VERSION})
self.mox.ReplayAll()
self.security_group_api.trigger_members_refresh(self.context, [1, 2])
def test_secgroup_refresh_none(self):
def rule_get(*args, **kwargs):
mock_rule = FakeModel({'parent_group_id': 1})
return [mock_rule]
def group_get(*args, **kwargs):
mock_group = FakeModel({'instances': []})
return mock_group
self.stubs.Set(
self.compute_api.db,
'security_group_rule_get_by_security_group_grantee',
rule_get)
self.stubs.Set(self.compute_api.db, 'security_group_get', group_get)
self.mox.StubOutWithMock(rpc, 'cast')
self.mox.ReplayAll()
self.security_group_api.trigger_members_refresh(self.context, [1])
def test_secrule_refresh(self):
instance = self._create_fake_instance()
def group_get(*args, **kwargs):
mock_group = FakeModel({'instances': [instance]})
return mock_group
self.stubs.Set(self.compute_api.db, 'security_group_get', group_get)
self.mox.StubOutWithMock(rpc, 'cast')
topic = rpc.queue_get_for(self.context, FLAGS.compute_topic,
instance['host'])
rpc.cast(self.context, topic,
{"method": "refresh_instance_security_rules",
"args": {'instance': jsonutils.to_primitive(instance)},
"version":
compute_rpcapi.SecurityGroupAPI.BASE_RPC_API_VERSION})
self.mox.ReplayAll()
self.security_group_api.trigger_rules_refresh(self.context, [1])
def test_secrule_refresh_once(self):
instance = self._create_fake_instance()
def group_get(*args, **kwargs):
mock_group = FakeModel({'instances': [instance]})
return mock_group
self.stubs.Set(self.compute_api.db, 'security_group_get', group_get)
self.mox.StubOutWithMock(rpc, 'cast')
topic = rpc.queue_get_for(self.context, FLAGS.compute_topic,
instance['host'])
rpc.cast(self.context, topic,
{"method": "refresh_instance_security_rules",
"args": {'instance': jsonutils.to_primitive(instance)},
"version":
compute_rpcapi.SecurityGroupAPI.BASE_RPC_API_VERSION})
self.mox.ReplayAll()
self.security_group_api.trigger_rules_refresh(self.context, [1, 2])
def test_secrule_refresh_none(self):
def group_get(*args, **kwargs):
mock_group = FakeModel({'instances': []})
return mock_group
self.stubs.Set(self.compute_api.db, 'security_group_get', group_get)
self.mox.StubOutWithMock(rpc, 'cast')
self.mox.ReplayAll()
self.security_group_api.trigger_rules_refresh(self.context, [1, 2])
def test_live_migrate(self):
instance, instance_uuid = self._run_instance()
self.compute_api.live_migrate(self.context, instance,
block_migration=True,
disk_over_commit=True,
host='fake_dest_host')
instance = db.instance_get_by_uuid(self.context, instance_uuid)
self.assertEqual(instance['task_state'], task_states.MIGRATING)
db.instance_destroy(self.context, instance['uuid'])
def fake_rpc_method(context, topic, msg, do_cast=True):
pass
def _create_service_entries(context, values={'avail_zone1': ['fake_host1',
'fake_host2'],
'avail_zone2': ['fake_host3'], }):
for avail_zone, hosts in values.iteritems():
for host in hosts:
db.service_create(context,
{'host': host,
'binary': 'nova-compute',
'topic': 'compute',
'report_count': 0,
'availability_zone': avail_zone})
return values
class ComputeAPIAggrTestCase(BaseTestCase):
"""This is for unit coverage of aggregate-related methods
defined in nova.compute.api."""
def setUp(self):
super(ComputeAPIAggrTestCase, self).setUp()
self.api = compute_api.AggregateAPI()
self.context = context.get_admin_context()
self.stubs.Set(rpc, 'call', fake_rpc_method)
self.stubs.Set(rpc, 'cast', fake_rpc_method)
def test_create_invalid_availability_zone(self):
"""Ensure InvalidAggregateAction is raised with wrong avail_zone."""
self.assertRaises(exception.InvalidAggregateAction,
self.api.create_aggregate,
self.context, 'fake_aggr', 'fake_avail_zone')
def test_update_aggregate_metadata(self):
"""Ensure metadata can be updated"""
_create_service_entries(self.context, {'fake_zone': ['fake_host']})
aggr = self.api.create_aggregate(self.context, 'fake_aggregate',
'fake_zone')
metadata = {'foo_key1': 'foo_value1',
'foo_key2': 'foo_value2', }
aggr = self.api.update_aggregate_metadata(self.context, aggr['id'],
metadata)
metadata['foo_key1'] = None
expected = self.api.update_aggregate_metadata(self.context,
aggr['id'], metadata)
self.assertDictMatch(expected['metadata'], {'foo_key2': 'foo_value2'})
def test_delete_aggregate(self):
"""Ensure we can delete an aggregate."""
_create_service_entries(self.context, {'fake_zone': ['fake_host']})
aggr = self.api.create_aggregate(self.context, 'fake_aggregate',
'fake_zone')
self.api.delete_aggregate(self.context, aggr['id'])
expected = db.aggregate_get(self.context.elevated(read_deleted='yes'),
aggr['id'])
self.assertRaises(exception.AggregateNotFound,
self.api.delete_aggregate, self.context, aggr['id'])
def test_delete_non_empty_aggregate(self):
"""Ensure InvalidAggregateAction is raised when non empty aggregate."""
_create_service_entries(self.context,
{'fake_availability_zone': ['fake_host']})
aggr = self.api.create_aggregate(self.context, 'fake_aggregate',
'fake_availability_zone')
self.api.add_host_to_aggregate(self.context, aggr['id'], 'fake_host')
self.assertRaises(exception.InvalidAggregateAction,
self.api.delete_aggregate, self.context, aggr['id'])
def test_add_host_to_aggregate(self):
"""Ensure we can add a host to an aggregate."""
values = _create_service_entries(self.context)
fake_zone = values.keys()[0]
fake_host = values[fake_zone][0]
aggr = self.api.create_aggregate(self.context,
'fake_aggregate', fake_zone)
aggr = self.api.add_host_to_aggregate(self.context,
aggr['id'], fake_host)
self.assertEqual(len(aggr['hosts']), 1)
def test_add_host_to_aggregate_multiple(self):
"""Ensure we can add multiple hosts to an aggregate."""
values = _create_service_entries(self.context)
fake_zone = values.keys()[0]
aggr = self.api.create_aggregate(self.context,
'fake_aggregate', fake_zone)
for host in values[fake_zone]:
aggr = self.api.add_host_to_aggregate(self.context,
aggr['id'], host)
self.assertEqual(len(aggr['hosts']), len(values[fake_zone]))
def test_add_host_to_aggregate_zones_mismatch(self):
"""Ensure InvalidAggregateAction is raised when zones don't match."""
_create_service_entries(self.context, {'fake_zoneX': ['fake_host1'],
'fake_zoneY': ['fake_host2']})
aggr = self.api.create_aggregate(self.context,
'fake_aggregate', 'fake_zoneY')
self.assertRaises(exception.InvalidAggregateAction,
self.api.add_host_to_aggregate,
self.context, aggr['id'], 'fake_host1')
def test_add_host_to_aggregate_raise_not_found(self):
"""Ensure ComputeHostNotFound is raised when adding invalid host."""
_create_service_entries(self.context, {'fake_zone': ['fake_host']})
aggr = self.api.create_aggregate(self.context, 'fake_aggregate',
'fake_zone')
self.assertRaises(exception.ComputeHostNotFound,
self.api.add_host_to_aggregate,
self.context, aggr['id'], 'invalid_host')
def test_remove_host_from_aggregate_active(self):
"""Ensure we can remove a host from an aggregate."""
values = _create_service_entries(self.context)
fake_zone = values.keys()[0]
aggr = self.api.create_aggregate(self.context,
'fake_aggregate', fake_zone)
for host in values[fake_zone]:
aggr = self.api.add_host_to_aggregate(self.context,
aggr['id'], host)
expected = self.api.remove_host_from_aggregate(self.context,
aggr['id'],
values[fake_zone][0])
self.assertEqual(len(aggr['hosts']) - 1, len(expected['hosts']))
def test_remove_host_from_aggregate_raise_not_found(self):
"""Ensure ComputeHostNotFound is raised when removing invalid host."""
_create_service_entries(self.context, {'fake_zone': ['fake_host']})
aggr = self.api.create_aggregate(self.context, 'fake_aggregate',
'fake_zone')
self.assertRaises(exception.ComputeHostNotFound,
self.api.remove_host_from_aggregate,
self.context, aggr['id'], 'invalid_host')
class ComputeAggrTestCase(BaseTestCase):
"""This is for unit coverage of aggregate-related methods
defined in nova.compute.manager."""
def setUp(self):
super(ComputeAggrTestCase, self).setUp()
self.context = context.get_admin_context()
values = {'name': 'test_aggr',
'availability_zone': 'test_zone'}
self.aggr = db.aggregate_create(self.context, values)
def test_add_aggregate_host(self):
def fake_driver_add_to_aggregate(context, aggregate, host, **_ignore):
fake_driver_add_to_aggregate.called = True
return {"foo": "bar"}
self.stubs.Set(self.compute.driver, "add_to_aggregate",
fake_driver_add_to_aggregate)
self.compute.add_aggregate_host(self.context, self.aggr.id, "host")
self.assertTrue(fake_driver_add_to_aggregate.called)
def test_remove_aggregate_host(self):
def fake_driver_remove_from_aggregate(context, aggregate, host,
**_ignore):
fake_driver_remove_from_aggregate.called = True
self.assertEqual("host", host, "host")
return {"foo": "bar"}
self.stubs.Set(self.compute.driver, "remove_from_aggregate",
fake_driver_remove_from_aggregate)
self.compute.remove_aggregate_host(self.context, self.aggr.id, "host")
self.assertTrue(fake_driver_remove_from_aggregate.called)
def test_add_aggregate_host_passes_slave_info_to_driver(self):
def driver_add_to_aggregate(context, aggregate, host, **kwargs):
self.assertEquals(self.context, context)
self.assertEquals(aggregate.id, self.aggr.id)
self.assertEquals(host, "the_host")
self.assertEquals("SLAVE_INFO", kwargs.get("slave_info"))
self.stubs.Set(self.compute.driver, "add_to_aggregate",
driver_add_to_aggregate)
self.compute.add_aggregate_host(self.context, self.aggr.id,
"the_host", slave_info="SLAVE_INFO")
def test_remove_from_aggregate_passes_slave_info_to_driver(self):
def driver_remove_from_aggregate(context, aggregate, host, **kwargs):
self.assertEquals(self.context, context)
self.assertEquals(aggregate.id, self.aggr.id)
self.assertEquals(host, "the_host")
self.assertEquals("SLAVE_INFO", kwargs.get("slave_info"))
self.stubs.Set(self.compute.driver, "remove_from_aggregate",
driver_remove_from_aggregate)
self.compute.remove_aggregate_host(self.context,
self.aggr.id, "the_host", slave_info="SLAVE_INFO")
class ComputePolicyTestCase(BaseTestCase):
def setUp(self):
super(ComputePolicyTestCase, self).setUp()
nova.policy.reset()
nova.policy.init()
self.compute_api = compute.API()
def tearDown(self):
super(ComputePolicyTestCase, self).tearDown()
nova.policy.reset()
def _set_rules(self, rules):
common_policy.set_brain(common_policy.HttpBrain(rules))
def test_actions_are_prefixed(self):
self.mox.StubOutWithMock(nova.policy, 'enforce')
nova.policy.enforce(self.context, 'compute:reboot', {})
self.mox.ReplayAll()
nova.compute.api.check_policy(self.context, 'reboot', {})
def test_wrapped_method(self):
instance = self._create_fake_instance(params={'host': None})
# force delete to fail
rules = {"compute:delete": [["false:false"]]}
self._set_rules(rules)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.delete, self.context, instance)
# reset rules to allow deletion
rules = {"compute:delete": []}
self._set_rules(rules)
self.compute_api.delete(self.context, instance)
def test_create_fail(self):
rules = {"compute:create": [["false:false"]]}
self._set_rules(rules)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.create, self.context, '1', '1')
def test_create_attach_volume_fail(self):
rules = {
"compute:create": [],
"compute:create:attach_network": [["false:false"]],
"compute:create:attach_volume": [],
}
self._set_rules(rules)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.create, self.context, '1', '1',
requested_networks='blah',
block_device_mapping='blah')
def test_create_attach_network_fail(self):
rules = {
"compute:create": [],
"compute:create:attach_network": [],
"compute:create:attach_volume": [["false:false"]],
}
self._set_rules(rules)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.create, self.context, '1', '1',
requested_networks='blah',
block_device_mapping='blah')
def test_get_fail(self):
instance = self._create_fake_instance()
rules = {
"compute:get": [["false:false"]],
}
self._set_rules(rules)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.get, self.context, instance['uuid'])
def test_get_all_fail(self):
rules = {
"compute:get_all": [["false:false"]],
}
self._set_rules(rules)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.get_all, self.context)
def test_get_instance_faults(self):
instance1 = self._create_fake_instance()
instance2 = self._create_fake_instance()
instances = [instance1, instance2]
rules = {
"compute:get_instance_faults": [["false:false"]],
}
self._set_rules(rules)
self.assertRaises(exception.PolicyNotAuthorized,
self.compute_api.get_instance_faults,
self.context, instances)
class ComputeHostAPITestCase(BaseTestCase):
def setUp(self):
super(ComputeHostAPITestCase, self).setUp()
self.host_api = compute_api.HostAPI()
def _rpc_call_stub(self, call_info):
def fake_rpc_call(context, topic, msg, timeout=None):
call_info['context'] = context
call_info['topic'] = topic
call_info['msg'] = msg
self.stubs.Set(rpc, 'call', fake_rpc_call)
def test_set_host_enabled(self):
ctxt = context.RequestContext('fake', 'fake')
call_info = {}
self._rpc_call_stub(call_info)
self.host_api.set_host_enabled(ctxt, 'fake_host', 'fake_enabled')
self.assertEqual(call_info['context'], ctxt)
self.assertEqual(call_info['topic'], 'compute.fake_host')
self.assertEqual(call_info['msg'],
{'method': 'set_host_enabled',
'args': {'enabled': 'fake_enabled'},
'version': compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION})
def test_get_host_uptime(self):
ctxt = context.RequestContext('fake', 'fake')
call_info = {}
self._rpc_call_stub(call_info)
self.host_api.get_host_uptime(ctxt, 'fake_host')
self.assertEqual(call_info['context'], ctxt)
self.assertEqual(call_info['topic'], 'compute.fake_host')
self.assertEqual(call_info['msg'],
{'method': 'get_host_uptime',
'args': {},
'version': compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION})
def test_host_power_action(self):
ctxt = context.RequestContext('fake', 'fake')
call_info = {}
self._rpc_call_stub(call_info)
self.host_api.host_power_action(ctxt, 'fake_host', 'fake_action')
self.assertEqual(call_info['context'], ctxt)
self.assertEqual(call_info['topic'], 'compute.fake_host')
self.assertEqual(call_info['msg'],
{'method': 'host_power_action',
'args': {'action': 'fake_action'},
'version':
compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION})
def test_set_host_maintenance(self):
ctxt = context.RequestContext('fake', 'fake')
call_info = {}
self._rpc_call_stub(call_info)
self.host_api.set_host_maintenance(ctxt, 'fake_host', 'fake_mode')
self.assertEqual(call_info['context'], ctxt)
self.assertEqual(call_info['topic'], 'compute.fake_host')
self.assertEqual(call_info['msg'],
{'method': 'host_maintenance_mode',
'args': {'host': 'fake_host', 'mode': 'fake_mode'},
'version': compute_rpcapi.ComputeAPI.BASE_RPC_API_VERSION})
class KeypairAPITestCase(BaseTestCase):
def setUp(self):
super(KeypairAPITestCase, self).setUp()
self.keypair_api = compute_api.KeypairAPI()
self.ctxt = context.RequestContext('fake', 'fake')
self._keypair_db_call_stubs()
self.existing_key_name = 'fake existing key name'
self.pub_key = ('ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDLnVkqJu9WVf'
'/5StU3JCrBR2r1s1j8K1tux+5XeSvdqaM8lMFNorzbY5iyoBbR'
'S56gy1jmm43QsMPJsrpfUZKcJpRENSe3OxIIwWXRoiapZe78u/'
'a9xKwj0avFYMcws9Rk9iAB7W4K1nEJbyCPl5lRBoyqeHBqrnnu'
'XWEgGxJCK0Ah6wcOzwlEiVjdf4kxzXrwPHyi7Ea1qvnNXTziF8'
'yYmUlH4C8UXfpTQckwSwpDyxZUc63P8q+vPbs3Q2kw+/7vvkCK'
'HJAXVI+oCiyMMfffoTq16M1xfV58JstgtTqAXG+ZFpicGajREU'
'E/E3hO5MGgcHmyzIrWHKpe1n3oEGuz')
self.fingerprint = '4e:48:c6:a0:4a:f9:dd:b5:4c:85:54:5a:af:43:47:5a'
def _keypair_db_call_stubs(self):
def db_key_pair_get_all_by_user(self, user_id):
return []
def db_key_pair_create(self, keypair):
pass
def db_key_pair_destroy(context, user_id, name):
pass
def db_key_pair_get(context, user_id, name):
if name == self.existing_key_name:
return {'name': self.existing_key_name,
'public_key': self.pub_key,
'fingerprint': self.fingerprint}
else:
raise exception.KeypairNotFound(user_id=user_id, name=name)
self.stubs.Set(db, "key_pair_get_all_by_user",
db_key_pair_get_all_by_user)
self.stubs.Set(db, "key_pair_create",
db_key_pair_create)
self.stubs.Set(db, "key_pair_destroy",
db_key_pair_destroy)
self.stubs.Set(db, "key_pair_get",
db_key_pair_get)
def test_create_keypair(self):
keypair = self.keypair_api.create_key_pair(self.ctxt,
self.ctxt.user_id, 'foo')
self.assertEqual('foo', keypair['name'])
def test_create_keypair_name_too_long(self):
self.assertRaises(exception.InvalidKeypair,
self.keypair_api.create_key_pair,
self.ctxt, self.ctxt.user_id, 'x' * 256)
def test_create_keypair_invalid_chars(self):
self.assertRaises(exception.InvalidKeypair,
self.keypair_api.create_key_pair,
self.ctxt, self.ctxt.user_id, '* BAD CHARACTERS! *')
def test_create_keypair_already_exists(self):
self.assertRaises(exception.KeyPairExists,
self.keypair_api.create_key_pair,
self.ctxt, self.ctxt.user_id,
self.existing_key_name)
def test_create_keypair_quota_limit(self):
def fake_quotas_count(self, context, resource, *args, **kwargs):
return FLAGS.quota_key_pairs
self.stubs.Set(QUOTAS, "count", fake_quotas_count)
self.assertRaises(exception.KeypairLimitExceeded,
self.keypair_api.create_key_pair,
self.ctxt, self.ctxt.user_id, 'foo')
def test_import_keypair(self):
keypair = self.keypair_api.import_key_pair(self.ctxt,
self.ctxt.user_id,
'foo',
self.pub_key)
self.assertEqual('foo', keypair['name'])
self.assertEqual(self.fingerprint, keypair['fingerprint'])
self.assertEqual(self.pub_key, keypair['public_key'])
def test_import_keypair_bad_public_key(self):
self.assertRaises(exception.InvalidKeypair,
self.keypair_api.import_key_pair,
self.ctxt, self.ctxt.user_id, 'foo', 'bad key data')
def test_import_keypair_name_too_long(self):
self.assertRaises(exception.InvalidKeypair,
self.keypair_api.import_key_pair,
self.ctxt, self.ctxt.user_id, 'x' * 256,
self.pub_key)
def test_import_keypair_invalid_chars(self):
self.assertRaises(exception.InvalidKeypair,
self.keypair_api.import_key_pair,
self.ctxt, self.ctxt.user_id,
'* BAD CHARACTERS! *', self.pub_key)
def test_import_keypair_quota_limit(self):
def fake_quotas_count(self, context, resource, *args, **kwargs):
return FLAGS.quota_key_pairs
self.stubs.Set(QUOTAS, "count", fake_quotas_count)
self.assertRaises(exception.KeypairLimitExceeded,
self.keypair_api.import_key_pair,
self.ctxt, self.ctxt.user_id, 'foo', self.pub_key)
def test_get_keypair(self):
keypair = self.keypair_api.get_key_pair(self.ctxt,
self.ctxt.user_id,
self.existing_key_name)
self.assertEqual(self.existing_key_name, keypair['name'])
class DisabledInstanceTypesTestCase(BaseTestCase):
"""
Some instance-types are marked 'disabled' which means that they will not
show up in customer-facing listings. We do, however, want those
instance-types to be availble for emergency migrations and for rebuilding
of existing instances.
One legitimate use of the 'disabled' field would be when phasing out a
particular instance-type. We still want customers to be able to use an
instance that of the old type, and we want Ops to be able perform
migrations against it, but we *don't* want customers building new slices
with ths phased-out instance-type.
"""
def setUp(self):
super(DisabledInstanceTypesTestCase, self).setUp()
self.compute_api = compute.API()
self.inst_type = instance_types.get_default_instance_type()
def test_can_build_instance_from_visible_instance_type(self):
self.inst_type['disabled'] = False
self.assertNotRaises(exception.InstanceTypeNotFound,
self.compute_api.create, self.context, self.inst_type, None,
exc_msg="Visible instance-types can be built from")
def test_cannot_build_instance_from_disabled_instance_type(self):
self.inst_type['disabled'] = True
self.assertRaises(exception.InstanceTypeNotFound,
self.compute_api.create, self.context, self.inst_type, None)
def test_can_rebuild_instance_from_visible_instance_type(self):
instance = self._create_fake_instance()
image_href = None
admin_password = 'blah'
instance['instance_type']['disabled'] = True
# Assert no errors were raised
self.assertNotRaises(None,
self.compute_api.rebuild, self.context, instance, image_href,
admin_password,
exc_msg="Visible instance-types can be rebuilt from")
def test_can_rebuild_instance_from_disabled_instance_type(self):
"""
A rebuild or a restore should only change the 'image',
not the 'instance_type'. Therefore, should be allowed even
when the slice is on disabled type already.
"""
instance = self._create_fake_instance()
image_href = None
admin_password = 'blah'
instance['instance_type']['disabled'] = True
# Assert no errors were raised
self.assertNotRaises(None,
self.compute_api.rebuild, self.context, instance, image_href,
admin_password,
exc_msg="Disabled instance-types can be rebuilt from")
def test_can_resize_to_visible_instance_type(self):
instance = self._create_fake_instance()
orig_get_instance_type_by_flavor_id =\
instance_types.get_instance_type_by_flavor_id
def fake_get_instance_type_by_flavor_id(flavor_id):
instance_type = orig_get_instance_type_by_flavor_id(flavor_id)
instance_type['disabled'] = False
return instance_type
self.stubs.Set(instance_types, 'get_instance_type_by_flavor_id',
fake_get_instance_type_by_flavor_id)
# FIXME(sirp): for legacy this raises FlavorNotFound instead of
# InstanceTypeNot; we should eventually make it raise
# InstanceTypeNotFound for consistency.
self.assertNotRaises(exception.FlavorNotFound,
self.compute_api.resize, self.context, instance, '4',
exc_msg="Visible flavors can be resized to")
def test_cannot_resize_to_disabled_instance_type(self):
instance = self._create_fake_instance()
orig_get_instance_type_by_flavor_id = \
instance_types.get_instance_type_by_flavor_id
def fake_get_instance_type_by_flavor_id(flavor_id):
instance_type = orig_get_instance_type_by_flavor_id(flavor_id)
instance_type['disabled'] = True
return instance_type
self.stubs.Set(instance_types, 'get_instance_type_by_flavor_id',
fake_get_instance_type_by_flavor_id)
# FIXME(sirp): for legacy this raises FlavorNotFound instead of
# InstanceTypeNot; we should eventually make it raise
# InstanceTypeNotFound for consistency.
self.assertRaises(exception.FlavorNotFound,
self.compute_api.resize, self.context, instance, '4')
def test_can_migrate_to_visible_instance_type(self):
instance = self._create_fake_instance()
instance['instance_type']['disabled'] = False
# FIXME(sirp): for legacy this raises FlavorNotFound instead of
# InstanceTypeNot; we should eventually make it raise
# InstanceTypeNotFound for consistency.
self.assertNotRaises(exception.FlavorNotFound,
self.compute_api.resize, self.context, instance, None,
exc_msg="Visible flavors can be migrated to")
def test_can_migrate_to_disabled_instance_type(self):
"""
We don't want to require a customers instance-type to change when ops
is migrating a failed server.
"""
instance = self._create_fake_instance()
instance['instance_type']['disabled'] = True
# FIXME(sirp): for legacy this raises FlavorNotFound instead of
# InstanceTypeNot; we should eventually make it raise
# InstanceTypeNotFound for consistency.
self.assertNotRaises(exception.FlavorNotFound,
self.compute_api.resize, self.context, instance, None,
exc_msg="Disabled flavors can be migrated to")
class ComputeReschedulingTestCase(BaseTestCase):
"""Tests related to re-scheduling build requests"""
def setUp(self):
super(ComputeReschedulingTestCase, self).setUp()
self._reschedule = self._reschedule_partial()
def fake_update(*args, **kwargs):
self.updated_task_state = kwargs.get('task_state')
self.stubs.Set(self.compute, '_instance_update', fake_update)
def _reschedule_partial(self):
uuid = "12-34-56-78-90"
requested_networks = None
admin_password = None
injected_files = None
is_first_time = False
return functools.partial(self.compute._reschedule, self.context, uuid,
requested_networks, admin_password, injected_files,
is_first_time, request_spec=None, filter_properties={})
def test_reschedule_no_filter_properties(self):
"""no filter_properties will disable re-scheduling"""
self.assertFalse(self._reschedule())
def test_reschedule_no_retry_info(self):
"""no retry info will also disable re-scheduling"""
filter_properties = {}
self.assertFalse(self._reschedule(filter_properties=filter_properties))
def test_reschedule_no_request_spec(self):
"""no request spec will also disable re-scheduling"""
retry = dict(num_attempts=1)
filter_properties = dict(retry=retry)
self.assertFalse(self._reschedule(filter_properties=filter_properties))
def test_reschedule_success(self):
retry = dict(num_attempts=1)
filter_properties = dict(retry=retry)
request_spec = {'instance_uuids': ['foo', 'bar']}
self.assertTrue(self._reschedule(filter_properties=filter_properties,
request_spec=request_spec))
self.assertEqual(1, len(request_spec['instance_uuids']))
self.assertEqual(self.updated_task_state, task_states.SCHEDULING)
class ThatsNoOrdinaryRabbitException(Exception):
pass
class ComputeReschedulingExceptionTestCase(BaseTestCase):
"""Tests for re-scheduling exception handling logic"""
def setUp(self):
super(ComputeReschedulingExceptionTestCase, self).setUp()
# cause _spawn to raise an exception to test the exception logic:
def exploding_spawn(*args, **kwargs):
raise ThatsNoOrdinaryRabbitException()
self.stubs.Set(self.compute, '_spawn',
exploding_spawn)
self.fake_instance = jsonutils.to_primitive(
self._create_fake_instance())
self.instance_uuid = self.fake_instance['uuid']
def test_exception_with_rescheduling_disabled(self):
"""Spawn fails and re-scheduling is disabled."""
# this won't be re-scheduled:
self.assertRaises(ThatsNoOrdinaryRabbitException,
self.compute._run_instance, self.context,
None, {}, None, None, None, None, self.fake_instance)
def test_exception_with_rescheduling_enabled(self):
"""Spawn fails and re-scheduling is enabled. Original exception
should *not* be re-raised.
"""
# provide the expected status so that this one will be re-scheduled:
retry = dict(num_attempts=1)
filter_properties = dict(retry=retry)
request_spec = dict(num_attempts=1)
self.assertNotRaises(ThatsNoOrdinaryRabbitException,
self.compute._run_instance, self.context,
filter_properties=filter_properties, request_spec=request_spec,
instance=self.fake_instance)
def test_exception_context_cleared(self):
"""Test with no rescheduling and an additional exception occurs
clearing the original build error's exception context.
"""
# clears the original exception context:
class FleshWoundException(Exception):
pass
def reschedule_explode(*args, **kwargs):
raise FleshWoundException()
self.stubs.Set(self.compute, '_reschedule', reschedule_explode)
# the original exception should now be raised:
self.assertRaises(ThatsNoOrdinaryRabbitException,
self.compute._run_instance, self.context,
None, {}, None, None, None, None, self.fake_instance)
| apache-2.0 |
arthurchan1111/EventPlanner | node_modules/node-gyp/gyp/tools/pretty_gyp.py | 2618 | 4756 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Pretty-prints the contents of a GYP file."""
import sys
import re
# Regex to remove comments when we're counting braces.
COMMENT_RE = re.compile(r'\s*#.*')
# Regex to remove quoted strings when we're counting braces.
# It takes into account quoted quotes, and makes sure that the quotes match.
# NOTE: It does not handle quotes that span more than one line, or
# cases where an escaped quote is preceeded by an escaped backslash.
QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
QUOTE_RE = re.compile(QUOTE_RE_STR)
def comment_replace(matchobj):
return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
def mask_comments(input):
"""Mask the quoted strings so we skip braces inside quoted strings."""
search_re = re.compile(r'(.*?)(#)(.*)')
return [search_re.sub(comment_replace, line) for line in input]
def quote_replace(matchobj):
return "%s%s%s%s" % (matchobj.group(1),
matchobj.group(2),
'x'*len(matchobj.group(3)),
matchobj.group(2))
def mask_quotes(input):
"""Mask the quoted strings so we skip braces inside quoted strings."""
search_re = re.compile(r'(.*?)' + QUOTE_RE_STR)
return [search_re.sub(quote_replace, line) for line in input]
def do_split(input, masked_input, search_re):
output = []
mask_output = []
for (line, masked_line) in zip(input, masked_input):
m = search_re.match(masked_line)
while m:
split = len(m.group(1))
line = line[:split] + r'\n' + line[split:]
masked_line = masked_line[:split] + r'\n' + masked_line[split:]
m = search_re.match(masked_line)
output.extend(line.split(r'\n'))
mask_output.extend(masked_line.split(r'\n'))
return (output, mask_output)
def split_double_braces(input):
"""Masks out the quotes and comments, and then splits appropriate
lines (lines that matche the double_*_brace re's above) before
indenting them below.
These are used to split lines which have multiple braces on them, so
that the indentation looks prettier when all laid out (e.g. closing
braces make a nice diagonal line).
"""
double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
masked_input = mask_quotes(input)
masked_input = mask_comments(masked_input)
(output, mask_output) = do_split(input, masked_input, double_open_brace_re)
(output, mask_output) = do_split(output, mask_output, double_close_brace_re)
return output
def count_braces(line):
"""keeps track of the number of braces on a given line and returns the result.
It starts at zero and subtracts for closed braces, and adds for open braces.
"""
open_braces = ['[', '(', '{']
close_braces = [']', ')', '}']
closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
cnt = 0
stripline = COMMENT_RE.sub(r'', line)
stripline = QUOTE_RE.sub(r"''", stripline)
for char in stripline:
for brace in open_braces:
if char == brace:
cnt += 1
for brace in close_braces:
if char == brace:
cnt -= 1
after = False
if cnt > 0:
after = True
# This catches the special case of a closing brace having something
# other than just whitespace ahead of it -- we don't want to
# unindent that until after this line is printed so it stays with
# the previous indentation level.
if cnt < 0 and closing_prefix_re.match(stripline):
after = True
return (cnt, after)
def prettyprint_input(lines):
"""Does the main work of indenting the input based on the brace counts."""
indent = 0
basic_offset = 2
last_line = ""
for line in lines:
if COMMENT_RE.match(line):
print line
else:
line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
if len(line) > 0:
(brace_diff, after) = count_braces(line)
if brace_diff != 0:
if after:
print " " * (basic_offset * indent) + line
indent += brace_diff
else:
indent += brace_diff
print " " * (basic_offset * indent) + line
else:
print " " * (basic_offset * indent) + line
else:
print ""
last_line = line
def main():
if len(sys.argv) > 1:
data = open(sys.argv[1]).read().splitlines()
else:
data = sys.stdin.read().splitlines()
# Split up the double braces.
lines = split_double_braces(data)
# Indent and print the output.
prettyprint_input(lines)
return 0
if __name__ == '__main__':
sys.exit(main())
| mit |
raffaelespazzoli/origin | vendor/github.com/google/certificate-transparency/python/demo/vdb/verifiable_base.py | 30 | 2670 | import cPickle as pickle
from verifiable_log import VerifiableLog
from verifiable_map import VerifiableMap
# Extend this class, override _apply_operation and add your own API to:
# 1. append to log
# 2. read from map
class VerifiableBase:
def __init__(self, log):
# The log, such as a VerifiableLog
self._log = log
# Internal map that we use. The mapper is responsible for mutating this
# when triggered by log changes.
self._map = VerifiableMap()
# How many log changes have been processed
self._ops_processed = 0
# After we process a log operation, we capture the corresponding map
# mutation index which may be higher or lower.
self._log_sth_to_map_sth = {0: 0}
# Called internally to poll the log and process all updates
def _update_from_log(self):
log_size = self._log.get_tree_head()['tree_size']
ctr = 0
while log_size > self._ops_processed:
for entry in self._log.get_entries(self._ops_processed, log_size - 1):
# Call mapper
self._apply_operation(self._ops_processed, entry, self._map)
self._ops_processed += 1
self._log_sth_to_map_sth[self._ops_processed] = self._map.get_tree_head()['tree_size']
# Called by the underlying map when new entries are sequenced by the log
# Override me!
def _apply_operation(self, idx, entry, map):
pass
# Get the value and proof for a key. Tree size the number of entries in the log
def get(self, key, tree_size):
if tree_size > self._ops_processed:
raise ValueError
return self._map.get(key, self._log_sth_to_map_sth[tree_size])
# Return the current tree head, this triggers fetching the latest entries
# from the log (if needed) and this tree_size should be passed to corresponding
# get() calls.
def get_tree_head(self, tree_size=None):
if tree_size is None or tree_size > self._ops_processed:
self._update_from_log()
if tree_size is None:
tree_size = self._ops_processed
if tree_size > self._ops_processed:
raise ValueError
rv = self._map.get_tree_head(self._log_sth_to_map_sth[tree_size])
rv['tree_size'] = tree_size # override what the map says
rv['log_tree_head'] = self._log.get_tree_head(tree_size)
return rv
def get_log_entries(self, start, end):
return self._log.get_entries(start, end)
def get_log_consistency(self, first, second):
return self._log.consistency_proof(first, second)
# Get the value and proof for a key. Tree size the number of entries in the log
def debug_dump(self, tree_size):
return self._map._root.debug_dump(self._log_sth_to_map_sth[self.get_tree_head(tree_size)['tree_size']])
| apache-2.0 |
fkorotkov/pants | tests/python/pants_test/backend/jvm/tasks/test_bundle_create.py | 8 | 11859 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.jvm_app import JvmApp
from pants.backend.jvm.targets.jvm_binary import JvmBinary
from pants.backend.jvm.tasks.bundle_create import BundleCreate
from pants.backend.jvm.tasks.classpath_products import ClasspathProducts, MissingClasspathEntryError
from pants.build_graph.resources import Resources
from pants.java.jar.jar_dependency import JarDependency
from pants.util.contextutil import open_zip
from pants.util.dirutil import safe_file_dump, safe_mkdir, safe_mkdtemp
from pants_test.backend.jvm.tasks.jvm_binary_task_test_base import JvmBinaryTaskTestBase
class TestBundleCreate(JvmBinaryTaskTestBase):
@classmethod
def task_type(cls):
return BundleCreate
def add_consolidated_bundle(self, context, tgt, files_dict):
"""Add a bundle to the classpath as if it has been consolidated already.
"""
consolidated_classpath = context.products.get_data(
'consolidated_classpath',
init_func=ClasspathProducts.init_func(self.pants_workdir)
)
# Create a temporary directory under the target id, then dump all files.
target_dir = os.path.join(self.test_workdir, tgt.id)
safe_mkdir(target_dir)
entry_path = safe_mkdtemp(dir=target_dir)
classpath_dir = safe_mkdtemp(dir=target_dir)
for rel_path, content in files_dict.items():
safe_file_dump(os.path.join(entry_path, rel_path), content)
# Create Jar to mimic consolidate classpath behavior.
jarpath = os.path.join(classpath_dir, 'output-0.jar')
with self.task.open_jar(jarpath, overwrite=True, compressed=False) as jar:
jar.write(entry_path)
consolidated_classpath.add_for_target(tgt, [('default', jarpath)])
def setUp(self):
"""Prepare targets, context, runtime classpath. """
super(TestBundleCreate, self).setUp()
self.task = self.prepare_execute(self.context())
self.jar_artifact = self.create_artifact(org='org.example', name='foo', rev='1.0.0')
self.zip_artifact = self.create_artifact(org='org.pantsbuild', name='bar', rev='2.0.0',
ext='zip')
self.bundle_artifact = self.create_artifact(org='org.apache', name='baz', rev='3.0.0',
classifier='tests')
self.tar_gz_artifact = self.create_artifact(org='org.gnu', name='gary', rev='4.0.0',
ext='tar.gz')
self.jar_lib = self.make_target(spec='3rdparty/jvm/org/example:foo',
target_type=JarLibrary,
jars=[JarDependency(org='org.example', name='foo', rev='1.0.0'),
JarDependency(org='org.pantsbuild',
name='bar',
rev='2.0.0',
ext='zip'),
JarDependency(org='org.apache', name='baz', rev='3.0.0',
classifier='tests'),
JarDependency(org='org.gnu', name='gary', rev='4.0.0',
ext='tar.gz')])
safe_file_dump(os.path.join(self.build_root, 'resources/foo/file'), '// dummy content')
self.resources_target = self.make_target('//resources:foo-resources', Resources,
sources=['foo/file'])
# This is so that payload fingerprint can be computed.
safe_file_dump(os.path.join(self.build_root, 'foo/Foo.java'), '// dummy content')
self.java_lib_target = self.make_target('//foo:foo-library', JavaLibrary, sources=['Foo.java'])
self.binary_target = self.make_target(spec='//foo:foo-binary',
target_type=JvmBinary,
dependencies=[self.java_lib_target, self.jar_lib, self.resources_target])
self.dist_root = os.path.join(self.build_root, 'dist')
def _create_target(self, **kwargs):
return self.make_target(spec='//foo:foo-app',
target_type=JvmApp,
basename='FooApp',
dependencies=[self.binary_target],
**kwargs)
def _setup_classpath(self, task_context):
"""As a separate prep step because to test different option settings, this needs to rerun
after context is re-created.
"""
classpath_products = self.ensure_consolidated_classpath_products(task_context)
classpath_products.add_jars_for_targets(targets=[self.jar_lib],
conf='default',
resolved_jars=[self.jar_artifact,
self.zip_artifact,
self.bundle_artifact,
self.tar_gz_artifact])
self.add_consolidated_bundle(task_context, self.binary_target,
{'Foo.class': '', 'foo.txt': '', 'foo/file': ''})
def test_jvm_bundle_products(self):
"""Test default setting outputs bundle products using `target.id`."""
self.app_target = self._create_target()
self.task_context = self.context(target_roots=[self.app_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self._check_bundle_products('foo.foo-app', check_symlink=True)
def test_jvm_bundle_use_basename_prefix(self):
"""Test override default setting outputs bundle products using basename."""
self.app_target = self._create_target()
self.set_options(use_basename_prefix=True)
self.task_context = self.context(target_roots=[self.app_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self._check_bundle_products('foo.foo-app', check_symlink=True, symlink_name_prefix='FooApp')
def test_bundle_non_app_target(self):
"""Test bundle does not apply to a non jvm_app/jvm_binary target."""
self.task_context = self.context(target_roots=[self.java_lib_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self.assertIsNone(self.task_context.products.get('jvm_bundles').get(self.java_lib_target))
self.assertFalse(os.path.exists(self.dist_root))
def test_jvm_bundle_missing_product(self):
"""Test exception is thrown in case of a missing jar."""
self.app_target = self._create_target()
self.task_context = self.context(target_roots=[self.app_target])
missing_jar_artifact = self.create_artifact(org='org.example', name='foo', rev='2.0.0',
materialize=False)
classpath_products = self.ensure_consolidated_classpath_products(self.task_context)
classpath_products.add_jars_for_targets(targets=[self.binary_target],
conf='default',
resolved_jars=[missing_jar_artifact])
with self.assertRaises(MissingClasspathEntryError):
self.execute(self.task_context)
def test_conflicting_basename(self):
"""Test exception is thrown when two targets share the same basename."""
self.app_target = self._create_target()
conflict_app_target = self.make_target(spec='//foo:foo-app-conflict',
target_type=JvmApp,
basename='FooApp',
dependencies=[self.binary_target])
self.set_options(use_basename_prefix=True)
self.task_context = self.context(target_roots=[self.app_target, conflict_app_target])
self._setup_classpath(self.task_context)
with self.assertRaises(BundleCreate.BasenameConflictError):
self.execute(self.task_context)
def test_target_options(self):
self.app_target = self._create_target(archive='zip')
self.task_context = self.context(target_roots=[self.app_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self._check_archive_products('foo.foo-app', 'zip', check_copy=True)
def test_cli_suppress_target_options(self):
self.set_options(archive='tar')
self.app_target = self._create_target(archive='zip')
self.task_context = self.context(target_roots=[self.app_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self._check_archive_products('foo.foo-app', 'tar', check_copy=True)
def _check_products(self, products, product_fullname):
self.assertIsNotNone(products)
product_data = products.get(self.app_target)
product_basedir = product_data.keys()[0]
self.assertIn(self.pants_workdir, product_basedir)
self.assertEquals(product_data[product_basedir], [product_fullname])
product_path = os.path.join(product_basedir, product_fullname)
return product_path
def _check_archive_products(self, archive_name_prefix, archive_extension, check_copy=False, copy_name_prefix=''):
products = self.task_context.products.get('deployable_archives')
archive_fullname = '{}.{}'.format(archive_name_prefix, archive_extension)
archive_path = self._check_products(products, archive_fullname)
self.assertTrue(os.path.isfile(archive_path))
if check_copy:
copy_fullname = '{}.{}'.format(copy_name_prefix, archive_extension) if copy_name_prefix else archive_fullname
copy_path = os.path.join(self.dist_root, copy_fullname)
self.assertTrue(os.path.isfile(copy_path))
def _check_bundle_products(self, bundle_name_prefix, check_symlink=False, symlink_name_prefix=''):
products = self.task_context.products.get('jvm_bundles')
bundle_fullname = '{}-bundle'.format(bundle_name_prefix)
bundle_root = self._check_products(products, bundle_fullname)
self.assertTrue(os.path.isdir(bundle_root))
self.assertEqual(sorted(['foo-binary.jar',
'libs/foo.foo-binary-0.jar',
'libs/3rdparty.jvm.org.example.foo-0.jar',
'libs/3rdparty.jvm.org.example.foo-1.zip',
'libs/3rdparty.jvm.org.example.foo-2.jar',
'libs/3rdparty.jvm.org.example.foo-3.gz']),
sorted(self.iter_files(bundle_root)))
with open_zip(os.path.join(bundle_root, 'libs/foo.foo-binary-0.jar')) as zf:
self.assertEqual(sorted(['META-INF/',
'META-INF/MANIFEST.MF',
'Foo.class',
'foo.txt',
'foo/',
'foo/file']),
sorted(zf.namelist()))
# TODO verify Manifest's Class-Path
with open_zip(os.path.join(bundle_root, 'foo-binary.jar')) as jar:
self.assertEqual(sorted(['META-INF/', 'META-INF/MANIFEST.MF']),
sorted(jar.namelist()))
# Check symlink.
if check_symlink:
symlink_fullname = '{}-bundle'.format(symlink_name_prefix) if symlink_name_prefix else bundle_fullname
symlink_path = os.path.join(self.dist_root, symlink_fullname)
self.assertTrue(os.path.islink(symlink_path))
self.assertEqual(os.readlink(symlink_path), bundle_root)
| apache-2.0 |
JT5D/Alfred-Popclip-Sublime | Sublime Text 2/Emmet/emmet/pyv8loader.py | 2 | 16997 | # coding=utf-8
import os
import os.path
import sys
import json
import re
import threading
import subprocess
import tempfile
import collections
import platform
import semver
import time
import zipfile
is_python3 = sys.version_info[0] > 2
if is_python3:
import urllib.request as url_req
import urllib.error as url_err
import urllib.parse as url_parse
else:
import urllib
import urllib2
url_req = urllib2
url_err = urllib2
url_parse = urllib2
CHECK_INTERVAL = 60 * 60 * 24
# PACKAGES_URL = 'https://api.github.com/repos/emmetio/pyv8-binaries/downloads'
PACKAGES_URL = 'https://api.github.com/repos/emmetio/pyv8-binaries/contents'
def load(dest_path, delegate=None):
"""
Main function that attempts to load or update PyV8 binary.
First, it loads list of available PyV8 modules and check if
PyV8 should be downloaded or updated.
@param dest_path: Path where PyV8 lib should be downloaded
@param delegate: instance of LoaderDelegate that will receive
loader progress events
@returns: `True` if download progress was initiated
"""
if delegate is None:
delegate = LoaderDelegate()
config = get_loader_config(dest_path)
if 'PyV8' in sys.modules and (config['skip_update'] or time.time() < config['last_update'] + CHECK_INTERVAL):
# No need to load anything: user already has PyV8 binary
# or decided to disable update process
delegate.log('No need to update PyV8')
return False
def on_complete(result, *args, **kwargs):
if result is not None:
# Most recent version was downloaded
config['last_id'] = result
if 'PyV8' not in sys.modules:
# PyV8 is not loaded yet, we can safely unpack it
unpack_pyv8(dest_path)
config['last_update'] = time.time()
save_loader_config(dest_path, config)
delegate.on_complete(*args, **kwargs)
# try to download most recent version of PyV8
# As PyV8 for Sublime Text spreads the world, it's possible
# that multiple distinct PyV8Loader's may start doing the same
# job at the same time. In this case, we should check if there's
# already a thread that load PyV8 and hook on existing thread
# rather that creating a new one
thread = None
thread_exists = False
for t in threading.enumerate():
if hasattr(t, 'is_pyv8_thread'):
print('PyV8: Reusing thread')
thread = t
thread_exists = True
break
if not thread:
print('PyV8: Creating new thread')
thread = PyV8Loader(get_arch(), dest_path, config, delegate=delegate)
thread.start()
delegate.on_start()
# watch on download progress
prog = ThreadProgress(thread, delegate, thread_exists)
prog.on('complete', on_complete if not thread_exists else delegate.on_complete)
prog.on('error', delegate.on_error)
def get_arch():
"Returns architecture name for PyV8 binary"
suffix = is_python3 and '-p3' or ''
p = lambda a: '%s%s' % (a, suffix)
is_64bit = sys.maxsize > 2**32
system_name = platform.system()
if system_name == 'Darwin':
try:
if semver.match(platform.mac_ver()[0], '<10.7.0'):
return p('mac106')
except:
pass
return p('osx')
if system_name == 'Windows':
return p('win64') if is_64bit else p('win32')
if system_name == 'Linux':
return p('linux64') if is_64bit else p('linux32')
def get_loader_config(path):
config = {
"last_id": 0,
"last_update": 0,
"skip_update": False
}
config_path = os.path.join(path, 'config.json')
if os.path.exists(config_path):
with open(config_path) as fd:
for k,v in json.load(fd).items():
config[k] = v
return config
def save_loader_config(path, data):
config_path = os.path.join(path, 'config.json')
if not os.path.exists(path):
os.makedirs(path)
fp = open(config_path, 'w')
fp.write(json.dumps(data))
fp.close()
def clean_old_data():
for f in os.listdir('.'):
if f.lower() != 'config.json' and f.lower() != 'pack.zip':
try:
os.remove(f)
except Exception as e:
pass
def unpack_pyv8(package_dir):
f = os.path.join(package_dir, 'pack.zip')
if not os.path.exists(f):
return
package_zip = zipfile.ZipFile(f, 'r')
root_level_paths = []
last_path = None
for path in package_zip.namelist():
last_path = path
if path.find('/') in [len(path) - 1, -1]:
root_level_paths.append(path)
if path[0] == '/' or path.find('../') != -1 or path.find('..\\') != -1:
raise 'The PyV8 package contains files outside of the package dir and cannot be safely installed.'
if last_path and len(root_level_paths) == 0:
root_level_paths.append(last_path[0:last_path.find('/') + 1])
prev_dir = os.getcwd()
os.chdir(package_dir)
clean_old_data()
# Here we don't use .extractall() since it was having issues on OS X
skip_root_dir = len(root_level_paths) == 1 and \
root_level_paths[0].endswith('/')
extracted_paths = []
for path in package_zip.namelist():
dest = path
if not is_python3:
try:
if not isinstance(dest, unicode):
dest = unicode(dest, 'utf-8', 'strict')
except UnicodeDecodeError:
dest = unicode(dest, 'cp1252', 'replace')
if os.name == 'nt':
regex = ':|\*|\?|"|<|>|\|'
if re.search(regex, dest) != None:
print ('%s: Skipping file from package named %s due to ' +
'an invalid filename') % (__name__, path)
continue
# If there was only a single directory in the package, we remove
# that folder name from the paths as we extract entries
if skip_root_dir:
dest = dest[len(root_level_paths[0]):]
if os.name == 'nt':
dest = dest.replace('/', '\\')
else:
dest = dest.replace('\\', '/')
dest = os.path.join(package_dir, dest)
def add_extracted_dirs(dir):
while dir not in extracted_paths:
extracted_paths.append(dir)
dir = os.path.dirname(dir)
if dir == package_dir:
break
if path.endswith('/'):
if not os.path.exists(dest):
os.makedirs(dest)
add_extracted_dirs(dest)
else:
dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
add_extracted_dirs(dest_dir)
extracted_paths.append(dest)
try:
open(dest, 'wb').write(package_zip.read(path))
except (IOError, UnicodeDecodeError):
print ('%s: Skipping file from package named %s due to ' +
'an invalid filename') % (__name__, path)
package_zip.close()
os.chdir(prev_dir)
os.remove(f)
class LoaderDelegate():
"""
Abstract class used to display PyV8 binary download progress,
and provide some settings for downloader
"""
def __init__(self, settings={}):
self.settings = settings
def on_start(self, *args, **kwargs):
"Invoked when download process is initiated"
pass
def on_progress(self, *args, **kwargs):
"Invoked on download progress"
pass
def on_complete(self, *args, **kwargs):
"Invoked when download process was finished successfully"
pass
def on_error(self, *args, **kwargs):
"Invoked when error occured during download process"
pass
def setting(self, name, default=None):
"Returns specified setting name"
return self.settings[name] if name in self.settings else default
def log(self, message):
pass
class ThreadProgress():
def __init__(self, thread, delegate, is_background=False):
self.thread = thread
self.delegate = delegate
self.is_background = is_background
self._callbacks = {}
threading.Timer(0, self.run).start()
def run(self):
if not self.thread.is_alive():
if self.thread.exit_code != 0:
return self.trigger('error', exit_code=self.thread.exit_code, progress=self)
return self.trigger('complete', result=self.thread.result, progress=self)
self.trigger('progress', progress=self)
threading.Timer(0.1, self.run).start()
def on(self, event_name, callback):
if event_name not in self._callbacks:
self._callbacks[event_name] = []
if isinstance(callback, collections.Callable):
self._callbacks[event_name].append(callback)
return self
def trigger(self, event_name, *args, **kwargs):
if event_name in self._callbacks:
for c in self._callbacks[event_name]:
c(*args, **kwargs)
if self.delegate and hasattr(self.delegate, 'on_%s' % event_name):
getattr(self.delegate, 'on_%s' % event_name)(*args, **kwargs)
return self
class BinaryNotFoundError(Exception):
pass
class NonCleanExitError(Exception):
def __init__(self, returncode):
self.returncode = returncode
def __str__(self):
return repr(self.returncode)
class CliDownloader():
def __init__(self, settings):
self.settings = settings
def find_binary(self, name):
for dir in os.environ['PATH'].split(os.pathsep):
path = os.path.join(dir, name)
if os.path.exists(path):
return path
raise BinaryNotFoundError('The binary %s could not be located' % name)
def execute(self, args):
proc = subprocess.Popen(args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = proc.stdout.read()
returncode = proc.wait()
if returncode != 0:
error = NonCleanExitError(returncode)
error.output = output
raise error
return output
class WgetDownloader(CliDownloader):
def __init__(self, settings):
self.settings = settings
self.wget = self.find_binary('wget')
def clean_tmp_file(self):
os.remove(self.tmp_file)
def download(self, url, error_message, timeout, tries):
if not self.wget:
return False
self.tmp_file = tempfile.NamedTemporaryFile().name
command = [self.wget, '--connect-timeout=' + str(int(timeout)), '-o',
self.tmp_file, '-O', '-', '-U', 'Emmet PyV8 Loader',
'--no-check-certificate']
command.append(url)
if self.settings.get('http_proxy'):
os.putenv('http_proxy', self.settings.get('http_proxy'))
if not self.settings.get('https_proxy'):
os.putenv('https_proxy', self.settings.get('http_proxy'))
if self.settings.get('https_proxy'):
os.putenv('https_proxy', self.settings.get('https_proxy'))
while tries > 0:
tries -= 1
try:
result = self.execute(command)
self.clean_tmp_file()
return result
except NonCleanExitError as e:
error_line = ''
with open(self.tmp_file) as f:
for line in list(f):
if re.search('ERROR[: ]|failed: ', line):
error_line = line
break
if e.returncode == 8:
regex = re.compile('^.*ERROR (\d+):.*', re.S)
if re.sub(regex, '\\1', error_line) == '503':
# GitHub and BitBucket seem to rate limit via 503
print('%s: Downloading %s was rate limited, trying again' % (__name__, url))
continue
error_string = 'HTTP error ' + re.sub('^.*? ERROR ', '',
error_line)
elif e.returncode == 4:
error_string = re.sub('^.*?failed: ', '', error_line)
# GitHub and BitBucket seem to time out a lot
if error_string.find('timed out') != -1:
print('%s: Downloading %s timed out, trying again' % (__name__, url))
continue
else:
error_string = re.sub('^.*?(ERROR[: ]|failed: )', '\\1',
error_line)
error_string = re.sub('\\.?\s*\n\s*$', '', error_string)
print('%s: %s %s downloading %s.' % (__name__, error_message,
error_string, url))
self.clean_tmp_file()
break
return False
class CurlDownloader(CliDownloader):
def __init__(self, settings):
self.settings = settings
self.curl = self.find_binary('curl')
def download(self, url, error_message, timeout, tries):
if not self.curl:
return False
command = [self.curl, '-f', '--user-agent', 'Emmet PyV8 Loader',
'--connect-timeout', str(int(timeout)), '-sS']
command.append(url)
if self.settings.get('http_proxy'):
os.putenv('http_proxy', self.settings.get('http_proxy'))
if not self.settings.get('https_proxy'):
os.putenv('HTTPS_PROXY', self.settings.get('http_proxy'))
if self.settings.get('https_proxy'):
os.putenv('HTTPS_PROXY', self.settings.get('https_proxy'))
while tries > 0:
tries -= 1
try:
return self.execute(command)
except NonCleanExitError as e:
if e.returncode == 22:
code = re.sub('^.*?(\d+)\s*$', '\\1', e.output)
if code == '503':
# GitHub and BitBucket seem to rate limit via 503
print('%s: Downloading %s was rate limited, trying again' % (__name__, url))
continue
error_string = 'HTTP error ' + code
elif e.returncode == 6:
error_string = 'URL error host not found'
elif e.returncode == 28:
# GitHub and BitBucket seem to time out a lot
print('%s: Downloading %s timed out, trying again' % (__name__, url))
continue
else:
error_string = e.output.rstrip()
print('%s: %s %s downloading %s.' % (__name__, error_message, error_string, url))
break
return False
class UrlLib2Downloader():
def __init__(self, settings):
self.settings = settings
def download(self, url, error_message, timeout, tries):
http_proxy = self.settings.get('http_proxy')
https_proxy = self.settings.get('https_proxy')
if http_proxy or https_proxy:
proxies = {}
if http_proxy:
proxies['http'] = http_proxy
if not https_proxy:
proxies['https'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
proxy_handler = url_req.ProxyHandler(proxies)
else:
proxy_handler = url_req.ProxyHandler()
handlers = [proxy_handler]
# secure_url_match = re.match('^https://([^/]+)', url)
# if secure_url_match != None:
# secure_domain = secure_url_match.group(1)
# bundle_path = self.check_certs(secure_domain, timeout)
# if not bundle_path:
# return False
# handlers.append(VerifiedHTTPSHandler(ca_certs=bundle_path))
url_req.install_opener(url_req.build_opener(*handlers))
while tries > 0:
tries -= 1
try:
request = url_req.Request(url, headers={"User-Agent":
"Emmet PyV8 Loader"})
http_file = url_req.urlopen(request, timeout=timeout)
return http_file.read()
except url_err.HTTPError as e:
# Bitbucket and Github ratelimit using 503 a decent amount
if str(e.code) == '503':
print('%s: Downloading %s was rate limited, trying again' % (__name__, url))
continue
print('%s: %s HTTP error %s downloading %s.' % (__name__, error_message, str(e.code), url))
except url_err.URLError as e:
# Bitbucket and Github timeout a decent amount
if str(e.reason) == 'The read operation timed out' or \
str(e.reason) == 'timed out':
print('%s: Downloading %s timed out, trying again' % (__name__, url))
continue
print('%s: %s URL error %s downloading %s.' % (__name__, error_message, str(e.reason), url))
break
return False
class PyV8Loader(threading.Thread):
def __init__(self, arch, download_path, config, delegate=None):
self.arch = arch
self.config = config
self.download_path = download_path
self.exit_code = 0
self.result = None
self.delegate = delegate or LoaderDelegate()
self.is_pyv8_thread = True
threading.Thread.__init__(self)
self.delegate.log('Creating thread')
def download_url(self, url, error_message):
# TODO add settings
has_ssl = 'ssl' in sys.modules and hasattr(url_req, 'HTTPSHandler')
is_ssl = re.search('^https://', url) != None
if (is_ssl and has_ssl) or not is_ssl:
downloader = UrlLib2Downloader(self.delegate.settings)
else:
for downloader_class in [CurlDownloader, WgetDownloader]:
try:
downloader = downloader_class(self.delegate.settings)
break
except BinaryNotFoundError:
pass
if not downloader:
self.delegate.log('Unable to download PyV8 binary due to invalid downloader')
return False
timeout = self.delegate.settings.get('timeout', 60)
# timeout = 3
return downloader.download(url.replace(' ', '%20'), error_message, timeout, 3)
def run(self):
# get list of available packages first
self.delegate.log('Loading %s' % PACKAGES_URL)
try:
packages = self.download_url(PACKAGES_URL, 'Unable to download packages list.')
except Exception as e:
self.delegate.log('Unable to download file: %s' % e)
self.exit_code = 4
return
if not packages:
self.exit_code = 1
return
if isinstance(packages, bytes):
packages = packages.decode('utf-8')
files = json.loads(packages)
# find package for current architecture
cur_item = None
bundle_name = 'pyv8-%s.zip' % self.arch
for item in files:
if bundle_name == item['name']:
cur_item = item
break
if not cur_item:
self.delegate.log('Unable to find binary for %s architecture' % self.arch)
self.exit_code = 2
return
if cur_item['sha'] == self.config['last_id']:
self.delegate.log('You have the most recent PyV8 binary')
return
url = 'https://raw.github.com/emmetio/pyv8-binaries/master/%s' % cur_item['name']
self.delegate.log('Loading PyV8 binary from %s' % url)
package = self.download_url(url, 'Unable to download package from %s' % url)
if not package:
self.exit_code = 3
return
# we should only save downloaded package and delegate module
# loading/unloading to main thread since improper PyV8 unload
# may cause editor crash
try:
os.makedirs(self.download_path)
except Exception as e:
pass
fp = open(os.path.join(self.download_path, 'pack.zip'), 'wb')
fp.write(package)
fp.close()
self.result = cur_item['sha']
# Done!
| gpl-2.0 |
jcurbelo/networkx | networkx/classes/tests/test_multigraph.py | 14 | 8638 | #!/usr/bin/env python
from nose.tools import *
import networkx
from test_graph import BaseAttrGraphTester, TestGraph
class BaseMultiGraphTester(BaseAttrGraphTester):
def test_has_edge(self):
G=self.K3
assert_equal(G.has_edge(0,1),True)
assert_equal(G.has_edge(0,-1),False)
assert_equal(G.has_edge(0,1,0),True)
assert_equal(G.has_edge(0,1,1),False)
def test_get_edge_data(self):
G=self.K3
assert_equal(G.get_edge_data(0,1),{0:{}})
assert_equal(G[0][1],{0:{}})
assert_equal(G[0][1][0],{})
assert_equal(G.get_edge_data(10,20),None)
assert_equal(G.get_edge_data(0,1,0),{})
def test_adjacency(self):
G=self.K3
assert_equal(dict(G.adjacency()),
{0: {1: {0:{}}, 2: {0:{}}},
1: {0: {0:{}}, 2: {0:{}}},
2: {0: {0:{}}, 1: {0:{}}}})
def deepcopy_edge_attr(self,H,G):
assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo'])
G[1][2][0]['foo'].append(1)
assert_not_equal(G[1][2][0]['foo'],H[1][2][0]['foo'])
def shallow_copy_edge_attr(self,H,G):
assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo'])
G[1][2][0]['foo'].append(1)
assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo'])
def same_attrdict(self, H, G):
# same attrdict in the edgedata
old_foo=H[1][2][0]['foo']
H.add_edge(1,2,0,foo='baz')
assert_equal(G.edge,H.edge)
H.add_edge(1,2,0,foo=old_foo)
assert_equal(G.edge,H.edge)
# but not same edgedata dict
H.add_edge(1,2,foo='baz')
assert_not_equal(G.edge,H.edge)
old_foo=H.node[0]['foo']
H.node[0]['foo']='baz'
assert_equal(G.node,H.node)
H.node[0]['foo']=old_foo
assert_equal(G.node,H.node)
def different_attrdict(self, H, G):
# used by graph_equal_but_different
old_foo=H[1][2][0]['foo']
H.add_edge(1,2,0,foo='baz')
assert_not_equal(G.edge,H.edge)
H.add_edge(1,2,0,foo=old_foo)
assert_equal(G.edge,H.edge)
HH=H.copy()
H.add_edge(1,2,foo='baz')
assert_not_equal(G.edge,H.edge)
H=HH
old_foo=H.node[0]['foo']
H.node[0]['foo']='baz'
assert_not_equal(G.node,H.node)
H.node[0]['foo']=old_foo
assert_equal(G.node,H.node)
def test_to_undirected(self):
G=self.K3
self.add_attributes(G)
H=networkx.MultiGraph(G)
self.is_shallow_copy(H,G)
H=G.to_undirected()
self.is_deepcopy(H,G)
def test_to_directed(self):
G=self.K3
self.add_attributes(G)
H=networkx.MultiDiGraph(G)
self.is_shallow_copy(H,G)
H=G.to_directed()
self.is_deepcopy(H,G)
def test_selfloops(self):
G=self.K3
G.add_edge(0,0)
assert_equal(list(G.nodes_with_selfloops()), [0])
assert_equal(list(G.selfloop_edges()), [(0, 0)])
assert_equal(list(G.selfloop_edges(data=True)), [(0, 0, {})])
assert_equal(G.number_of_selfloops(),1)
def test_selfloops2(self):
G=self.K3
G.add_edge(0,0)
G.add_edge(0,0)
G.add_edge(0,0,key='parallel edge')
G.remove_edge(0,0,key='parallel edge')
assert_equal(G.number_of_edges(0,0),2)
G.remove_edge(0,0)
assert_equal(G.number_of_edges(0,0),1)
def test_edge_attr4(self):
G=self.Graph()
G.add_edge(1,2,key=0,data=7,spam='bar',bar='foo')
assert_equal(list(G.edges(data=True)),
[(1,2,{'data':7,'spam':'bar','bar':'foo'})])
G[1][2][0]['data']=10 # OK to set data like this
assert_equal(list(G.edges(data=True)),
[(1,2,{'data':10,'spam':'bar','bar':'foo'})])
G.edge[1][2][0]['data']=20 # another spelling, "edge"
assert_equal(list(G.edges(data=True)),
[(1,2,{'data':20,'spam':'bar','bar':'foo'})])
G.edge[1][2][0]['listdata']=[20,200]
G.edge[1][2][0]['weight']=20
assert_equal(list(G.edges(data=True)),
[(1,2,{'data':20,'spam':'bar',
'bar':'foo','listdata':[20,200],'weight':20})])
class TestMultiGraph(BaseMultiGraphTester,TestGraph):
def setUp(self):
self.Graph=networkx.MultiGraph
# build K3
ed1,ed2,ed3 = ({0:{}},{0:{}},{0:{}})
self.k3adj={0: {1: ed1, 2: ed2},
1: {0: ed1, 2: ed3},
2: {0: ed2, 1: ed3}}
self.k3edges=[(0, 1), (0, 2), (1, 2)]
self.k3nodes=[0, 1, 2]
self.K3=self.Graph()
self.K3.adj = self.K3.edge = self.k3adj
self.K3.node={}
self.K3.node[0]={}
self.K3.node[1]={}
self.K3.node[2]={}
def test_data_input(self):
G=self.Graph(data={1:[2],2:[1]}, name="test")
assert_equal(G.name,"test")
assert_equal(sorted(G.adj.items()),[(1, {2: {0:{}}}), (2, {1: {0:{}}})])
def test_getitem(self):
G=self.K3
assert_equal(G[0],{1: {0:{}}, 2: {0:{}}})
assert_raises(KeyError, G.__getitem__, 'j')
assert_raises((TypeError,networkx.NetworkXError), G.__getitem__, ['A'])
def test_remove_node(self):
G=self.K3
G.remove_node(0)
assert_equal(G.adj,{1:{2:{0:{}}},2:{1:{0:{}}}})
assert_raises((KeyError,networkx.NetworkXError), G.remove_node,-1)
def test_add_edge(self):
G=self.Graph()
G.add_edge(0,1)
assert_equal(G.adj,{0: {1: {0:{}}}, 1: {0: {0:{}}}})
G=self.Graph()
G.add_edge(*(0,1))
assert_equal(G.adj,{0: {1: {0:{}}}, 1: {0: {0:{}}}})
def test_add_edge_conflicting_key(self):
G=self.Graph()
G.add_edge(0,1,key=1)
G.add_edge(0,1)
assert_equal(G.number_of_edges(),2)
G=self.Graph()
G.add_edges_from([(0,1,1,{})])
G.add_edges_from([(0,1)])
assert_equal(G.number_of_edges(),2)
def test_add_edges_from(self):
G=self.Graph()
G.add_edges_from([(0,1),(0,1,{'weight':3})])
assert_equal(G.adj,{0: {1: {0:{},1:{'weight':3}}},
1: {0: {0:{},1:{'weight':3}}}})
G.add_edges_from([(0,1),(0,1,{'weight':3})],weight=2)
assert_equal(G.adj,{0: {1: {0:{},1:{'weight':3},
2:{'weight':2},3:{'weight':3}}},
1: {0: {0:{},1:{'weight':3},
2:{'weight':2},3:{'weight':3}}}})
# too few in tuple
assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,)])
# too many in tuple
assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,1,2,3,4)])
assert_raises(TypeError, G.add_edges_from,[0]) # not a tuple
def test_remove_edge(self):
G=self.K3
G.remove_edge(0,1)
assert_equal(G.adj,{0: {2: {0: {}}},
1: {2: {0: {}}},
2: {0: {0: {}},
1: {0: {}}}})
assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0)
assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,0,2,
key=1)
def test_remove_edges_from(self):
G=self.K3.copy()
G.remove_edges_from([(0,1)])
assert_equal(G.adj,{0:{2:{0:{}}},1:{2:{0:{}}},2:{0:{0:{}},1:{0:{}}}})
G.remove_edges_from([(0,0)]) # silent fail
self.K3.add_edge(0,1)
G=self.K3.copy()
G.remove_edges_from(list(G.edges(data=True,keys=True)))
assert_equal(G.adj,{0:{},1:{},2:{}})
G=self.K3.copy()
G.remove_edges_from(list(G.edges(data=False,keys=True)))
assert_equal(G.adj,{0:{},1:{},2:{}})
G=self.K3.copy()
G.remove_edges_from(list(G.edges(data=False,keys=False)))
assert_equal(G.adj,{0:{},1:{},2:{}})
G=self.K3.copy()
G.remove_edges_from([(0,1,0),(0,2,0,{}),(1,2)])
assert_equal(G.adj,{0:{1:{1:{}}},1:{0:{1:{}}},2:{}})
def test_remove_multiedge(self):
G=self.K3
G.add_edge(0,1,key='parallel edge')
G.remove_edge(0,1,key='parallel edge')
assert_equal(G.adj,{0: {1: {0:{}}, 2: {0:{}}},
1: {0: {0:{}}, 2: {0:{}}},
2: {0: {0:{}}, 1: {0:{}}}})
G.remove_edge(0,1)
assert_equal(G.adj,{0:{2:{0:{}}},1:{2:{0:{}}},2:{0:{0:{}},1:{0:{}}}})
assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0)
| bsd-3-clause |
kostaspl/SpiderMonkey38 | python/mozbuild/mozbuild/test/test_mozinfo.py | 4 | 8470 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import json
import os
import tempfile
import unittest
from StringIO import StringIO
import mozunit
from mozbuild.backend.configenvironment import ConfigEnvironment
from mozbuild.mozinfo import (
build_dict,
write_mozinfo,
)
from mozfile.mozfile import NamedTemporaryFile
class Base(object):
def _config(self, substs={}):
d = os.path.dirname(__file__)
return ConfigEnvironment(d, d, substs=substs)
class TestBuildDict(unittest.TestCase, Base):
def test_missing(self):
"""
Test that missing required values raises.
"""
with self.assertRaises(Exception):
build_dict(self._config(substs=dict(OS_TARGET='foo')))
with self.assertRaises(Exception):
build_dict(self._config(substs=dict(TARGET_CPU='foo')))
with self.assertRaises(Exception):
build_dict(self._config(substs=dict(MOZ_WIDGET_TOOLKIT='foo')))
def test_win(self):
d = build_dict(self._config(dict(
OS_TARGET='WINNT',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='windows',
)))
self.assertEqual('win', d['os'])
self.assertEqual('x86', d['processor'])
self.assertEqual('windows', d['toolkit'])
self.assertEqual(32, d['bits'])
def test_linux(self):
d = build_dict(self._config(dict(
OS_TARGET='Linux',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='gtk2',
)))
self.assertEqual('linux', d['os'])
self.assertEqual('x86', d['processor'])
self.assertEqual('gtk2', d['toolkit'])
self.assertEqual(32, d['bits'])
d = build_dict(self._config(dict(
OS_TARGET='Linux',
TARGET_CPU='x86_64',
MOZ_WIDGET_TOOLKIT='gtk2',
)))
self.assertEqual('linux', d['os'])
self.assertEqual('x86_64', d['processor'])
self.assertEqual('gtk2', d['toolkit'])
self.assertEqual(64, d['bits'])
def test_mac(self):
d = build_dict(self._config(dict(
OS_TARGET='Darwin',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='cocoa',
)))
self.assertEqual('mac', d['os'])
self.assertEqual('x86', d['processor'])
self.assertEqual('cocoa', d['toolkit'])
self.assertEqual(32, d['bits'])
d = build_dict(self._config(dict(
OS_TARGET='Darwin',
TARGET_CPU='x86_64',
MOZ_WIDGET_TOOLKIT='cocoa',
)))
self.assertEqual('mac', d['os'])
self.assertEqual('x86_64', d['processor'])
self.assertEqual('cocoa', d['toolkit'])
self.assertEqual(64, d['bits'])
def test_mac_universal(self):
d = build_dict(self._config(dict(
OS_TARGET='Darwin',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='cocoa',
UNIVERSAL_BINARY='1',
)))
self.assertEqual('mac', d['os'])
self.assertEqual('universal-x86-x86_64', d['processor'])
self.assertEqual('cocoa', d['toolkit'])
self.assertFalse('bits' in d)
d = build_dict(self._config(dict(
OS_TARGET='Darwin',
TARGET_CPU='x86_64',
MOZ_WIDGET_TOOLKIT='cocoa',
UNIVERSAL_BINARY='1',
)))
self.assertEqual('mac', d['os'])
self.assertEqual('universal-x86-x86_64', d['processor'])
self.assertEqual('cocoa', d['toolkit'])
self.assertFalse('bits' in d)
def test_android(self):
d = build_dict(self._config(dict(
OS_TARGET='Android',
TARGET_CPU='arm',
MOZ_WIDGET_TOOLKIT='android',
)))
self.assertEqual('android', d['os'])
self.assertEqual('arm', d['processor'])
self.assertEqual('android', d['toolkit'])
self.assertEqual(32, d['bits'])
def test_x86(self):
"""
Test that various i?86 values => x86.
"""
d = build_dict(self._config(dict(
OS_TARGET='WINNT',
TARGET_CPU='i486',
MOZ_WIDGET_TOOLKIT='windows',
)))
self.assertEqual('x86', d['processor'])
d = build_dict(self._config(dict(
OS_TARGET='WINNT',
TARGET_CPU='i686',
MOZ_WIDGET_TOOLKIT='windows',
)))
self.assertEqual('x86', d['processor'])
def test_arm(self):
"""
Test that all arm CPU architectures => arm.
"""
d = build_dict(self._config(dict(
OS_TARGET='Linux',
TARGET_CPU='arm',
MOZ_WIDGET_TOOLKIT='gtk2',
)))
self.assertEqual('arm', d['processor'])
d = build_dict(self._config(dict(
OS_TARGET='Linux',
TARGET_CPU='armv7',
MOZ_WIDGET_TOOLKIT='gtk2',
)))
self.assertEqual('arm', d['processor'])
def test_unknown(self):
"""
Test that unknown values pass through okay.
"""
d = build_dict(self._config(dict(
OS_TARGET='RandOS',
TARGET_CPU='cptwo',
MOZ_WIDGET_TOOLKIT='foobar',
)))
self.assertEqual("randos", d["os"])
self.assertEqual("cptwo", d["processor"])
self.assertEqual("foobar", d["toolkit"])
# unknown CPUs should not get a bits value
self.assertFalse("bits" in d)
def test_debug(self):
"""
Test that debug values are properly detected.
"""
d = build_dict(self._config(dict(
OS_TARGET='Linux',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='gtk2',
)))
self.assertEqual(False, d['debug'])
d = build_dict(self._config(dict(
OS_TARGET='Linux',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='gtk2',
MOZ_DEBUG='1',
)))
self.assertEqual(True, d['debug'])
def test_crashreporter(self):
"""
Test that crashreporter values are properly detected.
"""
d = build_dict(self._config(dict(
OS_TARGET='Linux',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='gtk2',
)))
self.assertEqual(False, d['crashreporter'])
d = build_dict(self._config(dict(
OS_TARGET='Linux',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='gtk2',
MOZ_CRASHREPORTER='1',
)))
self.assertEqual(True, d['crashreporter'])
class TestWriteMozinfo(unittest.TestCase, Base):
"""
Test the write_mozinfo function.
"""
def setUp(self):
fd, self.f = tempfile.mkstemp()
os.close(fd)
def tearDown(self):
os.unlink(self.f)
def test_basic(self):
"""
Test that writing to a file produces correct output.
"""
c = self._config(dict(
OS_TARGET='WINNT',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='windows',
))
tempdir = tempfile.tempdir
c.topsrcdir = tempdir
with NamedTemporaryFile(dir=os.path.normpath(c.topsrcdir)) as mozconfig:
mozconfig.write('unused contents')
mozconfig.flush()
write_mozinfo(self.f, c, {'MOZCONFIG': mozconfig.name})
with open(self.f) as f:
d = json.load(f)
self.assertEqual('win', d['os'])
self.assertEqual('x86', d['processor'])
self.assertEqual('windows', d['toolkit'])
self.assertEqual(tempdir, d['topsrcdir'])
self.assertEqual(mozconfig.name, d['mozconfig'])
self.assertEqual(32, d['bits'])
def test_fileobj(self):
"""
Test that writing to a file-like object produces correct output.
"""
s = StringIO()
c = self._config(dict(
OS_TARGET='WINNT',
TARGET_CPU='i386',
MOZ_WIDGET_TOOLKIT='windows',
))
write_mozinfo(s, c)
d = json.loads(s.getvalue())
self.assertEqual('win', d['os'])
self.assertEqual('x86', d['processor'])
self.assertEqual('windows', d['toolkit'])
self.assertEqual(32, d['bits'])
if __name__ == '__main__':
mozunit.main()
| mpl-2.0 |
jtrobec/pants | contrib/go/tests/python/pants_test/contrib/go/tasks/test_go_task.py | 4 | 2527 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants_test.tasks.task_test_base import TaskTestBase
from pants.contrib.go.tasks.go_task import GoTask
class ImportOracleTest(TaskTestBase):
class ImportTask(GoTask):
def execute(self):
raise NotImplementedError()
@classmethod
def task_type(cls):
return cls.ImportTask
def setUp(self):
super(ImportOracleTest, self).setUp()
task = self.create_task(self.context())
self.import_oracle = task.import_oracle
def test_go_stdlib(self):
self.assertIn('archive/tar', self.import_oracle.go_stdlib)
self.assertIn('bufio', self.import_oracle.go_stdlib)
self.assertIn('fmt', self.import_oracle.go_stdlib)
self.assertNotIn('C', self.import_oracle.go_stdlib)
self.assertNotIn('github.com/bitly/go-simplejson', self.import_oracle.go_stdlib)
self.assertNotIn('local/pkg', self.import_oracle.go_stdlib)
def test_is_go_internal_import(self):
self.assertTrue(self.import_oracle.is_go_internal_import('archive/tar'))
self.assertTrue(self.import_oracle.is_go_internal_import('bufio'))
self.assertTrue(self.import_oracle.is_go_internal_import('fmt'))
self.assertTrue(self.import_oracle.is_go_internal_import('C'))
self.assertFalse(self.import_oracle.is_go_internal_import('github.com/bitly/go-simplejson'))
self.assertFalse(self.import_oracle.is_go_internal_import('local/pkg'))
def test_list_imports(self):
import_listing = self.import_oracle.list_imports('archive/tar')
self.assertEqual('tar', import_listing.pkg_name)
self.assertTrue(len(import_listing.imports) > 0,
'Expected the `archive/tar` package to have at least one import')
self.assertTrue(set(import_listing.imports).issubset(self.import_oracle.go_stdlib),
'All imports for any stdlib package should also be internal to the stdlib')
self.assertTrue(len(import_listing.test_imports) > 0,
'Expected the `archive/tar` package to have at least 1 test that has an import')
self.assertTrue(set(import_listing.test_imports).issubset(self.import_oracle.go_stdlib),
'All imports for any stdlib package (including its tests) should also be '
'internal to the stdlib')
| apache-2.0 |
mikalstill/nova | nova/scheduler/filters/ram_filter.py | 1 | 4044 | # Copyright (c) 2011 OpenStack Foundation
# Copyright (c) 2012 Cloudscaling
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from nova.scheduler import filters
from nova.scheduler.filters import utils
LOG = logging.getLogger(__name__)
class BaseRamFilter(filters.BaseHostFilter):
RUN_ON_REBUILD = False
def _get_ram_allocation_ratio(self, host_state, spec_obj):
raise NotImplementedError
def host_passes(self, host_state, spec_obj):
"""Only return hosts with sufficient available RAM."""
requested_ram = spec_obj.memory_mb
free_ram_mb = host_state.free_ram_mb
total_usable_ram_mb = host_state.total_usable_ram_mb
# Do not allow an instance to overcommit against itself, only against
# other instances.
if not total_usable_ram_mb >= requested_ram:
LOG.debug("%(host_state)s does not have %(requested_ram)s MB "
"usable ram before overcommit, it only has "
"%(usable_ram)s MB.",
{'host_state': host_state,
'requested_ram': requested_ram,
'usable_ram': total_usable_ram_mb})
return False
ram_allocation_ratio = self._get_ram_allocation_ratio(host_state,
spec_obj)
memory_mb_limit = total_usable_ram_mb * ram_allocation_ratio
used_ram_mb = total_usable_ram_mb - free_ram_mb
usable_ram = memory_mb_limit - used_ram_mb
if not usable_ram >= requested_ram:
LOG.debug("%(host_state)s does not have %(requested_ram)s MB "
"usable ram, it only has %(usable_ram)s MB usable ram.",
{'host_state': host_state,
'requested_ram': requested_ram,
'usable_ram': usable_ram})
return False
# save oversubscription limit for compute node to test against:
host_state.limits['memory_mb'] = memory_mb_limit
return True
class RamFilter(BaseRamFilter):
"""Ram Filter with over subscription flag."""
def __init__(self):
super(RamFilter, self).__init__()
LOG.warning('The RamFilter is deprecated since the 19.0.0 Stein '
'release. MEMORY_MB filtering is performed natively '
'using the Placement service when using the '
'filter_scheduler driver. Furthermore, enabling RamFilter '
'may incorrectly filter out baremetal nodes which must be '
'scheduled using custom resource classes.')
def _get_ram_allocation_ratio(self, host_state, spec_obj):
return host_state.ram_allocation_ratio
class AggregateRamFilter(BaseRamFilter):
"""AggregateRamFilter with per-aggregate ram subscription flag.
Fall back to global ram_allocation_ratio if no per-aggregate setting found.
"""
def _get_ram_allocation_ratio(self, host_state, spec_obj):
aggregate_vals = utils.aggregate_values_from_key(
host_state,
'ram_allocation_ratio')
try:
ratio = utils.validate_num_values(
aggregate_vals, host_state.ram_allocation_ratio, cast_to=float)
except ValueError as e:
LOG.warning("Could not decode ram_allocation_ratio: '%s'", e)
ratio = host_state.ram_allocation_ratio
return ratio
| apache-2.0 |
ycaihua/kbengine | kbe/res/scripts/common/Lib/site-packages/pip/locations.py | 390 | 6202 | """Locations where we look for configs, install stuff, etc"""
import sys
import site
import os
import tempfile
from distutils.command.install import install, SCHEME_KEYS
import getpass
from pip.backwardcompat import get_python_lib, get_path_uid, user_site
import pip.exceptions
DELETE_MARKER_MESSAGE = '''\
This file is placed here by pip to indicate the source was put
here by pip.
Once this package is successfully installed this source code will be
deleted (unless you remove this file).
'''
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
def write_delete_marker_file(directory):
"""
Write the pip delete marker file into this directory.
"""
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
marker_fp = open(filepath, 'w')
marker_fp.write(DELETE_MARKER_MESSAGE)
marker_fp.close()
def running_under_virtualenv():
"""
Return True if we're running inside a virtualenv, False otherwise.
"""
if hasattr(sys, 'real_prefix'):
return True
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
return True
return False
def virtualenv_no_global():
"""
Return True if in a venv and no system site packages.
"""
#this mirrors the logic in virtualenv.py for locating the no-global-site-packages.txt file
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
if running_under_virtualenv() and os.path.isfile(no_global_file):
return True
def __get_username():
""" Returns the effective username of the current process. """
if sys.platform == 'win32':
return getpass.getuser()
import pwd
return pwd.getpwuid(os.geteuid()).pw_name
def _get_build_prefix():
""" Returns a safe build_prefix """
path = os.path.join(tempfile.gettempdir(), 'pip_build_%s' %
__get_username())
if sys.platform == 'win32':
""" on windows(tested on 7) temp dirs are isolated """
return path
try:
os.mkdir(path)
write_delete_marker_file(path)
except OSError:
file_uid = None
try:
# raises OSError for symlinks
# https://github.com/pypa/pip/pull/935#discussion_r5307003
file_uid = get_path_uid(path)
except OSError:
file_uid = None
if file_uid != os.geteuid():
msg = "The temporary folder for building (%s) is either not owned by you, or is a symlink." \
% path
print (msg)
print("pip will not work until the temporary folder is " + \
"either deleted or is a real directory owned by your user account.")
raise pip.exceptions.InstallationError(msg)
return path
if running_under_virtualenv():
build_prefix = os.path.join(sys.prefix, 'build')
src_prefix = os.path.join(sys.prefix, 'src')
else:
# Note: intentionally NOT using mkdtemp
# See https://github.com/pypa/pip/issues/906 for plan to move to mkdtemp
build_prefix = _get_build_prefix()
## FIXME: keep src in cwd for now (it is not a temporary folder)
try:
src_prefix = os.path.join(os.getcwd(), 'src')
except OSError:
# In case the current working directory has been renamed or deleted
sys.exit("The folder you are executing pip from can no longer be found.")
# under Mac OS X + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
# Note: using realpath due to tmp dirs on OSX being symlinks
build_prefix = os.path.abspath(os.path.realpath(build_prefix))
src_prefix = os.path.abspath(src_prefix)
# FIXME doesn't account for venv linked to global site-packages
site_packages = get_python_lib()
user_dir = os.path.expanduser('~')
if sys.platform == 'win32':
bin_py = os.path.join(sys.prefix, 'Scripts')
bin_user = os.path.join(user_site, 'Scripts') if user_site else None
# buildout uses 'bin' on Windows too?
if not os.path.exists(bin_py):
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin') if user_site else None
default_storage_dir = os.path.join(user_dir, 'pip')
default_config_file = os.path.join(default_storage_dir, 'pip.ini')
default_log_file = os.path.join(default_storage_dir, 'pip.log')
else:
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin') if user_site else None
default_storage_dir = os.path.join(user_dir, '.pip')
default_config_file = os.path.join(default_storage_dir, 'pip.conf')
default_log_file = os.path.join(default_storage_dir, 'pip.log')
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
bin_py = '/usr/local/bin'
default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log')
def distutils_scheme(dist_name, user=False, home=None, root=None):
"""
Return a distutils install scheme
"""
from distutils.dist import Distribution
scheme = {}
d = Distribution({'name': dist_name})
d.parse_config_files()
i = d.get_command_obj('install', create=True)
# NOTE: setting user or home has the side-effect of creating the home dir or
# user base for installations during finalize_options()
# ideally, we'd prefer a scheme class that has no side-effects.
i.user = user or i.user
i.home = home or i.home
i.root = root or i.root
i.finalize_options()
for key in SCHEME_KEYS:
scheme[key] = getattr(i, 'install_'+key)
if running_under_virtualenv():
scheme['headers'] = os.path.join(sys.prefix,
'include',
'site',
'python' + sys.version[:3],
dist_name)
if root is not None:
scheme["headers"] = os.path.join(
root,
os.path.abspath(scheme["headers"])[1:],
)
return scheme
| lgpl-3.0 |
CankingApp/cankingapp.github.io | node_modules/node-gyp/gyp/tools/pretty_vcproj.py | 2637 | 9586 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
REPLACEMENTS = dict()
ARGUMENTS = None
class CmpTuple(object):
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode(object):
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
def get_string(node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
return cmp(get_string(x), get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
else:
print '%s<%s' % (' '*indent, node.nodeName)
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip('"\' ')
if filename.startswith('$'):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
"""Makes all the properties we know about in this node absolute."""
if node.attributes:
for (name, value) in node.attributes.items():
if name in ['InheritedPropertySheets', 'RelativePath',
'AdditionalIncludeDirectories',
'IntermediateDirectory', 'OutputDirectory',
'AdditionalLibraryDirectories']:
# We want to fix up these paths
path_list = value.split(';')
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ';'.join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
"""For each sub node, we call recursively this function."""
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extranous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(';'))
unique_list = []
for i in sorted_list:
if not unique_list.count(i):
unique_list.append(i)
node.setAttribute(name, ';'.join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == 'Filter':
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == 'Tool':
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == 'UserMacro':
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
#TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == 'Name':
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ';'.join([value1, value2]))
else:
# The attribute does nto exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == 'InheritedPropertySheets':
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
"""Main function of this vcproj prettifier."""
global ARGUMENTS
ARGUMENTS = argv
# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return 1
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split('=')
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute('InheritedPropertySheets')
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(vsprops.strip().split(';'),
os.path.dirname(argv[1]))
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node,
parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
#print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 |
Edraak/edraak-platform | lms/djangoapps/commerce/tests/test_utils.py | 9 | 13123 | """Tests of commerce utilities."""
import json
import unittest
from urllib import urlencode
import ddt
import httpretty
from django.conf import settings
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from mock import patch
from waffle.testutils import override_switch
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from course_modes.models import CourseMode
from lms.djangoapps.commerce.models import CommerceConfiguration
from lms.djangoapps.commerce.utils import EcommerceService, refund_entitlement
from openedx.core.lib.log_utils import audit_log
from student.tests.factories import (TEST_PASSWORD, UserFactory)
# Entitlements is not in CMS' INSTALLED_APPS so these imports will error during test collection
if settings.ROOT_URLCONF == 'lms.urls':
from entitlements.tests.factories import CourseEntitlementFactory
def update_commerce_config(enabled=False, checkout_page='/test_basket/add/'):
""" Enable / Disable CommerceConfiguration model """
CommerceConfiguration.objects.create(
checkout_on_ecommerce_service=enabled,
basket_checkout_page=checkout_page,
)
class AuditLogTests(TestCase):
"""Tests of the commerce audit logging helper."""
shard = 4
@patch('openedx.core.lib.log_utils.log')
def test_log_message(self, mock_log):
"""Verify that log messages are constructed correctly."""
audit_log('foo', qux='quux', bar='baz')
# Verify that the logged message contains comma-separated
# key-value pairs ordered alphabetically by key.
message = 'foo: bar="baz", qux="quux"'
self.assertTrue(mock_log.info.called_with(message))
@ddt.ddt
class EcommerceServiceTests(TestCase):
"""Tests for the EcommerceService helper class."""
shard = 4
def setUp(self):
self.request_factory = RequestFactory()
self.user = UserFactory.create()
self.request = self.request_factory.get("foo")
update_commerce_config(enabled=True)
super(EcommerceServiceTests, self).setUp()
def test_is_enabled(self):
"""Verify that is_enabled() returns True when ecomm checkout is enabled. """
is_enabled = EcommerceService().is_enabled(self.user)
self.assertTrue(is_enabled)
config = CommerceConfiguration.current()
config.checkout_on_ecommerce_service = False
config.save()
is_not_enabled = EcommerceService().is_enabled(self.user)
self.assertFalse(is_not_enabled)
@override_switch(settings.DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH, active=True)
def test_is_enabled_activation_requirement_disabled(self):
"""Verify that is_enabled() returns True when ecomm checkout is enabled. """
self.user.is_active = False
self.user.save()
is_enabled = EcommerceService().is_enabled(self.user)
self.assertTrue(is_enabled)
@patch('openedx.core.djangoapps.theming.helpers.is_request_in_themed_site')
def test_is_enabled_for_microsites(self, is_microsite):
"""Verify that is_enabled() returns True if used for a microsite."""
is_microsite.return_value = True
is_enabled = EcommerceService().is_enabled(self.user)
self.assertTrue(is_enabled)
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
def test_ecommerce_url_root(self):
"""Verify that the proper root URL is returned."""
self.assertEqual(EcommerceService().ecommerce_url_root, 'http://ecommerce_url')
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
def test_get_absolute_ecommerce_url(self):
"""Verify that the proper URL is returned."""
url = EcommerceService().get_absolute_ecommerce_url('/test_basket/')
self.assertEqual(url, 'http://ecommerce_url/test_basket/')
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
def test_get_receipt_page_url(self):
"""Verify that the proper Receipt page URL is returned."""
order_number = 'ORDER1'
url = EcommerceService().get_receipt_page_url(order_number)
expected_url = 'http://ecommerce_url/checkout/receipt/?order_number={}'.format(order_number)
self.assertEqual(url, expected_url)
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
def test_get_order_dashboard_url(self):
"""Verify that the proper order dashboard url is returned."""
url = EcommerceService().get_order_dashboard_url()
expected_url = 'http://ecommerce_url/dashboard/orders/'
self.assertEqual(url, expected_url)
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
@ddt.data(
{
'skus': ['TESTSKU']
},
{
'skus': ['TESTSKU1', 'TESTSKU2', 'TESTSKU3']
},
{
'skus': ['TESTSKU'],
'program_uuid': '12345678-9012-3456-7890-123456789012'
},
{
'skus': ['TESTSKU1', 'TESTSKU2', 'TESTSKU3'],
'program_uuid': '12345678-9012-3456-7890-123456789012'
}
)
def test_get_checkout_page_url(self, skus, program_uuid=None):
""" Verify the checkout page URL is properly constructed and returned. """
url = EcommerceService().get_checkout_page_url(*skus, program_uuid=program_uuid)
config = CommerceConfiguration.current()
expected_url = '{root}{basket_url}?{skus}'.format(
basket_url=config.basket_checkout_page,
root=settings.ECOMMERCE_PUBLIC_URL_ROOT,
skus=urlencode({'sku': skus}, doseq=True),
)
if program_uuid:
expected_url = '{expected_url}&basket={program_uuid}'.format(
expected_url=expected_url,
program_uuid=program_uuid
)
self.assertEqual(url, expected_url)
@override_settings(ECOMMERCE_PUBLIC_URL_ROOT='http://ecommerce_url')
@ddt.data(
{
'skus': ['TESTSKU'],
'enterprise_catalog_uuid': None
},
{
'skus': ['TESTSKU'],
'enterprise_catalog_uuid': '6eca3efb-f3a0-4c08-806f-c6e6b65d61cb'
},
)
@ddt.unpack
def test_get_checkout_page_url_with_enterprise_catalog_uuid(self, skus, enterprise_catalog_uuid):
""" Verify the checkout page URL is properly constructed and returned. """
url = EcommerceService().get_checkout_page_url(
*skus,
enterprise_customer_catalog_uuid=enterprise_catalog_uuid
)
config = CommerceConfiguration.current()
query = {'sku': skus}
if enterprise_catalog_uuid:
query.update({'enterprise_customer_catalog_uuid': enterprise_catalog_uuid})
expected_url = '{root}{basket_url}?{skus}'.format(
basket_url=config.basket_checkout_page,
root=settings.ECOMMERCE_PUBLIC_URL_ROOT,
skus=urlencode(query, doseq=True),
)
self.assertEqual(url, expected_url)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class RefundUtilMethodTests(ModuleStoreTestCase):
shard = 4
def setUp(self):
super(RefundUtilMethodTests, self).setUp()
self.user = UserFactory()
UserFactory(username=settings.ECOMMERCE_SERVICE_WORKER_USERNAME, is_staff=True)
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self.course = CourseFactory.create(org='edX', number='DemoX', display_name='Demo_Course')
self.course2 = CourseFactory.create(org='edX', number='DemoX2', display_name='Demo_Course 2')
@patch('lms.djangoapps.commerce.utils.is_commerce_service_configured', return_value=False)
def test_ecommerce_service_not_configured(self, mock_commerce_configured):
course_entitlement = CourseEntitlementFactory.create(mode=CourseMode.VERIFIED)
refund_success = refund_entitlement(course_entitlement)
assert mock_commerce_configured.is_called
assert not refund_success
@httpretty.activate
def test_no_ecommerce_connection_and_failure(self):
httpretty.register_uri(
httpretty.POST,
settings.ECOMMERCE_API_URL + 'refunds/',
status=404,
body='{}',
content_type='application/json'
)
course_entitlement = CourseEntitlementFactory.create(mode=CourseMode.VERIFIED)
refund_success = refund_entitlement(course_entitlement)
assert not refund_success
@httpretty.activate
def test_ecommerce_successful_refund(self):
httpretty.register_uri(
httpretty.POST,
settings.ECOMMERCE_API_URL + 'refunds/',
status=201,
body='[1]',
content_type='application/json'
)
httpretty.register_uri(
httpretty.PUT,
settings.ECOMMERCE_API_URL + 'refunds/1/process/',
status=200,
body=json.dumps({
"id": 9,
"created": "2017-12-21T18:23:49.468298Z",
"modified": "2017-12-21T18:24:02.741426Z",
"total_credit_excl_tax": "100.00",
"currency": "USD",
"status": "Complete",
"order": 15,
"user": 5
}),
content_type='application/json'
)
course_entitlement = CourseEntitlementFactory.create(mode=CourseMode.VERIFIED)
refund_success = refund_entitlement(course_entitlement)
assert refund_success
@httpretty.activate
@patch('lms.djangoapps.commerce.utils._send_refund_notification', return_value=True)
def test_ecommerce_refund_failed_process_notification_sent(self, mock_send_notification):
httpretty.register_uri(
httpretty.POST,
settings.ECOMMERCE_API_URL + 'refunds/',
status=201,
body='[1]',
content_type='application/json'
)
httpretty.register_uri(
httpretty.PUT,
settings.ECOMMERCE_API_URL + 'refunds/1/process/',
status=400,
body='{}',
content_type='application/json'
)
course_entitlement = CourseEntitlementFactory.create(mode=CourseMode.VERIFIED)
refund_success = refund_entitlement(course_entitlement)
assert mock_send_notification.is_called
call_args = list(mock_send_notification.call_args)
assert call_args[0] == (course_entitlement.user, [1])
assert refund_success
@httpretty.activate
@patch('lms.djangoapps.commerce.utils._send_refund_notification', return_value=True)
def test_ecommerce_refund_not_verified_notification_for_entitlement(self, mock_send_notification):
"""
Note that we are currently notifying Support whenever a refund require approval for entitlements as
Entitlements are only available in paid modes. This test should be updated if this logic changes
in the future.
PROFESSIONAL mode is used here although we never auto approve PROFESSIONAL refunds right now
"""
httpretty.register_uri(
httpretty.POST,
settings.ECOMMERCE_API_URL + 'refunds/',
status=201,
body='[1]',
content_type='application/json'
)
httpretty.register_uri(
httpretty.PUT,
settings.ECOMMERCE_API_URL + 'refunds/1/process/',
status=400,
body='{}',
content_type='application/json'
)
course_entitlement = CourseEntitlementFactory.create(mode=CourseMode.PROFESSIONAL)
refund_success = refund_entitlement(course_entitlement)
assert mock_send_notification.is_called
call_args = list(mock_send_notification.call_args)
assert call_args[0] == (course_entitlement.user, [1])
assert refund_success
@httpretty.activate
@patch('lms.djangoapps.commerce.utils._send_refund_notification', return_value=True)
def test_ecommerce_refund_send_notification_failed(self, mock_send_notification):
httpretty.register_uri(
httpretty.POST,
settings.ECOMMERCE_API_URL + 'refunds/',
status=201,
body='[1]',
content_type='application/json'
)
httpretty.register_uri(
httpretty.PUT,
settings.ECOMMERCE_API_URL + 'refunds/1/process/',
status=400,
body='{}',
content_type='application/json'
)
mock_send_notification.side_effect = NotImplementedError
course_entitlement = CourseEntitlementFactory.create(mode=CourseMode.VERIFIED)
refund_success = refund_entitlement(course_entitlement)
assert mock_send_notification.is_called
call_args = list(mock_send_notification.call_args)
assert call_args[0] == (course_entitlement.user, [1])
assert not refund_success
| agpl-3.0 |
apagac/robottelo-blrm | tests/foreman/ui/test_template.py | 2 | 10200 | # -*- encoding: utf-8 -*-
"""Test class for Template UI"""
from ddt import ddt
from fauxfactory import gen_string
from robottelo import entities
from robottelo.common.constants import OS_TEMPLATE_DATA_FILE, SNIPPET_DATA_FILE
from robottelo.common.decorators import data, run_only_on, skip_if_bug_open
from robottelo.common.helpers import get_data_file, generate_strings_list
from robottelo.test import UITestCase
from robottelo.ui.factory import make_templates
from robottelo.ui.locators import common_locators
from robottelo.ui.session import Session
@run_only_on('sat')
@ddt
class Template(UITestCase):
"""Implements Provisioning Template tests from UI"""
@classmethod
def setUpClass(cls): # noqa
org_attrs = entities.Organization().create_json()
cls.org_name = org_attrs['name']
cls.org_id = org_attrs['id']
super(Template, cls).setUpClass()
@data(*generate_strings_list(len1=8))
def test_positive_create_template(self, name):
"""@Test: Create new template
@Feature: Template - Positive Create
@Assert: New provisioning template of type 'provision'
should be created successfully
"""
temp_type = 'provision'
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertIsNotNone(self.template.search(name))
def test_negative_create_template_1(self):
"""@Test: Template - Create a new template with 256 characters in name
@Feature: Template - Negative Create
@Assert: Template is not created
"""
name = gen_string("alpha", 256)
temp_type = 'provision'
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertIsNotNone(self.template.wait_until_element
(common_locators["name_haserror"]))
@data(" ", "")
def test_negative_create_template_2(self, name):
"""@Test: Create a new template with blank and whitespace in name
@Feature: Template - Negative Create
@Assert: Template is not created
"""
temp_type = 'provision'
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertIsNotNone(self.template.wait_until_element
(common_locators["name_haserror"]))
def test_negative_create_template_4(self):
"""@Test: Template - Create a new template with same name
@Feature: Template - Negative Create
@Assert: Template is not created
"""
name = gen_string("alpha", 16)
temp_type = 'provision'
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertIsNotNone(self.template.search(name))
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertIsNotNone(self.template.wait_until_element
(common_locators["name_haserror"]))
def test_negative_create_template_5(self):
"""@Test: Template - Create a new template without selecting its type
@Feature: Template - Negative Create
@Assert: Template is not created
"""
name = gen_string("alpha", 16)
temp_type = ""
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
with self.assertRaises(Exception) as context:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertEqual(context.exception.message,
"Could not create template '%s'"
" without type" % name)
def test_negative_create_template_6(self):
"""@Test: Template - Create a new template without uploading a template
@Feature: Template - Negative Create
@Assert: Template is not created
"""
name = gen_string("alpha", 16)
temp_type = 'PXELinux'
template_path = ""
with Session(self.browser) as session:
with self.assertRaises(Exception) as context:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertEqual(context.exception.message,
"Could not create blank template '%s'" % name)
def test_negative_create_template_7(self):
"""@Test: Create a new template with 256 characters in audit comments
@Feature: Template - Negative Create
@Assert: Template is not created
"""
name = gen_string("alpha", 16)
audit_comment = gen_string("alpha", 256)
temp_type = 'PXELinux'
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, audit_comment=audit_comment,
template_type=temp_type)
self.assertIsNotNone(self.template.wait_until_element
(common_locators["haserror"]))
@data(*generate_strings_list(len1=8))
def test_positive_create_snippet_template(self, name):
"""@Test: Create new template of type snippet
@Feature: Template - Positive Create
@Assert: New provisioning template of type 'snippet'
should be created successfully
"""
template_path = get_data_file(SNIPPET_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, snippet=True)
self.assertIsNotNone(self.template.search(name))
@skip_if_bug_open('bugzilla', 1177756)
@data(*generate_strings_list(len1=8))
def test_remove_template(self, template_name):
"""@Test: Remove a template
@Feature: Template - Positive Delete
@Assert: Template removed successfully
@BZ: 1177756
"""
entities.ConfigTemplate(
name=template_name, organization=[self.org_id]
).create_json()
with Session(self.browser) as session:
session.nav.go_to_select_org(self.org_name)
self.template.delete(template_name, True)
self.assertIsNone(self.template.search(template_name))
def test_update_template(self):
"""@Test: Update template name and template type
@Feature: Template - Positive Update
@Assert: The template name and type should be updated successfully
"""
name = gen_string("alpha", 6)
new_name = gen_string("alpha", 6)
temp_type = 'provision'
new_temp_type = 'PXELinux'
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertIsNotNone(self.template.search(name))
self.template.update(name, False, new_name, None, new_temp_type)
self.assertIsNotNone(self.template.search(new_name))
def test_update_template_os(self):
"""@Test: Creates new template, along with two OS's
and associate list of OS's with created template
@Feature: Template - Positive Update
@Assert: The template should be updated with newly created OS's
successfully
"""
name = gen_string("alpha", 6)
new_name = gen_string("alpha", 6)
temp_type = 'provision'
os_list = [
entities.OperatingSystem().create_json()['name'] for _ in range(2)
]
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertIsNotNone(self.template.search(name))
self.template.update(name, False, new_name, new_os_list=os_list)
self.assertIsNotNone(self.template.search(new_name))
def test_clone_template(self):
"""@Test: Assure ability to clone a provisioning template
@Feature: Template - Clone
@Steps:
1. Go to Provisioning template UI
2. Choose a template and attempt to clone it
@Assert: template is cloned
"""
name = gen_string("alpha", 6)
clone_name = gen_string("alpha", 6)
temp_type = 'provision'
os_list = [
entities.OperatingSystem().create_json()['name'] for _ in range(2)
]
template_path = get_data_file(OS_TEMPLATE_DATA_FILE)
with Session(self.browser) as session:
make_templates(session, name=name, template_path=template_path,
custom_really=True, template_type=temp_type)
self.assertIsNotNone(self.template.search(name))
self.template.clone(name, custom_really=False,
clone_name=clone_name, os_list=os_list)
self.assertIsNotNone(self.template.search(clone_name))
| gpl-3.0 |
robinro/ansible | lib/ansible/modules/windows/win_firewall.py | 12 | 1988 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Michael Eaton <meaton@iforium.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_firewall
version_added: "2.4"
short_description: Manages Windows Firewall
description:
- Manages Windows Firewall
options:
profile:
description:
- specify the profile to change
choices:
- Public
- Domain
- Private
state:
description:
- set state of firewall for given profile
choices:
- enabled
- disabled
author: "Michael Eaton (@MichaelEaton83)"
'''
EXAMPLES = r'''
- name: Enable all firewalls
win_firewall:
state: enabled
profiles:
- Domain
- Public
- Private
tags: enable_firewall
- name: Disable Domain firewall
win_firewall:
state: disabled
profiles:
- Domain
tags: disable_firewall
'''
RETURN = r'''
profile:
description: chosen profile
returned: always
type: string
sample: Domain
enabled:
description: current firewall status for chosen profile (after any potential change)
returned: always
type: bool
sample: true
'''
| gpl-3.0 |
mhugent/Quantum-GIS | python/plugins/processing/algs/admintools/DeleteWorkspace.py | 2 | 1798 | # -*- coding: utf-8 -*-
"""
***************************************************************************
DeleteWorkspace.py
---------------------
Date : October 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'October 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import *
from GeoServerToolsAlgorithm import \
GeoServerToolsAlgorithm
from processing.parameters.ParameterString import ParameterString
class DeleteWorkspace(GeoServerToolsAlgorithm):
WORKSPACE = 'WORKSPACE'
def processAlgorithm(self, progress):
self.createCatalog()
workspaceName = self.getParameterValue(self.WORKSPACE)
ws = self.catalog.get_workspace(workspaceName)
self.catalog.delete(ws)
def defineCharacteristics(self):
self.addBaseParameters()
self.name = 'Delete workspace'
self.group = 'GeoServer management tools'
self.addParameter(ParameterString(self.WORKSPACE, 'Workspace'))
| gpl-2.0 |
jagguli/intellij-community | python/lib/Lib/CGIHTTPServer.py | 86 | 12466 | """CGI-savvy HTTP Server.
This module builds on SimpleHTTPServer by implementing GET and POST
requests to cgi-bin scripts.
If the os.fork() function is not present (e.g. on Windows),
os.popen2() is used as a fallback, with slightly altered semantics; if
that function is not present either (e.g. on Macintosh), only Python
scripts are supported, and they are executed by the current process.
In all cases, the implementation is intentionally naive -- all
requests are executed sychronously.
SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
-- it may execute arbitrary Python code or external programs.
Note that status code 200 is sent prior to execution of a CGI script, so
scripts cannot send other status codes such as 302 (redirect).
"""
__version__ = "0.4"
__all__ = ["CGIHTTPRequestHandler"]
import os
import sys
import urllib
import BaseHTTPServer
import SimpleHTTPServer
import select
class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""Complete HTTP server with GET, HEAD and POST commands.
GET and HEAD also support running CGI scripts.
The POST command is *only* implemented for CGI scripts.
"""
# Determine platform specifics
have_fork = hasattr(os, 'fork')
have_popen2 = hasattr(os, 'popen2')
have_popen3 = hasattr(os, 'popen3')
# Make rfile unbuffered -- we need to read one line and then pass
# the rest to a subprocess, so we can't use buffered input.
rbufsize = 0
def do_POST(self):
"""Serve a POST request.
This is only implemented for CGI scripts.
"""
if self.is_cgi():
self.run_cgi()
else:
self.send_error(501, "Can only POST to CGI scripts")
def send_head(self):
"""Version of send_head that support CGI scripts"""
if self.is_cgi():
return self.run_cgi()
else:
return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script.
Return a tuple (dir, rest) if self.path requires running a
CGI script, None if not. Note that rest begins with a
slash if it is not empty.
The default implementation tests whether the path
begins with one of the strings in the list
self.cgi_directories (and the next character is a '/'
or the end of the string).
"""
path = self.path
for x in self.cgi_directories:
i = len(x)
if path[:i] == x and (not path[i:] or path[i] == '/'):
self.cgi_info = path[:i], path[i+1:]
return True
return False
cgi_directories = ['/cgi-bin', '/htbin']
def is_executable(self, path):
"""Test whether argument path is an executable file."""
return executable(path)
def is_python(self, path):
"""Test whether argument path is a Python script."""
head, tail = os.path.splitext(path)
return tail.lower() in (".py", ".pyw")
def run_cgi(self):
"""Execute a CGI script."""
path = self.path
dir, rest = self.cgi_info
i = path.find('/', len(dir) + 1)
while i >= 0:
nextdir = path[:i]
nextrest = path[i+1:]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
dir, rest = nextdir, nextrest
i = path.find('/', len(dir) + 1)
else:
break
# find an explicit query string, if present.
i = rest.rfind('?')
if i >= 0:
rest, query = rest[:i], rest[i+1:]
else:
query = ''
# dissect the part after the directory name into a script name &
# a possible additional path, to be stored in PATH_INFO.
i = rest.find('/')
if i >= 0:
script, rest = rest[:i], rest[i:]
else:
script, rest = rest, ''
scriptname = dir + '/' + script
scriptfile = self.translate_path(scriptname)
if not os.path.exists(scriptfile):
self.send_error(404, "No such CGI script (%r)" % scriptname)
return
if not os.path.isfile(scriptfile):
self.send_error(403, "CGI script is not a plain file (%r)" %
scriptname)
return
ispy = self.is_python(scriptname)
if not ispy:
if not (self.have_fork or self.have_popen2 or self.have_popen3):
self.send_error(403, "CGI script is not a Python script (%r)" %
scriptname)
return
if not self.is_executable(scriptfile):
self.send_error(403, "CGI script is not executable (%r)" %
scriptname)
return
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
# XXX Much of the following could be prepared ahead of time!
env = {}
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if authorization[0].lower() == "basic":
try:
authorization = base64.decodestring(authorization[1])
except binascii.Error:
pass
else:
authorization = authorization.split(':')
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if line[:1] in "\t\n\r ":
accept.append(line.strip())
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
# XXX Other HTTP_* headers
# Since we're setting the env in the parent, provide empty
# values to override previously set values
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
'HTTP_USER_AGENT', 'HTTP_COOKIE'):
env.setdefault(k, "")
os.environ.update(env)
self.send_response(200, "Script output follows")
decoded_query = query.replace('+', ' ')
if self.have_fork:
# Unix -- fork as we should
args = [script]
if '=' not in decoded_query:
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush() # Always flush before forking
pid = os.fork()
if pid != 0:
# Parent
pid, sts = os.waitpid(pid, 0)
# throw away additional data [see bug #427345]
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
if sts:
self.log_error("CGI script exit status %#x", sts)
return
# Child
try:
try:
os.setuid(nobody)
except os.error:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, os.environ)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
elif self.have_popen2 or self.have_popen3:
# Windows -- use popen2 or popen3 to create a subprocess
import shutil
if self.have_popen3:
popenx = os.popen3
else:
popenx = os.popen2
cmdline = scriptfile
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith("w.exe"):
# On Windows, use python.exe, not pythonw.exe
interp = interp[:-5] + interp[-4:]
cmdline = "%s -u %s" % (interp, cmdline)
if '=' not in query and '"' not in query:
cmdline = '%s "%s"' % (cmdline, query)
self.log_message("command: %s", cmdline)
try:
nbytes = int(length)
except (TypeError, ValueError):
nbytes = 0
files = popenx(cmdline, 'b')
fi = files[0]
fo = files[1]
if self.have_popen3:
fe = files[2]
if self.command.lower() == "post" and nbytes > 0:
data = self.rfile.read(nbytes)
fi.write(data)
# throw away additional data [see bug #427345]
while select.select([self.rfile._sock], [], [], 0)[0]:
if not self.rfile._sock.recv(1):
break
fi.close()
shutil.copyfileobj(fo, self.wfile)
if self.have_popen3:
errors = fe.read()
fe.close()
if errors:
self.log_error('%s', errors)
sts = fo.close()
if sts:
self.log_error("CGI script exit status %#x", sts)
else:
self.log_message("CGI script exited OK")
else:
# Other O.S. -- execute script in this process
save_argv = sys.argv
save_stdin = sys.stdin
save_stdout = sys.stdout
save_stderr = sys.stderr
try:
save_cwd = os.getcwd()
try:
sys.argv = [scriptfile]
if '=' not in decoded_query:
sys.argv.append(decoded_query)
sys.stdout = self.wfile
sys.stdin = self.rfile
execfile(scriptfile, {"__name__": "__main__"})
finally:
sys.argv = save_argv
sys.stdin = save_stdin
sys.stdout = save_stdout
sys.stderr = save_stderr
os.chdir(save_cwd)
except SystemExit, sts:
self.log_error("CGI script exit status %s", str(sts))
else:
self.log_message("CGI script exited OK")
nobody = None
def nobody_uid():
"""Internal routine to get nobody's uid"""
global nobody
if nobody:
return nobody
try:
import pwd
except ImportError:
return -1
try:
nobody = pwd.getpwnam('nobody')[2]
except KeyError:
nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
return nobody
def executable(path):
"""Test for executable file."""
try:
st = os.stat(path)
except os.error:
return False
return st.st_mode & 0111 != 0
def test(HandlerClass = CGIHTTPRequestHandler,
ServerClass = BaseHTTPServer.HTTPServer):
SimpleHTTPServer.test(HandlerClass, ServerClass)
if __name__ == '__main__':
test()
| apache-2.0 |
raymancao/Flask-Migrate | tests/test_migrate.py | 4 | 2789 | import os
import shutil
import unittest
import subprocess
import shlex
def run_cmd(cmd):
"""Run a command and return a tuple with (stdout, stderr, exit_code)"""
process = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = process.communicate()
return stdout, stderr, process.wait()
class TestMigrate(unittest.TestCase):
def setUp(self):
os.chdir(os.path.split(os.path.abspath(__file__))[0])
try:
os.remove('app.db')
except OSError:
pass
try:
shutil.rmtree('migrations')
except OSError:
pass
try:
shutil.rmtree('temp_folder')
except OSError:
pass
def tearDown(self):
try:
os.remove('app.db')
except OSError:
pass
try:
shutil.rmtree('migrations')
except OSError:
pass
try:
shutil.rmtree('temp_folder')
except OSError:
pass
def test_alembic_version(self):
from flask_migrate import alembic_version
self.assertEqual(len(alembic_version), 3)
for v in alembic_version:
self.assertTrue(isinstance(v, int))
def test_migrate_upgrade(self):
(o, e, s) = run_cmd('python app.py db init')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('python app.py db migrate')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('python app.py db upgrade')
self.assertTrue(s == 0)
from .app import db, User
db.session.add(User(name='test'))
db.session.commit()
def test_custom_directory(self):
(o, e, s) = run_cmd('python app_custom_directory.py db init')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('python app_custom_directory.py db migrate')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('python app_custom_directory.py db upgrade')
self.assertTrue(s == 0)
from .app_custom_directory import db, User
db.session.add(User(name='test'))
db.session.commit()
def test_compare_type(self):
(o, e, s) = run_cmd('python app_compare_type1.py db init')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('python app_compare_type1.py db migrate')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('python app_compare_type1.py db upgrade')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('python app_compare_type2.py db migrate')
self.assertTrue(s == 0)
self.assertTrue(b'Detected type change from VARCHAR(length=128) '
b'to String(length=10)' in e)
if __name__ == '__main__':
unittest.main()
| mit |
tiagofrepereira2012/tensorflow | tensorflow/python/framework/importer_test.py | 9 | 44172 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.importer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import op_def_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import importer
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_ops # pylint: disable=unused-import
from tensorflow.python.framework import versions
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
def _UnknownShape(op):
return [tensor_shape.unknown_shape() for _ in op.outputs]
# NOTE(cwhipkey): Dummy shape registration for ops used in the tests, since they
# don't have C++ op registrations on which to attach C++ shape fns.
ops.RegisterShape("If")(_UnknownShape)
ops.RegisterShape("Iff")(_UnknownShape)
ops.RegisterShape("Ii")(_UnknownShape)
ops.RegisterShape("Iif")(_UnknownShape)
ops.RegisterShape("Iii")(_UnknownShape)
ops.RegisterShape("In")(_UnknownShape)
ops.RegisterShape("Iri")(_UnknownShape)
ops.RegisterShape("None")(_UnknownShape)
ops.RegisterShape("Of")(_UnknownShape)
ops.RegisterShape("Oi")(_UnknownShape)
ops.RegisterShape("Oif")(_UnknownShape)
ops.RegisterShape("Oii")(_UnknownShape)
ops.RegisterShape("OpWithDefaultAttr")(_UnknownShape)
ops.RegisterShape("OpWithFutureDefaultAttr")(_UnknownShape)
ops.RegisterShape("Or")(_UnknownShape)
ops.RegisterShape("Otl")(_UnknownShape)
ops.RegisterShape("Unary")(_UnknownShape)
_op_list = op_def_pb2.OpList()
text_format.Merge("""
op {
name: 'None'
}
op {
name: 'Oi'
output_arg { name: 'a' type: DT_INT32 }
}
op {
name: 'Or'
output_arg { name: 'a' type: DT_INT32 is_ref: true }
}
op {
name: 'Of'
output_arg { name: 'a' type: DT_FLOAT }
}
op {
name: 'Ii'
input_arg { name: 'a' type: DT_INT32 }
}
op {
name: 'If'
input_arg { name: 'a' type: DT_FLOAT }
}
op {
name: 'Oii'
output_arg { name: 'a' type: DT_INT32 }
output_arg { name: 'b' type: DT_INT32 }
}
op {
name: 'Oif'
output_arg { name: 'a' type: DT_INT32 }
output_arg { name: 'b' type: DT_FLOAT }
}
op {
name: 'Iii'
input_arg { name: 'a' type: DT_INT32 }
input_arg { name: 'b' type: DT_INT32 }
}
op {
name: 'Iff'
input_arg { name: 'a' type: DT_FLOAT }
input_arg { name: 'b' type: DT_FLOAT }
}
op {
name: 'Iif'
input_arg { name: 'a' type: DT_INT32 }
input_arg { name: 'b' type: DT_FLOAT }
}
op {
name: 'Iri'
input_arg { name: 'a' type: DT_INT32 is_ref: true }
input_arg { name: 'b' type: DT_INT32 }
}
op {
name: 'In'
input_arg { name: 'a' number_attr: 'N' type_attr: 'T' }
attr { name: 'N' type: 'int' minimum: 1 }
attr { name: 'T' type: 'type' }
}
op {
name: 'Otl'
output_arg { name: 'a' type_list_attr: 't' }
attr { name: 'T' type: 'list(type)' minimum: 1 }
}
op {
name: 'Unary'
input_arg { name: 'a' type_attr: 'T' }
output_arg { name: 'b' type_attr: 'T' }
attr { name: 'T' type: 'type' }
}
op {
name: 'OpWithDefaultAttr'
output_arg { name: 'a' type: DT_INT32 }
attr { name: 'default_float' type: 'float' default_value { f: 123.0 } }
}
op {
name: 'OpWithFutureDefaultAttr'
}
""", _op_list)
op_def_registry.register_op_list(_op_list)
# NOTE(mrry): Dummy shape registrations for ops used in the tests.
for op_def in _op_list.op:
ops.RegisterShape(op_def.name)(None)
class ImportGraphDefTest(test.TestCase):
def _MakeGraphDef(self,
text,
producer=versions.GRAPH_DEF_VERSION,
min_consumer=versions.GRAPH_DEF_VERSION_MIN_CONSUMER):
text = "versions: { producer: %d min_consumer: %d };\n%s" % (producer,
min_consumer,
text)
ret = graph_pb2.GraphDef()
text_format.Merge(text, ret)
return ret
def testBasic(self):
with ops.Graph().as_default():
a, b, c, d = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oif' }
node { name: 'B' op: 'Otl'
attr { key: 't'
value { list { type: DT_INT32 type: DT_FLOAT } } } }
node { name: 'C' op: 'In'
attr { key: 'N' value { i: 2 } }
attr { key: 'T' value { type: DT_INT32 } }
input: 'A:0' input: 'B:0' }
node { name: 'D' op: 'In'
attr { key: 'N' value { i: 2 } }
attr { key: 'T' value { type: DT_FLOAT } }
input: 'A:1' input: 'B:1' }
"""),
return_elements=["A", "B", "C", "D"],
name="import")
# Assert that the import process creates distinct tensors.
self.assertNotEqual(a.outputs[0].name, a.outputs[1].name)
self.assertNotEqual(b.outputs[0].name, b.outputs[1].name)
self.assertNotEqual(a.outputs[0].name, b.outputs[0].name)
self.assertNotEqual(a.outputs[0].name, b.outputs[1].name)
self.assertNotEqual(a.outputs[1].name, b.outputs[0].name)
self.assertNotEqual(a.outputs[1].name, b.outputs[1].name)
# Assert that the ops are connected according to the GraphDef topology.
self.assertEqual(c.inputs[0], a.outputs[0])
self.assertEqual(c.inputs[1], b.outputs[0])
self.assertEqual(d.inputs[0], a.outputs[1])
self.assertEqual(d.inputs[1], b.outputs[1])
# Check the types of the returned ops and tensors.
self.assertEqual(a.type, "Oif")
self.assertEqual(b.type, "Otl")
self.assertEqual(c.type, "In")
self.assertEqual(d.type, "In")
self.assertEqual(a.outputs[0].dtype, dtypes.int32)
self.assertEqual(a.outputs[1].dtype, dtypes.float32)
self.assertEqual(b.outputs[0].dtype, dtypes.int32)
self.assertEqual(b.outputs[1].dtype, dtypes.float32)
# Check the names of the returned ops.
self.assertEqual(a.name, "import/A")
self.assertEqual(b.name, "import/B")
self.assertEqual(c.name, "import/C")
self.assertEqual(d.name, "import/D")
# Check that the op_def is still available.
self.assertNotEqual(None, a.op_def)
def testInputMap(self):
with ops.Graph().as_default():
feed_a_0 = constant_op.constant(0, dtype=dtypes.int32)
feed_b_1 = constant_op.constant(1, dtype=dtypes.int32)
a, b, c, d = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oii' }
node { name: 'B' op: 'Oii' }
node { name: 'C' op: 'In'
attr { key: 'N' value { i: 2 } }
attr { key: 'T' value { type: DT_INT32 } }
input: 'A:0' input: 'B:0' }
node { name: 'D' op: 'In'
attr { key: 'N' value { i: 2 } }
attr { key: 'T' value { type: DT_INT32 } }
input: 'A:1' input: 'B:1' }
"""),
input_map={"A:0": feed_a_0,
"B:1": feed_b_1},
return_elements=["A", "B", "C", "D"])
self.assertEqual(c.inputs[0], feed_a_0)
self.assertEqual(c.inputs[1], b.outputs[0])
self.assertEqual(d.inputs[0], a.outputs[1])
self.assertEqual(d.inputs[1], feed_b_1)
def testInputMapBytes(self):
with ops.Graph().as_default():
feed_a_0 = constant_op.constant(0, dtype=dtypes.int32)
feed_b_1 = constant_op.constant(1, dtype=dtypes.int32)
a, b, c, d = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oii' }
node { name: 'B' op: 'Oii' }
node { name: 'C' op: 'In'
attr { key: 'N' value { i: 2 } }
attr { key: 'T' value { type: DT_INT32 } }
input: 'A:0' input: 'B:0' }
node { name: 'D' op: 'In'
attr { key: 'N' value { i: 2 } }
attr { key: 'T' value { type: DT_INT32 } }
input: 'A:1' input: 'B:1' }
"""),
input_map={b"A:0": feed_a_0,
b"B:1": feed_b_1},
return_elements=[b"A", b"B", b"C", b"D"])
self.assertEqual(c.inputs[0], feed_a_0)
self.assertEqual(c.inputs[1], b.outputs[0])
self.assertEqual(d.inputs[0], a.outputs[1])
self.assertEqual(d.inputs[1], feed_b_1)
def testInputMapUnicode(self):
with ops.Graph().as_default():
feed_a_0 = constant_op.constant(0, dtype=dtypes.int32)
feed_b_1 = constant_op.constant(1, dtype=dtypes.int32)
a, b, c, d = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oii' }
node { name: 'B' op: 'Oii' }
node { name: 'C' op: 'In'
attr { key: 'N' value { i: 2 } }
attr { key: 'T' value { type: DT_INT32 } }
input: 'A:0' input: 'B:0' }
node { name: 'D' op: 'In'
attr { key: 'N' value { i: 2 } }
attr { key: 'T' value { type: DT_INT32 } }
input: 'A:1' input: 'B:1' }
"""),
input_map={u"A:0": feed_a_0,
u"B:1": feed_b_1},
return_elements=[u"A", u"B", u"C", u"D"])
self.assertEqual(c.inputs[0], feed_a_0)
self.assertEqual(c.inputs[1], b.outputs[0])
self.assertEqual(d.inputs[0], a.outputs[1])
self.assertEqual(d.inputs[1], feed_b_1)
def testImplicitZerothOutput(self):
with ops.Graph().as_default():
a, b = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oii' }
node { name: 'B' op: 'Ii' input: 'A' }
"""),
return_elements=["A", "B"])
self.assertEqual(b.inputs[0], a.outputs[0])
def testInputMapImplicitZerothOutput(self):
with ops.Graph().as_default():
feed_a_0 = constant_op.constant(0, dtype=dtypes.int32)
b, = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oii' }
node { name: 'B' op: 'Ii' input: 'A:0' }
"""),
input_map={"A": feed_a_0},
return_elements=["B"])
self.assertEqual(b.inputs[0], feed_a_0)
def testWithControlDependency(self):
with ops.Graph().as_default():
a, b = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'None' }
node { name: 'B' op: 'None' input: '^A' }
"""),
return_elements=["A", "B"])
self.assertEqual(b.control_inputs, [a])
def testWithRefs(self):
with ops.Graph().as_default():
a, b, c, d = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Or' }
node { name: 'B' op: 'Oi' }
node { name: 'C' op: 'Iii' input: 'A:0' input: 'B:0' }
node { name: 'D' op: 'Iri' input: 'A:0' input: 'B:0' }
"""),
return_elements=["A", "B", "C", "D"])
self.assertEqual(c.inputs[0], a.outputs[0])
self.assertEqual(c.inputs[1], b.outputs[0])
self.assertEqual(d.inputs[0], a.outputs[0])
self.assertEqual(d.inputs[1], b.outputs[0])
self.assertEqual(a.outputs[0].dtype, dtypes.int32_ref)
self.assertEqual(c._input_dtypes, [dtypes.int32, dtypes.int32])
self.assertEqual(c.outputs, [])
self.assertEqual(d._input_dtypes, [dtypes.int32_ref, dtypes.int32])
self.assertEqual(d.outputs, [])
def testCyclic(self):
with ops.Graph().as_default():
a, b = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Unary'
attr { key: 'T' value { type: DT_INT32 } } input: 'B:0' }
node { name: 'B' op: 'Unary'
attr { key: 'T' value { type: DT_INT32 } } input: 'A:0' }
"""),
return_elements=["A", "B"])
self.assertEqual(a.inputs[0], b.outputs[0])
self.assertEqual(b.inputs[0], a.outputs[0])
def testTypeMismatchInGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
node { name: 'B' op: 'If' input: 'A:0' }
"""))
self.assertTrue(
"Cannot convert a tensor of type int32 to an input of type float" in
str(e.exception))
def testShapeWhitelist(self):
# Barrier's shape is an output vector of 2, but the
# graph says it's a scalar. This is currently whitelisted.
with ops.Graph().as_default():
_ = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Barrier'
attr { key: '_output_shapes'
value { list { shape { } } } } }
"""),
return_elements=["A"],
name="import")
def testShapeWhitelistViolation(self):
# L2 loss produces a scalar shape, but the graph
# has the wrong shape, so raise an error.
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
_ = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Of' }
node { name: 'B' op: 'L2Loss'
input: 'A:0'
attr { key: 'T' value { type: DT_FLOAT } }
attr { key: '_output_shapes'
value { list { shape { dim { size: 43 } } } } } }
"""),
return_elements=["B"],
name="import")
self.assertTrue(
"Shapes () and (43,) are not compatible" in str(e.exception))
def testInvalidSignatureTooManyInputsInGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
node { name: 'B' op: 'None' input: 'A:0' }
"""))
self.assertTrue("More inputs specified ('A:0') than the op expects" in
str(e.exception))
def testInvalidSignatureNotEnoughInputsInGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
node { name: 'B' op: 'Iif' input: 'A:0' }
"""))
self.assertTrue("Input types mismatch (expected 'int32, float32' but "
"got 'int32')" in str(e.exception))
def testMissingInputOpInGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'B' op: 'If' input: 'A:0' }
"""))
self.assertTrue("Input tensor 'A:0' not found" in str(e.exception))
def testMissingInputOpInGraphDefButAppearsInInputMap(self):
with ops.Graph().as_default():
feed_a_0 = constant_op.constant(5.0)
b, = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'B' op: 'If' input: 'A:0' }
"""),
input_map={"A:0": feed_a_0},
return_elements=["B"])
self.assertEqual(b.inputs[0], feed_a_0)
def testMissingInputTensorInGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Of' }
node { name: 'B' op: 'If' input: 'A:1' }
"""))
self.assertTrue("Input tensor 'A:1' not found" in str(e.exception))
def testMissingControlInputInGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'B' op: 'None' input: '^A' }
"""))
self.assertTrue("Control input '^A' not found" in str(e.exception))
def testInvalidTensorNameOutputIndexInGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'B' op: 'None' input: 'A:B' }
"""))
self.assertEqual("Cannot convert 'A:B' to a tensor name.",
str(e.exception))
def testInvalidTensorNameInGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'B' op: 'None' input: 'A:B:0' }
"""))
self.assertEqual("Cannot convert 'A:B:0' to a tensor name.",
str(e.exception))
def testMissingReturnOperation(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'None' }
"""),
return_elements=["B"])
self.assertTrue(
"return_element 'B' not found in graph_def." in str(e.exception))
def testMissingReturnTensor(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
"""),
return_elements=["A:1"])
self.assertTrue(
"return_element 'A:1' not found in graph_def." in str(e.exception))
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
"""),
return_elements=["B:0"])
self.assertTrue(
"return_element 'B:0' not found in graph_def." in str(e.exception))
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
"""),
return_elements=["A:B:0"])
self.assertTrue(
"return_element 'A:B:0' not found in graph_def." in str(e.exception))
def testMissingInputMap(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'None' }
"""),
input_map={"B:0": constant_op.constant(5.0)})
self.assertTrue("not found in graph_def: [B:0]" in str(e.exception))
def testInputMapUnusedAsInput(self):
with ops.Graph().as_default():
# Mapping an unused node output should succeed.
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
"""),
input_map={"A:0": constant_op.constant(5.0)})
# Mapping a non-existent output of an existing node should fail.
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
"""),
input_map={"A:2": constant_op.constant(5.0)})
self.assertTrue("not found in graph_def: [A:2]" in str(e.exception))
def testInputMapTypeMismatch(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
node { name: 'B' op: 'Ii' input: 'A:0' }
"""),
input_map={"A:0": constant_op.constant(5.0)})
self.assertTrue(
"Cannot convert a tensor of type float32 to an input of type int32."
in str(e.exception))
def testNoReturns(self):
with ops.Graph().as_default() as g:
ret = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'None' }
"""))
self.assertEqual(ret, None)
a = g.get_operation_by_name("import/A")
self.assertEqual(a.type, "None")
def testOverrideNamePrefix(self):
with ops.Graph().as_default():
a, = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'None' }
"""),
return_elements=["A"],
name="imported_graph")
self.assertEqual(a.name, "imported_graph/A")
def testNamePrefixColocationAttrs(self):
original_graph_def = self._MakeGraphDef("""
node { name: 'A' op: 'None' }
node { name: 'B' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@A' } }
} }""")
with ops.Graph().as_default():
b, = importer.import_graph_def(
original_graph_def, return_elements=["B"], name="imported_graph")
self.assertProtoEqualsVersion("""
node { name: 'imported_graph/A' op: 'None' }
node { name: 'imported_graph/B' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@imported_graph/A' } }
} }""", b.graph.as_graph_def())
def testColocationWithDeviceFn(self):
original_graph_def = self._MakeGraphDef("""
node { name: 'A' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@A' } }
} }
node { name: 'B' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@A' } }
} }""")
# A device function that places "A" on one device and "B" on
# another device. Because B is colocated with A, we test that B's
# device function is overridden by A.
def CustomDeviceFn(op):
if "A" in op.name:
return "/device:A:0"
else:
return "/device:B:0"
with ops.Graph().as_default():
with ops.device(CustomDeviceFn):
b, = importer.import_graph_def(
original_graph_def, return_elements=["B"], name="imported_graph")
self.assertProtoEqualsVersion("""
node { name: 'imported_graph/A' op: 'None' device: "/device:A:0"
attr {
key: '_class' value { list { s: 'loc:@imported_graph/A' } }
}
}
node { name: 'imported_graph/B' op: 'None' device: "/device:A:0"
attr {
key: '_class' value { list { s: 'loc:@imported_graph/A' } }
} }""", b.graph.as_graph_def())
# Test a scenario where 'A' doesn't get a device; 'A' should
# not have a device, but during runtime will get colocated with
# 'B' because of the colocation attribute.
def BDeviceFn(op):
if "B" in op.name:
return "/device:B:0"
return ""
with ops.Graph().as_default():
with ops.device(BDeviceFn):
b, = importer.import_graph_def(
original_graph_def, return_elements=["B"], name="imported_graph")
self.assertProtoEqualsVersion("""
node { name: 'imported_graph/A' op: 'None'
attr {
key: '_class' value { list { s: 'loc:@imported_graph/A' } }
}
}
node { name: 'imported_graph/B' op: 'None'
attr {
key: '_class' value { list { s: 'loc:@imported_graph/A' } }
} }""", b.graph.as_graph_def())
# Only A gets a device, so B inherits it implicitly.
def ADeviceFn(op):
if "A" in op.name:
return "/device:A:0"
return ""
with ops.Graph().as_default():
with ops.device(ADeviceFn):
b, = importer.import_graph_def(
original_graph_def, return_elements=["B"], name="imported_graph")
self.assertProtoEqualsVersion("""
node { name: 'imported_graph/A' op: 'None' device: "/device:A:0"
attr {
key: '_class' value { list { s: 'loc:@imported_graph/A' } }
}
}
node { name: 'imported_graph/B' op: 'None' device: "/device:A:0"
attr {
key: '_class' value { list { s: 'loc:@imported_graph/A' } }
} }""", b.graph.as_graph_def())
def testMultipleColocationWithDeviceFn(self):
original_graph_def = self._MakeGraphDef("""
node { name: 'A' op: 'None'}
node { name: 'B' op: 'None'}
node { name: 'C' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@A' s: 'loc:@B' } }
} }""")
# A device function that places "B" on a device, and "A" is empty.
#
# B and C should contain "/device:B". A will not right now. But
# because of the colocation property, at runtime it would be
# placed with B and C.
def CustomDeviceFn(op):
if "B" in op.name:
return "/device:B:0"
return ""
with ops.Graph().as_default():
with ops.device(CustomDeviceFn):
c, = importer.import_graph_def(
original_graph_def, return_elements=["C"], name="imported_graph")
self.assertProtoEqualsVersion("""
node { name: 'imported_graph/A' op: 'None' }
node { name: 'imported_graph/B' op: 'None' device: "/device:B:0" }
node { name: 'imported_graph/C' op: 'None' device: "/device:B:0"
attr {
key: '_class' value {
list { s: 'loc:@imported_graph/A'
s: 'loc:@imported_graph/B' }
}
}
}""", c.graph.as_graph_def())
def testNamePrefixColocationAttrsMultipleImport(self):
original_graph_def = self._MakeGraphDef("""
node { name: 'A' op: 'None' }
node { name: 'B' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@A' } }
} }""")
with ops.Graph().as_default():
b, = importer.import_graph_def(
original_graph_def, return_elements=["B"], name="")
_, = importer.import_graph_def(
original_graph_def, return_elements=["B"], name="")
self.assertProtoEqualsVersion("""
node { name: 'A' op: 'None' }
node { name: 'B' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@A' } }
} }
node { name: 'A_1' op: 'None' }
node { name: 'B_1' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@A_1' } }
} }""", b.graph.as_graph_def())
def testNamePrefixColocationAttrsNotFound(self):
original_graph_def = self._MakeGraphDef("""
node { name: 'B' op: 'None' attr {
key: '_class'
value { list { s: 'loc:@A' } }
} }""")
with ops.Graph().as_default():
with self.assertRaisesRegexp(ValueError, "does not exist during import"):
importer.import_graph_def(
original_graph_def, return_elements=["B"], name="imported_graph")
def testEmptyGraph(self):
with ops.Graph().as_default() as g:
init_version = g.version
importer.import_graph_def(self._MakeGraphDef(""))
self.assertEqual(init_version, g.version)
def testInvalidInputForGraphDef(self):
with ops.Graph().as_default():
with self.assertRaises(TypeError) as e:
importer.import_graph_def("")
self.assertEqual("graph_def must be a GraphDef proto.", str(e.exception))
def testInvalidInputForInputMap(self):
with ops.Graph().as_default():
with self.assertRaises(TypeError) as e:
importer.import_graph_def(
self._MakeGraphDef(""), input_map=[constant_op.constant(5.0)])
self.assertEqual("input_map must be a dictionary mapping strings to "
"Tensor objects.", str(e.exception))
graph_def = self._MakeGraphDef("""
node { name: 'a' op: 'Placeholder'
attr { key: 'dtype' value { type: DT_FLOAT } }}
node { name: 'id' op: 'Identity' input: 'a:0'
attr { key: 'T' value { type: DT_FLOAT } }}""")
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
graph_def,
input_map={"a:0": variables.Variable(5.0)},
name="")
self.assertStartsWith(str(e.exception),
"tf.import_graph_def() requires a non-empty `name` "
"if `input_map` contains non-Tensor values.")
with ops.Graph().as_default():
t, = importer.import_graph_def(
graph_def,
input_map={"a:0": constant_op.constant(5.0)},
name="",
return_elements=["id:0"])
with self.test_session():
self.assertEqual(5.0, t.eval())
def testInvalidInputForReturnOperations(self):
with ops.Graph().as_default():
with self.assertRaises(TypeError) as e:
importer.import_graph_def(self._MakeGraphDef(""), return_elements=[7])
self.assertEqual("return_elements must be a list of strings.",
str(e.exception))
def testDuplicateOperationNames(self):
with ops.Graph().as_default():
with self.assertRaises(ValueError) as e:
importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'Oi' }
node { name: 'B' op: 'Oi' }
node { name: 'A' op: 'Oi' }
"""))
self.assertEqual("Duplicate name 'A' in GraphDef.", str(e.exception))
def testWithExtensionAndAttr(self):
with ops.Graph().as_default() as g:
c = constant_op.constant(5.0, dtype=dtypes.float32, name="c")
array_ops.stack([c, c], name="pack")
gdef = g.as_graph_def()
with self.test_session():
pack, = importer.import_graph_def(gdef, return_elements=["pack"])
self.assertAllEqual(pack.outputs[0].eval(), [5.0, 5.0])
def testWithDevice(self):
with ops.Graph().as_default() as g:
# No device.
a = constant_op.constant(3.0, name="a")
with ops.device("/cpu:0"):
b = constant_op.constant(4.0, name="b")
with ops.device("/job:worker"):
c = constant_op.constant(5.0, name="c")
gdef = g.as_graph_def()
with ops.Graph().as_default():
a2, b2, c2 = importer.import_graph_def(
gdef, return_elements=["a", "b", "c"])
self.assertEqual(a.device, a2.device)
self.assertEqual(b.device, b2.device)
self.assertEqual(c.device, c2.device)
with ops.Graph().as_default():
with ops.device(device.merge_device("/task:0")):
a3, b3, c3 = importer.import_graph_def(
gdef, return_elements=["a", "b", "c"])
self.assertEqual("/task:0", a3.device)
self.assertEqual("/task:0/device:CPU:0", b3.device) # canonicalized.
self.assertEqual(c.device + "/task:0", c3.device)
with ops.Graph().as_default():
with ops.device(device.merge_device("/job:ps")):
a4, b4, c4 = importer.import_graph_def(
gdef, return_elements=["a", "b", "c"])
self.assertEqual("/job:ps", a4.device)
self.assertEqual("/job:ps/device:CPU:0", b4.device) # canonicalized.
self.assertEqual(c.device, c4.device) # worker overrides ps.
with ops.Graph().as_default():
with ops.device(device.merge_device("/gpu:0")):
a5, b5, c5 = importer.import_graph_def(
gdef, return_elements=["a", "b", "c"])
self.assertEqual("/device:GPU:0", a5.device)
self.assertEqual("/device:CPU:0", b5.device) # cpu overrides gpu.
self.assertEqual(c.device + "/device:GPU:0", c5.device)
def testWithDeviceFunctionDependingOnInputs(self):
with ops.Graph().as_default() as g:
with ops.device("/job:ps"):
v1 = constant_op.constant(1.0)
v2 = constant_op.constant(1.0)
_ = v1 + v2
_ = v1 - v2
_ = array_ops.identity(v1)
gdef = g.as_graph_def()
# We'll use the following device function to observe ops with two inputs.
ops_with_two_inputs = []
def InputCounter(op):
if len(op.inputs) == 2:
ops_with_two_inputs.append(op)
return ""
with ops.Graph().as_default() as g:
with ops.device(InputCounter):
importer.import_graph_def(gdef)
# We expect to see the add and subtract, but not identity.
self.assertEqual(2, len(ops_with_two_inputs))
def testGradient(self):
with ops.Graph().as_default() as g:
inputs = array_ops.placeholder(
dtypes.float32, shape=[None, 100], name="input")
weights = array_ops.placeholder(
dtypes.float32, shape=[100, 10], name="weights")
biases = array_ops.placeholder(dtypes.float32, shape=[10], name="biases")
activations = nn_ops.relu(
math_ops.matmul(inputs, weights) + biases, name="activations")
loss = math_ops.reduce_mean(activations, name="loss")
gdef = g.as_graph_def()
with ops.Graph().as_default() as g:
input_placeholder = array_ops.placeholder(dtypes.float32, shape=[32, 100])
weights_var = variables.Variable(
random_ops.truncated_normal([100, 10]), name="weights")
biases_var = variables.Variable(array_ops.zeros([10]), name="biases")
activations, loss = importer.import_graph_def(
gdef,
input_map={
"input:0": input_placeholder,
"weights:0": weights_var,
"biases:0": biases_var
},
return_elements=["activations:0", "loss:0"])
self.assertEqual([32, 10], activations.get_shape())
self.assertEqual([], loss.get_shape())
weights_grad, biases_grad = gradients_impl.gradients(
loss, [weights_var, biases_var])
self.assertEqual([100, 10], weights_grad.get_shape())
self.assertEqual([10], biases_grad.get_shape())
def testLargeGraph(self):
with self.test_session():
# The default message byte limit is 64M. Ours is 2G with a warning at 512.
# Adding a 130M entries float32 tensor should exceed the warning, but not
# the hard limit.
input_shape = [130, 1000, 1000]
tensor_input = np.ones(input_shape, dtype=np.float32)
t = constant_op.constant(tensor_input, shape=input_shape)
g = array_ops.identity(t)
g.eval()
def testVersion(self):
v0 = versions.GRAPH_DEF_VERSION_MIN_CONSUMER
v2 = versions.GRAPH_DEF_VERSION
v1 = (v0 + v2) // 2
for producer in v0, v1, v2:
for min_consumer in v0, v1, v2:
with ops.Graph().as_default():
a, = importer.import_graph_def(
self._MakeGraphDef(
"node { name: 'A' op: 'Oii' }",
producer=producer,
min_consumer=min_consumer),
return_elements=["A"])
self.assertEqual(a.graph.graph_def_versions.producer, producer)
self.assertEqual(a.graph.graph_def_versions.min_consumer,
min_consumer)
def testVersionLow(self):
with ops.Graph().as_default() as g:
pat = (r"GraphDef producer version -1 below min producer %d supported "
r"by TensorFlow \S+\. Please regenerate your graph.$" %
versions.GRAPH_DEF_VERSION_MIN_PRODUCER)
importer.import_graph_def(self._MakeGraphDef("", producer=-1))
x = constant_op.constant(
7) # Need at least one op to get a C++ graph generated
with self.test_session(graph=g) as sess:
with self.assertRaisesRegexp(Exception, pat):
sess.run(x)
def testVersionHigh(self):
with ops.Graph().as_default() as g:
pat = (r"GraphDef min consumer version %d above current version %d "
r"for TensorFlow \S+\. Please upgrade TensorFlow\.$" %
(1 << 30, versions.GRAPH_DEF_VERSION))
importer.import_graph_def(self._MakeGraphDef("", min_consumer=1 << 30))
x = constant_op.constant(
7) # Need at least one op to get a C++ graph generated
with self.test_session(graph=g) as sess:
with self.assertRaisesRegexp(Exception, pat):
sess.run(x)
def testVersionAppliesToOpConstruction(self):
"""These tests rely on shape fns in test_ops.cc."""
with ops.Graph().as_default():
importer.import_graph_def(
self._MakeGraphDef(
"node { name: 'A' op: 'RequiresOlderGraphVersion' }",
producer=versions.GRAPH_DEF_VERSION - 1),
return_elements=["A"])
with ops.Graph().as_default():
with self.assertRaisesWithPredicateMatch(ValueError,
"Wrong graph version.*"):
importer.import_graph_def(
self._MakeGraphDef(
"node { name: 'A' op: 'RequiresOlderGraphVersion' }",
producer=versions.GRAPH_DEF_VERSION),
return_elements=["A"])
def testDefaultAttrsAdded(self):
with ops.Graph().as_default():
a = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'OpWithDefaultAttr' }
"""),
return_elements=["A"])
self.assertEqual(123.0, a[0].get_attr("default_float"))
def testDefaultAttrsRemoved(self):
producer_op_list = op_def_pb2.OpList()
text_format.Merge("""
op {
name: 'OpWithFutureDefaultAttr'
attr { name: 'default_int' type: 'int' default_value { i: 456 } }
}
""", producer_op_list)
# Attr only in producer_op_list with default value gets removed.
with ops.Graph().as_default():
a = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'OpWithFutureDefaultAttr'
attr { key: 'default_int' value { i: 456 } } }
"""),
return_elements=["A"],
producer_op_list=producer_op_list)
with self.assertRaisesRegexp(ValueError, "No attr named 'default_int'"):
a[0].get_attr("default_int")
# Attr only in producer_op_list with non-default value is preserved.
with ops.Graph().as_default():
a = importer.import_graph_def(
self._MakeGraphDef("""
node { name: 'A' op: 'OpWithFutureDefaultAttr'
attr { key: 'default_int' value { i: 987 } } }
"""),
return_elements=["A"],
producer_op_list=producer_op_list)
self.assertEqual(987, a[0].get_attr("default_int"))
def testFunctions(self):
dtype = dtypes.float32
@function.Defun(dtype, dtype, dtype, dtype)
def Grad(x, y, dout1, dout2): # pylint: disable=unused-argument
# Return the inputs for simplicity of testing. The correct return value
# would be (dout1 + dout2, dout1 - dout2)
return x, y
@function.Defun(dtype, dtype, grad_func=Grad)
def FuncWithGrad(x, y):
return x + y, x - y
@function.Defun(dtypes.int32)
def ExternalTensorFunc(x):
# c must be defined in the containing graph
return x + c
@function.Defun(dtypes.int32, dtypes.int32)
def OuterFunc(x, y):
@function.Defun(dtypes.int32)
def InnerFunc(x):
return x + x
return InnerFunc(x) + y
# Create graph with function calls and export to GraphDef
with ops.Graph().as_default() as g1:
p1 = array_ops.placeholder(dtype, name="p1")
p2 = array_ops.placeholder(dtype, name="p2")
# pylint: disable=unexpected-keyword-arg
a, b = FuncWithGrad(p1, p2, name="f")
c = constant_op.constant(10, dtype=dtypes.int32)
ExternalTensorFunc(1, name="external")
OuterFunc(10, 1, name="outer")
# pylint: enable=unexpected-keyword-arg
gdef = g1.as_graph_def()
# Import GraphDef into new graph, add imported gradients, and test that
# imported functions can be run
with ops.Graph().as_default() as g2:
p1, p2, a, b = importer.import_graph_def(
gdef, return_elements=["p1:0", "p2:0", "f:0", "f:1"], name="")
grad = gradients_impl.gradients([a], [p1, p2])
with self.test_session(graph=g2) as sess:
feed_dict = {p1: 1, p2: 2}
a_val, b_val, grad_val = sess.run([a, b, grad], feed_dict=feed_dict)
self.assertEqual(a_val, 3.0)
self.assertEqual(b_val, -1.0)
# Grad function returns inputs values for testing
self.assertEqual(grad_val, [1.0, 2.0])
self.assertEqual(sess.run("external:0"), 11)
self.assertEqual(sess.run("outer:0"), 21)
# Export the new graph and reimport to test that imported functions can be
# successfully exported/imported again
gdef = g2.as_graph_def()
with ops.Graph().as_default() as g3:
p1, p2, a, b = importer.import_graph_def(
gdef, return_elements=["p1:0", "p2:0", "f:0", "f:1"], name="")
# Create new gradient functions (in additional to the imported gradient
# functions created in g2).
grad = gradients_impl.gradients([a], [p1, p2])
with self.test_session(graph=g3) as sess:
feed_dict = {p1: 1, p2: 2}
a_val, b_val, grad_val = sess.run([a, b, grad], feed_dict=feed_dict)
self.assertEqual(a_val, 3.0)
self.assertEqual(b_val, -1.0)
self.assertEqual(grad_val, [1.0, 2.0])
self.assertEqual(sess.run("external:0"), 11)
self.assertEqual(sess.run("outer:0"), 21)
def testImportInsideDefun(self):
g = ops.Graph()
with g.as_default():
@function.Defun()
def Add2(x, y):
return math_ops.add(x, y)
x = constant_op.constant(3.0, dtype=dtypes.float32)
y = constant_op.constant(-5.0, dtype=dtypes.float32)
z = Add2(x, y, name="z") # pylint: disable=unexpected-keyword-arg
gdef = g.as_graph_def()
@function.Defun()
def TestFunc():
return importer.import_graph_def(gdef, return_elements=["z:0"])[0]
z = TestFunc()
with self.test_session():
z_val = z.eval()
self.assertEqual(z_val, -2.0)
def testImportGraphWithFunctionTwice(self):
g = ops.Graph()
with g.as_default():
@function.Defun()
def Add2(x, y):
return math_ops.add(x, y)
x = array_ops.placeholder(dtype=dtypes.float32, name="x")
y = array_ops.placeholder(dtype=dtypes.float32, name="y")
_ = Add2(x, y, name="z") # pylint: disable=unexpected-keyword-arg
gdef = g.as_graph_def()
x = random_ops.random_uniform(dtype=dtypes.float32, shape=())
y = random_ops.random_uniform(dtype=dtypes.float32, shape=())
input_map = {"x:0": x, "y:0": y}
with ops.name_scope("first"):
z1 = importer.import_graph_def(gdef, return_elements=["z:0"],
input_map=input_map)[0]
with ops.name_scope("second"):
z2 = importer.import_graph_def(gdef, return_elements=["z:0"],
input_map=input_map)[0]
with self.test_session() as sess:
z1_val, z2_val = sess.run((z1, z2))
self.assertAllEqual(z1_val, z2_val)
if __name__ == "__main__":
test.main()
| apache-2.0 |
PartidoDeLaRed/pdr-wiki | extensions/ConfirmEdit/captcha.py | 47 | 7848 | #!/usr/bin/python
#
# Script to generate distorted text images for a captcha system.
#
# Copyright (C) 2005 Neil Harris
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# http://www.gnu.org/copyleft/gpl.html
#
# Further tweaks by Brion Vibber <brion@pobox.com>:
# 2006-01-26: Add command-line options for the various parameters
# 2007-02-19: Add --dirs param for hash subdirectory splits
# Tweaks by Greg Sabino Mullane <greg@turnstep.com>:
# 2008-01-06: Add regex check to skip words containing other than a-z
import random
import math
import hashlib
from optparse import OptionParser
import os
import sys
import re
try:
import Image
import ImageFont
import ImageDraw
import ImageEnhance
import ImageOps
except:
sys.exit("This script requires the Python Imaging Library - http://www.pythonware.com/products/pil/")
nonalpha = re.compile('[^a-z]') # regex to test for suitability of words
# Does X-axis wobbly copy, sandwiched between two rotates
def wobbly_copy(src, wob, col, scale, ang):
x, y = src.size
f = random.uniform(4*scale, 5*scale)
p = random.uniform(0, math.pi*2)
rr = ang+random.uniform(-30, 30) # vary, but not too much
int_d = Image.new('RGB', src.size, 0) # a black rectangle
rot = src.rotate(rr, Image.BILINEAR)
# Do a cheap bounding-box op here to try to limit work below
bbx = rot.getbbox()
if bbx == None:
return src
else:
l, t, r, b= bbx
# and only do lines with content on
for i in range(t, b+1):
# Drop a scan line in
xoff = int(math.sin(p+(i*f/y))*wob)
xoff += int(random.uniform(-wob*0.5, wob*0.5))
int_d.paste(rot.crop((0, i, x, i+1)), (xoff, i))
# try to stop blurring from building up
int_d = int_d.rotate(-rr, Image.BILINEAR)
enh = ImageEnhance.Sharpness(int_d)
return enh.enhance(2)
def gen_captcha(text, fontname, fontsize, file_name):
"""Generate a captcha image"""
# white text on a black background
bgcolor = 0x0
fgcolor = 0xffffff
# create a font object
font = ImageFont.truetype(fontname,fontsize)
# determine dimensions of the text
dim = font.getsize(text)
# create a new image significantly larger that the text
edge = max(dim[0], dim[1]) + 2*min(dim[0], dim[1])
im = Image.new('RGB', (edge, edge), bgcolor)
d = ImageDraw.Draw(im)
x, y = im.size
# add the text to the image
d.text((x/2-dim[0]/2, y/2-dim[1]/2), text, font=font, fill=fgcolor)
k = 3
wob = 0.20*dim[1]/k
rot = 45
# Apply lots of small stirring operations, rather than a few large ones
# in order to get some uniformity of treatment, whilst
# maintaining randomness
for i in range(k):
im = wobbly_copy(im, wob, bgcolor, i*2+3, rot+0)
im = wobbly_copy(im, wob, bgcolor, i*2+1, rot+45)
im = wobbly_copy(im, wob, bgcolor, i*2+2, rot+90)
rot += 30
# now get the bounding box of the nonzero parts of the image
bbox = im.getbbox()
bord = min(dim[0], dim[1])/4 # a bit of a border
im = im.crop((bbox[0]-bord, bbox[1]-bord, bbox[2]+bord, bbox[3]+bord))
# and turn into black on white
im = ImageOps.invert(im)
# save the image, in format determined from filename
im.save(file_name)
def gen_subdir(basedir, md5hash, levels):
"""Generate a subdirectory path out of the first _levels_
characters of _hash_, and ensure the directories exist
under _basedir_."""
subdir = None
for i in range(0, levels):
char = md5hash[i]
if subdir:
subdir = os.path.join(subdir, char)
else:
subdir = char
fulldir = os.path.join(basedir, subdir)
if not os.path.exists(fulldir):
os.mkdir(fulldir)
return subdir
def try_pick_word(words, blacklist, verbose):
word1 = words[random.randint(0,len(words)-1)]
word2 = words[random.randint(0,len(words)-1)]
word = word1+word2
if verbose:
print "word is %s" % word
if nonalpha.search(word):
if verbose:
print "skipping word pair '%s' because it contains non-alphabetic characters" % word
return None
for naughty in blacklist:
if naughty in word:
if verbose:
print "skipping word pair '%s' because it contains blacklisted word '%s'" % (word, naughty)
return None
return word
def pick_word(words, blacklist, verbose):
for x in range(1000): # If we can't find a valid combination in 1000 tries, just give up
word = try_pick_word(words, blacklist, verbose)
if word:
return word
sys.exit("Unable to find valid word combinations")
def read_wordlist(filename):
return [x.strip().lower() for x in open(wordlist).readlines()]
if __name__ == '__main__':
"""This grabs random words from the dictionary 'words' (one
word per line) and generates a captcha image for each one,
with a keyed salted hash of the correct answer in the filename.
To check a reply, hash it in the same way with the same salt and
secret key, then compare with the hash value given.
"""
parser = OptionParser()
parser.add_option("--wordlist", help="A list of words (required)", metavar="WORDS.txt")
parser.add_option("--key", help="The passphrase set as $wgCaptchaSecret (required)", metavar="KEY")
parser.add_option("--output", help="The directory to put the images in - $wgCaptchaDirectory (required)", metavar="DIR")
parser.add_option("--font", help="The font to use (required)", metavar="FONT.ttf")
parser.add_option("--font-size", help="The font size (default 40)", metavar="N", type='int', default=40)
parser.add_option("--count", help="The maximum number of images to make (default 20)", metavar="N", type='int', default=20)
parser.add_option("--blacklist", help="A blacklist of words that should not be used", metavar="FILE")
parser.add_option("--fill", help="Fill the output directory to contain N files, overrides count, cannot be used with --dirs", metavar="N", type='int')
parser.add_option("--dirs", help="Put the images into subdirectories N levels deep - $wgCaptchaDirectoryLevels", metavar="N", type='int')
parser.add_option("--verbose", "-v", help="Show debugging information", action='store_true')
opts, args = parser.parse_args()
if opts.wordlist:
wordlist = opts.wordlist
else:
sys.exit("Need to specify a wordlist")
if opts.key:
key = opts.key
else:
sys.exit("Need to specify a key")
if opts.output:
output = opts.output
else:
sys.exit("Need to specify an output directory")
if opts.font and os.path.exists(opts.font):
font = opts.font
else:
sys.exit("Need to specify the location of a font")
blacklistfile = opts.blacklist
count = opts.count
fill = opts.fill
dirs = opts.dirs
verbose = opts.verbose
fontsize = opts.font_size
if fill:
count = max(0, fill - len(os.listdir(output)))
words = read_wordlist(wordlist)
words = [x for x in words
if len(x) in (4,5) and x[0] != "f"
and x[0] != x[1] and x[-1] != x[-2]]
if blacklistfile:
blacklist = read_wordlist(blacklistfile)
else:
blacklist = []
for i in range(count):
word = pick_word(words, blacklist, verbose)
salt = "%08x" % random.randrange(2**32)
# 64 bits of hash is plenty for this purpose
md5hash = hashlib.md5(key+salt+word+key+salt).hexdigest()[:16]
filename = "image_%s_%s.png" % (salt, md5hash)
if dirs:
subdir = gen_subdir(output, md5hash, dirs)
filename = os.path.join(subdir, filename)
if verbose:
print filename
gen_captcha(word, font, fontsize, os.path.join(output, filename))
| gpl-2.0 |
nouiz/pylearn2 | pylearn2/training_algorithms/sgd.py | 32 | 48169 | """
Stochastic Gradient Descent and related functionality such as
learning rate adaptation, momentum, and Polyak averaging.
"""
from __future__ import division
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow, David Warde-Farley"]
__license__ = "3-clause BSD"
__maintainer__ = "David Warde-Farley"
__email__ = "pylearn-dev@googlegroups"
import logging
import warnings
import numpy as np
from theano.compat import six
from theano import config
from theano import function
from theano.gof.op import get_debug_values
from pylearn2.compat import OrderedDict, first_key
from pylearn2.monitor import Monitor
from pylearn2.space import CompositeSpace, NullSpace
from pylearn2.train_extensions import TrainExtension
from pylearn2.training_algorithms.training_algorithm import TrainingAlgorithm
from pylearn2.training_algorithms.learning_rule import Momentum
from pylearn2.training_algorithms.learning_rule import (
MomentumAdjustor as LRMomentumAdjustor)
from pylearn2.utils.iteration import is_stochastic, has_uniform_batch_size
from pylearn2.utils import py_integer_types, py_float_types
from pylearn2.utils import safe_zip
from pylearn2.utils import serial
from pylearn2.utils import sharedX
from pylearn2.utils import contains_nan
from pylearn2.utils import contains_inf
from pylearn2.utils import isfinite
from pylearn2.utils.data_specs import DataSpecsMapping
from pylearn2.utils.exc import reraise_as
from pylearn2.utils.timing import log_timing
from pylearn2.utils.rng import make_np_rng
log = logging.getLogger(__name__)
class SGD(TrainingAlgorithm):
"""
SGD = (Minibatch) Stochastic Gradient Descent.
A TrainingAlgorithm that does stochastic gradient descent on
minibatches of training examples.
For theoretical background on this algorithm, see Yoshua Bengio's
machine learning course notes on the subject:
http://www.iro.umontreal.ca/~pift6266/H10/notes/gradient.html
Parameters
----------
learning_rate : float
The learning rate to use. Train object callbacks can change the
learning rate after each epoch. SGD update_callbacks can change
it after each minibatch.
cost : pylearn2.costs.cost.Cost, optional
Cost object specifying the objective function to be minimized.
Optionally, may be None. In this case, SGD will call the model's
get_default_cost method to obtain the objective function.
batch_size : int, optional
The size of the batch to be used.
If not specified, the model will be asked for the batch size, so
you must have specified the batch size there.
(Some models are rigidly defined to only work with one batch size)
monitoring_batch_size : int, optional
The size of the monitoring batches.
monitoring_batches : int, optional
At the start of each epoch, we run "monitoring", to evaluate
quantities such as the validation set error.
monitoring_batches, if specified, determines the number of batches
to draw from the iterator for each monitoring dataset.
Unnecessary if not using monitoring or if `monitor_iteration_mode`
is 'sequential' and `batch_size` is specified (number of
batches will be calculated based on full dataset size).
TODO: make it possible to specify different monitoring_batches
for each monitoring dataset. The Monitor itself already supports
this.
monitoring_dataset : Dataset or dictionary, optional
If not specified, no monitoring is used.
If specified to be a Dataset, monitor on that Dataset.
If specified to be dictionary, the keys should be string names
of datasets, and the values should be Datasets. All monitoring
channels will be computed for all monitoring Datasets and will
have the dataset name and an underscore prepended to them.
monitor_iteration_mode : str, optional
The iteration mode used to iterate over the examples in all
monitoring datasets. If not specified, defaults to 'sequential'.
TODO: make it possible to specify different modes for different
datasets.
termination_criterion : instance of \
pylearn2.termination_criteria.TerminationCriterion, optional
Used to determine when the algorithm should stop running.
If not specified, runs forever--or more realistically, until
external factors halt the python process (Kansas 1977).
update_callbacks : list, optional
If specified, each member of the list should be a callable that
accepts an SGD instance as its only argument.
All callbacks will be called with this SGD instance after each
SGD step.
learning_rule : training_algorithms.learning_rule.LearningRule, optional
A learning rule computes the new parameter values given old
parameters and first-order gradients. If learning_rule is None,
sgd.SGD will update parameters according to the standard SGD
learning rule:
.. code-block:: none
param := param - learning_rate * d cost / d param
This argument allows more sophisticated learning rules, such
as SGD with momentum.
set_batch_size : bool, optional
Defaults to False.
If True, and batch_size conflicts with model.force_batch_size,
will call model.set_batch_size(batch_size) in an attempt to
change model.force_batch_size
train_iteration_mode : str, optional
Defaults to 'shuffled_sequential'.
The iteration mode to use for iterating through training examples.
batches_per_iter : int, optional
The number of batches to draw from the iterator over training
examples.
If iteration mode is 'sequential' or 'shuffled_sequential', this
is unnecessary; when unspecified we will iterate over all examples.
theano_function_mode : a valid argument to theano.function's \
'mode' parameter, optional
The theano mode to compile the updates function with. Note that
pylearn2 includes some wraplinker modes that are not bundled with
theano. See pylearn2.devtools. These extra modes let you do
things like check for NaNs at every step, or record md5 digests
of all computations performed by the update function to help
isolate problems with nondeterminism.
monitoring_costs : OrderedDict, optional
A dictionary of Cost instances. Keys should be string containing
the name of the cost. The Monitor will also include all
channels defined by these Costs, even though we don't train
using them.
seed : valid argument to np.random.RandomState, optional
The seed used for the random number generate to be passed to the
training dataset iterator (if any)
"""
def __init__(self, learning_rate, cost=None, batch_size=None,
monitoring_batch_size=None, monitoring_batches=None,
monitoring_dataset=None,
monitor_iteration_mode='sequential',
termination_criterion=None, update_callbacks=None,
learning_rule=None, set_batch_size=False,
train_iteration_mode=None, batches_per_iter=None,
theano_function_mode=None, monitoring_costs=None,
seed=[2012, 10, 5]):
if isinstance(cost, (list, tuple, set)):
raise TypeError("SGD no longer supports using collections of " +
"Costs to represent a sum of Costs. Use " +
"pylearn2.costs.cost.SumOfCosts instead.")
self.learning_rule = learning_rule
self.learning_rate = sharedX(learning_rate, 'learning_rate')
self.cost = cost
self.batch_size = batch_size
self.set_batch_size = set_batch_size
self.batches_per_iter = batches_per_iter
self._set_monitoring_dataset(monitoring_dataset)
self.monitoring_batch_size = monitoring_batch_size
self.monitoring_batches = monitoring_batches
self.monitor_iteration_mode = monitor_iteration_mode
if monitoring_dataset is None:
if monitoring_batch_size is not None:
raise ValueError("Specified a monitoring batch size " +
"but not a monitoring dataset.")
if monitoring_batches is not None:
raise ValueError("Specified an amount of monitoring batches " +
"but not a monitoring dataset.")
self.termination_criterion = termination_criterion
self._register_update_callbacks(update_callbacks)
if train_iteration_mode is None:
train_iteration_mode = 'shuffled_sequential'
self.train_iteration_mode = train_iteration_mode
self.first = True
self.rng = make_np_rng(seed, which_method=["randn", "randint"])
self.theano_function_mode = theano_function_mode
self.monitoring_costs = monitoring_costs
def _setup_monitor(self):
"""
Set up monitor to model the objective value, learning rate,
momentum (if applicable), and extra channels defined by
the cost.
This method must be called after `learning_rule.get_updates`,
since it may have an effect on `learning_rule.add_channels_to_monitor`
(that is currently the case for `learning_rule.RMSProp`).
"""
if bool(self.monitoring_dataset):
if (self.monitoring_batch_size is None and
self.monitoring_batches is None):
self.monitoring_batch_size = self.batch_size
self.monitoring_batches = self.batches_per_iter
self.monitor.setup(dataset=self.monitoring_dataset,
cost=self.cost,
batch_size=self.monitoring_batch_size,
num_batches=self.monitoring_batches,
extra_costs=self.monitoring_costs,
mode=self.monitor_iteration_mode)
dataset_name = first_key(self.monitoring_dataset)
monitoring_dataset = self.monitoring_dataset[dataset_name]
# TODO: have Monitor support non-data-dependent channels
self.monitor.add_channel(name='learning_rate',
ipt=None,
val=self.learning_rate,
data_specs=(NullSpace(), ''),
dataset=monitoring_dataset)
if self.learning_rule:
self.learning_rule.add_channels_to_monitor(
self.monitor,
monitoring_dataset)
def setup(self, model, dataset):
"""
Compiles the theano functions needed for the train method.
Parameters
----------
model : a Model instance
dataset : Dataset
"""
if self.cost is None:
self.cost = model.get_default_cost()
inf_params = [param for param in model.get_params()
if contains_inf(param.get_value())]
if len(inf_params) > 0:
raise ValueError("These params are Inf: "+str(inf_params))
if any([contains_nan(param.get_value())
for param in model.get_params()]):
nan_params = [param for param in model.get_params()
if contains_nan(param.get_value())]
raise ValueError("These params are NaN: "+str(nan_params))
self.model = model
self._synchronize_batch_size(model)
model._test_batch_size = self.batch_size
self.monitor = Monitor.get_monitor(model)
self.monitor._sanity_check()
# test if force batch size and batch size
has_force_batch_size = getattr(model, "force_batch_size", False)
train_dataset_is_uneven = \
dataset.get_num_examples() % self.batch_size != 0
has_monitoring_datasets = bool(self.monitoring_dataset)
if has_monitoring_datasets:
monitoring_datasets_are_uneven = \
any(d.get_num_examples() % self.batch_size
!= 0 for d in self.monitoring_dataset.values())
else:
monitoring_datasets_are_uneven = False # or True it doesn't matter
if has_force_batch_size and train_dataset_is_uneven and \
not has_uniform_batch_size(self.train_iteration_mode):
raise ValueError("Dataset size is not a multiple of batch size."
"You should set train_iteration_mode (and "
"maybe monitor_iteration_mode) to "
"even_sequential, even_shuffled_sequential or "
"even_batchwise_shuffled_sequential")
if has_force_batch_size and has_monitoring_datasets and \
monitoring_datasets_are_uneven and \
not has_uniform_batch_size(self.monitor_iteration_mode):
raise ValueError("Dataset size is not a multiple of batch size."
"You should set monitor_iteration_mode to "
"even_sequential, even_shuffled_sequential or "
"even_batchwise_shuffled_sequential")
data_specs = self.cost.get_data_specs(self.model)
mapping = DataSpecsMapping(data_specs)
space_tuple = mapping.flatten(data_specs[0], return_tuple=True)
source_tuple = mapping.flatten(data_specs[1], return_tuple=True)
# Build a flat tuple of Theano Variables, one for each space.
# We want that so that if the same space/source is specified
# more than once in data_specs, only one Theano Variable
# is generated for it, and the corresponding value is passed
# only once to the compiled Theano function.
theano_args = []
for space, source in safe_zip(space_tuple, source_tuple):
name = '%s[%s]' % (self.__class__.__name__, source)
arg = space.make_theano_batch(name=name,
batch_size=self.batch_size)
theano_args.append(arg)
theano_args = tuple(theano_args)
# Methods of `self.cost` need args to be passed in a format compatible
# with data_specs
nested_args = mapping.nest(theano_args)
fixed_var_descr = self.cost.get_fixed_var_descr(model, nested_args)
self.on_load_batch = fixed_var_descr.on_load_batch
cost_value = self.cost.expr(model, nested_args,
** fixed_var_descr.fixed_vars)
if cost_value is not None and cost_value.name is None:
# Concatenate the name of all tensors in theano_args !?
cost_value.name = 'objective'
learning_rate = self.learning_rate
params = list(model.get_params())
assert len(params) > 0
for i, param in enumerate(params):
if param.name is None:
param.name = 'sgd_params[%d]' % i
grads, updates = self.cost.get_gradients(model, nested_args,
** fixed_var_descr.fixed_vars)
if not isinstance(grads, OrderedDict):
raise TypeError(str(type(self.cost)) + ".get_gradients returned " +
"something with" + str(type(grads)) + "as its " +
"first member. Expected OrderedDict.")
for param in grads:
assert param in params
for param in params:
assert param in grads
for param in grads:
if grads[param].name is None and cost_value is not None:
grads[param].name = ('grad(%(costname)s, %(paramname)s)' %
{'costname': cost_value.name,
'paramname': param.name})
assert grads[param].dtype == param.dtype
lr_scalers = model.get_lr_scalers()
for key in lr_scalers:
if key not in params:
raise ValueError(
"Tried to scale the learning rate on " +
str(key) + " which is not an optimization parameter.")
log.info('Parameter and initial learning rate summary:')
for param in params:
param_name = param.name
if param_name is None:
param_name = 'anon_param'
lr = learning_rate.get_value() * lr_scalers.get(param, 1.)
log.info('\t' + param_name + ': ' + str(lr))
if self.learning_rule:
updates.update(self.learning_rule.get_updates(
learning_rate, grads, lr_scalers))
else:
# Use standard SGD updates with fixed learning rate.
updates.update(dict(safe_zip(params, [param - learning_rate *
lr_scalers.get(param, 1.) * grads[param]
for param in params])))
for param in params:
if updates[param].name is None:
updates[param].name = 'sgd_update(' + param.name + ')'
model.modify_updates(updates)
for param in params:
update = updates[param]
if update.name is None:
update.name = 'censor(sgd_update(' + param.name + '))'
for update_val in get_debug_values(update):
if contains_inf(update_val):
raise ValueError("debug value of %s contains infs" %
update.name)
if contains_nan(update_val):
raise ValueError("debug value of %s contains nans" %
update.name)
# Set up monitor to model the objective value, learning rate,
# momentum (if applicable), and extra channels defined by
# the cost.
# We have to do that after learning_rule.get_updates has been
# called, since it may have an effect on
# learning_rule.add_channels_to_monitor (that is currently the case
# for AdaDelta and RMSProp).
self._setup_monitor()
with log_timing(log, 'Compiling sgd_update'):
self.sgd_update = function(theano_args,
updates=updates,
name='sgd_update',
on_unused_input='ignore',
mode=self.theano_function_mode)
self.params = params
def train(self, dataset):
"""
Runs one epoch of SGD training on the specified dataset.
Parameters
----------
dataset : Dataset
"""
if not hasattr(self, 'sgd_update'):
raise Exception("train called without first calling setup")
# Make sure none of the parameters have bad values
for param in self.params:
value = param.get_value(borrow=True)
if not isfinite(value):
raise RuntimeError("NaN in " + param.name)
self.first = False
rng = self.rng
if not is_stochastic(self.train_iteration_mode):
rng = None
data_specs = self.cost.get_data_specs(self.model)
# The iterator should be built from flat data specs, so it returns
# flat, non-redundent tuples of data.
mapping = DataSpecsMapping(data_specs)
space_tuple = mapping.flatten(data_specs[0], return_tuple=True)
source_tuple = mapping.flatten(data_specs[1], return_tuple=True)
if len(space_tuple) == 0:
# No data will be returned by the iterator, and it is impossible
# to know the size of the actual batch.
# It is not decided yet what the right thing to do should be.
raise NotImplementedError(
"Unable to train with SGD, because "
"the cost does not actually use data from the data set. "
"data_specs: %s" % str(data_specs))
flat_data_specs = (CompositeSpace(space_tuple), source_tuple)
iterator = dataset.iterator(mode=self.train_iteration_mode,
batch_size=self.batch_size,
data_specs=flat_data_specs,
return_tuple=True, rng=rng,
num_batches=self.batches_per_iter)
on_load_batch = self.on_load_batch
for batch in iterator:
for callback in on_load_batch:
callback(*batch)
self.sgd_update(*batch)
# iterator might return a smaller batch if dataset size
# isn't divisible by batch_size
# Note: if data_specs[0] is a NullSpace, there is no way to know
# how many examples would actually have been in the batch,
# since it was empty, so actual_batch_size would be reported as 0.
actual_batch_size = flat_data_specs[0].np_batch_size(batch)
self.monitor.report_batch(actual_batch_size)
for callback in self.update_callbacks:
callback(self)
# Make sure none of the parameters have bad values
for param in self.params:
value = param.get_value(borrow=True)
if not isfinite(value):
raise RuntimeError("NaN in " + param.name)
def continue_learning(self, model):
"""
Returns True if the algorithm should continue running, or False
if it has reached convergence / started overfitting and should
stop.
Parameters
----------
model : a Model instance
"""
if self.termination_criterion is None:
return True
else:
return self.termination_criterion.continue_learning(self.model)
class MonitorBasedLRAdjuster(TrainExtension):
"""
A TrainExtension that uses the on_monitor callback to adjust
the learning rate on each epoch. It pulls out a channel
from the model's monitor and adjusts the learning rate
based on what happened to the monitoring channel on the last
epoch. If the channel is greater than high_trigger times
its previous value, the learning rate will be scaled by
shrink_amt (which should be < 1 for this scheme to make
sense). The idea is that in this case the learning algorithm
is overshooting the bottom of the objective function.
If the objective is less than high_trigger but
greater than low_trigger times its previous value, the
learning rate will be scaled by grow_amt (which should be > 1
for this scheme to make sense). The idea is that the learning
algorithm is making progress but at too slow of a rate.
Parameters
----------
high_trigger : float, optional
See class-level docstring
low_trigger : float, optional
See class-level docstring
grow_amt : float, optional
See class-level docstring
min_lr : float, optional
All updates to the learning rate are clipped to be at least
this value.
max_lr : float, optional
All updates to the learning rate are clipped to be at most
this value.
dataset_name : str, optional
If specified, use dataset_name + "_objective" as the channel
to guide the learning rate adaptation.
channel_name : str, optional
If specified, use channel_name as the channel to guide the
learning rate adaptation. Conflicts with dataset_name.
If neither dataset_name nor channel_name is specified, uses
"objective"
"""
def __init__(self, high_trigger=1., shrink_amt=.99,
low_trigger=.99, grow_amt=1.01,
min_lr=1e-7, max_lr=1.,
dataset_name=None, channel_name=None):
self.high_trigger = high_trigger
self.shrink_amt = shrink_amt
self.low_trigger = low_trigger
self.grow_amt = grow_amt
self.min_lr = min_lr
self.max_lr = max_lr
self.dataset_name = None
if channel_name is not None:
self.channel_name = channel_name
else:
if dataset_name is not None:
self.channel_name = dataset_name + '_objective'
self.dataset_name = dataset_name
else:
self.channel_name = None
def on_monitor(self, model, dataset, algorithm):
"""
Adjusts the learning rate based on the contents of model.monitor
Parameters
----------
model : a Model instance
dataset : Dataset
algorithm : WRITEME
"""
model = algorithm.model
lr = algorithm.learning_rate
current_learning_rate = lr.get_value()
assert hasattr(model, 'monitor'), ("no monitor associated with "
+ str(model))
monitor = model.monitor
monitor_channel_specified = True
if self.channel_name is None:
monitor_channel_specified = False
channels = [elem for elem in monitor.channels
if elem.endswith("objective")]
if len(channels) < 1:
raise ValueError(
"There are no monitoring channels that end "
"with \"objective\". Please specify either "
"channel_name or dataset_name.")
elif len(channels) > 1:
datasets = algorithm.monitoring_dataset.keys()
raise ValueError(
"There are multiple monitoring channels that"
"end with \"_objective\". The list of available "
"datasets are: " +
str(datasets) + " . Please specify either "
"channel_name or dataset_name in the "
"MonitorBasedLRAdjuster constructor to "
'disambiguate.')
else:
self.channel_name = channels[0]
warnings.warn('The channel that has been chosen for '
'monitoring is: ' + str(self.channel_name) + '.')
try:
v = monitor.channels[self.channel_name].val_record
except KeyError:
err_input = ''
if monitor_channel_specified:
if self.dataset_name:
err_input = 'The dataset_name \'' + str(
self.dataset_name) + '\' is not valid.'
else:
err_input = 'The channel_name \'' + str(
self.channel_name) + '\' is not valid.'
err_message = 'There is no monitoring channel named \'' + \
str(self.channel_name) + '\'. You probably need to ' + \
'specify a valid monitoring channel by using either ' + \
'dataset_name or channel_name in the ' + \
'MonitorBasedLRAdjuster constructor. ' + err_input
reraise_as(ValueError(err_message))
if len(v) < 1:
if monitor.dataset is None:
assert len(v) == 0
raise ValueError(
"You're trying to use a monitor-based "
"learning rate adjustor but the monitor has no "
"entries because you didn't specify a "
"monitoring dataset.")
raise ValueError(
"For some reason there are no monitor entries"
"yet the MonitorBasedLRAdjuster has been "
"called. This should never happen. The Train"
" object should call the monitor once on "
"initialization, then call the callbacks. "
"It seems you are either calling the "
"callback manually rather than as part of a "
"training algorithm, or there is a problem "
"with the Train object.")
if len(v) == 1:
# only the initial monitoring has happened
# no learning has happened, so we can't adjust learning rate yet
# just do nothing
return
rval = current_learning_rate
log.info("monitoring channel is {0}".format(self.channel_name))
if v[-1] > self.high_trigger * v[-2]:
rval *= self.shrink_amt
log.info("shrinking learning rate to %f" % rval)
elif v[-1] > self.low_trigger * v[-2]:
rval *= self.grow_amt
log.info("growing learning rate to %f" % rval)
rval = max(self.min_lr, rval)
rval = min(self.max_lr, rval)
lr.set_value(np.cast[lr.dtype](rval))
class PatienceBasedTermCrit(object):
"""
A monitor-based termination criterion using a geometrically increasing
amount of patience. If the selected channel has decreased by a certain
proportion when comparing to the lowest value seen yet, the patience is
set to a factor of the number of examples seen, which by default
(patience_increase=2.) ensures the model has seen as many examples as the
number of examples that lead to the lowest value before concluding a local
optima has been reached.
Note: Technically, the patience corresponds to a number of epochs to be
independent of the size of the dataset, so be aware of that when choosing
initial_patience.
Parameters
----------
prop_decrease : float
The factor X in the (1 - X) * best_value threshold
initial_patience : int
Minimal number of epochs the model has to run before it can stop
patience_increase : float, optional
The factor X in the patience = X * n_iter update.
channel_name : string, optional
Name of the channel to examine. If None and the monitor
has only one channel, this channel will be used; otherwise, an
error will be raised.
"""
def __init__(self, prop_decrease, initial_patience,
patience_increase=2., channel_name=None):
self._channel_name = channel_name
self.prop_decrease = prop_decrease
self.patience = initial_patience
self.best_value = np.inf
self.patience_increase = patience_increase
def __call__(self, model):
"""
Returns True or False depending on whether the optimization should
stop or not. The optimization should stop if it has run for a number
of epochs superior to the patience without any improvement.
Parameters
----------
model : Model
The model used in the experiment and from which the monitor used
in the termination criterion will be extracted.
Returns
-------
bool
True or False, indicating if the optimization should stop or not.
"""
monitor = model.monitor
# In the case the monitor has only one channel, the channel_name can
# be omitted and the criterion will examine the only channel
# available. However, if the monitor has multiple channels, leaving
# the channel_name unspecified will raise an error.
if self._channel_name is None:
if len(monitor.channels) != 1:
raise ValueError("Only single-channel monitors are supported "
"for channel_name == None")
v = monitor.channels.values()[0].val_record
else:
v = monitor.channels[self._channel_name].val_record
# If the channel value decrease is higher than the threshold, we
# update the best value to this value and we update the patience.
if v[-1] < self.best_value * (1. - self.prop_decrease):
# Using the max between actual patience and updated patience
# ensures that the model will run for at least the initial
# patience and that it would behave correctly if the user
# chooses a dumb value (i.e. less than 1)
self.patience = max(self.patience, len(v) * self.patience_increase)
self.best_value = v[-1]
return len(v) < self.patience
class AnnealedLearningRate(object):
"""
This is a callback for the SGD algorithm rather than the Train object.
This anneals the learning rate to decrease as 1/t where t is the number
of gradient descent updates done so far. Use OneOverEpoch as Train object
callback if you would prefer 1/t where t is epochs.
Parameters
----------
anneal_start : int
The epoch on which to begin annealing
"""
def __init__(self, anneal_start):
self._initialized = False
self._count = 0
self._anneal_start = anneal_start
def __call__(self, algorithm):
"""
Updates the learning rate according to the annealing schedule.
Parameters
----------
algorithm : WRITEME
"""
if not self._initialized:
self._base = algorithm.learning_rate.get_value()
self._initialized = True
self._count += 1
algorithm.learning_rate.set_value(np.cast[config.floatX](
self.current_learning_rate()))
def current_learning_rate(self):
"""
Returns the current desired learning rate according to the
annealing schedule.
"""
return self._base * min(1, self._anneal_start / self._count)
class ExponentialDecay(object):
"""
This is a callback for the `SGD` algorithm rather than the `Train` object.
This anneals the learning rate by dividing by decay_factor after each
gradient descent step. It will not shrink the learning rate beyond
`min_lr`.
Parameters
----------
decay_factor : float
The learning rate at step t is given by
`init_learning_rate / (decay_factor ** t)`
min_lr : float
The learning rate will be clipped to be at least this value
"""
def __init__(self, decay_factor, min_lr):
if isinstance(decay_factor, str):
decay_factor = float(decay_factor)
if isinstance(min_lr, str):
min_lr = float(min_lr)
assert isinstance(decay_factor, float)
assert isinstance(min_lr, float)
self.__dict__.update(locals())
del self.self
self._count = 0
self._min_reached = False
def __call__(self, algorithm):
"""
Updates the learning rate according to the exponential decay schedule.
Parameters
----------
algorithm : SGD
The SGD instance whose `learning_rate` field should be modified.
"""
if self._count == 0:
self._base_lr = algorithm.learning_rate.get_value()
self._count += 1
if not self._min_reached:
# If we keep on executing the exponentiation on each mini-batch,
# we will eventually get an OverflowError. So make sure we
# only do the computation until min_lr is reached.
new_lr = self._base_lr / (self.decay_factor ** self._count)
if new_lr <= self.min_lr:
self._min_reached = True
new_lr = self.min_lr
else:
new_lr = self.min_lr
new_lr = np.cast[config.floatX](new_lr)
algorithm.learning_rate.set_value(new_lr)
class LinearDecay(object):
"""
This is a callback for the SGD algorithm rather than the Train object.
This anneals the learning rate to decay_factor times of the initial value
during time start till saturate.
Parameters
----------
start : int
The step at which to start decreasing the learning rate
saturate : int
The step at which to stop decreating the learning rate
decay_factor : float
`final learning rate = decay_factor * initial learning rate`
"""
def __init__(self, start, saturate, decay_factor):
if isinstance(decay_factor, str):
decay_factor = float(decay_factor)
if isinstance(start, str):
start = float(start)
if isinstance(saturate, str):
saturate = float(saturate)
assert isinstance(decay_factor, float)
assert isinstance(start, (py_integer_types, py_float_types))
assert isinstance(saturate, (py_integer_types, py_float_types))
assert saturate > start
assert start > 0
self.__dict__.update(locals())
del self.self
self._count = 0
def __call__(self, algorithm):
"""
Adjusts the learning rate according to the linear decay schedule
Parameters
----------
algorithm : WRITEME
"""
if self._count == 0:
self._base_lr = algorithm.learning_rate.get_value()
self._step = ((self._base_lr - self._base_lr * self.decay_factor) /
(self.saturate - self.start + 1))
self._count += 1
if self._count >= self.start:
if self._count < self.saturate:
new_lr = self._base_lr - self._step * (self._count
- self.start + 1)
else:
new_lr = self._base_lr * self.decay_factor
else:
new_lr = self._base_lr
assert new_lr > 0
new_lr = np.cast[config.floatX](new_lr)
algorithm.learning_rate.set_value(new_lr)
class EpochMonitor(object):
"""
This is a callback for the SGD algorithm rather than the Train object.
It can log one-line progress summaries and/or full monitor updates at
regular intervals within epochs, which can be useful for large datasets.
Note that each monitor update increases the calculation time of the epoch.
Parameters
----------
model : pylearn2 model instance
The model being monitored
tick_rate : int (optional)
Log one-line updates every `tick_rate` batches
monitor_rate : int (optional)
Call full monitor updates within epochs every `monitor_rate` batches
YAML usage
----------
model: &model !obj:pylearn2.models.mlp.MLP {
...
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
update_callbacks: [
!obj:pylearn2.training_algorithms.sgd.EpochMonitor {
model: *model,
tick_rate: 20,
monitor_rate: 110 }],
...
}
"""
def __init__(self, model, tick_rate=None, monitor_rate=None):
self.model = model
self.tick_rate = tick_rate
self.monitor_rate = monitor_rate
self.batches = 0
self.epoch = 1
def __call__(self, algorithm):
if self.model.monitor.get_epochs_seen() == self.epoch:
self.epoch += 1
self.batches = 0
else:
self.batches += 1
if self.monitor_rate and self.batches and not (
self.batches % self.monitor_rate):
self.model.monitor.__call__()
elif self.tick_rate and not self.batches % self.tick_rate:
log.info('Epoch {}: {} batches seen'.format(
self.epoch, self.batches))
class OneOverEpoch(TrainExtension):
"""
Scales the learning rate like one over # epochs
Parameters
----------
start : int
The epoch on which to start shrinking the learning rate
half_life : int, optional
How many epochs after start it will take for the learning rate to lose
half its value for the first time (to lose the next half of its value
will take twice as long)
min_lr : float, optional
The minimum value the learning rate can take on
"""
def __init__(self, start, half_life=None, min_lr=1e-6):
self.__dict__.update(locals())
del self.self
self._initialized = False
self._count = 0
assert start >= 0
if half_life is None:
self.half_life = start + 1
else:
assert half_life > 0
def on_monitor(self, model, dataset, algorithm):
"""
Adjusts the learning rate according to the decay schedule.
Parameters
----------
model : a Model instance
dataset : Dataset
algorithm : WRITEME
"""
if not self._initialized:
self._init_lr = algorithm.learning_rate.get_value()
if self._init_lr < self.min_lr:
raise ValueError("The initial learning rate is smaller than " +
"the minimum allowed learning rate.")
self._initialized = True
self._count += 1
algorithm.learning_rate.set_value(np.cast[config.floatX](
self.current_lr()))
def current_lr(self):
"""
Returns the learning rate currently desired by the decay schedule.
"""
if self._count < self.start:
scale = 1
else:
scale = float(self.half_life) / float(self._count -
self.start + self.half_life)
lr = self._init_lr * scale
clipped = max(self.min_lr, lr)
return clipped
class LinearDecayOverEpoch(TrainExtension):
"""
Scales the learning rate linearly on each epochs
Parameters
----------
start : int
The epoch on which to start shrinking the learning rate
saturate : int
The epoch to saturate the shrinkage
decay_factor : float
The final value would be initial learning rate times decay_factor
"""
def __init__(self, start, saturate, decay_factor):
self.__dict__.update(locals())
del self.self
self._initialized = False
self._count = 0
assert isinstance(decay_factor, float)
assert isinstance(start, (py_integer_types, py_float_types))
assert isinstance(saturate, (py_integer_types, py_float_types))
assert saturate > start
assert start >= 0
assert saturate >= start
def setup(self, model, dataset, algorithm):
"""
Initializes the decay schedule based on epochs_seen.
Parameters
----------
model : pylearn2.models.Model
The model to which the training algorithm is applied.
dataset : pylearn2.datasets.Dataset
The dataset to which the model is applied.
algorithm : pylearn2.training_algorithms.TrainingAlgorithm
Describes how gradients should be updated.
"""
monitor = Monitor.get_monitor(model)
self._count = monitor.get_epochs_seen()
self._apply_learning_rate(algorithm)
def on_monitor(self, model, dataset, algorithm):
"""
Updates the learning rate based on the linear decay schedule.
Parameters
----------
model : a Model instance
dataset : Dataset
algorithm : WRITEME
"""
self._count += 1
self._apply_learning_rate(algorithm)
def _apply_learning_rate(self, algorithm):
"""
Updates the learning rate on algorithm based on the epochs elapsed.
"""
if not self._initialized:
self._init_lr = algorithm.learning_rate.get_value()
self._step = ((self._init_lr - self._init_lr * self.decay_factor) /
(self.saturate - self.start + 1))
self._initialized = True
algorithm.learning_rate.set_value(np.cast[config.floatX](
self.current_lr()))
def current_lr(self):
"""
Returns the learning rate currently desired by the decay schedule.
"""
if self._count >= self.start:
if self._count < self.saturate:
new_lr = self._init_lr - self._step * (self._count
- self.start + 1)
else:
new_lr = self._init_lr * self.decay_factor
else:
new_lr = self._init_lr
assert new_lr > 0
return new_lr
class _PolyakWorker(object):
"""
Only to be used by the PolyakAveraging TrainingCallback below.
Do not use directly.
A callback for the SGD class.
Parameters
----------
model : a Model
The model whose parameters we want to train with Polyak averaging
"""
def __init__(self, model):
avg_updates = OrderedDict()
t = sharedX(1.)
self.param_to_mean = OrderedDict()
for param in model.get_params():
mean = sharedX(param.get_value())
assert type(mean) == type(param)
self.param_to_mean[param] = mean
avg_updates[mean] = mean - (mean - param) / t
avg_updates[t] = t + 1.
self.avg = function([], updates=avg_updates)
def __call__(self, algorithm):
"""
To be called after each SGD step.
Updates the Polyak averaged-parameters for this model
Parameters
----------
algorithm : WRITEME
"""
self.avg()
class PolyakAveraging(TrainExtension):
"""
See "A Tutorial on Stochastic Approximation Algorithms
for Training Restricted Boltzmann Machines and
Deep Belief Nets" by Kevin Swersky et al
This functionality is still a work in progress. Currently,
your model needs to implement "add_polyak_channels" to
use it.
The problem is that Polyak averaging shouldn't modify
the model parameters. It should keep a second copy
that it averages in the background. This second copy
doesn't get to come back in and affect the learning process
though.
(IG tried having the second copy get pushed back into
the model once per epoch, but this turned out to be
harmful, at least in limited tests)
So we need a cleaner interface for monitoring the
averaged copy of the parameters, and we need to make
sure the saved model at the end uses the averaged
parameters, not the parameters used for computing
the gradients during training.
TODO: make use of the new on_save callback instead
of duplicating Train's save_freq flag
Parameters
----------
start : int
The epoch after which to start averaging (0 = start averaging
immediately)
save_path : str, optional
WRITEME
save_freq : int, optional
WRITEME
Notes
-----
This is usually used with a fixed, rather than annealed learning
rate. It may be used in conjunction with momentum.
"""
def __init__(self, start, save_path=None, save_freq=1):
self.__dict__.update(locals())
del self.self
self._count = 0
assert isinstance(start, py_integer_types)
assert start >= 0
def on_monitor(self, model, dataset, algorithm):
"""
Make sure Polyak-averaged model gets monitored.
Save the model if necessary.
Parameters
----------
model : a Model instance
dataset : Dataset
algorithm : WRITEME
"""
if self._count == self.start:
self._worker = _PolyakWorker(model)
algorithm.update_callbacks.append(self._worker)
# HACK
try:
model.add_polyak_channels(self._worker.param_to_mean,
algorithm.monitoring_dataset)
except AttributeError:
pass
elif self.save_path is not None and self._count > self.start and \
self._count % self.save_freq == 0:
saved_params = OrderedDict()
for param in model.get_params():
saved_params[param] = param.get_value()
param.set_value(self._worker.param_to_mean[param].get_value())
serial.save(self.save_path, model)
for param in model.get_params():
param.set_value(saved_params[param])
self._count += 1
| bsd-3-clause |
zuotingbing/spark | python/pyspark/testing/streamingutils.py | 23 | 6062 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import tempfile
import time
import unittest
from pyspark import SparkConf, SparkContext, RDD
from pyspark.streaming import StreamingContext
from pyspark.testing.utils import search_jar
# Must be same as the variable and condition defined in KinesisTestUtils.scala and modules.py
kinesis_test_environ_var = "ENABLE_KINESIS_TESTS"
should_skip_kinesis_tests = not os.environ.get(kinesis_test_environ_var) == '1'
if should_skip_kinesis_tests:
kinesis_requirement_message = (
"Skipping all Kinesis Python tests as environmental variable 'ENABLE_KINESIS_TESTS' "
"was not set.")
else:
kinesis_asl_assembly_jar = search_jar("external/kinesis-asl-assembly",
"spark-streaming-kinesis-asl-assembly-",
"spark-streaming-kinesis-asl-assembly_")
if kinesis_asl_assembly_jar is None:
kinesis_requirement_message = (
"Skipping all Kinesis Python tests as the optional Kinesis project was "
"not compiled into a JAR. To run these tests, "
"you need to build Spark with 'build/sbt -Pkinesis-asl assembly/package "
"streaming-kinesis-asl-assembly/assembly' or "
"'build/mvn -Pkinesis-asl package' before running this test.")
else:
existing_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
jars_args = "--jars %s" % kinesis_asl_assembly_jar
os.environ["PYSPARK_SUBMIT_ARGS"] = " ".join([jars_args, existing_args])
kinesis_requirement_message = None
should_test_kinesis = kinesis_requirement_message is None
class PySparkStreamingTestCase(unittest.TestCase):
timeout = 30 # seconds
duration = .5
@classmethod
def setUpClass(cls):
class_name = cls.__name__
conf = SparkConf().set("spark.default.parallelism", 1)
cls.sc = SparkContext(appName=class_name, conf=conf)
cls.sc.setCheckpointDir(tempfile.mkdtemp())
@classmethod
def tearDownClass(cls):
cls.sc.stop()
# Clean up in the JVM just in case there has been some issues in Python API
try:
jSparkContextOption = SparkContext._jvm.SparkContext.get()
if jSparkContextOption.nonEmpty():
jSparkContextOption.get().stop()
except:
pass
def setUp(self):
self.ssc = StreamingContext(self.sc, self.duration)
def tearDown(self):
if self.ssc is not None:
self.ssc.stop(False)
# Clean up in the JVM just in case there has been some issues in Python API
try:
jStreamingContextOption = StreamingContext._jvm.SparkContext.getActive()
if jStreamingContextOption.nonEmpty():
jStreamingContextOption.get().stop(False)
except:
pass
def wait_for(self, result, n):
start_time = time.time()
while len(result) < n and time.time() - start_time < self.timeout:
time.sleep(0.01)
if len(result) < n:
print("timeout after", self.timeout)
def _take(self, dstream, n):
"""
Return the first `n` elements in the stream (will start and stop).
"""
results = []
def take(_, rdd):
if rdd and len(results) < n:
results.extend(rdd.take(n - len(results)))
dstream.foreachRDD(take)
self.ssc.start()
self.wait_for(results, n)
return results
def _collect(self, dstream, n, block=True):
"""
Collect each RDDs into the returned list.
:return: list, which will have the collected items.
"""
result = []
def get_output(_, rdd):
if rdd and len(result) < n:
r = rdd.collect()
if r:
result.append(r)
dstream.foreachRDD(get_output)
if not block:
return result
self.ssc.start()
self.wait_for(result, n)
return result
def _test_func(self, input, func, expected, sort=False, input2=None):
"""
:param input: dataset for the test. This should be list of lists.
:param func: wrapped function. This function should return PythonDStream object.
:param expected: expected output for this testcase.
"""
if not isinstance(input[0], RDD):
input = [self.sc.parallelize(d, 1) for d in input]
input_stream = self.ssc.queueStream(input)
if input2 and not isinstance(input2[0], RDD):
input2 = [self.sc.parallelize(d, 1) for d in input2]
input_stream2 = self.ssc.queueStream(input2) if input2 is not None else None
# Apply test function to stream.
if input2:
stream = func(input_stream, input_stream2)
else:
stream = func(input_stream)
result = self._collect(stream, len(expected))
if sort:
self._sort_result_based_on_key(result)
self._sort_result_based_on_key(expected)
self.assertEqual(expected, result)
def _sort_result_based_on_key(self, outputs):
"""Sort the list based on first value."""
for output in outputs:
output.sort(key=lambda x: x[0])
| apache-2.0 |
jameshicks/pydigree | pydigree/genotypes/labelledalleles.py | 1 | 7621 |
from pydigree.common import all_same_type
from .genoabc import AlleleContainer
class LabelledAlleles(AlleleContainer):
def __init__(self, spans=None, chromobj=None, nmark=None):
if not (chromobj or nmark):
raise ValueError('One of chromobj or nmark must be specified')
self.spans = spans if spans is not None else []
self.chromobj = chromobj
self.nmark = nmark if self.chromobj is None else self.chromobj.nmark()
def __eq__(self, other):
if not isinstance(other, LabelledAlleles):
return False
return all(x == y for x, y in zip(self.spans, other.spans))
def __getitem__(self, index):
for span in self.spans:
if span.contains(index):
return span.ancestral_allele
raise ValueError('Index out of bounds: {}'.format(index))
def empty_like(self):
return LabelledAlleles([], chromobj=self.chromobj, nmark=self.nmark)
@property
def dtype(self):
return type(self)
@staticmethod
def founder_chromosome(ind, chromidx, hap, chromobj=None, nmark=None):
n = nmark if not chromobj else chromobj.nmark()
spans = [InheritanceSpan(ind, chromidx, hap, 0, n)]
return LabelledAlleles(spans=spans, chromobj=chromobj, nmark=nmark)
def add_span(self, new_span):
if any(new_span.stop < x.stop for x in self.spans):
raise ValueError('Overwriting not supported for LabelledAlleles')
if len(self.spans) == 0 and new_span.start > 0:
raise ValueError('Spans not contiguous')
if len(self.spans) > 0 and (not new_span.start == self.spans[-1].stop):
raise ValueError('Spans not contiguous')
self.spans.append(new_span)
def copy_span(self, template, copy_start, copy_stop):
if not isinstance(template, LabelledAlleles):
raise ValueError(
'LabelledAlleles can only copy from other LabelledAlleles')
if copy_stop is None:
copy_stop = self.nmark
for span in template.spans:
if copy_start > span.stop or copy_stop < span.start:
# These are the segments that aren't relevant
# Ours [-------------]
# Template [---] OR [-----]
continue
elif copy_start == span.start and copy_stop == span.stop:
# Ours [----------]
# Template [----------]
new_span = InheritanceSpan(span.ancestor,
span.chromosomeidx,
span.haplotype,
copy_start,
copy_stop)
self.add_span(new_span)
elif span.contains(copy_start) and span.contains(copy_stop):
# Ours: [----------------]
# Template: [-----------------------]
# The span we want is a sub-span of this span
new_span = InheritanceSpan(span.ancestor,
span.chromosomeidx,
span.haplotype,
copy_start,
copy_stop)
self.add_span(new_span)
elif span.contains(copy_start):
# Ours: [------------------]
# Template: [--------]
new_span = InheritanceSpan(span.ancestor,
span.chromosomeidx,
span.haplotype,
copy_start,
span.stop)
self.add_span(new_span)
elif span.contains(copy_stop):
# Ours [-----------------]
# Template: [-----------]
new_span = InheritanceSpan(span.ancestor,
span.chromosomeidx,
span.haplotype,
span.start,
copy_stop)
self.add_span(new_span)
return
elif span.start > copy_start and span.stop < copy_stop:
# This span is a sub-span of ours
# Ours [------------------------]
# Template [-------------]
# Make a new span object anyway for object ownership purposes
new_span = InheritanceSpan(span.ancestor,
span.chromosomeidx,
span.haplotype,
span.start,
span.stop)
self.add_span(new_span)
else:
raise ValueError('Unforseen combination of spans')
def delabel(self):
# Check to make sure all the founders are delabeled
if not all_same_type(self.spans, InheritanceSpan):
for span in self.spans:
if isinstance(span.ancestral_chromosome, LabelledAlleles):
raise ValueError('Ancestral chromosome {} {} {}'
'has not been delabeled'.format(
self.individual,
self.chromosomeidx,
self.haplotype))
nc = self.spans[0].ancestral_chromosome.empty_like()
for span in self.spans:
nc.copy_span(span.ancestral_chromosome, span.start, span.stop)
return nc
class InheritanceSpan(object):
__slots__ = ['ancestor', 'chromosomeidx', 'haplotype', 'start', 'stop']
def __init__(self, ancestor, chromosomeidx, haplotype, start, stop):
self.ancestor = ancestor
self.chromosomeidx = chromosomeidx
self.haplotype = haplotype
self.start = start
self.stop = stop
def __repr__(self):
return 'InheritanceSpan{}'.format(self.to_tuple())
def __eq__(self, other):
return (self.ancestor == other.ancestor and
self.chromosomeidx == other.chromosomeidx and
self.haplotype == other.haplotype and
self.start == other.start and
self.stop == other.stop)
@property
def ancestral_allele(self):
return AncestralAllele(self.ancestor, self.haplotype)
def contains(self, index):
'Returns true if the index specified falls within this span'
return self.start <= index <= self.stop
@property
def interval(self):
return self.start, self.stop
def to_tuple(self):
return (self.ancestor, self.chromosomeidx, self.haplotype,
self.start, self.stop)
@property
def ancestral_chromosome(self):
return self.ancestor.genotypes[self.chromosomeidx][self.haplotype]
class AncestralAllele(object):
__slots__ = ['ancestor', 'haplotype']
def __init__(self, anc, hap):
self.ancestor = anc
self.haplotype = hap
def __repr__(self):
return 'AncestralAllele: {}: {}'.format(self.ancestor, self.haplotype)
def __eq__(self, other):
return (self.ancestor == other.ancestor and
self.haplotype == other.haplotype)
def __ne__(self, other):
return not self == other | apache-2.0 |
jayceyxc/hue | desktop/core/ext-py/lxml-3.3.6/src/lxml/_elementpath.py | 36 | 9311 | #
# ElementTree
# $Id: ElementPath.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xpath support for element trees
#
# history:
# 2003-05-23 fl created
# 2003-05-28 fl added support for // etc
# 2003-08-27 fl fixed parsing of periods in element names
# 2007-09-10 fl new selection engine
# 2007-09-12 fl fixed parent selector
# 2007-09-13 fl added iterfind; changed findall to return a list
# 2007-11-30 fl added namespaces support
# 2009-10-30 fl added child element value filter
#
# Copyright (c) 2003-2009 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2009 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
##
# Implementation module for XPath support. There's usually no reason
# to import this module directly; the <b>ElementTree</b> does this for
# you, if needed.
##
import re
xpath_tokenizer_re = re.compile(
"("
"'[^']*'|\"[^\"]*\"|"
"::|"
"//?|"
"\.\.|"
"\(\)|"
"[/.*:\[\]\(\)@=])|"
"((?:\{[^}]+\})?[^/\[\]\(\)@=\s]+)|"
"\s+"
)
def xpath_tokenizer(pattern, namespaces=None):
for token in xpath_tokenizer_re.findall(pattern):
tag = token[1]
if tag and tag[0] != "{" and ":" in tag:
try:
prefix, uri = tag.split(":", 1)
if not namespaces:
raise KeyError
yield token[0], "{%s}%s" % (namespaces[prefix], uri)
except KeyError:
raise SyntaxError("prefix %r not found in prefix map" % prefix)
else:
yield token
def prepare_child(next, token):
tag = token[1]
def select(result):
for elem in result:
for e in elem.iterchildren(tag):
yield e
return select
def prepare_star(next, token):
def select(result):
for elem in result:
for e in elem.iterchildren('*'):
yield e
return select
def prepare_self(next, token):
def select(result):
return result
return select
def prepare_descendant(next, token):
token = next()
if token[0] == "*":
tag = "*"
elif not token[0]:
tag = token[1]
else:
raise SyntaxError("invalid descendant")
def select(result):
for elem in result:
for e in elem.iterdescendants(tag):
yield e
return select
def prepare_parent(next, token):
def select(result):
for elem in result:
parent = elem.getparent()
if parent is not None:
yield parent
return select
def prepare_predicate(next, token):
# FIXME: replace with real parser!!! refs:
# http://effbot.org/zone/simple-iterator-parser.htm
# http://javascript.crockford.com/tdop/tdop.html
signature = []
predicate = []
while 1:
token = next()
if token[0] == "]":
break
if token[0] and token[0][:1] in "'\"":
token = "'", token[0][1:-1]
signature.append(token[0] or "-")
predicate.append(token[1])
signature = "".join(signature)
# use signature to determine predicate type
if signature == "@-":
# [@attribute] predicate
key = predicate[1]
def select(result):
for elem in result:
if elem.get(key) is not None:
yield elem
return select
if signature == "@-='":
# [@attribute='value']
key = predicate[1]
value = predicate[-1]
def select(result):
for elem in result:
if elem.get(key) == value:
yield elem
return select
if signature == "-" and not re.match("-?\d+$", predicate[0]):
# [tag]
tag = predicate[0]
def select(result):
for elem in result:
for _ in elem.iterchildren(tag):
yield elem
break
return select
if signature == "-='" and not re.match("-?\d+$", predicate[0]):
# [tag='value']
tag = predicate[0]
value = predicate[-1]
def select(result):
for elem in result:
for e in elem.iterchildren(tag):
if "".join(e.itertext()) == value:
yield elem
break
return select
if signature == "-" or signature == "-()" or signature == "-()-":
# [index] or [last()] or [last()-index]
if signature == "-":
# [index]
index = int(predicate[0]) - 1
if index < 0:
if index == -1:
raise SyntaxError(
"indices in path predicates are 1-based, not 0-based")
else:
raise SyntaxError("path index >= 1 expected")
else:
if predicate[0] != "last":
raise SyntaxError("unsupported function")
if signature == "-()-":
try:
index = int(predicate[2]) - 1
except ValueError:
raise SyntaxError("unsupported expression")
else:
index = -1
def select(result):
for elem in result:
parent = elem.getparent()
if parent is None:
continue
try:
# FIXME: what if the selector is "*" ?
elems = list(parent.iterchildren(elem.tag))
if elems[index] is elem:
yield elem
except IndexError:
pass
return select
raise SyntaxError("invalid predicate")
ops = {
"": prepare_child,
"*": prepare_star,
".": prepare_self,
"..": prepare_parent,
"//": prepare_descendant,
"[": prepare_predicate,
}
_cache = {}
# --------------------------------------------------------------------
def _build_path_iterator(path, namespaces):
# compile selector pattern
if path[-1:] == "/":
path = path + "*" # implicit all (FIXME: keep this?)
try:
return _cache[(path, namespaces and tuple(sorted(namespaces.items())) or None)]
except KeyError:
pass
if len(_cache) > 100:
_cache.clear()
if path[:1] == "/":
raise SyntaxError("cannot use absolute path on element")
stream = iter(xpath_tokenizer(path, namespaces))
try:
_next = stream.next
except AttributeError:
# Python 3
_next = stream.__next__
try:
token = _next()
except StopIteration:
raise SyntaxError("empty path expression")
selector = []
while 1:
try:
selector.append(ops[token[0]](_next, token))
except StopIteration:
raise SyntaxError("invalid path")
try:
token = _next()
if token[0] == "/":
token = _next()
except StopIteration:
break
_cache[path] = selector
return selector
##
# Iterate over the matching nodes
def iterfind(elem, path, namespaces=None):
selector = _build_path_iterator(path, namespaces)
result = iter((elem,))
for select in selector:
result = select(result)
return result
##
# Find first matching object.
def find(elem, path, namespaces=None):
it = iterfind(elem, path, namespaces)
try:
try:
_next = it.next
except AttributeError:
return next(it)
else:
return _next()
except StopIteration:
return None
##
# Find all matching objects.
def findall(elem, path, namespaces=None):
return list(iterfind(elem, path, namespaces))
##
# Find text for first matching object.
def findtext(elem, path, default=None, namespaces=None):
el = find(elem, path, namespaces)
if el is None:
return default
else:
return el.text or ''
| apache-2.0 |
jayceyxc/hue | apps/security/src/security/api/test_hive.py | 10 | 4602 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from django.core.urlresolvers import reverse
from nose.plugins.skip import SkipTest
from nose.tools import assert_equal
from hadoop.conf import HDFS_CLUSTERS
from desktop.lib.test_utils import clear_sys_caches
from desktop.lib.django_test_util import make_logged_in_client
from desktop.lib.test_utils import grant_access, add_to_group
from libsentry import api
from security.api.hive import _massage_uri, _get_splitted_path
def mocked_get_api(user):
return MockHiveApi(user)
class MockHiveApi(object):
def __init__(self, user):
self.user = user
def list_sentry_roles_by_group(self, groupName): # return GroupName only
return [{'name': groupName}]
class TestMockedApi(object):
def setUp(self):
if not hasattr(api, 'OriginalSentryApi'):
api.OriginalSentryApi = api.get_api
api.get_api = mocked_get_api
self.client = make_logged_in_client(username='sentry_test', groupname='test', is_superuser=False)
self.client_admin = make_logged_in_client(username='sentry_hue', groupname='hue', is_superuser=False)
grant_access("sentry_test", "test", "security")
grant_access("sentry_hue", "hue", "security")
add_to_group("sentry_test")
add_to_group("sentry_hue")
raise SkipTest
def tearDown(self):
api.get_api = api.OriginalSentryApi
def test_list_sentry_roles_by_group(self):
response = self.client.post(reverse("security:list_sentry_roles_by_group"), {'groupName': ''})
assert_equal('*', json.loads(response.content).get('roles', [{'name': ''}])[0]['name'], response.content)
response = self.client.post(reverse("security:list_sentry_roles_by_group"), {'groupName': 'test'})
assert_equal('test', json.loads(response.content).get('roles', [{'name': ''}])[0]['name'], response.content)
response = self.client_admin.post(reverse("security:list_sentry_roles_by_group"), {'groupName': ''})
assert_equal(None, json.loads(response.content).get('roles', [{'name': ''}])[0]['name'], response.content)
response = self.client_admin.post(reverse("security:list_sentry_roles_by_group"), {'groupName': 'test'})
assert_equal('test', json.loads(response.content).get('roles', [{'name': ''}])[0]['name'], response.content)
class TestUtils(object):
def test_massage_uri(self):
finish = HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode')
clear_sys_caches()
try:
assert_equal('', _massage_uri(''))
assert_equal('namenode/data', _massage_uri('hdfs:///data'))
assert_equal('hdfs://nn:11/data', _massage_uri('hdfs://nn:11/data'))
assert_equal('hdfs://logical/data', _massage_uri('hdfs://logical/data'))
assert_equal('namenode/data', _massage_uri('/data'))
assert_equal('file:///data', _massage_uri('file:///data'))
finally:
finish()
finish = HDFS_CLUSTERS['default'].FS_DEFAULTFS.set_for_testing('hdfs://fs_defaultfs:8021')
clear_sys_caches()
try:
assert_equal('', _massage_uri(''))
assert_equal('hdfs://fs_defaultfs:8021/data', _massage_uri('hdfs:///data'))
assert_equal('hdfs://nn:11/data', _massage_uri('hdfs://nn:11/data'))
assert_equal('hdfs://logical/data', _massage_uri('hdfs://logical/data'))
assert_equal('hdfs://fs_defaultfs:8021/data', _massage_uri('/data'))
assert_equal('file:///data', _massage_uri('file:///data'))
finally:
finish()
def test_get_splitted_path(self):
assert_equal(('', '', ''), _get_splitted_path(''))
assert_equal(('db', '', ''), _get_splitted_path('db'))
assert_equal(('db', 'table', ''), _get_splitted_path('db.table'))
assert_equal(('db', 'table', 'column'), _get_splitted_path('db.table.column'))
assert_equal(('db', 'table', 'column'), _get_splitted_path('db.table.column.blah'))
| apache-2.0 |
Intel-Corporation/tensorflow | tensorflow/python/grappler/controller.py | 39 | 4640 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Controller Class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import defaultdict
class Controller(object):
"""Controller class."""
def __init__(self, item, cluster):
"""Controller class initializer.
Args:
item: The metagraph to place wrapped in a cluster.
cluster: A cluster of devices on which to place the item.
"""
self.item = item
self._node = {}
for node in item.metagraph.graph_def.node:
self._node[node.name] = node
self._fanout = defaultdict(lambda: [])
for node in item.metagraph.graph_def.node:
for fanin in self._get_node_fanin(node):
self._fanout[fanin.name].append(node)
important_op_names = item.IdentifyImportantOps(sort_topologically=True)
# List of important ops (these are the ops to place) sorted in topological
# order. The order of this collection is deterministic.
self.important_ops = []
for name in important_op_names:
self.important_ops.append(self._node[name])
self.node_properties = item.GetOpProperties()
self.cluster = cluster
self.devices = cluster.ListDevices()
self.colocation_constraints = item.GetColocationGroups()
self.placement_constraints = cluster.GetSupportedDevices(item)
for node_name, dev in self.placement_constraints.items():
if len(dev) == 1:
# Place the node on the supported device
node = self._node[node_name]
node.device = dev[0]
fanout = self.get_node_fanout(node)
# Update the fanout of the fanin to bypass the node
for fanin in self._get_node_fanin(node):
fanout_of_fanin = self.get_node_fanout(fanin)
fanout_of_fanin += fanout
fanout_of_fanin.remove(node)
# Remove node from the list of important ops since we don't need to
# place the node.
if node in self.important_ops:
self.important_ops.remove(node)
important_op_names.remove(node.name)
# List of important op names, in non deterministic order.
self.important_op_names = frozenset(important_op_names)
@property
def input_graph_def(self):
return self.item.metagraph.graph_def
@property
def num_devices(self):
return len(self.devices)
def get_node_by_name(self, node_name):
return self._node[node_name]
def get_node_fanout(self, node):
return self._fanout[node.name]
def get_placements(self, *args, **kwargs):
"""Returns: Two TF ops.
Args:
*args: "".
**kwargs: "".
Returns:
y_preds: tensor of size [batch_size, num_ops]
log_probs: python dict of at least two fields: "sample", "target" each
containing a tensor of size [batch_size], corresponding to the log_probs.
"""
raise NotImplementedError
def eval_placement(self, sess, *args, **kwargs):
"""At this time, this method evaluates ONLY ONE placement.
Args:
sess: a tf.Session() object used to retrieve cached assignment info.
*args: "".
**kwargs: "".
Returns:
run_time: scalar
"""
raise NotImplementedError
def export_placement(self, metagraph):
"""Annotate the placement onto the specified metagraph.
Args:
metagraph: the metagraph to annotate with the placement.
"""
for node in metagraph.graph_def.node:
if node.name in self.important_op_names:
node.device = self.get_node_by_name(node.name).device
# Get the nodes in the immediate fanin of node.
# Beware: this doesn't take into account the nodes that may be skipped
# since placement constraints force their placement.
def _get_node_fanin(self, node):
input_ops = []
for fanin_name in node.input:
if fanin_name[0] == "^":
fanin_name = fanin_name[1:]
fanin_name = fanin_name.split(":")[0]
input_ops.append(self.get_node_by_name(fanin_name))
return input_ops
| apache-2.0 |
IndonesiaX/edx-platform | common/lib/xmodule/xmodule/modulestore/edit_info.py | 201 | 2843 | """
Access methods to get EditInfo for xblocks
"""
from xblock.fields import XBlockMixin
from abc import ABCMeta, abstractmethod
class EditInfoMixin(XBlockMixin):
"""
Provides the interfaces for getting the edit info from XBlocks
"""
@property
def edited_by(self):
"""
The user id of the last user to change this xblock content, children, or settings.
"""
return self.runtime.get_edited_by(self)
@property
def edited_on(self):
"""
The datetime of the last change to this xblock content, children, or settings.
"""
return self.runtime.get_edited_on(self)
@property
def subtree_edited_by(self):
"""
The user id of the last user to change content, children, or settings in this xblock's subtree
"""
return self.runtime.get_subtree_edited_by(self)
@property
def subtree_edited_on(self):
"""
The datetime of the last change content, children, or settings in this xblock's subtree
"""
return self.runtime.get_subtree_edited_on(self)
@property
def published_by(self):
"""
The user id of the last user to publish this specific xblock (or a previous version of it).
"""
return self.runtime.get_published_by(self)
@property
def published_on(self):
"""
The datetime of the last time this specific xblock was published.
"""
return self.runtime.get_published_on(self)
class EditInfoRuntimeMixin(object):
"""
An abstract mixin class for the functions which the :class: `EditInfoMixin` methods call on the runtime
"""
__metaclass__ = ABCMeta
@abstractmethod
def get_edited_by(self, xblock):
"""
The datetime of the last change to this xblock content, children, or settings.
"""
pass
@abstractmethod
def get_edited_on(self, xblock):
"""
The datetime of the last change to this xblock content, children, or settings.
"""
pass
@abstractmethod
def get_subtree_edited_by(self, xblock):
"""
The user id of the last user to change content, children, or settings in this xblock's subtree
"""
pass
@abstractmethod
def get_subtree_edited_on(self, xblock):
"""
The datetime of the last change content, children, or settings in this xblock's subtree
"""
pass
@abstractmethod
def get_published_by(self, xblock):
"""
The user id of the last user to publish this specific xblock (or a previous version of it).
"""
pass
@abstractmethod
def get_published_on(self, xblock):
"""
The datetime of the last time this specific xblock was published.
"""
pass
| agpl-3.0 |
nmarley/dash | test/functional/test_framework/coverage.py | 6 | 3389 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for doing coverage analysis on the RPC interface.
Provides a way to track which RPC commands are exercised during
testing.
"""
import os
REFERENCE_FILENAME = 'rpc_interface.txt'
class AuthServiceProxyWrapper(object):
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
"""
def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
"""
Kwargs:
auth_service_proxy_instance (AuthServiceProxy): the instance
being wrapped.
coverage_logfile (str): if specified, write each service_name
out to a file when called.
"""
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
def __getattr__(self, name):
return_val = getattr(self.auth_service_proxy_instance, name)
if not isinstance(return_val, type(self.auth_service_proxy_instance)):
# If proxy getattr returned an unwrapped value, do the same here.
return return_val
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
"""
Delegates to AuthServiceProxy, then writes the particular RPC method
called to a file.
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
self._log_call()
return return_val
def _log_call(self):
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+', encoding='utf8') as f:
f.write("%s\n" % rpc_method)
def __truediv__(self, relative_uri):
return AuthServiceProxyWrapper(self.auth_service_proxy_instance / relative_uri,
self.coverage_logfile)
def get_request(self, *args, **kwargs):
self._log_call()
return self.auth_service_proxy_instance.get_request(*args, **kwargs)
def get_filename(dirname, n_node):
"""
Get a filename unique to the test process ID and node.
This file will contain a list of RPC commands covered.
"""
pid = str(os.getpid())
return os.path.join(
dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
def write_all_rpc_commands(dirname, node):
"""
Write out a list of all RPC functions available in `bitcoin-cli` for
coverage comparison. This will only happen once per coverage
directory.
Args:
dirname (str): temporary test dir
node (AuthServiceProxy): client
Returns:
bool. if the RPC interface file was written.
"""
filename = os.path.join(dirname, REFERENCE_FILENAME)
if os.path.isfile(filename):
return False
help_output = node.help().split('\n')
commands = set()
for line in help_output:
line = line.strip()
# Ignore blanks and headers
if line and not line.startswith('='):
commands.add("%s\n" % line.split()[0])
with open(filename, 'w', encoding='utf8') as f:
f.writelines(list(commands))
return True
| mit |
Digenis/scrapy | scrapy/linkextractors/regex.py | 23 | 1360 | import re
from six.moves.urllib.parse import urljoin
from w3lib.html import remove_tags, replace_entities, replace_escape_chars, get_base_url
from scrapy.link import Link
from .sgml import SgmlLinkExtractor
linkre = re.compile(
"<a\s.*?href=(\"[.#]+?\"|\'[.#]+?\'|[^\s]+?)(>|\s.*?>)(.*?)<[/ ]?a>",
re.DOTALL | re.IGNORECASE)
def clean_link(link_text):
"""Remove leading and trailing whitespace and punctuation"""
return link_text.strip("\t\r\n '\"")
class RegexLinkExtractor(SgmlLinkExtractor):
"""High performant link extractor"""
def _extract_links(self, response_text, response_url, response_encoding, base_url=None):
def clean_text(text):
return replace_escape_chars(remove_tags(text.decode(response_encoding))).strip()
def clean_url(url):
clean_url = ''
try:
clean_url = urljoin(base_url, replace_entities(clean_link(url.decode(response_encoding))))
except ValueError:
pass
return clean_url
if base_url is None:
base_url = get_base_url(response_text, response_url, response_encoding)
links_text = linkre.findall(response_text)
return [Link(clean_url(url).encode(response_encoding),
clean_text(text))
for url, _, text in links_text]
| bsd-3-clause |
kampanita/pelisalacarta | python/main-classic/servers/allmyvideos.py | 6 | 5438 | # -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para allmyvideos
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
# ------------------------------------------------------------
import re
from core import logger
from core import scrapertools
def test_video_exists(page_url):
logger.info("pelisalacarta.servers.allmyvideos test_video_exists(page_url='%s')" % page_url)
# No existe / borrado: http://allmyvideos.net/8jcgbrzhujri
data = scrapertools.cache_page("http://anonymouse.org/cgi-bin/anon-www.cgi/" + page_url)
if "<b>File Not Found</b>" in data or "<b>Archivo no encontrado</b>" in data or '<b class="err">Deleted' in data \
or '<b class="err">Removed' in data or '<font class="err">No such' in data:
return False, "No existe o ha sido borrado de allmyvideos"
return True, ""
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("pelisalacarta.servers.allmyvideos url=%s" % page_url)
# Normaliza la URL
videoid = scrapertools.get_match(page_url, "http://allmyvideos.net/([a-z0-9A-Z]+)")
page_url = "http://amvtv.net/embed-" + videoid + "-728x400.html"
data = scrapertools.cachePage(page_url)
if "Access denied" in data:
geobloqueo = True
else:
geobloqueo = False
if geobloqueo:
# url = "http://www.anonymousbrowser.xyz/hide.php"
# post = "go=%s" % page_url
url = "http://www.videoproxy.co/hide.php"
post = "go=%s" % page_url
location = scrapertools.get_header_from_response(url, post=post, header_to_get="location")
# url = "http://www.anonymousbrowser.xyz/" + location
url = "http://www.videoproxy.co/" + location
data = scrapertools.cachePage(url)
# Extrae la URL
media_url = scrapertools.find_single_match(data, '"file" : "([^"]+)",')
video_urls = []
if media_url != "":
if geobloqueo:
# url = "http://www.anonymousbrowser.xyz/hide.php"
url = "http://www.videoproxy.co/hide.php"
post = "go=%s" % media_url
location = scrapertools.get_header_from_response(url, post=post, header_to_get="location")
# media_url = "http://www.anonymousbrowser.xyz/" + location + "&direct=false"
media_url = "http://www.videoproxy.co/" + location + "&direct=false"
else:
media_url += "&direct=false"
video_urls.append([scrapertools.get_filename_from_url(media_url)[-4:] + " [allmyvideos]", media_url])
for video_url in video_urls:
logger.info("pelisalacarta.servers.allmyvideos %s - %s" % (video_url[0], video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
# Añade manualmente algunos erróneos para evitarlos
encontrados = set()
encontrados.add("http://allmyvideos.net/embed-theme.html")
encontrados.add("http://allmyvideos.net/embed-jquery.html")
encontrados.add("http://allmyvideos.net/embed-s.html")
encontrados.add("http://allmyvideos.net/embed-images.html")
encontrados.add("http://allmyvideos.net/embed-faq.html")
encontrados.add("http://allmyvideos.net/embed-embed.html")
encontrados.add("http://allmyvideos.net/embed-ri.html")
encontrados.add("http://allmyvideos.net/embed-d.html")
encontrados.add("http://allmyvideos.net/embed-css.html")
encontrados.add("http://allmyvideos.net/embed-js.html")
encontrados.add("http://allmyvideos.net/embed-player.html")
encontrados.add("http://allmyvideos.net/embed-cgi.html")
encontrados.add("http://allmyvideos.net/embed-i.html")
encontrados.add("http://allmyvideos.net/images")
encontrados.add("http://allmyvideos.net/theme")
encontrados.add("http://allmyvideos.net/xupload")
encontrados.add("http://allmyvideos.net/s")
encontrados.add("http://allmyvideos.net/js")
encontrados.add("http://allmyvideos.net/jquery")
encontrados.add("http://allmyvideos.net/login")
encontrados.add("http://allmyvideos.net/make")
encontrados.add("http://allmyvideos.net/i")
encontrados.add("http://allmyvideos.net/faq")
encontrados.add("http://allmyvideos.net/tos")
encontrados.add("http://allmyvideos.net/premium")
encontrados.add("http://allmyvideos.net/checkfiles")
encontrados.add("http://allmyvideos.net/privacy")
encontrados.add("http://allmyvideos.net/refund")
encontrados.add("http://allmyvideos.net/links")
encontrados.add("http://allmyvideos.net/contact")
devuelve = []
# http://allmyvideos.net/3sw6tewl21sn
# http://allmyvideos.net/embed-3sw6tewl21sn.html
# http://www.cinetux.org/video/allmyvideos.php?id=3sw6tewl21sn
patronvideos = 'allmyvideos.(?:net/|php\?id=)(?:embed-|)([a-z0-9]+)'
logger.info("pelisalacarta.servers.allmyvideos find_videos #" + patronvideos + "#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
if len(matches) > 0:
for match in matches:
titulo = "[allmyvideos]"
url = "http://allmyvideos.net/" + match
if url not in encontrados:
logger.info(" url=" + url)
devuelve.append([titulo, url, 'allmyvideos'])
encontrados.add(url)
else:
logger.info(" url duplicada=" + url)
return devuelve
| gpl-3.0 |
IV-GII/SocialCookies | Twitter_Python/twitter.py | 1 | 1938 | import tweepy
import web
from web.contrib.template import render_mako
from web import form
web.config.debug=False
consumer_key = 'acisTloOHSws1UA289etVw'
consumer_secret = 'b9eVS9CeFxIFx3jIwKkeeQPfsO3hlrAdWNOfIItQEgU'
access_token = '2308079432-cDmExMexRNNwAEvIUdrtKQFXgfIA1vQPeM4mLRC'
access_token_secret = 'eUFjHJf3Wbqzo6NKxJRHE0HapuzNBzLynA8nOTOPygqis'
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
urls=(
'/', 'Home',
'/Mostrar','Mostrar',
)
app=web.application(urls, globals())
render=render_mako(directories=['templates'],input_encoding='utf-8',output_encoding='utf-8')
formulario=form.Form(
form.Textbox("busqueda", form.notnull, description="busqueda"),
form.Button("Enviar"),
)
class Home:
def GET(self):
f=formulario()
return render.index(formu=f.render())
class Mostrar:
def POST(self):
entrada=web.input()
busq=entrada.busqueda
tuits=[]
for tweet in tweepy.Cursor(api.search,q=busq,count=100,result_type="photo",include_entities=True).items(100):
if 'media' in tweet.entities:
for image in tweet.entities['media']:
tuits.append("<img src='%s' onClick='imgFunction(this, \"%s\")' />" % (image['media_url'],image['media_url']))
tuits = ''.join(tuits)
return """
<script type='text/javascript'>
var elegidos = new Array();
function imgFunction(objeto, url){
if(objeto.style.opacity==1){
objeto.style.opacity=0.4;
elegidos.push(url);
console.log(elegidos.valueOf(elegidos.length-1));
}
else{
objeto.style.opacity=1;
indice = elegidos.indexOf(url);
if(indice!=-1)
elegidos.splice(indice,1);
}
}
</script>
Photos from Twitter <br>""" + tuits
if __name__=="__main__":
app.run()
| gpl-2.0 |
martinez-zea/tts | tlslite/handshakesettings.py | 1 | 5859 | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""Class for setting handshake parameters."""
from .constants import CertificateType
from .utils import cryptomath
from .utils import cipherfactory
class HandshakeSettings:
"""This class encapsulates various parameters that can be used with
a TLS handshake.
@sort: minKeySize, maxKeySize, cipherNames, certificateTypes,
minVersion, maxVersion
@type minKeySize: int
@ivar minKeySize: The minimum bit length for asymmetric keys.
If the other party tries to use SRP, RSA, or Diffie-Hellman
parameters smaller than this length, an alert will be
signalled. The default is 1023.
@type maxKeySize: int
@ivar maxKeySize: The maximum bit length for asymmetric keys.
If the other party tries to use SRP, RSA, or Diffie-Hellman
parameters larger than this length, an alert will be signalled.
The default is 8193.
@type cipherNames: list
@ivar cipherNames: The allowed ciphers, in order of preference.
The allowed values in this list are 'aes256', 'aes128', '3des', and
'rc4'. If these settings are used with a client handshake, they
determine the order of the ciphersuites offered in the ClientHello
message.
If these settings are used with a server handshake, the server will
choose whichever ciphersuite matches the earliest entry in this
list.
NOTE: If '3des' is used in this list, but TLS Lite can't find an
add-on library that supports 3DES, then '3des' will be silently
removed.
The default value is ['aes256', 'aes128', '3des', 'rc4'].
@type certificateTypes: list
@ivar certificateTypes: The allowed certificate types, in order of
preference.
The allowed values in this list are 'x509'. This
list is only used with a client handshake. The client will
advertise to the server which certificate types are supported, and
will check that the server uses one of the appropriate types.
@type minVersion: tuple
@ivar minVersion: The minimum allowed SSL/TLS version.
This variable can be set to (3,0) for SSL 3.0, (3,1) for
TLS 1.0, or (3,2) for TLS 1.1. If the other party wishes to
use a lower version, a protocol_version alert will be signalled.
The default is (3,0).
@type maxVersion: tuple
@ivar maxVersion: The maximum allowed SSL/TLS version.
This variable can be set to (3,0) for SSL 3.0, (3,1) for
TLS 1.0, or (3,2) for TLS 1.1. If the other party wishes to
use a higher version, a protocol_version alert will be signalled.
The default is (3,2). (WARNING: Some servers may (improperly)
reject clients which offer support for TLS 1.1. In this case,
try lowering maxVersion to (3,1)).
"""
def __init__(self):
self.minKeySize = 1023
self.maxKeySize = 8193
self.cipherNames = ["aes256", "aes128", "3des", "rc4"]
self.cipherImplementations = ["openssl", "pycrypto","python"]
self.certificateTypes = ["x509"]
self.minVersion = (3,0)
self.maxVersion = (3,2)
# Validates the min/max fields, and certificateTypes
# Filters out unsupported cipherNames and cipherImplementations
def _filter(self):
other = HandshakeSettings()
other.minKeySize = self.minKeySize
other.maxKeySize = self.maxKeySize
other.cipherNames = self.cipherNames
other.cipherImplementations = self.cipherImplementations
other.certificateTypes = self.certificateTypes
other.minVersion = self.minVersion
other.maxVersion = self.maxVersion
if not cipherfactory.tripleDESPresent:
other.cipherNames = [e for e in self.cipherNames if e != "3des"]
if len(other.cipherNames)==0:
raise ValueError("No supported ciphers")
if len(other.certificateTypes)==0:
raise ValueError("No supported certificate types")
if not cryptomath.m2cryptoLoaded:
other.cipherImplementations = [e for e in \
other.cipherImplementations if e != "openssl"]
if not cryptomath.pycryptoLoaded:
other.cipherImplementations = [e for e in \
other.cipherImplementations if e != "pycrypto"]
if len(other.cipherImplementations)==0:
raise ValueError("No supported cipher implementations")
if other.minKeySize<512:
raise ValueError("minKeySize too small")
if other.minKeySize>16384:
raise ValueError("minKeySize too large")
if other.maxKeySize<512:
raise ValueError("maxKeySize too small")
if other.maxKeySize>16384:
raise ValueError("maxKeySize too large")
for s in other.cipherNames:
if s not in ("aes256", "aes128", "rc4", "3des"):
raise ValueError("Unknown cipher name: '%s'" % s)
for s in other.cipherImplementations:
if s not in ("openssl", "python", "pycrypto"):
raise ValueError("Unknown cipher implementation: '%s'" % s)
for s in other.certificateTypes:
if s not in ("x509"):
raise ValueError("Unknown certificate type: '%s'" % s)
if other.minVersion > other.maxVersion:
raise ValueError("Versions set incorrectly")
if not other.minVersion in ((3,0), (3,1), (3,2)):
raise ValueError("minVersion set incorrectly")
if not other.maxVersion in ((3,0), (3,1), (3,2)):
raise ValueError("maxVersion set incorrectly")
return other
def _getCertificateTypes(self):
l = []
for ct in self.certificateTypes:
if ct == "x509":
l.append(CertificateType.x509)
else:
raise AssertionError()
return l
| gpl-3.0 |
pytroll/pygac | pygac/utils.py | 1 | 11310 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author(s):
# Stephan Finkensieper <stephan.finkensieper@dwd.de>
# Carlos Horn <carlos.horn@external.eumetsat.int>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gzip
import io
import logging
from contextlib import contextmanager, nullcontext
import numpy as np
LOG = logging.getLogger(__name__)
def gzip_inspected(open_file):
"""Try to gzip decompress the file object if applicable."""
try:
file_object = gzip.GzipFile(mode='rb', fileobj=open_file)
file_object.read(1)
except OSError:
file_object = open_file
finally:
file_object.seek(0)
return file_object
@contextmanager
def file_opener(file):
if isinstance(file, io.IOBase) and file.seekable():
# avoid closing the file using nullcontext
open_file = nullcontext(file)
elif hasattr(file, 'open'):
try:
open_file = file.open(mode='rb')
except TypeError:
open_file = file.open()
else:
open_file = open(file, mode='rb')
# set open_file into context in case of lazy loading in __enter__ method.
with open_file as file_object:
yield gzip_inspected(file_object)
def get_absolute_azimuth_angle_diff(sat_azi, sun_azi):
"""Calculates absolute azimuth difference angle. """
rel_azi = abs(sat_azi - sun_azi)
rel_azi = rel_azi % 360
# Not using np.where to avoid copying array
rel_azi[rel_azi > 180] = 360.0 - rel_azi[rel_azi > 180]
return rel_azi
def centered_modulus(array, divisor):
"""Transform array to half open range ]-divisor/2, divisor/2]."""
arr = array % divisor
arr[arr > divisor / 2] -= divisor
return arr
def calculate_sun_earth_distance_correction(jday):
"""Calculate the sun earth distance correction.
In 2008 3-4 different equations of ESD were considered.
This one was chosen as it at the time gave reflectances most closely
matching the PATMOS-x data provided then by Andy Heidinger.
Formula might need to be reconsidered if jday is updated to a float.
"""
# Earth-Sun distance correction factor
corr = 1.0 - 0.0334 * np.cos(2.0 * np.pi * (jday - 2) / 365.25)
return corr
def check_user_scanlines(start_line, end_line, first_valid_lat=None,
last_valid_lat=None, along_track=None):
"""Check user-defined scanlines.
Can be used by both pygac and satpy.
Args:
start_line: User-defined start line (afer stripping, if enabled)
end_line: User-defined end line (afer stripping, if enabled)
first_valid_lat: First scanline with valid latitudes
last_valid_lat: Last scanline with valid latitudes
along_track: Number of scanlines (only needed if stripping
is disabled)
"""
if first_valid_lat is not None and last_valid_lat is not None:
num_valid_lines = last_valid_lat - first_valid_lat + 1
else:
if along_track is None:
raise ValueError('Need along_track')
num_valid_lines = along_track
start_line = int(start_line)
end_line = int(end_line)
if end_line == 0:
# If the user specifies 0 as the last scanline, process all
# scanlines with valid coordinates
end_line = num_valid_lines - 1
elif end_line >= num_valid_lines:
end_line = num_valid_lines - 1
LOG.warning('Given end line exceeds scanline range, resetting '
'to {}'.format(end_line))
if start_line > num_valid_lines:
raise ValueError('Given start line {} exceeds scanline range {}'
.format(start_line, num_valid_lines))
return start_line, end_line
def strip_invalid_lat(lats):
"""Strip invalid latitudes at the end and beginning of the orbit."""
no_wrong_lat = np.where(np.logical_not(np.isnan(lats)))
return min(no_wrong_lat[0]), max(no_wrong_lat[0])
def slice_channel(ch, start_line, end_line, first_valid_lat=None,
last_valid_lat=None, midnight_scanline=None,
miss_lines=None, qual_flags=None):
"""Slice channel data using user-defined start/end line.
If valid_lat_start/end are given, strip scanlines with invalid
coordinates at the beginning and end of the orbit.
Can be used by both pygac and satpy.
Args:
ch: Channel data
start_line: User-defined start line (afer stripping, if enabled)
end_line: User-defined end line (after stripping, if enabled)
first_valid_lat: First scanline with valid latitudes
last_valid_lat: Last scanline with valid latitudes.
midnight_scanline: If given, update midnight scanline to the new
scanline range.
miss_lines: If given, update list of missing lines with the ones
that have been stripped due to invalid coordinates
qual_flags: Quality flags, needed to updated missing lines.
"""
if first_valid_lat is not None and last_valid_lat is not None:
# Strip invalid coordinates and update midnight scanline as well as
# user-defined start/end lines
ch, updated = _slice(ch,
start_line=first_valid_lat,
end_line=last_valid_lat,
update=[midnight_scanline])
midnight_scanline = updated[0]
# Reset user-defined end line, if it has been removed
end_line = min(end_line, ch.shape[0] - 1)
start_line = min(start_line, ch.shape[0] - 1)
# Update missing scanlines
if miss_lines is not None:
miss_lines = _update_missing_scanlines(
miss_lines=miss_lines,
qual_flags=qual_flags,
start_line=first_valid_lat,
end_line=last_valid_lat)
# Slice data using user-defined start/end lines
ch_slc, updated = _slice(ch, start_line=start_line, end_line=end_line,
update=[midnight_scanline])
midnight_scanline = updated[0]
return ch_slc, miss_lines, midnight_scanline
def _slice(ch, start_line, end_line, update=None):
"""Slice the given channel.
Args:
start_line: New start line
end_line: New end line
update: List of scanlines to be updated to the new range
"""
# Slice data using new start/end lines
if len(ch.shape) == 1:
ch_slc = ch[start_line:end_line + 1].copy()
else:
ch_slc = ch[start_line:end_line + 1, :].copy()
if update:
updated = [_update_scanline(l, start_line, end_line)
if l is not None else None
for l in update]
return ch_slc, updated
return ch_slc
def _update_scanline(scanline, new_start_line, new_end_line):
"""Update the given scanline to the new range.
Set scanline to None if it lies outside the new range.
"""
scanline -= new_start_line
num_lines = new_end_line - new_start_line + 1
if scanline < 0 or scanline >= num_lines:
scanline = None
return scanline
def _update_missing_scanlines(miss_lines, qual_flags, start_line, end_line):
"""Add scanlines excluded by slicing to the list of missing scanlines.
Args:
miss_lines: List of missing scanlines
qual_flags: Quality flags
start_line: New start line of the slice
end_line: New end line of the slice
"""
return np.sort(np.unique(
qual_flags[0:start_line, 0].tolist() +
miss_lines.tolist() +
qual_flags[end_line + 1:, 0].tolist()
))
def plot_correct_times_thresh(res, filename=None):
"""Visualize results of GACReader.correct_times_thresh."""
import matplotlib.pyplot as plt
t = res['t']
tcorr = res.get('tcorr')
n = res['n']
offsets = res.get('offsets')
t0_head = res.get('t0_head')
max_diff_from_t0_head = res.get('max_diff_from_t0_head')
fail_reason = res.get('fail_reason', 'Failed for unknown reason')
# Setup figure
along_track = np.arange(t.size)
_, (ax0, ax1, ax2) = plt.subplots(nrows=3, sharex=True,
figsize=(8, 10))
# Plot original vs corrected timestamps
ax0.plot(along_track, t, "b-", label="original")
if tcorr is not None:
ax0.plot(along_track, tcorr, color="red", linestyle="--",
label="corrected")
else:
ax0.set_title(fail_reason)
ax0.set_ylabel("Time")
ax0.set_ylim(t.min() - np.timedelta64(30, "m"),
t.max() + np.timedelta64(30, "m"))
ax0.legend(loc="best")
# Plot offset (original time - ideal time)
if offsets is not None:
ax1.plot(along_track, offsets)
ax1.fill_between(
along_track,
t0_head - np.ones(along_track.size) * max_diff_from_t0_head,
t0_head + np.ones(along_track.size) * max_diff_from_t0_head,
facecolor="g", alpha=0.33)
ax1.axhline(y=t0_head, color="g", linestyle="--",
label="Header timestamp")
ax1.set_ylim(t0_head - 5 * max_diff_from_t0_head,
t0_head + 5 * max_diff_from_t0_head)
ax1.set_ylabel("Offset t-tn [ms]")
ax1.legend(loc="best")
# Plot scanline number
ax2.plot(along_track, n)
ax2.set_ylabel("Scanline number")
ax2.set_xlabel("Along Track")
if filename:
plt.savefig(filename, bbox_inches="tight", dpi=100)
else:
plt.show()
def plot_correct_scanline_numbers(res, filename=None):
"""Visualize results of GACReader.correct_scanline_numbers."""
import matplotlib.pyplot as plt
along_track = res['along_track']
n_orig = res['n_orig']
n_corr = res['n_corr']
within_range = res['within_range']
thresh = res['thresh']
diffs = res['diffs']
nz_diffs = res['nz_diffs']
# Setup figure
_, (ax0, ax1) = plt.subplots(nrows=2)
# Plot original vs corrected scanline numbers
ax0.plot(along_track, n_orig, "b-", label="original")
along_track_corr = along_track.copy()
along_track_corr = along_track_corr[within_range]
along_track_corr = along_track_corr[diffs <= thresh]
ax0.plot(along_track_corr, n_corr, "r--", label="corrected")
ax0.set_ylabel("Scanline Number")
ax0.set_xlabel("Along Track")
ax0.legend(loc="best")
# Plot difference from ideal
ax1.plot(np.arange(len(nz_diffs)), nz_diffs)
ax1.axhline(thresh, color="r", label="thresh={0:.2f}"
.format(thresh))
ax1.set_xlabel("Index")
ax1.set_ylabel("nonzero |n - n'|")
ax1.legend()
plt.tight_layout()
if filename:
plt.savefig(filename, bbox_inches='tight')
else:
plt.show()
| gpl-3.0 |
pinae/ctSESAM-python | ctSESAM.py | 2 | 1134 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import getpass
from hashlib import pbkdf2_hmac
small_letters = list('abcdefghijklmnopqrstuvwxyz')
big_letters = list('ABCDEFGHJKLMNPQRTUVWXYZ')
numbers = list('0123456789')
special_characters = list('#!"§$%&/()[]{}=-_+*<>;:.')
password_characters = small_letters + big_letters + numbers + special_characters
salt = "pepper"
def convert_bytes_to_password(hashed_bytes, length):
number = int.from_bytes(hashed_bytes, byteorder='big')
password = ''
while number > 0 and len(password) < length:
password = password + password_characters[number % len(password_characters)]
number = number // len(password_characters)
return password
master_password = getpass.getpass(prompt='Masterpasswort: ')
domain = input('Domain: ')
while len(domain) < 1:
print('Bitte gib eine Domain an, für die das Passwort generiert werden soll.')
domain = input('Domain: ')
hash_string = domain + master_password
hashed_bytes = pbkdf2_hmac('sha512', hash_string.encode('utf-8'), salt.encode('utf-8'), 4096)
print('Passwort: ' + convert_bytes_to_password(hashed_bytes, 10))
| gpl-3.0 |
mccheung/kbengine | kbe/res/scripts/common/Lib/encodings/shift_jisx0213.py | 816 | 1059 | #
# shift_jisx0213.py: Python Unicode Codec for SHIFT_JISX0213
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('shift_jisx0213')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='shift_jisx0213',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| lgpl-3.0 |
daavery/audacity | lib-src/lv2/serd/waflib/ConfigSet.py | 266 | 3763 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import copy,re,os
from waflib import Logs,Utils
re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
class ConfigSet(object):
__slots__=('table','parent')
def __init__(self,filename=None):
self.table={}
if filename:
self.load(filename)
def __contains__(self,key):
if key in self.table:return True
try:return self.parent.__contains__(key)
except AttributeError:return False
def keys(self):
keys=set()
cur=self
while cur:
keys.update(cur.table.keys())
cur=getattr(cur,'parent',None)
keys=list(keys)
keys.sort()
return keys
def __str__(self):
return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()])
def __getitem__(self,key):
try:
while 1:
x=self.table.get(key,None)
if not x is None:
return x
self=self.parent
except AttributeError:
return[]
def __setitem__(self,key,value):
self.table[key]=value
def __delitem__(self,key):
self[key]=[]
def __getattr__(self,name):
if name in self.__slots__:
return object.__getattr__(self,name)
else:
return self[name]
def __setattr__(self,name,value):
if name in self.__slots__:
object.__setattr__(self,name,value)
else:
self[name]=value
def __delattr__(self,name):
if name in self.__slots__:
object.__delattr__(self,name)
else:
del self[name]
def derive(self):
newenv=ConfigSet()
newenv.parent=self
return newenv
def detach(self):
tbl=self.get_merged_dict()
try:
delattr(self,'parent')
except AttributeError:
pass
else:
keys=tbl.keys()
for x in keys:
tbl[x]=copy.deepcopy(tbl[x])
self.table=tbl
def get_flat(self,key):
s=self[key]
if isinstance(s,str):return s
return' '.join(s)
def _get_list_value_for_modification(self,key):
try:
value=self.table[key]
except KeyError:
try:value=self.parent[key]
except AttributeError:value=[]
if isinstance(value,list):
value=value[:]
else:
value=[value]
else:
if not isinstance(value,list):
value=[value]
self.table[key]=value
return value
def append_value(self,var,val):
current_value=self._get_list_value_for_modification(var)
if isinstance(val,str):
val=[val]
current_value.extend(val)
def prepend_value(self,var,val):
if isinstance(val,str):
val=[val]
self.table[var]=val+self._get_list_value_for_modification(var)
def append_unique(self,var,val):
if isinstance(val,str):
val=[val]
current_value=self._get_list_value_for_modification(var)
for x in val:
if x not in current_value:
current_value.append(x)
def get_merged_dict(self):
table_list=[]
env=self
while 1:
table_list.insert(0,env.table)
try:env=env.parent
except AttributeError:break
merged_table={}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self,filename):
try:
os.makedirs(os.path.split(filename)[0])
except OSError:
pass
buf=[]
merged_table=self.get_merged_dict()
keys=list(merged_table.keys())
keys.sort()
try:
fun=ascii
except NameError:
fun=repr
for k in keys:
if k!='undo_stack':
buf.append('%s = %s\n'%(k,fun(merged_table[k])))
Utils.writef(filename,''.join(buf))
def load(self,filename):
tbl=self.table
code=Utils.readf(filename,m='rU')
for m in re_imp.finditer(code):
g=m.group
tbl[g(2)]=eval(g(3))
Logs.debug('env: %s'%str(self.table))
def update(self,d):
for k,v in d.items():
self[k]=v
def stash(self):
orig=self.table
tbl=self.table=self.table.copy()
for x in tbl.keys():
tbl[x]=copy.deepcopy(tbl[x])
self.undo_stack=self.undo_stack+[orig]
def revert(self):
self.table=self.undo_stack.pop(-1)
| gpl-2.0 |
ScottWales/rose | lib/python/rose/config_editor/menu.py | 1 | 47000 | # -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# (C) British Crown Copyright 2012-5 Met Office.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
#-----------------------------------------------------------------------------
import ast
import inspect
import os
import shlex
import subprocess
import sys
import pygtk
pygtk.require('2.0')
import gtk
import rose.config
import rose.config_editor
import rose.config_editor.upgrade_controller
import rose.external
import rose.gtk.dialog
import rose.gtk.run
import rose.macro
import rose.macros
import rose.popen
import rose.suite_control
import rose.suite_engine_proc
class MenuBar(object):
"""Generate the menu bar, using the GTK UIManager.
Parses the settings in 'ui_config_string'. Connection of buttons is done
at a higher level.
"""
ui_config_string = """<ui>
<menubar name="TopMenuBar">
<menu action="File">
<menuitem action="Open..."/>
<menuitem action="Save"/>
<menuitem action="Check and save"/>
<menuitem action="Load All Apps"/>
<separator name="sep_save"/>
<menuitem action="Quit"/>
</menu>
<menu action="Edit">
<menuitem action="Undo"/>
<menuitem action="Redo"/>
<menuitem action="Stack"/>
<separator name="sep_undo_redo"/>
<menuitem action="Find"/>
<menuitem action="Find Next"/>
<separator name="sep_find"/>
<menuitem action="Preferences"/>
</menu>
<menu action="View">
<menuitem action="View fixed vars"/>
<menuitem action="View ignored vars"/>
<menuitem action="View user-ignored vars"/>
<menuitem action="View latent vars"/>
<separator name="sep_view_vars"/>
<menuitem action="View ignored pages"/>
<menuitem action="View user-ignored pages"/>
<menuitem action="View latent pages"/>
<separator name="sep_view_flags"/>
<menuitem action="Flag no-metadata vars"/>
<menuitem action="Flag optional vars"/>
<menuitem action="Flag opt config vars"/>
<separator name="sep_view_widgets"/>
<menuitem action="View status bar"/>
</menu>
<menu action="Metadata">
<menuitem action="Reload metadata"/>
<menuitem action="Switch off metadata"/>
<separator name="sep_view_generic"/>
<menu action="Prefs">
<menuitem action="View without descriptions"/>
<menuitem action="View without help"/>
<menuitem action="View without titles"/>
</menu>
<separator name="sep_upgrade"/>
<menuitem action="Upgrade"/>
<separator name="sep graph"/>
<menuitem action="Graph"/>
<separator name="sep_checking"/>
<menuitem action="Extra checks"/>
<separator name="sep macro"/>
<menuitem action="All V"/>
<menuitem action="Autofix"/>
</menu>
<menu action="Tools">
<menu action="Run Suite">
<menuitem action="Run Suite default"/>
<menuitem action="Run Suite custom"/>
</menu>
<separator name="sep_run_action"/>
<menuitem action="Browser"/>
<menuitem action="Terminal"/>
<menuitem action="View Output"/>
<menuitem action="Open Suite GControl"/>
</menu>
<menu action="Page">
<menuitem action="Add variable"/>
<menuitem action="Revert"/>
<separator name="info"/>
<menuitem action="Page Info"/>
<separator name="help"/>
<menuitem action="Page Help"/>
<menuitem action="Page Web Help"/>
</menu>
<menu action="Help">
<menuitem action="GUI Help"/>
<menuitem action="About"/>
</menu>
</menubar>
</ui>"""
action_details = [('File', None,
rose.config_editor.TOP_MENU_FILE),
('Open...', gtk.STOCK_OPEN,
rose.config_editor.TOP_MENU_FILE_OPEN,
rose.config_editor.ACCEL_OPEN),
('Save', gtk.STOCK_SAVE,
rose.config_editor.TOP_MENU_FILE_SAVE,
rose.config_editor.ACCEL_SAVE),
('Check and save', gtk.STOCK_SPELL_CHECK,
rose.config_editor.TOP_MENU_FILE_CHECK_AND_SAVE),
('Load All Apps', gtk.STOCK_CDROM,
rose.config_editor.TOP_MENU_FILE_LOAD_APPS),
('Quit', gtk.STOCK_QUIT,
rose.config_editor.TOP_MENU_FILE_QUIT,
rose.config_editor.ACCEL_QUIT),
('Edit', None,
rose.config_editor.TOP_MENU_EDIT),
('Undo', gtk.STOCK_UNDO,
rose.config_editor.TOP_MENU_EDIT_UNDO,
rose.config_editor.ACCEL_UNDO),
('Redo', gtk.STOCK_REDO,
rose.config_editor.TOP_MENU_EDIT_REDO,
rose.config_editor.ACCEL_REDO),
('Stack', gtk.STOCK_INFO,
rose.config_editor.TOP_MENU_EDIT_STACK),
('Find', gtk.STOCK_FIND,
rose.config_editor.TOP_MENU_EDIT_FIND,
rose.config_editor.ACCEL_FIND),
('Find Next', gtk.STOCK_FIND,
rose.config_editor.TOP_MENU_EDIT_FIND_NEXT,
rose.config_editor.ACCEL_FIND_NEXT),
('Preferences', gtk.STOCK_PREFERENCES,
rose.config_editor.TOP_MENU_EDIT_PREFERENCES),
('View', None,
rose.config_editor.TOP_MENU_VIEW),
('Page', None,
rose.config_editor.TOP_MENU_PAGE),
('Add variable', gtk.STOCK_ADD,
rose.config_editor.TOP_MENU_PAGE_ADD),
('Revert', gtk.STOCK_REVERT_TO_SAVED,
rose.config_editor.TOP_MENU_PAGE_REVERT),
('Page Info', gtk.STOCK_INFO,
rose.config_editor.TOP_MENU_PAGE_INFO),
('Page Help', gtk.STOCK_HELP,
rose.config_editor.TOP_MENU_PAGE_HELP),
('Page Web Help', gtk.STOCK_HOME,
rose.config_editor.TOP_MENU_PAGE_WEB_HELP),
('Metadata', None,
rose.config_editor.TOP_MENU_METADATA),
('Reload metadata', gtk.STOCK_REFRESH,
rose.config_editor.TOP_MENU_METADATA_REFRESH,
rose.config_editor.ACCEL_METADATA_REFRESH),
('Prefs', gtk.STOCK_PREFERENCES,
rose.config_editor.TOP_MENU_METADATA_PREFERENCES),
('Upgrade', gtk.STOCK_GO_UP,
rose.config_editor.TOP_MENU_METADATA_UPGRADE),
('All V', gtk.STOCK_DIALOG_QUESTION,
rose.config_editor.TOP_MENU_METADATA_MACRO_ALL_V),
('Autofix', gtk.STOCK_CONVERT,
rose.config_editor.TOP_MENU_METADATA_MACRO_AUTOFIX),
('Extra checks', gtk.STOCK_DIALOG_QUESTION,
rose.config_editor.TOP_MENU_METADATA_CHECK),
('Graph', gtk.STOCK_SORT_ASCENDING,
rose.config_editor.TOP_MENU_METADATA_GRAPH),
('Tools', None,
rose.config_editor.TOP_MENU_TOOLS),
('Run Suite', gtk.STOCK_MEDIA_PLAY,
rose.config_editor.TOP_MENU_TOOLS_SUITE_RUN),
('Run Suite default', gtk.STOCK_MEDIA_PLAY,
rose.config_editor.TOP_MENU_TOOLS_SUITE_RUN_DEFAULT,
rose.config_editor.ACCEL_SUITE_RUN),
('Run Suite custom', gtk.STOCK_EDIT,
rose.config_editor.TOP_MENU_TOOLS_SUITE_RUN_CUSTOM),
('Browser', gtk.STOCK_DIRECTORY,
rose.config_editor.TOP_MENU_TOOLS_BROWSER,
rose.config_editor.ACCEL_BROWSER),
('Terminal', gtk.STOCK_EXECUTE,
rose.config_editor.TOP_MENU_TOOLS_TERMINAL,
rose.config_editor.ACCEL_TERMINAL),
('View Output', gtk.STOCK_DIRECTORY,
rose.config_editor.TOP_MENU_TOOLS_VIEW_OUTPUT),
('Open Suite GControl', "rose-gtk-scheduler",
rose.config_editor.TOP_MENU_TOOLS_OPEN_SUITE_GCONTROL),
('Help', None,
rose.config_editor.TOP_MENU_HELP),
('GUI Help', gtk.STOCK_HELP,
rose.config_editor.TOP_MENU_HELP_GUI,
rose.config_editor.ACCEL_HELP_GUI),
('About', gtk.STOCK_DIALOG_INFO,
rose.config_editor.TOP_MENU_HELP_ABOUT)]
toggle_action_details = [
('View latent vars', None,
rose.config_editor.TOP_MENU_VIEW_LATENT_VARS),
('View fixed vars', None,
rose.config_editor.TOP_MENU_VIEW_FIXED_VARS),
('View ignored vars', None,
rose.config_editor.TOP_MENU_VIEW_IGNORED_VARS),
('View user-ignored vars', None,
rose.config_editor.TOP_MENU_VIEW_USER_IGNORED_VARS),
('View without descriptions', None,
rose.config_editor.TOP_MENU_VIEW_WITHOUT_DESCRIPTIONS),
('View without help', None,
rose.config_editor.TOP_MENU_VIEW_WITHOUT_HELP),
('View without titles', None,
rose.config_editor.TOP_MENU_VIEW_WITHOUT_TITLES),
('View ignored pages', None,
rose.config_editor.TOP_MENU_VIEW_IGNORED_PAGES),
('View user-ignored pages', None,
rose.config_editor.TOP_MENU_VIEW_USER_IGNORED_PAGES),
('View latent pages', None,
rose.config_editor.TOP_MENU_VIEW_LATENT_PAGES),
('Flag opt config vars', None,
rose.config_editor.TOP_MENU_VIEW_FLAG_OPT_CONF_VARS),
('Flag optional vars', None,
rose.config_editor.TOP_MENU_VIEW_FLAG_OPTIONAL_VARS),
('Flag no-metadata vars', None,
rose.config_editor.TOP_MENU_VIEW_FLAG_NO_METADATA_VARS),
('View status bar', None,
rose.config_editor.TOP_MENU_VIEW_STATUS_BAR),
('Switch off metadata', None,
rose.config_editor.TOP_MENU_METADATA_SWITCH_OFF)]
def __init__(self):
self.uimanager = gtk.UIManager()
self.actiongroup = gtk.ActionGroup('MenuBar')
self.actiongroup.add_actions(self.action_details)
self.actiongroup.add_toggle_actions(self.toggle_action_details)
self.uimanager.insert_action_group(self.actiongroup, pos=0)
self.uimanager.add_ui_from_string(self.ui_config_string)
self.macro_ids = []
def set_accelerators(self, accel_dict):
"""Add the keyboard accelerators."""
self.accelerators = gtk.AccelGroup()
self.accelerators.lookup = {} # Unfortunately, this is necessary.
key_list = []
mod_list = []
action_list = []
for key_press, accel_func in accel_dict.items():
key, mod = gtk.accelerator_parse(key_press)
self.accelerators.lookup[str(key) + str(mod)] = accel_func
self.accelerators.connect_group(
key, mod,
gtk.ACCEL_VISIBLE,
lambda a, c, k, m:
self.accelerators.lookup[str(k) + str(m)]())
def clear_macros(self):
"""Reset menu to original configuration and clear macros."""
for merge_id in self.macro_ids:
self.uimanager.remove_ui(merge_id)
self.macro_ids = []
all_v_item = self.uimanager.get_widget("/TopMenuBar/Metadata/All V")
all_v_item.set_sensitive(False)
def add_macro(self, config_name, modulename, classname, methodname,
help, image_path, run_macro):
"""Add a macro to the macro menu."""
macro_address = '/TopMenuBar/Metadata'
macro_menu = self.uimanager.get_widget(macro_address).get_submenu()
if methodname == rose.macro.VALIDATE_METHOD:
all_v_item = self.uimanager.get_widget(macro_address + "/All V")
all_v_item.set_sensitive(True)
config_menu_name = config_name.replace('/', ':').replace('_', '__')
config_label_name = config_name.split('/')[-1].replace('_', '__')
label = rose.config_editor.TOP_MENU_METADATA_MACRO_CONFIG.format(
config_label_name)
config_address = macro_address + '/' + config_menu_name
config_item = self.uimanager.get_widget(config_address)
if config_item is None:
actiongroup = self.uimanager.get_action_groups()[0]
if actiongroup.get_action(config_menu_name) is None:
actiongroup.add_action(gtk.Action(config_menu_name,
label,
None, None))
new_ui = """<ui><menubar name="TopMenuBar">
<menu action="Metadata">
<menuitem action="{0}"/></menu></menubar>
</ui>""".format(config_menu_name)
self.macro_ids.append(self.uimanager.add_ui_from_string(new_ui))
config_item = self.uimanager.get_widget(config_address)
if image_path is not None:
image = gtk.image_new_from_file(image_path)
config_item.set_image(image)
if config_item.get_submenu() is None:
config_item.set_submenu(gtk.Menu())
macro_fullname = ".".join([modulename, classname, methodname])
macro_fullname = macro_fullname.replace("_", "__")
if methodname == rose.macro.VALIDATE_METHOD:
stock_id = gtk.STOCK_DIALOG_QUESTION
else:
stock_id = gtk.STOCK_CONVERT
macro_item = gtk.ImageMenuItem(stock_id=stock_id)
macro_item.set_label(macro_fullname)
macro_item.set_tooltip_text(help)
macro_item.show()
macro_item._run_data = [config_name, modulename, classname,
methodname]
macro_item.connect("activate",
lambda i: run_macro(*i._run_data))
config_item.get_submenu().append(macro_item)
if (methodname == rose.macro.VALIDATE_METHOD):
for item in config_item.get_submenu().get_children():
if hasattr(item, "_rose_all_validators"):
return False
all_item = gtk.ImageMenuItem(gtk.STOCK_DIALOG_QUESTION)
all_item._rose_all_validators = True
all_item.set_label(rose.config_editor.MACRO_MENU_ALL_VALIDATORS)
all_item.set_tooltip_text(
rose.config_editor.MACRO_MENU_ALL_VALIDATORS_TIP)
all_item.show()
all_item._run_data = [config_name, None, None, methodname]
all_item.connect("activate",
lambda i: run_macro(*i._run_data))
config_item.get_submenu().prepend(all_item)
class MainMenuHandler(object):
"""Handles signals from the main menu and tool bar."""
def __init__(self, data, util, reporter, mainwindow,
undo_stack, redo_stack, undo_func,
update_config_func,
apply_macro_transform_func, apply_macro_validation_func,
group_ops_inst, section_ops_inst, variable_ops_inst,
find_ns_id_func):
self.data = data
self.util = util
self.reporter = reporter
self.mainwindow = mainwindow
self.undo_stack = undo_stack
self.redo_stack = redo_stack
self.perform_undo = undo_func
self.update_config = update_config_func
self.apply_macro_transform = apply_macro_transform_func
self.apply_macro_validation = apply_macro_validation_func
self.group_ops = group_ops_inst
self.sect_ops = section_ops_inst
self.var_ops = variable_ops_inst
self.find_ns_id_func = find_ns_id_func
self.bad_colour = rose.gtk.util.color_parse(
rose.config_editor.COLOUR_VARIABLE_TEXT_ERROR)
def about_dialog(self, args):
self.mainwindow.launch_about_dialog()
def get_orphan_container(self, page):
"""Return a container with the page object inside."""
box = gtk.VBox()
box.pack_start(page, expand=True, fill=True)
box.show()
return box
def view_stack(self, args):
"""Handle a View Stack request."""
self.mainwindow.launch_view_stack(self.undo_stack, self.redo_stack,
self.perform_undo)
def destroy(self, *args):
"""Handle a destroy main program request."""
for name in self.data.config:
if self.data.helper.get_config_has_unsaved_changes(name):
self.mainwindow.launch_exit_warning_dialog()
return True
try:
gtk.main_quit()
except RuntimeError:
# This can occur before gtk.main() is called, during the load.
sys.exit()
def check_all_extra(self):
"""Check fail-if, warn-if, and run all validator macros."""
for config_name in self.data.config.keys():
if not self.data.config[config_name].is_preview:
self.update_config(config_name)
num_errors = self.check_fail_rules(configs_updated=True)
num_errors += self.run_custom_macro(
method_name=rose.macro.VALIDATE_METHOD,
configs_updated=True)
if num_errors:
text = rose.config_editor.EVENT_MACRO_VALIDATE_CHECK_ALL.format(
num_errors)
kind = self.reporter.KIND_ERR
else:
text = rose.config_editor.EVENT_MACRO_VALIDATE_CHECK_ALL_OK
kind = self.reporter.KIND_OUT
self.reporter.report(text, kind=kind)
def check_fail_rules(self, configs_updated=False):
"""Check the fail-if and warn-if conditions of the configurations."""
if not configs_updated:
for config_name in self.data.config.keys():
if not self.data.config[config_name].is_preview:
self.update_config(config_name)
macro = rose.macros.rule.FailureRuleChecker()
macro_fullname = "rule.FailureRuleChecker.validate"
error_count = 0
config_names = sorted(self.data.config.keys())
for config_name in sorted(self.data.config.keys()):
config_data = self.data.config[config_name]
if config_data.is_preview:
continue
config = config_data.config
meta = config_data.meta
try:
return_value = macro.validate(config, meta)
if return_value:
error_count += len(return_value)
except Exception as e:
rose.gtk.dialog.run_dialog(
rose.gtk.dialog.DIALOG_TYPE_ERROR,
str(e),
rose.config_editor.ERROR_RUN_MACRO_TITLE.format(
macro_fullname))
continue
sorter = rose.config.sort_settings
to_id = lambda s: self.util.get_id_from_section_option(
s.section, s.option)
return_value.sort(lambda x, y: sorter(to_id(x), to_id(y)))
self.handle_macro_validation(config_name, macro_fullname,
config, return_value,
no_display=(not return_value))
if error_count > 0:
msg = rose.config_editor.EVENT_MACRO_VALIDATE_RULE_PROBLEMS_FOUND
info_text = msg.format(error_count)
kind = self.reporter.KIND_ERR
else:
msg = rose.config_editor.EVENT_MACRO_VALIDATE_RULE_NO_PROBLEMS
info_text = msg
kind = self.reporter.KIND_OUT
self.reporter.report(info_text, kind=kind)
return error_count
def clear_page_menu(self, menubar, add_menuitem):
"""Clear all page add variable items."""
add_menuitem.remove_submenu()
def load_page_menu(self, menubar, add_menuitem, current_page):
"""Load the page add variable items, if any."""
if current_page is None:
return False
add_var_menu = current_page.get_add_menu()
if add_var_menu is None or not add_var_menu.get_children():
add_menuitem.set_sensitive(False)
return False
add_menuitem.set_sensitive(True)
add_menuitem.set_submenu(add_var_menu)
def load_macro_menu(self, menubar):
"""Refresh the menu dealing with custom macro launches."""
menubar.clear_macros()
config_keys = self.data.config.keys()
config_keys.sort()
tuple_sorter = lambda x, y: cmp(x[0], y[0])
for config_name in config_keys:
image = self.data.helper.get_icon_path_for_config(config_name)
macros = self.data.config[config_name].macros
macro_tuples = rose.macro.get_macro_class_methods(macros)
macro_tuples.sort(tuple_sorter)
for macro_mod, macro_cls, macro_func, help in macro_tuples:
menubar.add_macro(config_name, macro_mod, macro_cls,
macro_func, help, image,
self.handle_run_custom_macro)
def inspect_custom_macro(self, macro_meth):
"""Inspect a custom macro for kwargs and return any"""
arglist = inspect.getargspec(macro_meth).args
defaultlist = inspect.getargspec(macro_meth).defaults
optionals = {}
while defaultlist is not None and len(defaultlist) > 0:
if arglist[-1] not in ["self", "config", "meta_config"]:
optionals[arglist[-1]] = defaultlist[-1]
arglist = arglist[0:-1]
defaultlist = defaultlist[0:-1]
else:
break
return optionals
def handle_graph(self):
"""Handle a graph metadata request."""
config_sect_dict = {}
for config_name in self.data.config.keys():
config_data = self.data.config[config_name]
config_sect_dict[config_name] = config_data.sections.now.keys()
config_sect_dict[config_name].sort(rose.config.sort_settings)
config_name, section = self.mainwindow.launch_graph_dialog(
config_sect_dict)
if config_name is None:
return False
if section is None:
allowed_sections = None
else:
allowed_sections = [section]
self.launch_graph(config_name, allowed_sections=allowed_sections)
def check_entry_value(self, entry_widget, dialog, entries,
labels, optionals):
is_valid = True
for k, entry in entries.items():
this_is_valid = True
try:
new_val = ast.literal_eval(entry.get_text())
entry.modify_text(gtk.STATE_NORMAL, None)
except (ValueError, EOFError, SyntaxError):
entry.modify_text(gtk.STATE_NORMAL, self.bad_colour)
is_valid = False
this_is_valid = False
if not this_is_valid or new_val != optionals[k]:
lab = '<span foreground="blue">{0}</span>'.format(str(k)+":")
labels[k].set_markup(lab)
else:
labels[k].set_text(str(k) + ":")
dialog.set_response_sensitive(gtk.RESPONSE_OK, is_valid)
return
def handle_macro_entry_activate(self, entry_widget, dialog, entries):
for k, entry in entries.items():
try:
ast.literal_eval(entry.get_text())
except (ValueError, EOFError, SyntaxError):
break
else:
dialog.response(gtk.RESPONSE_OK)
def override_macro_defaults(self, optionals, methname):
"""Launch a dialog to handle capture of any override args to macro"""
if not optionals:
return {}
res = {}
#create the text input field
entries = {}
labels = {}
errs = {}
succeeded = False
dialog = gtk.MessageDialog(
None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_QUESTION,
gtk.BUTTONS_OK_CANCEL,
None)
dialog.set_markup('Specify overrides for macro arguments:')
dialog.set_title(methname)
table = gtk.Table(len(optionals.items()), 2, False)
dialog.vbox.add(table)
for i in range(len(optionals.items())):
k, v = optionals.items()[i]
label = gtk.Label(str(k) + ":")
entry = gtk.Entry()
if isinstance(v,str):
entry.set_text("'" + v + "'")
else:
entry.set_text(str(v))
entry.connect("changed", self.check_entry_value, dialog,
entries, labels, optionals)
entry.connect("activate", self.handle_macro_entry_activate,
dialog, entries)
entries[k] = entry
labels[k] = label
table.attach(entry, 1, 2, i, i+1)
hbox = gtk.HBox()
hbox.pack_start(label, expand=False)
table.attach(hbox, 0, 1, i, i+1)
dialog.show_all()
response = dialog.run()
if (response == gtk.RESPONSE_CANCEL or
response == gtk.RESPONSE_CLOSE):
res = optionals
dialog.destroy()
else:
res = {}
for k,box in entries.items():
res[k] = ast.literal_eval(box.get_text())
dialog.destroy()
return res
def handle_run_custom_macro(self, *args, **kwargs):
"""Wrap the method so that this returns False for GTK callbacks."""
self.run_custom_macro(*args, **kwargs)
return False
def run_custom_macro(self, config_name=None, module_name=None,
class_name=None, method_name=None,
configs_updated=False):
"""Run the custom macro method and launch a dialog."""
old_pwd = os.getcwd()
macro_data = []
if config_name is None:
configs = sorted(self.data.config.keys())
else:
configs = [config_name]
for name in list(configs):
if self.data.config[name].is_preview:
configs.remove(name)
continue
if not configs_updated:
self.update_config(name)
if method_name is None:
method_names = [rose.macro.VALIDATE_METHOD,
rose.macro.TRANSFORM_METHOD]
else:
method_names = [method_name]
if module_name is not None and config_name is not None:
config_mod_prefix = self.data.helper.get_macro_module_prefix(config_name)
if not module_name.startswith(config_mod_prefix):
module_name = config_mod_prefix + module_name
for config_name in configs:
config_data = self.data.config[config_name]
if config_data.directory is not None:
os.chdir(config_data.directory)
for module in config_data.macros:
if module_name is not None and module.__name__ != module_name:
continue
for obj_name, obj in inspect.getmembers(module):
for method_name in method_names:
if (not hasattr(obj, method_name) or
obj_name.startswith("_") or
not issubclass(obj, rose.macro.MacroBase)):
continue
if class_name is not None and obj_name != class_name:
continue
macro_fullname = ".".join([module.__name__,
obj_name,
method_name])
err_text = (
rose.config_editor.ERROR_RUN_MACRO_TITLE.format(
macro_fullname))
try:
macro_inst = obj()
except Exception as e:
rose.gtk.dialog.run_dialog(
rose.gtk.dialog.DIALOG_TYPE_ERROR,
str(e), err_text)
continue
if hasattr(macro_inst, method_name):
macro_data.append((config_name, macro_inst,
module.__name__, obj_name,
method_name))
os.chdir(old_pwd)
if not macro_data:
return 0
sorter = rose.config.sort_settings
to_id = lambda s: self.util.get_id_from_section_option(s.section,
s.option)
config_macro_errors = []
config_macro_changes = []
for config_name, macro_inst, modname, objname, methname in macro_data:
macro_fullname = '.'.join([modname, objname, methname])
macro_config = self.data.dump_to_internal_config(config_name)
config_data = self.data.config[config_name]
meta_config = config_data.meta
macro_method = getattr(macro_inst, methname)
optionals = self.inspect_custom_macro(macro_method)
if optionals:
res = self.override_macro_defaults(optionals, objname)
else:
res = {}
os.chdir(config_data.directory)
try:
return_value = macro_method(macro_config, meta_config, **res)
except Exception as e:
rose.gtk.dialog.run_dialog(
rose.gtk.dialog.DIALOG_TYPE_ERROR,
str(e),
rose.config_editor.ERROR_RUN_MACRO_TITLE.format(
macro_fullname))
continue
if methname == rose.macro.TRANSFORM_METHOD:
if (not isinstance(return_value, tuple) or
len(return_value) != 2 or
not isinstance(return_value[0], rose.config.ConfigNode) or
not isinstance(return_value[1], list)):
self._handle_bad_macro_return(macro_fullname, return_value)
continue
integrity_exception = rose.macro.check_config_integrity(
return_value[0])
if integrity_exception is not None:
self._handle_bad_macro_return(macro_fullname,
integrity_exception)
continue
macro_config, change_list = return_value
if not change_list:
continue
change_list.sort(lambda x, y: sorter(to_id(x), to_id(y)))
num_changes = len(change_list)
self.handle_macro_transforms(config_name, macro_fullname,
macro_config, change_list)
config_macro_changes.append((config_name,
macro_fullname,
num_changes))
continue
elif methname == rose.macro.VALIDATE_METHOD:
if not isinstance(return_value, list):
self._handle_bad_macro_return(macro_fullname,
return_value)
continue
if return_value:
return_value.sort(lambda x, y: sorter(to_id(x), to_id(y)))
config_macro_errors.append((config_name,
macro_fullname,
len(return_value)))
self.handle_macro_validation(config_name, macro_fullname,
macro_config, return_value)
os.chdir(old_pwd)
if class_name is None:
# Construct a grouped report.
config_macro_errors.sort()
config_macro_changes.sort()
if rose.macro.VALIDATE_METHOD in method_names:
null_format = rose.config_editor.EVENT_MACRO_VALIDATE_ALL_OK
change_format = rose.config_editor.EVENT_MACRO_VALIDATE_ALL
num_issues = sum([e[2] for e in config_macro_errors])
issue_confs = [e[0] for e in config_macro_errors if e[2]]
else:
null_format = rose.config_editor.EVENT_MACRO_TRANSFORM_ALL_OK
change_format = rose.config_editor.EVENT_MACRO_TRANSFORM_ALL
num_issues = sum([e[2] for e in config_macro_changes])
issue_confs = [e[0] for e in config_macro_changes if e[2]]
issue_confs = sorted(set(issue_confs))
if num_issues:
issue_conf_text = self._format_macro_config_names(issue_confs)
self.reporter.report(change_format.format(issue_conf_text,
num_issues),
kind=self.reporter.KIND_ERR)
else:
all_conf_text = self._format_macro_config_names(configs)
self.reporter.report(null_format.format(all_conf_text),
kind=self.reporter.KIND_OUT)
num_errors = sum([e[2] for e in config_macro_errors])
num_changes = sum([c[2] for c in config_macro_changes])
return num_errors + num_changes
def _format_macro_config_names(self, config_names):
if len(config_names) > 5:
return rose.config_editor.EVENT_MACRO_CONFIGS.format(
len(config_names))
config_names = [c.lstrip("/") for c in config_names]
return ", ".join(config_names)
def _handle_bad_macro_return(self, macro_fullname, info):
if isinstance(info, Exception):
text = rose.config_editor.ERROR_BAD_MACRO_EXCEPTION.format(
type(info).__name__, str(info))
else:
text = rose.config_editor.ERROR_BAD_MACRO_RETURN.format(info)
summary = rose.config_editor.ERROR_RUN_MACRO_TITLE.format(
macro_fullname)
self.reporter.report(summary,
kind=self.reporter.KIND_ERR)
rose.gtk.dialog.run_dialog(
rose.gtk.dialog.DIALOG_TYPE_ERROR,
text, summary)
def handle_macro_transforms(self, config_name, macro_name,
macro_config, change_list, no_display=False,
triggers_ok=False):
"""Calculate needed changes and apply them if prompted to.
At the moment trigger-ignore of variables and sections is
assumed to be the exclusive property of the Rose trigger
macro and is not allowed for any other macro.
"""
if not change_list:
self._report_macro_transform(config_name, macro_name, 0)
return
macro_type = ".".join(macro_name.split(".")[:-1])
var_changes = []
sect_changes = []
sect_removes = []
for item in list(change_list):
if item.option is None:
sect_changes.append(item)
else:
var_changes.append(item)
search = lambda i: self.find_ns_id_func(config_name, i)
if not no_display:
proceed_ok = self.mainwindow.launch_macro_changes_dialog(
config_name, macro_type, change_list,
search_func=search)
if not proceed_ok:
self._report_macro_transform(config_name, macro_name, 0)
return 0
config_diff = macro_config - self.data.config[config_name].config
changed_ids = self.group_ops.apply_diff(config_name, config_diff,
origin_name=macro_type,
triggers_ok=triggers_ok)
self.apply_macro_transform(
config_name, changed_ids, skip_update=True)
self._report_macro_transform(config_name, macro_name, len(change_list))
return len(change_list)
def _report_macro_transform(self, config_name, macro_name, num_changes):
name = config_name.lstrip("/")
if macro_name.endswith(rose.macro.TRANSFORM_METHOD):
macro = macro_name.split('.')[-2]
else:
macro = macro_name.split('.')[-1]
kind = self.reporter.KIND_OUT
if num_changes:
info_text = rose.config_editor.EVENT_MACRO_TRANSFORM.format(
name, macro, num_changes)
else:
info_text = rose.config_editor.EVENT_MACRO_TRANSFORM_OK.format(
name, macro)
self.reporter.report(info_text, kind=kind)
def handle_macro_validation(self, config_name, macro_name,
macro_config, problem_list, no_display=False):
"""Apply errors and give information to the user."""
macro_type = ".".join(macro_name.split(".")[:-1])
self.apply_macro_validation(config_name, macro_type, problem_list)
search = lambda i: self.find_ns_id_func(config_name, i)
self._report_macro_validation(config_name, macro_name,
len(problem_list))
if not no_display:
self.mainwindow.launch_macro_changes_dialog(
config_name, macro_type, problem_list,
mode="validate", search_func=search)
def _report_macro_validation(self, config_name, macro_name, num_errors):
name = config_name.lstrip("/")
if macro_name.endswith(rose.macro.VALIDATE_METHOD):
macro = macro_name.split('.')[-2]
else:
macro = macro_name.split('.')[-1]
if num_errors:
info_text = rose.config_editor.EVENT_MACRO_VALIDATE.format(
name, macro, num_errors)
kind = self.reporter.KIND_ERR
else:
info_text = rose.config_editor.EVENT_MACRO_VALIDATE_OK.format(
name, macro)
kind = self.reporter.KIND_OUT
self.reporter.report(info_text, kind=kind)
def handle_upgrade(self, only_this_config_name=None):
"""Run the upgrade manager for this suite."""
config_dict = {}
for config_name in self.data.config.keys():
config_data = self.data.config[config_name]
if config_data.is_preview:
continue
self.update_config(config_name)
if (only_this_config_name is None or
config_name == only_this_config_name):
config_dict[config_name] = {
"config": config_data.config,
"directory": config_data.directory
}
rose.config_editor.upgrade_controller.UpgradeController(
config_dict, self.handle_macro_transforms,
parent_window=self.mainwindow.window,
upgrade_inspector=self.override_macro_defaults)
def help(self, *args):
# Handle a GUI help request.
self.mainwindow.launch_help_dialog()
def prefs(self, args):
# Handle a Preferences view request.
self.mainwindow.launch_prefs()
def launch_browser(self):
start_directory = self.data.top_level_directory
if self.data.top_level_directory is None:
start_directory = os.getcwd()
try:
rose.external.launch_fs_browser(start_directory)
except rose.popen.RosePopenError as exc:
rose.gtk.dialog.run_exception_dialog(exc)
def launch_graph(self, namespace, allowed_sections=None):
try:
import pygraphviz
except ImportError as e:
title = rose.config_editor.WARNING_CANNOT_GRAPH
rose.gtk.dialog.run_dialog(rose.gtk.dialog.DIALOG_TYPE_ERROR,
str(e), title)
return
config_name, subsp = self.util.split_full_ns(self.data, namespace)
self.update_config(config_name)
config_data = self.data.config[config_name]
if config_data.directory is None:
return False
if allowed_sections is None:
if config_name == namespace:
allowed_sections = []
else:
allowed_sections = (
self.data.helper.get_sections_from_namespace(namespace))
cmd = (shlex.split(rose.config_editor.LAUNCH_COMMAND_GRAPH) +
[config_data.directory] + allowed_sections)
try:
rose.popen.RosePopener().run_bg(
*cmd, stdout=sys.stdout, stderr=sys.stderr)
except rose.popen.RosePopenError as exc:
rose.gtk.dialog.run_exception_dialog(exc)
def launch_scheduler(self, *args):
"""Run the scheduler for a suite open in config edit."""
this_id = self.data.top_level_name
scontrol = rose.suite_control.SuiteControl()
if scontrol.suite_engine_proc.is_suite_registered(this_id):
return scontrol.gcontrol(this_id)
else:
msg = rose.config_editor.DIALOG_TEXT_UNREGISTERED_SUITE.format(
this_id)
return rose.gtk.dialog.run_dialog(
rose.gtk.dialog.DIALOG_TYPE_ERROR,
msg,
rose.config_editor.DIALOG_TITLE_UNREGISTERED_SUITE)
def launch_terminal(self):
# Handle a launch terminal request.
try:
rose.external.launch_terminal()
except rose.popen.RosePopenError as exc:
rose.gtk.dialog.run_exception_dialog(exc)
def launch_output_viewer(self):
"""View a suite's output, if any."""
g = rose.suite_engine_proc.SuiteEngineProcessor.get_processor()
try:
g.launch_suite_log_browser(None, self.data.top_level_name)
except rose.suite_engine_proc.NoSuiteLogError:
rose.gtk.dialog.run_dialog(
rose.gtk.dialog.DIALOG_TYPE_ERROR,
rose.config_editor.ERROR_NO_OUTPUT.format(
self.data.top_level_name),
rose.config_editor.DIALOG_TITLE_ERROR)
def get_run_suite_args(self, *args):
"""Ask the user for custom arguments to suite run."""
help_cmds = shlex.split(rose.config_editor.LAUNCH_SUITE_RUN_HELP)
help_text = subprocess.Popen(help_cmds,
stdout=subprocess.PIPE).communicate()[0]
rose.gtk.dialog.run_command_arg_dialog(
rose.config_editor.LAUNCH_SUITE_RUN,
help_text, self.run_suite_check_args)
def run_suite_check_args(self, args):
if args is None:
return False
self.run_suite(args)
def run_suite(self, args=None, **kwargs):
"""Run the suite, if possible."""
if not isinstance(args, list):
args = []
for key, value in kwargs.items():
args.extend([key, value])
rose.gtk.run.run_suite(*args)
return False
def transform_default(self, only_this_config=None):
"""Run the Rose built-in transformer macros."""
if (only_this_config is not None and
only_this_config in self.data.config.keys()):
config_keys = [only_this_config]
text = rose.config_editor.DIALOG_LABEL_AUTOFIX
else:
config_keys = sorted(self.data.config.keys())
text = rose.config_editor.DIALOG_LABEL_AUTOFIX_ALL
proceed = rose.gtk.dialog.run_dialog(
rose.gtk.dialog.DIALOG_TYPE_WARNING,
text,
rose.config_editor.DIALOG_TITLE_AUTOFIX,
cancel=True)
if not proceed:
return False
sorter = rose.config.sort_settings
to_id = lambda s: self.util.get_id_from_section_option(s.section,
s.option)
for config_name in config_keys:
macro_config = self.data.dump_to_internal_config(config_name)
meta_config = self.data.config[config_name].meta
macro = rose.macros.DefaultTransforms()
config, change_list = macro.transform(macro_config, meta_config)
change_list.sort(lambda x, y: sorter(to_id(x), to_id(y)))
self.handle_macro_transforms(
config_name, "Autofixer.transform",
macro_config, change_list, triggers_ok=True)
| gpl-3.0 |
bhargav2408/python-for-android | python3-alpha/python3-src/Lib/test/test_imp.py | 48 | 13451 | import imp
import os
import os.path
import shutil
import sys
import unittest
from test import support
import importlib
class LockTests(unittest.TestCase):
"""Very basic test of import lock functions."""
def verify_lock_state(self, expected):
self.assertEqual(imp.lock_held(), expected,
"expected imp.lock_held() to be %r" % expected)
def testLock(self):
LOOPS = 50
# The import lock may already be held, e.g. if the test suite is run
# via "import test.autotest".
lock_held_at_start = imp.lock_held()
self.verify_lock_state(lock_held_at_start)
for i in range(LOOPS):
imp.acquire_lock()
self.verify_lock_state(True)
for i in range(LOOPS):
imp.release_lock()
# The original state should be restored now.
self.verify_lock_state(lock_held_at_start)
if not lock_held_at_start:
try:
imp.release_lock()
except RuntimeError:
pass
else:
self.fail("release_lock() without lock should raise "
"RuntimeError")
class ImportTests(unittest.TestCase):
def setUp(self):
mod = importlib.import_module('test.encoded_modules')
self.test_strings = mod.test_strings
self.test_path = mod.__path__
def test_import_encoded_module(self):
for modname, encoding, teststr in self.test_strings:
mod = importlib.import_module('test.encoded_modules.'
'module_' + modname)
self.assertEqual(teststr, mod.test)
def test_find_module_encoding(self):
for mod, encoding, _ in self.test_strings:
with imp.find_module('module_' + mod, self.test_path)[0] as fd:
self.assertEqual(fd.encoding, encoding)
def test_issue1267(self):
for mod, encoding, _ in self.test_strings:
fp, filename, info = imp.find_module('module_' + mod,
self.test_path)
with fp:
self.assertNotEqual(fp, None)
self.assertEqual(fp.encoding, encoding)
self.assertEqual(fp.tell(), 0)
self.assertEqual(fp.readline(), '# test %s encoding\n'
% encoding)
fp, filename, info = imp.find_module("tokenize")
with fp:
self.assertNotEqual(fp, None)
self.assertEqual(fp.encoding, "utf-8")
self.assertEqual(fp.tell(), 0)
self.assertEqual(fp.readline(),
'"""Tokenization help for Python programs.\n')
def test_issue3594(self):
temp_mod_name = 'test_imp_helper'
sys.path.insert(0, '.')
try:
with open(temp_mod_name + '.py', 'w') as file:
file.write("# coding: cp1252\nu = 'test.test_imp'\n")
file, filename, info = imp.find_module(temp_mod_name)
file.close()
self.assertEqual(file.encoding, 'cp1252')
finally:
del sys.path[0]
support.unlink(temp_mod_name + '.py')
support.unlink(temp_mod_name + '.pyc')
support.unlink(temp_mod_name + '.pyo')
def test_issue5604(self):
# Test cannot cover imp.load_compiled function.
# Martin von Loewis note what shared library cannot have non-ascii
# character because init_xxx function cannot be compiled
# and issue never happens for dynamic modules.
# But sources modified to follow generic way for processing pathes.
# the return encoding could be uppercase or None
fs_encoding = sys.getfilesystemencoding()
# covers utf-8 and Windows ANSI code pages
# one non-space symbol from every page
# (http://en.wikipedia.org/wiki/Code_page)
known_locales = {
'utf-8' : b'\xc3\xa4',
'cp1250' : b'\x8C',
'cp1251' : b'\xc0',
'cp1252' : b'\xc0',
'cp1253' : b'\xc1',
'cp1254' : b'\xc0',
'cp1255' : b'\xe0',
'cp1256' : b'\xe0',
'cp1257' : b'\xc0',
'cp1258' : b'\xc0',
}
if sys.platform == 'darwin':
self.assertEqual(fs_encoding, 'utf-8')
# Mac OS X uses the Normal Form D decomposition
# http://developer.apple.com/mac/library/qa/qa2001/qa1173.html
special_char = b'a\xcc\x88'
else:
special_char = known_locales.get(fs_encoding)
if not special_char:
self.skipTest("can't run this test with %s as filesystem encoding"
% fs_encoding)
decoded_char = special_char.decode(fs_encoding)
temp_mod_name = 'test_imp_helper_' + decoded_char
test_package_name = 'test_imp_helper_package_' + decoded_char
init_file_name = os.path.join(test_package_name, '__init__.py')
try:
# if the curdir is not in sys.path the test fails when run with
# ./python ./Lib/test/regrtest.py test_imp
sys.path.insert(0, os.curdir)
with open(temp_mod_name + '.py', 'w') as file:
file.write('a = 1\n')
file, filename, info = imp.find_module(temp_mod_name)
with file:
self.assertIsNotNone(file)
self.assertTrue(filename[:-3].endswith(temp_mod_name))
self.assertEqual(info[0], '.py')
self.assertEqual(info[1], 'U')
self.assertEqual(info[2], imp.PY_SOURCE)
mod = imp.load_module(temp_mod_name, file, filename, info)
self.assertEqual(mod.a, 1)
mod = imp.load_source(temp_mod_name, temp_mod_name + '.py')
self.assertEqual(mod.a, 1)
mod = imp.load_compiled(
temp_mod_name, imp.cache_from_source(temp_mod_name + '.py'))
self.assertEqual(mod.a, 1)
if not os.path.exists(test_package_name):
os.mkdir(test_package_name)
with open(init_file_name, 'w') as file:
file.write('b = 2\n')
package = imp.load_package(test_package_name, test_package_name)
self.assertEqual(package.b, 2)
finally:
del sys.path[0]
for ext in ('.py', '.pyc', '.pyo'):
support.unlink(temp_mod_name + ext)
support.unlink(init_file_name + ext)
support.rmtree(test_package_name)
def test_issue9319(self):
path = os.path.dirname(__file__)
self.assertRaises(SyntaxError,
imp.find_module, "badsyntax_pep3120", [path])
class ReloadTests(unittest.TestCase):
"""Very basic tests to make sure that imp.reload() operates just like
reload()."""
def test_source(self):
# XXX (ncoghlan): It would be nice to use test.support.CleanImport
# here, but that breaks because the os module registers some
# handlers in copy_reg on import. Since CleanImport doesn't
# revert that registration, the module is left in a broken
# state after reversion. Reinitialising the module contents
# and just reverting os.environ to its previous state is an OK
# workaround
with support.EnvironmentVarGuard():
import os
imp.reload(os)
def test_extension(self):
with support.CleanImport('time'):
import time
imp.reload(time)
def test_builtin(self):
with support.CleanImport('marshal'):
import marshal
imp.reload(marshal)
class PEP3147Tests(unittest.TestCase):
"""Tests of PEP 3147."""
tag = imp.get_tag()
def test_cache_from_source(self):
# Given the path to a .py file, return the path to its PEP 3147
# defined .pyc file (i.e. under __pycache__).
self.assertEqual(
imp.cache_from_source('/foo/bar/baz/qux.py', True),
'/foo/bar/baz/__pycache__/qux.{}.pyc'.format(self.tag))
def test_cache_from_source_optimized(self):
# Given the path to a .py file, return the path to its PEP 3147
# defined .pyo file (i.e. under __pycache__).
self.assertEqual(
imp.cache_from_source('/foo/bar/baz/qux.py', False),
'/foo/bar/baz/__pycache__/qux.{}.pyo'.format(self.tag))
def test_cache_from_source_cwd(self):
self.assertEqual(imp.cache_from_source('foo.py', True),
os.sep.join(('__pycache__',
'foo.{}.pyc'.format(self.tag))))
def test_cache_from_source_override(self):
# When debug_override is not None, it can be any true-ish or false-ish
# value.
self.assertEqual(
imp.cache_from_source('/foo/bar/baz.py', []),
'/foo/bar/__pycache__/baz.{}.pyo'.format(self.tag))
self.assertEqual(
imp.cache_from_source('/foo/bar/baz.py', [17]),
'/foo/bar/__pycache__/baz.{}.pyc'.format(self.tag))
# However if the bool-ishness can't be determined, the exception
# propagates.
class Bearish:
def __bool__(self): raise RuntimeError
self.assertRaises(
RuntimeError,
imp.cache_from_source, '/foo/bar/baz.py', Bearish())
@unittest.skipIf(os.altsep is None,
'test meaningful only where os.altsep is defined')
def test_altsep_cache_from_source(self):
# Windows path and PEP 3147.
self.assertEqual(
imp.cache_from_source('\\foo\\bar\\baz\\qux.py', True),
'\\foo\\bar\\baz\\__pycache__\\qux.{}.pyc'.format(self.tag))
@unittest.skipIf(os.altsep is None,
'test meaningful only where os.altsep is defined')
def test_altsep_and_sep_cache_from_source(self):
# Windows path and PEP 3147 where altsep is right of sep.
self.assertEqual(
imp.cache_from_source('\\foo\\bar/baz\\qux.py', True),
'\\foo\\bar/baz\\__pycache__\\qux.{}.pyc'.format(self.tag))
@unittest.skipIf(os.altsep is None,
'test meaningful only where os.altsep is defined')
def test_sep_altsep_and_sep_cache_from_source(self):
# Windows path and PEP 3147 where sep is right of altsep.
self.assertEqual(
imp.cache_from_source('\\foo\\bar\\baz/qux.py', True),
'\\foo\\bar\\baz/__pycache__/qux.{}.pyc'.format(self.tag))
def test_source_from_cache(self):
# Given the path to a PEP 3147 defined .pyc file, return the path to
# its source. This tests the good path.
self.assertEqual(imp.source_from_cache(
'/foo/bar/baz/__pycache__/qux.{}.pyc'.format(self.tag)),
'/foo/bar/baz/qux.py')
def test_source_from_cache_bad_path(self):
# When the path to a pyc file is not in PEP 3147 format, a ValueError
# is raised.
self.assertRaises(
ValueError, imp.source_from_cache, '/foo/bar/bazqux.pyc')
def test_source_from_cache_no_slash(self):
# No slashes at all in path -> ValueError
self.assertRaises(
ValueError, imp.source_from_cache, 'foo.cpython-32.pyc')
def test_source_from_cache_too_few_dots(self):
# Too few dots in final path component -> ValueError
self.assertRaises(
ValueError, imp.source_from_cache, '__pycache__/foo.pyc')
def test_source_from_cache_too_many_dots(self):
# Too many dots in final path component -> ValueError
self.assertRaises(
ValueError, imp.source_from_cache,
'__pycache__/foo.cpython-32.foo.pyc')
def test_source_from_cache_no__pycache__(self):
# Another problem with the path -> ValueError
self.assertRaises(
ValueError, imp.source_from_cache,
'/foo/bar/foo.cpython-32.foo.pyc')
def test_package___file__(self):
# Test that a package's __file__ points to the right source directory.
os.mkdir('pep3147')
sys.path.insert(0, os.curdir)
def cleanup():
if sys.path[0] == os.curdir:
del sys.path[0]
shutil.rmtree('pep3147')
self.addCleanup(cleanup)
# Touch the __init__.py file.
with open('pep3147/__init__.py', 'w'):
pass
m = __import__('pep3147')
# Ensure we load the pyc file.
support.forget('pep3147')
m = __import__('pep3147')
self.assertEqual(m.__file__,
os.sep.join(('.', 'pep3147', '__init__.py')))
class NullImporterTests(unittest.TestCase):
@unittest.skipIf(support.TESTFN_UNENCODABLE is None,
"Need an undecodeable filename")
def test_unencodeable(self):
name = support.TESTFN_UNENCODABLE
os.mkdir(name)
try:
self.assertRaises(ImportError, imp.NullImporter, name)
finally:
os.rmdir(name)
def test_main():
tests = [
ImportTests,
PEP3147Tests,
ReloadTests,
NullImporterTests,
]
try:
import _thread
except ImportError:
pass
else:
tests.append(LockTests)
support.run_unittest(*tests)
if __name__ == "__main__":
test_main()
| apache-2.0 |
sacharya/nova | nova/api/openstack/compute/contrib/baremetal_ext_status.py | 3 | 1038 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
class Baremetal_ext_status(extensions.ExtensionDescriptor):
"""Add extended status in Baremetal Nodes v2 API."""
name = "BareMetalExtStatus"
alias = "os-baremetal-ext-status"
namespace = ("http://docs.openstack.org/compute/ext/"
"baremetal_ext_status/api/v2")
updated = "2013-08-27T00:00:00+00:00"
| apache-2.0 |
joakim-hove/django | django/utils/decorators.py | 126 | 6875 | "Functions that help with dynamically creating decorators for views."
try:
from contextlib import ContextDecorator
except ImportError:
ContextDecorator = None
from functools import WRAPPER_ASSIGNMENTS, update_wrapper, wraps
from django.utils import six
class classonlymethod(classmethod):
def __get__(self, instance, owner):
if instance is not None:
raise AttributeError("This method is available only on the class, not on instances.")
return super(classonlymethod, self).__get__(instance, owner)
def method_decorator(decorator, name=''):
"""
Converts a function decorator into a method decorator
"""
# 'obj' can be a class or a function. If 'obj' is a function at the time it
# is passed to _dec, it will eventually be a method of the class it is
# defined on. If 'obj' is a class, the 'name' is required to be the name
# of the method that will be decorated.
def _dec(obj):
is_class = isinstance(obj, type)
if is_class:
if name and hasattr(obj, name):
func = getattr(obj, name)
if not callable(func):
raise TypeError(
"Cannot decorate '{0}' as it isn't a callable "
"attribute of {1} ({2})".format(name, obj, func)
)
else:
raise ValueError(
"The keyword argument `name` must be the name of a method "
"of the decorated class: {0}. Got '{1}' instead".format(
obj, name,
)
)
else:
func = obj
def _wrapper(self, *args, **kwargs):
@decorator
def bound_func(*args2, **kwargs2):
return func.__get__(self, type(self))(*args2, **kwargs2)
# bound_func has the signature that 'decorator' expects i.e. no
# 'self' argument, but it is a closure over self so it can call
# 'func' correctly.
return bound_func(*args, **kwargs)
# In case 'decorator' adds attributes to the function it decorates, we
# want to copy those. We don't have access to bound_func in this scope,
# but we can cheat by using it on a dummy function.
@decorator
def dummy(*args, **kwargs):
pass
update_wrapper(_wrapper, dummy)
# Need to preserve any existing attributes of 'func', including the name.
update_wrapper(_wrapper, func)
if is_class:
setattr(obj, name, _wrapper)
return obj
return _wrapper
update_wrapper(_dec, decorator, assigned=available_attrs(decorator))
# Change the name to aid debugging.
if hasattr(decorator, '__name__'):
_dec.__name__ = 'method_decorator(%s)' % decorator.__name__
else:
_dec.__name__ = 'method_decorator(%s)' % decorator.__class__.__name__
return _dec
def decorator_from_middleware_with_args(middleware_class):
"""
Like decorator_from_middleware, but returns a function
that accepts the arguments to be passed to the middleware_class.
Use like::
cache_page = decorator_from_middleware_with_args(CacheMiddleware)
# ...
@cache_page(3600)
def my_view(request):
# ...
"""
return make_middleware_decorator(middleware_class)
def decorator_from_middleware(middleware_class):
"""
Given a middleware class (not an instance), returns a view decorator. This
lets you use middleware functionality on a per-view basis. The middleware
is created with no params passed.
"""
return make_middleware_decorator(middleware_class)()
def available_attrs(fn):
"""
Return the list of functools-wrappable attributes on a callable.
This is required as a workaround for http://bugs.python.org/issue3445
under Python 2.
"""
if six.PY3:
return WRAPPER_ASSIGNMENTS
else:
return tuple(a for a in WRAPPER_ASSIGNMENTS if hasattr(fn, a))
def make_middleware_decorator(middleware_class):
def _make_decorator(*m_args, **m_kwargs):
middleware = middleware_class(*m_args, **m_kwargs)
def _decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if hasattr(middleware, 'process_request'):
result = middleware.process_request(request)
if result is not None:
return result
if hasattr(middleware, 'process_view'):
result = middleware.process_view(request, view_func, args, kwargs)
if result is not None:
return result
try:
response = view_func(request, *args, **kwargs)
except Exception as e:
if hasattr(middleware, 'process_exception'):
result = middleware.process_exception(request, e)
if result is not None:
return result
raise
if hasattr(response, 'render') and callable(response.render):
if hasattr(middleware, 'process_template_response'):
response = middleware.process_template_response(request, response)
# Defer running of process_response until after the template
# has been rendered:
if hasattr(middleware, 'process_response'):
callback = lambda response: middleware.process_response(request, response)
response.add_post_render_callback(callback)
else:
if hasattr(middleware, 'process_response'):
return middleware.process_response(request, response)
return response
return _wrapped_view
return _decorator
return _make_decorator
if ContextDecorator is None:
# ContextDecorator was introduced in Python 3.2
# See https://docs.python.org/3/library/contextlib.html#contextlib.ContextDecorator
class ContextDecorator(object):
"""
A base class that enables a context manager to also be used as a decorator.
"""
def __call__(self, func):
@wraps(func, assigned=available_attrs(func))
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
class classproperty(object):
def __init__(self, method=None):
self.fget = method
def __get__(self, instance, owner):
return self.fget(owner)
def getter(self, method):
self.fget = method
return self
| bsd-3-clause |
Zerschmetterling91/three.js | utils/exporters/blender/addons/io_three/exporter/utilities.py | 225 | 1229 | import uuid
import hashlib
from .. import constants
ROUND = constants.DEFAULT_PRECISION
def bit_mask(flags):
"""Generate a bit mask.
:type flags: dict
:return: int
"""
bit = 0
true = lambda x, y: (x | (1 << y))
false = lambda x, y: (x & (~(1 << y)))
for mask, position in constants.MASK.items():
func = true if flags.get(mask) else false
bit = func(bit, position)
return bit
def hash(value):
"""Generate a hash from a given value
:param value:
:rtype: str
"""
hash_ = hashlib.md5()
hash_.update(repr(value).encode('utf8'))
return hash_.hexdigest()
def id():
"""Generate a random UUID
:rtype: str
"""
return str(uuid.uuid4()).upper()
def id_from_name(name):
"""Generate a UUID using a name as the namespace
:type name: str
:rtype: str
"""
return str(uuid.uuid3(uuid.NAMESPACE_DNS, name)).upper()
def rgb2int(rgb):
"""Convert a given rgb value to an integer
:type rgb: list|tuple
:rtype: int
"""
is_tuple = isinstance(rgb, tuple)
rgb = list(rgb) if is_tuple else rgb
colour = (int(rgb[0]*255) << 16) + (int(rgb[1]*255) << 8) + int(rgb[2]*255)
return colour
| mit |
mpvismer/pyqtgraph | pyqtgraph/GraphicsScene/GraphicsScene.py | 16 | 24745 | import weakref
from ..Qt import QtCore, QtGui
from ..python2_3 import sortList, cmp
from ..Point import Point
from .. import functions as fn
from .. import ptime as ptime
from .mouseEvents import *
from .. import debug as debug
if hasattr(QtCore, 'PYQT_VERSION'):
try:
import sip
HAVE_SIP = True
except ImportError:
HAVE_SIP = False
else:
HAVE_SIP = False
__all__ = ['GraphicsScene']
class GraphicsScene(QtGui.QGraphicsScene):
"""
Extension of QGraphicsScene that implements a complete, parallel mouse event system.
(It would have been preferred to just alter the way QGraphicsScene creates and delivers
events, but this turned out to be impossible because the constructor for QGraphicsMouseEvent
is private)
* Generates MouseClicked events in addition to the usual press/move/release events.
(This works around a problem where it is impossible to have one item respond to a
drag if another is watching for a click.)
* Adjustable radius around click that will catch objects so you don't have to click *exactly* over small/thin objects
* Global context menu--if an item implements a context menu, then its parent(s) may also add items to the menu.
* Allows items to decide _before_ a mouse click which item will be the recipient of mouse events.
This lets us indicate unambiguously to the user which item they are about to click/drag on
* Eats mouseMove events that occur too soon after a mouse press.
* Reimplements items() and itemAt() to circumvent PyQt bug
Mouse interaction is as follows:
1) Every time the mouse moves, the scene delivers both the standard hoverEnter/Move/LeaveEvents
as well as custom HoverEvents.
2) Items are sent HoverEvents in Z-order and each item may optionally call event.acceptClicks(button),
acceptDrags(button) or both. If this method call returns True, this informs the item that _if_
the user clicks/drags the specified mouse button, the item is guaranteed to be the
recipient of click/drag events (the item may wish to change its appearance to indicate this).
If the call to acceptClicks/Drags returns False, then the item is guaranteed to *not* receive
the requested event (because another item has already accepted it).
3) If the mouse is clicked, a mousePressEvent is generated as usual. If any items accept this press event, then
No click/drag events will be generated and mouse interaction proceeds as defined by Qt. This allows
items to function properly if they are expecting the usual press/move/release sequence of events.
(It is recommended that items do NOT accept press events, and instead use click/drag events)
Note: The default implementation of QGraphicsItem.mousePressEvent will *accept* the event if the
item is has its Selectable or Movable flags enabled. You may need to override this behavior.
4) If no item accepts the mousePressEvent, then the scene will begin delivering mouseDrag and/or mouseClick events.
If the mouse is moved a sufficient distance (or moved slowly enough) before the button is released,
then a mouseDragEvent is generated.
If no drag events are generated before the button is released, then a mouseClickEvent is generated.
5) Click/drag events are delivered to the item that called acceptClicks/acceptDrags on the HoverEvent
in step 1. If no such items exist, then the scene attempts to deliver the events to items near the event.
ClickEvents may be delivered in this way even if no
item originally claimed it could accept the click. DragEvents may only be delivered this way if it is the initial
move in a drag.
"""
sigMouseHover = QtCore.Signal(object) ## emits a list of objects hovered over
sigMouseMoved = QtCore.Signal(object) ## emits position of mouse on every move
sigMouseClicked = QtCore.Signal(object) ## emitted when mouse is clicked. Check for event.isAccepted() to see whether the event has already been acted on.
sigPrepareForPaint = QtCore.Signal() ## emitted immediately before the scene is about to be rendered
_addressCache = weakref.WeakValueDictionary()
ExportDirectory = None
@classmethod
def registerObject(cls, obj):
"""
Workaround for PyQt bug in qgraphicsscene.items()
All subclasses of QGraphicsObject must register themselves with this function.
(otherwise, mouse interaction with those objects will likely fail)
"""
if HAVE_SIP and isinstance(obj, sip.wrapper):
cls._addressCache[sip.unwrapinstance(sip.cast(obj, QtGui.QGraphicsItem))] = obj
def __init__(self, clickRadius=2, moveDistance=5, parent=None):
QtGui.QGraphicsScene.__init__(self, parent)
self.setClickRadius(clickRadius)
self.setMoveDistance(moveDistance)
self.exportDirectory = None
self.clickEvents = []
self.dragButtons = []
self.mouseGrabber = None
self.dragItem = None
self.lastDrag = None
self.hoverItems = weakref.WeakKeyDictionary()
self.lastHoverEvent = None
self.contextMenu = [QtGui.QAction("Export...", self)]
self.contextMenu[0].triggered.connect(self.showExportDialog)
self.exportDialog = None
def render(self, *args):
self.prepareForPaint()
return QtGui.QGraphicsScene.render(self, *args)
def prepareForPaint(self):
"""Called before every render. This method will inform items that the scene is about to
be rendered by emitting sigPrepareForPaint.
This allows items to delay expensive processing until they know a paint will be required."""
self.sigPrepareForPaint.emit()
def setClickRadius(self, r):
"""
Set the distance away from mouse clicks to search for interacting items.
When clicking, the scene searches first for items that directly intersect the click position
followed by any other items that are within a rectangle that extends r pixels away from the
click position.
"""
self._clickRadius = r
def setMoveDistance(self, d):
"""
Set the distance the mouse must move after a press before mouseMoveEvents will be delivered.
This ensures that clicks with a small amount of movement are recognized as clicks instead of
drags.
"""
self._moveDistance = d
def mousePressEvent(self, ev):
#print 'scenePress'
QtGui.QGraphicsScene.mousePressEvent(self, ev)
if self.mouseGrabberItem() is None: ## nobody claimed press; we are free to generate drag/click events
if self.lastHoverEvent is not None:
# If the mouse has moved since the last hover event, send a new one.
# This can happen if a context menu is open while the mouse is moving.
if ev.scenePos() != self.lastHoverEvent.scenePos():
self.sendHoverEvents(ev)
self.clickEvents.append(MouseClickEvent(ev))
## set focus on the topmost focusable item under this click
items = self.items(ev.scenePos())
for i in items:
if i.isEnabled() and i.isVisible() and int(i.flags() & i.ItemIsFocusable) > 0:
i.setFocus(QtCore.Qt.MouseFocusReason)
break
def mouseMoveEvent(self, ev):
self.sigMouseMoved.emit(ev.scenePos())
## First allow QGraphicsScene to deliver hoverEnter/Move/ExitEvents
QtGui.QGraphicsScene.mouseMoveEvent(self, ev)
## Next deliver our own HoverEvents
self.sendHoverEvents(ev)
if int(ev.buttons()) != 0: ## button is pressed; send mouseMoveEvents and mouseDragEvents
QtGui.QGraphicsScene.mouseMoveEvent(self, ev)
if self.mouseGrabberItem() is None:
now = ptime.time()
init = False
## keep track of which buttons are involved in dragging
for btn in [QtCore.Qt.LeftButton, QtCore.Qt.MidButton, QtCore.Qt.RightButton]:
if int(ev.buttons() & btn) == 0:
continue
if int(btn) not in self.dragButtons: ## see if we've dragged far enough yet
cev = [e for e in self.clickEvents if int(e.button()) == int(btn)][0]
dist = Point(ev.screenPos() - cev.screenPos())
if dist.length() < self._moveDistance and now - cev.time() < 0.5:
continue
init = init or (len(self.dragButtons) == 0) ## If this is the first button to be dragged, then init=True
self.dragButtons.append(int(btn))
## If we have dragged buttons, deliver a drag event
if len(self.dragButtons) > 0:
if self.sendDragEvent(ev, init=init):
ev.accept()
def leaveEvent(self, ev): ## inform items that mouse is gone
if len(self.dragButtons) == 0:
self.sendHoverEvents(ev, exitOnly=True)
def mouseReleaseEvent(self, ev):
#print 'sceneRelease'
if self.mouseGrabberItem() is None:
if ev.button() in self.dragButtons:
if self.sendDragEvent(ev, final=True):
#print "sent drag event"
ev.accept()
self.dragButtons.remove(ev.button())
else:
cev = [e for e in self.clickEvents if int(e.button()) == int(ev.button())]
if self.sendClickEvent(cev[0]):
#print "sent click event"
ev.accept()
self.clickEvents.remove(cev[0])
if int(ev.buttons()) == 0:
self.dragItem = None
self.dragButtons = []
self.clickEvents = []
self.lastDrag = None
QtGui.QGraphicsScene.mouseReleaseEvent(self, ev)
self.sendHoverEvents(ev) ## let items prepare for next click/drag
def mouseDoubleClickEvent(self, ev):
QtGui.QGraphicsScene.mouseDoubleClickEvent(self, ev)
if self.mouseGrabberItem() is None: ## nobody claimed press; we are free to generate drag/click events
self.clickEvents.append(MouseClickEvent(ev, double=True))
def sendHoverEvents(self, ev, exitOnly=False):
## if exitOnly, then just inform all previously hovered items that the mouse has left.
if exitOnly:
acceptable=False
items = []
event = HoverEvent(None, acceptable)
else:
acceptable = int(ev.buttons()) == 0 ## if we are in mid-drag, do not allow items to accept the hover event.
event = HoverEvent(ev, acceptable)
items = self.itemsNearEvent(event, hoverable=True)
self.sigMouseHover.emit(items)
prevItems = list(self.hoverItems.keys())
#print "hover prev items:", prevItems
#print "hover test items:", items
for item in items:
if hasattr(item, 'hoverEvent'):
event.currentItem = item
if item not in self.hoverItems:
self.hoverItems[item] = None
event.enter = True
else:
prevItems.remove(item)
event.enter = False
try:
item.hoverEvent(event)
except:
debug.printExc("Error sending hover event:")
event.enter = False
event.exit = True
#print "hover exit items:", prevItems
for item in prevItems:
event.currentItem = item
try:
item.hoverEvent(event)
except:
debug.printExc("Error sending hover exit event:")
finally:
del self.hoverItems[item]
# Update last hover event unless:
# - mouse is dragging (move+buttons); in this case we want the dragged
# item to continue receiving events until the drag is over
# - event is not a mouse event (QEvent.Leave sometimes appears here)
if (ev.type() == ev.GraphicsSceneMousePress or
(ev.type() == ev.GraphicsSceneMouseMove and int(ev.buttons()) == 0)):
self.lastHoverEvent = event ## save this so we can ask about accepted events later.
def sendDragEvent(self, ev, init=False, final=False):
## Send a MouseDragEvent to the current dragItem or to
## items near the beginning of the drag
event = MouseDragEvent(ev, self.clickEvents[0], self.lastDrag, start=init, finish=final)
#print "dragEvent: init=", init, 'final=', final, 'self.dragItem=', self.dragItem
if init and self.dragItem is None:
if self.lastHoverEvent is not None:
acceptedItem = self.lastHoverEvent.dragItems().get(event.button(), None)
else:
acceptedItem = None
if acceptedItem is not None:
#print "Drag -> pre-selected item:", acceptedItem
self.dragItem = acceptedItem
event.currentItem = self.dragItem
try:
self.dragItem.mouseDragEvent(event)
except:
debug.printExc("Error sending drag event:")
else:
#print "drag -> new item"
for item in self.itemsNearEvent(event):
#print "check item:", item
if not item.isVisible() or not item.isEnabled():
continue
if hasattr(item, 'mouseDragEvent'):
event.currentItem = item
try:
item.mouseDragEvent(event)
except:
debug.printExc("Error sending drag event:")
if event.isAccepted():
#print " --> accepted"
self.dragItem = item
if int(item.flags() & item.ItemIsFocusable) > 0:
item.setFocus(QtCore.Qt.MouseFocusReason)
break
elif self.dragItem is not None:
event.currentItem = self.dragItem
try:
self.dragItem.mouseDragEvent(event)
except:
debug.printExc("Error sending hover exit event:")
self.lastDrag = event
return event.isAccepted()
def sendClickEvent(self, ev):
## if we are in mid-drag, click events may only go to the dragged item.
if self.dragItem is not None and hasattr(self.dragItem, 'mouseClickEvent'):
ev.currentItem = self.dragItem
self.dragItem.mouseClickEvent(ev)
## otherwise, search near the cursor
else:
if self.lastHoverEvent is not None:
acceptedItem = self.lastHoverEvent.clickItems().get(ev.button(), None)
else:
acceptedItem = None
if acceptedItem is not None:
ev.currentItem = acceptedItem
try:
acceptedItem.mouseClickEvent(ev)
except:
debug.printExc("Error sending click event:")
else:
for item in self.itemsNearEvent(ev):
if not item.isVisible() or not item.isEnabled():
continue
if hasattr(item, 'mouseClickEvent'):
ev.currentItem = item
try:
item.mouseClickEvent(ev)
except:
debug.printExc("Error sending click event:")
if ev.isAccepted():
if int(item.flags() & item.ItemIsFocusable) > 0:
item.setFocus(QtCore.Qt.MouseFocusReason)
break
self.sigMouseClicked.emit(ev)
return ev.isAccepted()
def items(self, *args):
#print 'args:', args
items = QtGui.QGraphicsScene.items(self, *args)
## PyQt bug: items() returns a list of QGraphicsItem instances. If the item is subclassed from QGraphicsObject,
## then the object returned will be different than the actual item that was originally added to the scene
items2 = list(map(self.translateGraphicsItem, items))
#if HAVE_SIP and isinstance(self, sip.wrapper):
#items2 = []
#for i in items:
#addr = sip.unwrapinstance(sip.cast(i, QtGui.QGraphicsItem))
#i2 = GraphicsScene._addressCache.get(addr, i)
##print i, "==>", i2
#items2.append(i2)
#print 'items:', items
return items2
def selectedItems(self, *args):
items = QtGui.QGraphicsScene.selectedItems(self, *args)
## PyQt bug: items() returns a list of QGraphicsItem instances. If the item is subclassed from QGraphicsObject,
## then the object returned will be different than the actual item that was originally added to the scene
#if HAVE_SIP and isinstance(self, sip.wrapper):
#items2 = []
#for i in items:
#addr = sip.unwrapinstance(sip.cast(i, QtGui.QGraphicsItem))
#i2 = GraphicsScene._addressCache.get(addr, i)
##print i, "==>", i2
#items2.append(i2)
items2 = list(map(self.translateGraphicsItem, items))
#print 'items:', items
return items2
def itemAt(self, *args):
item = QtGui.QGraphicsScene.itemAt(self, *args)
## PyQt bug: items() returns a list of QGraphicsItem instances. If the item is subclassed from QGraphicsObject,
## then the object returned will be different than the actual item that was originally added to the scene
#if HAVE_SIP and isinstance(self, sip.wrapper):
#addr = sip.unwrapinstance(sip.cast(item, QtGui.QGraphicsItem))
#item = GraphicsScene._addressCache.get(addr, item)
#return item
return self.translateGraphicsItem(item)
def itemsNearEvent(self, event, selMode=QtCore.Qt.IntersectsItemShape, sortOrder=QtCore.Qt.DescendingOrder, hoverable=False):
"""
Return an iterator that iterates first through the items that directly intersect point (in Z order)
followed by any other items that are within the scene's click radius.
"""
#tr = self.getViewWidget(event.widget()).transform()
view = self.views()[0]
tr = view.viewportTransform()
r = self._clickRadius
rect = view.mapToScene(QtCore.QRect(0, 0, 2*r, 2*r)).boundingRect()
seen = set()
if hasattr(event, 'buttonDownScenePos'):
point = event.buttonDownScenePos()
else:
point = event.scenePos()
w = rect.width()
h = rect.height()
rgn = QtCore.QRectF(point.x()-w, point.y()-h, 2*w, 2*h)
#self.searchRect.setRect(rgn)
items = self.items(point, selMode, sortOrder, tr)
## remove items whose shape does not contain point (scene.items() apparently sucks at this)
items2 = []
for item in items:
if hoverable and not hasattr(item, 'hoverEvent'):
continue
shape = item.shape() # Note: default shape() returns boundingRect()
if shape is None:
continue
if shape.contains(item.mapFromScene(point)):
items2.append(item)
## Sort by descending Z-order (don't trust scene.itms() to do this either)
## use 'absolute' z value, which is the sum of all item/parent ZValues
def absZValue(item):
if item is None:
return 0
return item.zValue() + absZValue(item.parentItem())
sortList(items2, lambda a,b: cmp(absZValue(b), absZValue(a)))
return items2
#for item in items:
##seen.add(item)
#shape = item.mapToScene(item.shape())
#if not shape.contains(point):
#continue
#yield item
#for item in self.items(rgn, selMode, sortOrder, tr):
##if item not in seen:
#yield item
def getViewWidget(self):
return self.views()[0]
#def getViewWidget(self, widget):
### same pyqt bug -- mouseEvent.widget() doesn't give us the original python object.
### [[doesn't seem to work correctly]]
#if HAVE_SIP and isinstance(self, sip.wrapper):
#addr = sip.unwrapinstance(sip.cast(widget, QtGui.QWidget))
##print "convert", widget, addr
#for v in self.views():
#addr2 = sip.unwrapinstance(sip.cast(v, QtGui.QWidget))
##print " check:", v, addr2
#if addr2 == addr:
#return v
#else:
#return widget
def addParentContextMenus(self, item, menu, event):
"""
Can be called by any item in the scene to expand its context menu to include parent context menus.
Parents may implement getContextMenus to add new menus / actions to the existing menu.
getContextMenus must accept 1 argument (the event that generated the original menu) and
return a single QMenu or a list of QMenus.
The final menu will look like:
| Original Item 1
| Original Item 2
| ...
| Original Item N
| ------------------
| Parent Item 1
| Parent Item 2
| ...
| Grandparent Item 1
| ...
============== ==================================================
**Arguments:**
item The item that initially created the context menu
(This is probably the item making the call to this function)
menu The context menu being shown by the item
event The original event that triggered the menu to appear.
============== ==================================================
"""
menusToAdd = []
while item is not self:
item = item.parentItem()
if item is None:
item = self
if not hasattr(item, "getContextMenus"):
continue
subMenus = item.getContextMenus(event) or []
if isinstance(subMenus, list): ## so that some items (like FlowchartViewBox) can return multiple menus
menusToAdd.extend(subMenus)
else:
menusToAdd.append(subMenus)
if menusToAdd:
menu.addSeparator()
for m in menusToAdd:
if isinstance(m, QtGui.QMenu):
menu.addMenu(m)
elif isinstance(m, QtGui.QAction):
menu.addAction(m)
else:
raise Exception("Cannot add object %s (type=%s) to QMenu." % (str(m), str(type(m))))
return menu
def getContextMenus(self, event):
self.contextMenuItem = event.acceptedItem
return self.contextMenu
def showExportDialog(self):
if self.exportDialog is None:
from . import exportDialog
self.exportDialog = exportDialog.ExportDialog(self)
self.exportDialog.show(self.contextMenuItem)
@staticmethod
def translateGraphicsItem(item):
## for fixing pyqt bugs where the wrong item is returned
if HAVE_SIP and isinstance(item, sip.wrapper):
addr = sip.unwrapinstance(sip.cast(item, QtGui.QGraphicsItem))
item = GraphicsScene._addressCache.get(addr, item)
return item
@staticmethod
def translateGraphicsItems(items):
return list(map(GraphicsScene.translateGraphicsItem, items))
| mit |
sandymanu/sandy_lettuce_8916 | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
ROCmSoftwarePlatform/Tensile | Tensile/LibraryLogic.py | 1 | 58384 | ################################################################################
# Copyright 2016-2021 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell cop-
# ies of the Software, and to permit persons to whom the Software is furnished
# to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IM-
# PLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNE-
# CTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
################################################################################
from .Common import print1, print2, HR, printExit, defaultAnalysisParameters, globalParameters, \
setWorkingPath, popWorkingPath, assignParameterWithDefault, startTime, ProgressBar, printWarning
from .SolutionStructs import Solution
from . import LibraryIO
from . import SolutionSelectionLibrary
from copy import deepcopy
from sys import stdout
import array
import csv
import os
import time
################################################################################
# Analyze Problem Type
################################################################################
def analyzeProblemType( problemType, problemSizeGroups, inputParameters ):
print2(HR)
print1("# Analyzing: %s" % problemType)
enableTileSelection = problemType["TileAwareSelection"]
solutionsList = []
problemSizesList = []
dataFileNameList = []
selectionFileNameList = []
for problemSizeGroup in problemSizeGroups:
problemSizes = problemSizeGroup[0]
dataFileName = problemSizeGroup[1]
dataFileNameList.append(dataFileName)
solutionsFileName = problemSizeGroup[2]
#print " problemSizes:", problemSizes
#print "# DataFileName:", dataFileName
#print " solutionsFileName:", solutionsFileName
if enableTileSelection:
selectionFileName = problemSizeGroup[3]
selectionFileNameList.append(selectionFileName)
######################################
# Read Solutions
# (problemSizes, solutions) are already read and kept in problemSizeGroups, no need to call LibraryIO.readSolutions(solutionsFileName) again
solutions = problemSizeGroup[4]
problemSizesList.append(problemSizes)
solutionsList.append(solutions)
solutionMinNaming = Solution.getMinNaming(solutions)
print1("# Read: %s" % (solutionsFileName))
print2("# ProblemSizes: %s" % problemSizes)
print2("# Solutions:")
solutionIdx = 0
for solution in solutions:
print2("# (%u) %s" % (solutionIdx, Solution.getNameMin(solution, \
solutionMinNaming)))
solutionIdx += 1
print2(HR)
######################################
# Create Logic Analyzer
logicAnalyzer = LogicAnalyzer( problemType, problemSizesList, solutionsList, \
dataFileNameList, inputParameters)
selectionSolutionsIdsList = None
selectionSolutions = None
validSelectionSolutions = []
######################################
# Remove invalid solutions
logicAnalyzer.removeInvalidSolutions()
######################################
# Remove least important solutions
if globalParameters["SolutionSelectionAlg"] == 0:
logicAnalyzer.removeLeastImportantSolutions()
elif globalParameters["SolutionSelectionAlg"] == 1:
logicAnalyzer.keepWinnerSolutions()
else:
printExit("Bad KeepLogic=%u"%globalParameters["KeepLogic"])
# print raw data
if globalParameters["PrintLevel"] >= 2:
line = "After Removals:\n"
numOther = 1
for size in logicAnalyzer.numProblemSizes:
numOther *= size
numCols = logicAnalyzer.numProblemSizes[1]
if numCols == 0: numCols = 1
numOther //= numCols
for row in range(0, numOther):
for col in range(0, numCols):
for sol in range(0, logicAnalyzer.numSolutions):
line += "% 5.0f" % logicAnalyzer.data[sol + logicAnalyzer.numSolutions*(col + row*numCols)]
line += "; "
line += "\n"
print(line)
for i in range(0, len(logicAnalyzer.solutions)):
s = logicAnalyzer.solutions[i]
s["SolutionIndex"] = i
s["SolutionNameMin"] = Solution.getNameMin(s, solutionMinNaming)
print1("(%2u) %s : %s" % (i, Solution.getNameMin(s, solutionMinNaming), Solution.getNameFull(s)))
if enableTileSelection:
if globalParameters["NewClient"] == 2:
validSelectionSolutions = SolutionSelectionLibrary.analyzeSolutionSelection(problemType, selectionFileNameList, \
logicAnalyzer.numSolutionsPerGroup, logicAnalyzer.solutionGroupMap, solutionsList)
else:
validSelectionSolutions = SolutionSelectionLibrary.analyzeSolutionSelectionOldClient(problemType, problemSizeGroups)
validSelectionSolutionsIncluded = []
validSelectionSolutionsRemainder = []
selectionSolutionsIds = set([])
for validSelectionSolution in validSelectionSolutions:
(validSolution, validSolutionInfo) = validSelectionSolution
if validSolution in logicAnalyzer.solutions:
validExactSolutionIndex = logicAnalyzer.solutions.index(validSolution)
selectionSolutionsIds.add(validExactSolutionIndex)
validExactSolution = logicAnalyzer.solutions[validExactSolutionIndex]
validSelectionSolutionsIncluded.append((validExactSolution, validSolutionInfo))
else:
validSelectionSolutionsRemainder.append(validSelectionSolution)
selectionSolutions = []
for i in range(0 ,len(validSelectionSolutionsIncluded)):
validSelectionSolution = validSelectionSolutionsIncluded[i]
(validSolution, validSolutionInfo) = validSelectionSolution
validSolution["Ideals"] = validSolutionInfo
solutionsStartIndex = len(logicAnalyzer.solutions)
for i in range(0, len(validSelectionSolutionsRemainder)):
validSelectionSolution = validSelectionSolutionsRemainder[i]
(validSolution, validSolutionInfo) = validSelectionSolution
selectionSolutionIndex = solutionsStartIndex + i
selectionSolutionsIds.add(selectionSolutionIndex)
validSolution["SolutionNameMin"] = Solution.getNameMin(validSolution, solutionMinNaming)
validSolution["Ideals"] = validSolutionInfo
selectionSolutions.append(validSolution)
selectionSolutionsIdsList = list(selectionSolutionsIds)
######################################
# Correct outliers
"""
if inputParameters["SmoothOutliers"]:
logicAnalyzer.smoothOutliers()
"""
numProblemSizes = logicAnalyzer.numProblemSizes
# print all 2D
numPermutations = 1
permutations = []
for i in range(0, logicAnalyzer.numIndices):
if i != logicAnalyzer.idx0 and i != logicAnalyzer.idx1:
numPermutations *= numProblemSizes[i]
#print numPermutations
for j in range(0, numPermutations):
pIdx = j
permutation = []
for i in range(0, logicAnalyzer.numIndices):
if i != logicAnalyzer.idx0 and i != logicAnalyzer.idx1:
npsi = numProblemSizes[i]
permutation.append(pIdx%npsi)
pIdx /= numProblemSizes[i]
permutations.append(permutation)
#print permutations
for permutation in permutations:
logicAnalyzer.print2D(permutation)
######################################
# Range Logic
rangeLogic = logicAnalyzer.enRule(0, logicAnalyzer.globalIndexRange)
print2("# Final Range Logic:")
print2(rangeLogic)
logicComplexity = [0]*logicAnalyzer.numIndices
logicAnalyzer.scoreLogicComplexity(rangeLogic, logicComplexity)
print2("# Range Logic Complexity: %s" % logicComplexity)
score = logicAnalyzer.scoreRangeForLogic( \
logicAnalyzer.globalIndexRange, rangeLogic)
print1("\n# Score: %.0f ms" % (score/1000))
logicAnalyzer.prepareLogic(rangeLogic) # convert indices to sizes, -1
######################################
# Range Logic
exactLogic = logicAnalyzer.exactWinners
print1("# Exact Logic:\n")
print1("%s"%exactLogic)
#selectionSolutionsIdsList = list(selectionSolutionsIds)
return (problemType, logicAnalyzer.solutions, logicAnalyzer.indexOrder, \
exactLogic, rangeLogic, selectionSolutions, selectionSolutionsIdsList, logicAnalyzer.perfMetric)
################################################################################
# LogicAnalyzer
################################################################################
class LogicAnalyzer:
##############################################################################
##############################################################################
###
### Entry / Top-Level Functions
###
##############################################################################
##############################################################################
##############################################################################
# ENTRY: Init
##############################################################################
def __init__(self, problemType, problemSizesList, solutionsList, \
dataFileNameList, inputParameters):
# parameters
self.parameters = inputParameters
# problem type
self.problemType = problemType
self.idx0 = self.problemType["Index0"]
self.idx1 = self.problemType["Index1"]
self.idxU = self.problemType["IndexUnroll"]
# merge solutions from size groups
# solutions needs to be a set, and offset needs to be mapping
print1("# Merging Solutions:")
self.numSolutionsPerGroup = []
self.solutionGroupMap = []
self.solutions = []
solutionsHash = {} # for accelerating lookups
totalSolutions = 0
for solutionGroupIdx in range(0, len(solutionsList)):
solutionGroup = solutionsList[solutionGroupIdx]
totalSolutions += len(solutionGroup)
progressBar = ProgressBar(totalSolutions)
for solutionGroupIdx in range(0, len(solutionsList)):
solutionGroup = solutionsList[solutionGroupIdx]
self.numSolutionsPerGroup.append(len(solutionGroup))
self.solutionGroupMap.append({})
for solutionIdx in range(0, len(solutionGroup)):
solution = solutionGroup[solutionIdx]
if not solution in solutionsHash:
sIdx = len(self.solutions) # the one we are about to add
self.solutions.append(solution)
solutionsHash[solution] = sIdx
else:
sIdx = solutionsHash[solution]
self.solutionGroupMap[solutionGroupIdx][solutionIdx] = sIdx
progressBar.increment()
self.numSolutions = len(self.solutions)
self.solutionMinNaming = Solution.getMinNaming(self.solutions)
self.solutionNames = []
self.solutionTiles = []
for solution in self.solutions:
self.solutionNames.append(Solution.getNameMin(solution, \
self.solutionMinNaming))
self.solutionTiles.append("%ux%u"%(solution["MacroTile0"], \
solution["MacroTile1"]))
self.flopsPerMac = self.problemType["DataType"].flopsPerMac()
# merge problem sizes from size groups
#self.numIndices = len(problemSizesList[0].numProblemSizes)
self.numIndices = self.problemType["TotalIndices"] + problemType["NumIndicesLD"]
unifiedProblemSizes = []
for i in range(0, self.numIndices):
unifiedProblemSizes.append(set())
self.exactProblemSizes = set()
self.rangeProblemSizes = set()
for problemSizes in problemSizesList:
# add exacts
for problem in problemSizes.exacts:
self.exactProblemSizes.add(tuple(problem.sizes))
# add ranges
#print "ProblemSizes", problemSizes.sizes
#FIXME-problem
self.rangeProblemSizes.update([tuple(problem.sizes) for problem in problemSizes.problems])
for rangeSize in problemSizes.ranges:
if globalParameters["ExpandRanges"]:
# Treat ranges as pile of exacts:
for rsize in rangeSize.problemSizes:
self.exactProblemSizes.add(tuple(rsize))
else:
# Create the ranges info in the logic file
#print "RangeSize", rangeSize
sizedIdx = 0
mappedIdx = 0
for i in range(0, self.numIndices):
if rangeSize.indexIsSized[i]:
index = rangeSize.indicesSized[sizedIdx]
sizedIdx += 1
else:
index = rangeSize.indicesSized[ \
rangeSize.indicesMapped[mappedIdx]]
mappedIdx += 1
currentSize = index[0]
currentStride = index[1]
while currentSize <= index[3]:
unifiedProblemSizes[i].add(currentSize)
currentSize += currentStride
currentStride += index[2]
for i in range(0, len(unifiedProblemSizes)):
unifiedProblemSizes[i] = sorted(list(unifiedProblemSizes[i]))
print2("UnifiedProblemSizes: %s" % unifiedProblemSizes)
print2("ExactProblemSizes: %s" % self.exactProblemSizes)
print2("RangeProblemSizes: %s" % self.rangeProblemSizes)
# problem size index <-> size
self.problemSizeToIndex = []
self.problemIndexToSize = []
self.numProblemSizes = []
for i in range(0, self.numIndices):
self.problemSizeToIndex.append({})
self.problemIndexToSize.append([])
for j in range(0, len(unifiedProblemSizes[i])):
size = unifiedProblemSizes[i][j]
self.problemSizeToIndex[i][size] = j
self.problemIndexToSize[i].append(size)
self.numProblemSizes.append(len(unifiedProblemSizes[i]))
print1("# NumProblemSizes: %s" % self.numProblemSizes)
# total size of data array
self.totalProblems = 1
for numProblems in self.numProblemSizes:
self.totalProblems *= numProblems
self.totalSize = self.totalProblems * self.numSolutions
print2("TotalProblems: %u" % self.totalProblems)
print2("TotalSolutions: %u" % self.numSolutions)
print2("TotalSize: %u" % self.totalSize)
# data is a 2D array [problemIdx][solutionIdx] which stores perf data in gflops for
# the specified solution
self.data = array.array('f', [-2]*self.totalSize)
# Each entry in exactWinners is a 2D array [solutionIdx, perf]
self.exactWinners = {}
"""
# map problem sizes -> index
self.problemSizeToIndex = []
self.problemIndexToSize = []
sizedIdx = 0
mappedIdx = 0
for i in range(0, self.numIndices):
self.problemSizeToIndex.append({})
self.problemIndexToSize.append([])
if self.problemSizes.indexIsSized[i]:
index = self.problemSizes.indicesSized[sizedIdx]
sizedIdx += 1
else:
index = self.problemSizes.indicesSized[ \
self.problemSizes.indicesMapped[mappedIdx]]
mappedIdx += 1
currentSize = index[0]
currentStride = index[1]
idx = 0
while currentSize <= index[3]:
self.problemSizeToIndex[i][currentSize] = idx
self.problemIndexToSize[i].append(currentSize)
currentSize += currentStride
currentStride += index[2]
idx += 1
"""
#self.rangeIndicesFree = range(0, self.problemType["NumIndicesC"])
#self.rangeIndicesSummation = range(self.problemType["NumIndicesC"], \
# self.problemType["TotalIndices"])
self.indexOrder = self.recommendedIndexOrder()
print2("IndexOrder: %s" % self.indexOrder)
self.globalIndexRange = []
for i in range(0, self.numIndices):
self.globalIndexRange.append([0, self.numProblemSizes[i]])
self.problemIndicesForGlobalRange \
= self.problemIndicesForRange(self.globalIndexRange)
self.tab = [""]*self.numIndices
######################################
# Read Data From CSV
for fileIdx in range(0, len(dataFileNameList)):
dataFileName = dataFileNameList[fileIdx]
self.addFromCSV(dataFileName, self.numSolutionsPerGroup[fileIdx], \
self.solutionGroupMap[fileIdx])
#print self.data
# map exact problem sizes to solutions
print1("# ExactWinners: %s" % self.exactWinners)
##############################################################################
# ENTRY: Add From CSV
##############################################################################
def addFromCSV(self, dataFileName, numSolutions, solutionMap):
# open file
print("reading datafile", dataFileName)
try:
dataFile = open(dataFileName, "r")
except IOError:
printExit("Can't open \"%s\" to get data" % dataFileName )
# column indices
csvFile = csv.reader(dataFile)
problemSizeStartIdx = 1
# notice that for OperationType != GEMM, the numIndices = 0
totalSizeIdx = problemSizeStartIdx + self.numIndices
# need to take care if the loaded csv is the export-winner-version
csvHasWinner = "_CSVWinner" in dataFileName
if csvHasWinner:
# the column of the two are fixed (GFlops, SizeI/J/K/L, LDD/C/A/B, TotalFlops, WinnerGFlops, WinnerTimeUs, WinnerIdx, WinnerName)
# the order are implemented in ResultFileReporter.cpp (NewClient) and Client.h (OldClient)
columnOfWinnerGFlops = 10
columnOfWinnerIdx = 12
# iterate over rows
rowIdx = 0
for row in csvFile:
rowIdx+=1
if rowIdx == 1:
# get unit (gflops or gflops/cu) of benchmark data
perfUnit = row[0]
if perfUnit == "GFlops":
self.perfMetric = "DeviceEfficiency"
elif perfUnit == "GFlopsPerCU":
self.perfMetric = "CUEfficiency"
else:
printWarning("Performance unit %s in %s is unrecognized: assuming GFlops (device efficiency)" % (perfUnit, dataFileName))
self.perfMetric = "DeviceEfficiency"
# get the length of each row, and derive the first column of the solution instead of using wrong "solutionStartIdx = totalSizeIdx + 1"
rowLength = len(row)
solutionStartIdx = rowLength - numSolutions
continue
else:
#if len(row) < rowLength:
# printWarning("CSV File %s row %u doesn't have %u elements; ignoring remainer of file." \
# % (dataFileName, rowIdx, rowLength) )
# break
# get problem size
problemSize = []
for i in range(problemSizeStartIdx, totalSizeIdx):
problemSize.append(int(row[i]))
problemSize = tuple(problemSize)
# Exact Problem Size
if problemSize in self.exactProblemSizes:
if csvHasWinner:
# Faster. Get the winner info from csv directly, avoid an extra loop
winnerGFlops = float(row[columnOfWinnerGFlops])
winnerIdx = int(row[columnOfWinnerIdx])
else:
# Old code. TODO - Can we get rid of this in the future?
# solution gflops
solutionIdx = 0
winnerIdx = -1
winnerGFlops = -1
for i in range(solutionStartIdx, rowLength):
gflops = float(row[i])
if gflops > winnerGFlops:
winnerIdx = solutionIdx
winnerGFlops = gflops
solutionIdx += 1
if winnerIdx != -1:
if problemSize in self.exactWinners:
if winnerGFlops > self.exactWinners[problemSize][1]:
#print "update exact", problemSize, "CSV index=", winnerIdx, self.exactWinners[problemSize], "->", solutionMap[winnerIdx], winnerGFlops
self.exactWinners[problemSize] = [solutionMap[winnerIdx], winnerGFlops]
else:
self.exactWinners[problemSize] = [solutionMap[winnerIdx], winnerGFlops]
#print "new exact", problemSize, "CSV index=", winnerIdx, self.exactWinners[problemSize]
# Range Problem Size
elif problemSize in self.rangeProblemSizes:
problemIndices = []
for i in range(0, self.numIndices):
problemIndices.append(self.problemSizeToIndex[i][problemSize[i]])
serialIdx = self.indicesToSerial(0, problemIndices)
# solution gflops
solutionIdx = 0
for i in range(solutionStartIdx, rowLength):
gflops = float(row[i])
self.data[serialIdx+solutionMap[solutionIdx]] = gflops
solutionIdx += 1
# Unknown Problem Size
else:
printExit("Huh? %s has ProblemSize %s which isn't in its yaml" \
% ( dataFileName, list(problemSize)) )
#print self.data
##############################################################################
# ENTRY: Remove Invalid Solutions
##############################################################################
def removeInvalidSolutions(self):
#problemIndices = [0]*self.numIndices
allSolutionValid = False
while not allSolutionValid:
invalidIdx = -1
for problemIndices in self.problemIndicesForGlobalRange:
problemSerial = self.indicesToSerial(0, problemIndices)
for solutionIdx in range(0, self.numSolutions):
gflops = self.data[problemSerial+solutionIdx]
if gflops == 0:
invalidIdx = solutionIdx
break
if invalidIdx >= 0:
print1("# Removing Invalid Solution: %u %s" \
% (invalidIdx, self.solutionNames[invalidIdx]) )
self.removeSolution(invalidIdx)
else:
allSolutionValid = True
##############################################################################
# ENTRY: Original KeepLogic algorithm: Remove Least Important Solutions,
# one at a time. Stop when leastImportantSolution indicates no more
# solutions can be removed, which appears to be when the solution
# is used by an exact problem or is the only possible solution for some
# problem or doesn't improve the a solution by > SolutionImportanceMin%
##############################################################################
def removeLeastImportantSolutions(self):
# Remove least important solutions
start = time.time()
while len(self.solutions) > 1:
lisTuple = self.leastImportantSolution()
if lisTuple != None:
lisIdx = lisTuple[0]
lisPercSaved = lisTuple[1]
lisPercWins = lisTuple[2]
lisPercTime = lisTuple[3]
if lisPercSaved < self.parameters["SolutionImportanceMin"] or lisPercWins == 0:
print1("# Removing Unimportant Solution %u/%u: %s ( %f%% wins, %f%% ms time, %f%% ms saved" \
% (lisIdx, self.numSolutions, self.solutionNames[lisIdx], 100*lisPercWins, 100*lisPercTime, 100*lisPercSaved) )
self.removeSolution(lisIdx)
continue
else:
break
else: # no more lis, remainders are exact winner
break
stop = time.time()
print("removeLeastImportantSolutions elapsed time = %.1f secs" % (stop - start))
##############################################################################
# ENTRY: Alternate KeepLogic algorithm that keeps the fastest for each
# exact and range. Other solutions are removed.
##############################################################################
def keepWinnerSolutions(self):
# solution indexes for the winners:
winners = set()
solutionImportance = []
for i in range(0, self.numSolutions):
solutionImportance.append([i, 0, 0, 0, False])
problemSizes = [0]*self.numIndices
print("problemIndicesForGlobalRange", self.problemIndicesForGlobalRange)
for problemIndices in self.problemIndicesForGlobalRange:
for i in range(0, self.numIndices):
problemSizes[i] = self.problemIndexToSize[i][problemIndices[i]]
totalFlops = self.flopsPerMac
for size in problemSizes:
totalFlops *= size
problemSerial = self.indicesToSerial(0, problemIndices)
winnerIdx = -1
winnerGFlops = -1e6
for solutionIdx in range(0, self.numSolutions):
solutionSerialIdx = problemSerial + solutionIdx
solutionGFlops = self.data[solutionSerialIdx]
if solutionGFlops > winnerGFlops:
winnerIdx = solutionIdx
winnerGFlops = solutionGFlops
winners.add(winnerIdx)
# Always keep the exact sizes:
for exactProblem in self.exactWinners:
winnerIdx = self.exactWinners[exactProblem][0]
#print "keepWinnerSolution adding exact", exactProblem, winnerIdx
winners.add(winnerIdx)
print("Winners", winners)
self.pruneSolutions(winners)
##############################################################################
# ENTRY: En Rule
# currentIndexIndex = 0, 1, 2, 3...
# currentIndexRange will have only 1 size for prior indices (unless initial)
#
# Rule:
# [128, [
# [64, [
# [16, 0],
# [2880,1]
# ]
# ],
# [96, [
# [16, 0],
# [64, 1]
# ]
# ]
# ]
# ], another
#
#
##############################################################################
def enRule(self, currentIndexIndex, currentIndexRange):
cii = currentIndexIndex
if currentIndexIndex == 0:
self.tab[cii] = "[] "
elif currentIndexIndex == 1:
self.tab[cii] = "[%2u] " % ( \
currentIndexRange[self.indexOrder[0]][0])
elif currentIndexIndex == 2:
self.tab[cii] = "[%2u,%2u] " % ( \
currentIndexRange[self.indexOrder[0]][0], \
currentIndexRange[self.indexOrder[1]][0])
elif currentIndexIndex == 3:
self.tab[cii] = "[%2u,%2u,%2u] " % ( \
currentIndexRange[self.indexOrder[0]][0], \
currentIndexRange[self.indexOrder[1]][0], \
currentIndexRange[self.indexOrder[2]][0])
elif currentIndexIndex == 4:
self.tab[cii] = "[%2u,%2u,%2u,%2u] " % ( \
currentIndexRange[self.indexOrder[0]][0], \
currentIndexRange[self.indexOrder[1]][0], \
currentIndexRange[self.indexOrder[2]][0], \
currentIndexRange[self.indexOrder[3]][0])
tab = self.tab[cii]
if globalParameters["PrintLevel"] == 1:
stdout.write("\n%s"%tab)
currentIndex = self.indexOrder[currentIndexIndex]
print2("%senRule(%s)" % (tab, currentIndexRange))
nextIndexIndex = currentIndexIndex+1
nextIndexRange = deepcopy(currentIndexRange)
isLastIndex = currentIndexIndex == self.numIndices-1
ruleList = []
########################################
# SingleProblem
########################################
if currentIndexRange[currentIndex][1] \
- currentIndexRange[currentIndex][0] == 1:
########################################
# SingleProblem & LastIndex
# this is last index, so just return fastest solution
########################################
if isLastIndex:
print2("%sSingleProblem & LastIndex" % tab)
winnerIdx = self.winnerForRange(currentIndexRange)
if winnerIdx < 0:
print2("%sSingleProblem & LastIndex :: winnerIdx<0; returning" % (tab) )
return None
ruleList.append([-1, winnerIdx])
if globalParameters["PrintLevel"] == 1:
stdout.write("%")
########################################
# SingleProblem & NotLastIndex
# this isn't last index, so just recursively return next index
########################################
else:
print2("%sSingleProblem & NotLastIndex" % tab)
# % (tab, nextIndexRange) )
nextRule = self.enRule(nextIndexIndex, nextIndexRange)
if nextRule == None:
print2("%sSingleProblem & NotLastIndex :: nextRule==None; returning" % (tab) )
return None
rule = [ -1, nextRule ]
ruleList.append(rule)
if globalParameters["PrintLevel"] == 1:
stdout.write("%")
else:
########################################
# MultiProblem
# Create Initial Rule
########################################
if isLastIndex:
########################################
# MultiProblem & LastIndex
# InitialRule using winnerForRange()
########################################
print2("%sMultiProblem & LastIndex" % tab)
winnerIdx = -1
for problemIndex in range(currentIndexRange[currentIndex][0], \
currentIndexRange[currentIndex][1]):
nextIndexRange[currentIndex][0] = problemIndex
nextIndexRange[currentIndex][1] = problemIndex+1
winnerIdx = self.winnerForRange(nextIndexRange)
initialRule = [ currentIndexRange[currentIndex][0], winnerIdx]
if winnerIdx >= 0:
break
if winnerIdx < 0:
print2("%sMultiProblem & LastIndex :: winnerIdx<0; returning" % (tab) )
return None
else:
########################################
# MultiProblem & NotLastIndex
# InitialRule using enRule()
########################################
print2("%sMultiProblem & NotLastIndex" % tab)
# create initial rule
winnerIdx = -1
nextRule = None
for problemIndex in range(currentIndexRange[currentIndex][0], \
currentIndexRange[currentIndex][1]):
nextIndexRange[currentIndex][0] = problemIndex
nextIndexRange[currentIndex][1] = problemIndex+1
nextRule = self.enRule(nextIndexIndex, nextIndexRange)
# break when found initial rule
if nextRule != None:
break
if nextRule == None:
printWarning("%sMultiProblem & NotLastIndex :: nextRule==None; returning" % (tab) )
return None
initialRule = [ currentIndexRange[currentIndex][0], nextRule ]
ruleList.append(initialRule)
print2("%sMultiProblem::InitialRuleList=%s" % (tab, ruleList))
if globalParameters["PrintLevel"] == 1:
stdout.write("#")
########################################
# MultiProblem
# Append Rules to Initial Rule
########################################
print2("%sMultiProblem::Improving Rule" % tab)
for problemIndex in range(currentIndexRange[currentIndex][0]+1, \
currentIndexRange[currentIndex][1]):
nextIndexRange[currentIndex][0] = problemIndex
nextIndexRange[currentIndex][1] = problemIndex+1
priorRule = ruleList[len(ruleList)-1]
priorRuleForSize = deepcopy(priorRule)
priorRuleForSize[0] = problemIndex
if isLastIndex:
########################################
# nextRule using winnersForRange()
winnerIdx = self.winnerForRange(nextIndexRange)
print2("%sMultiProblem::ImproveRule[%u]::LastIndex::WinnerIdx=%u for %s" % (tab, problemIndex, winnerIdx, nextIndexRange))
# if no solutions benchmarked for this problem size, continue
if winnerIdx < 0:
ruleList[len(ruleList)-1][0] = problemIndex # NO_UPDATE
print2("%sUpdating range b/c None" % tab)
if globalParameters["PrintLevel"] == 1:
stdout.write(" ")
continue
else:
candidateRule = [ problemIndex, winnerIdx]
else:
########################################
# nextRule using enRule()
nextRule = self.enRule(nextIndexIndex, nextIndexRange)
print2("%sMultiProblem::ImproveRule[%u]::NotLastIndex::NextRule=%s for %s; %s" % (tab, problemIndex, nextRule, nextIndexIndex, nextIndexRange))
if nextRule == None:
ruleList[len(ruleList)-1][0] = problemIndex # NO_UPDATE
print2("%sUpdating b/c None" % tab)
if globalParameters["PrintLevel"] == 1:
stdout.write(" ")
continue
else:
candidateRule = [ problemIndex, nextRule ]
########################################
# candidate same as prior
if candidateRule[1] == priorRule[1]:
print2("%sCandidateRule==PriorRule; just updating prior" % (tab))
ruleList[len(ruleList)-1][0] = problemIndex # NO_UPDATE
if globalParameters["PrintLevel"] == 1:
stdout.write(" ")
continue
########################################
# compare candidate vs prior
else:
print2("%sCandidateRule!=PriorRule; appending rule assuming its better" % (tab))
"""
priorRuleScore = self.scoreRangeForLogic(nextIndexRange, \
[priorRuleForSize])
logicComplexity = [0]*self.numIndices
self.scoreLogicComplexity( \
[priorRuleForSize], logicComplexity)
priorRuleScore += self.parameters["BranchPenalty"] \
* sum(logicComplexity)
# score candidate
candidateRuleScore = self.scoreRangeForLogic(nextIndexRange, \
[candidateRule])
#print "CRS", candidateRuleScore
logicComplexity = [0]*self.numIndices
self.scoreLogicComplexity( \
[candidateRule], logicComplexity)
candidateRuleScore += self.parameters["BranchPenalty"] \
* sum(logicComplexity)
candidateRuleScore += self.parameters["BranchPenalty"] # penalize
candidateFaster = candidateRuleScore < priorRuleScore
print2("%sP[%2u]: %s %s~%.0fus < %s~%.0fus" % (tab, problemIndex, \
"wins" if candidateFaster else "same", \
candidateRule, candidateRuleScore, priorRuleForSize, \
priorRuleScore ))
"""
########################################
# candidate wins
#print candidateRuleScore, priorRuleScore
if True: # or candidateRuleScore < priorRuleScore:
ruleList.append(candidateRule)
print2("%sAppending b/c Different" % tab)
if globalParameters["PrintLevel"] == 1:
stdout.write("#")
########################################
# prior wins
else:
print2("%sPrior Rule Wins" % tab)
if globalParameters["PrintLevel"] == 1:
stdout.write(".")
ruleList[len(ruleList)-1][0] = problemIndex # NO_UPDATE
print2("%sReturning RuleList: %s" % (tab, ruleList))
return ruleList
##############################################################################
##############################################################################
###
### Mid-Level Functions
###
##############################################################################
##############################################################################
##############################################################################
# Prepare Logic
# convert threshold indices to sizes
# last threshold = -1
##############################################################################
def prepareLogic(self, logic):
depth = self.getLogicDepth(logic)
if depth == 0: return
indexIndex = self.numIndices - depth
index = self.indexOrder[indexIndex]
for i in range(0, len(logic)):
if i == len(logic)-1:
logic[i][0] = -1
else:
logic[i][0] = self.problemIndexToSize[index][logic[i][0]]
self.prepareLogic(logic[i][1])
##############################################################################
# Print2D
##############################################################################
def print2D(self, indices ):
indicesIdx = 0
problemIndices = []
for i in range(0, self.numIndices):
if i == self.idx0:
problemIndices.append(-1)
elif i == self.idx1:
problemIndices.append(-1)
else:
problemIndices.append(indices[indicesIdx])
indicesIdx += 1
winnerIndices = []
w = "winner"
g = "gflops"
f = "faster"
s = "second"
sss = []
for sIdx in range(0, self.numSolutions):
sss.append("Sol[%u]" % sIdx)
for j in range(0, self.numProblemSizes[1]):
w += ",%4u" % self.problemIndexToSize[1][j]
g += ",%4u" % self.problemIndexToSize[1][j]
f += ",%4u" % self.problemIndexToSize[1][j]
s += ",%4u" % self.problemIndexToSize[1][j]
for sIdx in range(0, self.numSolutions):
sss[sIdx] += ",%4u" % self.problemIndexToSize[1][j]
w += "\n"
g += "\n"
f += "\n"
s += "\n"
for sIdx in range(0, self.numSolutions):
sss[sIdx] += "\n"
for i in range(0, self.numProblemSizes[0]):
problemIndices[self.idx0] = i
w += "%4u" % self.problemIndexToSize[0][i]
g += "%4u" % self.problemIndexToSize[0][i]
f += "%4u" % self.problemIndexToSize[0][i]
s += "%4u" % self.problemIndexToSize[0][i]
for sIdx in range(0, self.numSolutions):
sss[sIdx] += "%4u" % self.problemIndexToSize[0][i]
for j in range(0, self.numProblemSizes[1]):
problemIndices[self.idx1] = j
problemSerial = self.indicesToSerial(0, problemIndices)
for sIdx in range(0, self.numSolutions):
sss[sIdx] += ",%f" % self.data[problemSerial+sIdx]
winnerIdx = 0
secondIdx = 1
winnerGFlops = self.data[problemSerial+0]
secondGFlops = 1e-9
for solutionIdx in range(1, self.numSolutions):
solutionSerialIdx = problemSerial + solutionIdx
solutionGFlops = self.data[solutionSerialIdx]
if solutionGFlops > winnerGFlops:
secondIdx = winnerIdx
secondGFlops = winnerGFlops
winnerIdx = solutionIdx
winnerGFlops = solutionGFlops
elif solutionGFlops > secondGFlops:
secondIdx = solutionIdx
secondGFlops = solutionGFlops
if winnerIdx not in winnerIndices:
winnerIndices.append(winnerIdx)
w += ",%4u" % winnerIdx
g += ",%f" % winnerGFlops
f += ",%f" % (winnerGFlops/secondGFlops)
s += ",%4u" % (secondIdx)
w += "\n"
g += "\n"
f += "\n"
s += "\n"
for sIdx in range(0, self.numSolutions):
sss[sIdx] += "\n"
w += "\n\n"
g += "\n\n"
f += "\n\n"
s += "\n\n"
for sIdx in range(0, self.numSolutions):
sss[sIdx] += "\n\n"
w += "Winners:\n"
for winnerIdx in winnerIndices:
w += "%4u, %s, %s\n" % (winnerIdx, self.solutionTiles[winnerIdx], self.solutionNames[winnerIdx])
printFileName = "Winner2D"
for idx in indices:
printFileName += "_%u" % idx
printFileName += ".csv"
printFile = open(os.path.join(globalParameters["WorkingPath"], printFileName), "w")
printFile.write( w )
printFile.write( g )
printFile.write( f )
printFile.write( s )
for sIdx in range(0, self.numSolutions):
printFile.write( sss[sIdx] )
printFile.close()
##############################################################################
# Least Important Solution
##############################################################################
def leastImportantSolution(self):
solutionImportance = []
for i in range(0, self.numSolutions):
solutionImportance.append([i, 0, 0, 0, False])
problemSizes = [0]*self.numIndices
totalSavedMs = 0
totalExecMs = 0
totalWins = 0
for problemIndices in self.problemIndicesForGlobalRange:
for i in range(0, self.numIndices):
problemSizes[i] = self.problemIndexToSize[i][problemIndices[i]]
totalFlops = self.flopsPerMac
for size in problemSizes:
totalFlops *= size
problemSerial = self.indicesToSerial(0, problemIndices)
winnerIdx = -1
winnerGFlops = -1e6
secondGFlops = -1e9
for solutionIdx in range(0, self.numSolutions):
solutionSerialIdx = problemSerial + solutionIdx
solutionGFlops = self.data[solutionSerialIdx]
if solutionGFlops > winnerGFlops:
secondGFlops = winnerGFlops
winnerIdx = solutionIdx
winnerGFlops = solutionGFlops
elif solutionGFlops > secondGFlops:
secondGFlops = solutionGFlops
winnerTimeMs = totalFlops / winnerGFlops / 1000000.0
secondTimeMs = totalFlops / secondGFlops / 1000000.0
if winnerGFlops > 0 and secondGFlops > 0:
solutionImportance[winnerIdx][1] += (secondTimeMs - winnerTimeMs)
totalSavedMs += secondTimeMs - winnerTimeMs
if winnerGFlops > 0:
solutionImportance[winnerIdx][2] += 1
solutionImportance[winnerIdx][3] += winnerTimeMs
totalExecMs += winnerTimeMs
totalWins += 1
if secondGFlops <= 0:
solutionImportance[winnerIdx][4] = True # this is only valid solution for this problem size, keep it
# print data before sorting
for i in range(0, self.numSolutions):
print2("[%2u] %s: %e saved, %u wins, %u time, %s" \
% (solutionImportance[i][0], \
self.solutionNames[solutionImportance[i][0]], \
solutionImportance[i][1], \
solutionImportance[i][2], \
solutionImportance[i][3], \
"singular" if solutionImportance[i][4] else "" ) )
totalSavedMs = max(1, totalSavedMs)
solutionImportance.sort(key=lambda x: x[1])
for i in range(0, self.numSolutions):
solutionIdx = solutionImportance[i][0]
canRemove = not solutionImportance[i][4] # don't remove if is only win for any size
for exactProblem in self.exactWinners:
winnerIdx = self.exactWinners[exactProblem][0]
if solutionIdx == winnerIdx: # exact winners are important
canRemove = False
break
if canRemove:
idx = solutionImportance[i][0]
if totalSavedMs > 0:
percSaved = 1.0 * solutionImportance[i][1] / totalSavedMs
else:
percSaved = 0
if totalWins > 0:
percWins = 1.0 * solutionImportance[i][2] / totalWins
else:
percWins = 0
if totalExecMs > 0:
percTime = 1.0 * solutionImportance[i][3] / totalExecMs
else:
percTime = 0
return ( idx, percSaved, percWins, percTime )
return None
##############################################################################
# Remove Solution
##############################################################################
def removeSolution(self, removeSolutionIdx):
# temporarily move current to old
oldSolutions = deepcopy(self.solutions)
oldNumSolutions = self.numSolutions
oldData = deepcopy(self.data)
# update solutions
self.solutions = []
for i in range(0, oldNumSolutions):
if i != removeSolutionIdx:
self.solutions.append(oldSolutions[i])
self.solutionMinNaming = Solution.getMinNaming(self.solutions)
self.solutionNames = []
self.solutionTiles = []
for solution in self.solutions:
self.solutionNames.append(Solution.getNameMin(solution, \
self.solutionMinNaming))
self.solutionTiles.append("%ux%u"%(solution["MacroTile0"], \
solution["MacroTile1"]))
self.numSolutions = len(self.solutions)
# update data
self.totalSize = self.totalProblems * self.numSolutions
self.data = array.array('f', [0]*self.totalSize)
for problemIndex in range(0, self.totalProblems):
newSolutionIdx = 0
for oldSolutionIdx in range(0, oldNumSolutions):
if oldSolutionIdx != removeSolutionIdx:
self.data[problemIndex*self.numSolutions+newSolutionIdx] \
= oldData[problemIndex*oldNumSolutions+oldSolutionIdx]
newSolutionIdx += 1
# update exact Winners
for problemSize in self.exactWinners:
if self.exactWinners[problemSize][0] >= removeSolutionIdx:
self.exactWinners[problemSize][0] -= 1
##############################################################################
# Prune a list of solutions, keeping only the indices specified in
# keepSolutions. keepSolutions is a set not a list
##############################################################################
def pruneSolutions(self, keepSolutions):
removeSolutionIdxList = []
solutionMapNewToOld = [] # dense mapping
solutionMapOldToNew = [-1] * self.numSolutions
# temporarily move current to old
oldSolutions = deepcopy(self.solutions)
oldNumSolutions = self.numSolutions
oldData = deepcopy(self.data)
# update solutions
self.solutions = []
for i in range(0, oldNumSolutions):
if i in keepSolutions:
solutionMapNewToOld.append(i)
solutionMapOldToNew[i] = len(self.solutions)
self.solutions.append(oldSolutions[i])
else:
removeSolutionIdxList.append(i)
self.solutionMinNaming = Solution.getMinNaming(self.solutions)
self.solutionNames = []
self.solutionTiles = []
for solution in self.solutions:
self.solutionNames.append(Solution.getNameMin(solution, \
self.solutionMinNaming))
self.solutionTiles.append("%ux%u"%(solution["MacroTile0"], \
solution["MacroTile1"]))
self.numSolutions = len(self.solutions)
# update data
self.totalSize = self.totalProblems * self.numSolutions
self.data = array.array('f', [0]*self.totalSize)
for problemIndex in range(0, self.totalProblems):
for newSolutionIdx in range(0, self.numSolutions):
oldSolutionIdx = solutionMapNewToOld[newSolutionIdx]
self.data[problemIndex*self.numSolutions+newSolutionIdx] \
= oldData[problemIndex*oldNumSolutions+oldSolutionIdx]
# update exact Winners
for problemSize in self.exactWinners:
#print "prune updating exacWinner", problemSize, \
# "from ", self.exactWinners[problemSize][0], \
# "to ", solutionMapOldToNew[self.exactWinners[problemSize][0]]
self.exactWinners[problemSize][0] = \
solutionMapOldToNew[self.exactWinners[problemSize][0]]
if self.exactWinners[problemSize][0] == -1:
print(("warning: exactWinner[", problemSize, "] == -1"))
if self.exactWinners[problemSize][0] >= self.numSolutions:
print(("warning: exactWinner[", problemSize, "] "))
##############################################################################
# Score Range For Logic
##############################################################################
def scoreRangeForLogic(self, indexRange, logic):
depth = self.getLogicDepth(logic)
depth = self.numIndices - depth
fullLogic = deepcopy(logic)
for i in range(0, depth):
fullLogic = [[-1, fullLogic]]
return self.scoreRangeForFullLogic(depth, indexRange, fullLogic)
##############################################################################
# Score Range For Full Logic
##############################################################################
def scoreRangeForFullLogic(self, depth, indexRange, logic):
score = 0
for problemIndices in self.problemIndicesForRange(indexRange):
problemSerial = self.indicesToSerial(0, problemIndices)
totalFlops = self.totalFlopsForProblemIndices(problemIndices)
solutionIdx = self.getSolutionForProblemIndicesUsingLogic( \
problemIndices, logic)
if solutionIdx == None:
printWarning("SolutionIdx = None. This should never happen.")
continue
solutionGFlops = self.data[problemSerial + solutionIdx]
solutionGFlops = max(1E-9, solutionGFlops)
timeUs = totalFlops / solutionGFlops / 1000
score += timeUs
return score
##############################################################################
# Get Solution For Problem Indices Using Logic
##############################################################################
def getSolutionForProblemIndicesUsingLogic(self, problemIndices, logic):
currentProblemIndices = self.toIndexOrder(problemIndices)
currentLogic = logic
for i in range(0, self.numIndices):
currentSizeIndex = currentProblemIndices[0]
for j in range(0, len(currentLogic)):
if currentLogic[j][0] < 0:
currentProblemIndices = currentProblemIndices[1:]
currentLogic = currentLogic[j][1]
break
if currentLogic[j][0] >= 0:
if currentSizeIndex <= currentLogic[j][0]:
currentProblemIndices = currentProblemIndices[1:]
currentLogic = currentLogic[j][1]
break
return currentLogic
##############################################################################
##############################################################################
###
### Helper / Low-Level Functions
###
##############################################################################
##############################################################################
##############################################################################
# Get Winner For Problem
def getWinnerForProblem(self, problemIndices):
problemSerial = self.indicesToSerial(0, problemIndices)
winnerIdx = -1
winnerGFlops = -1
for solutionIdx in range(0, self.numSolutions):
solutionSerialIdx = problemSerial + solutionIdx
solutionGFlops = self.data[solutionSerialIdx]
solutionGFlops = max(1E-9, solutionGFlops)
if solutionGFlops > winnerGFlops:
winnerIdx = solutionIdx
winnerGFlops = solutionGFlops
# print "Winner %u %f" % (winnerIdx, winnerGFlops)
return (winnerIdx, winnerGFlops)
##############################################################################
# Winner For Range, -1 if nothing benchmarked
def winnerForRange(self, indexRange):
if self.numSolutions == 1:
return 0
else:
scores = self.scoreRangeForSolutions(indexRange)
winnerIdx = 0
hasWinner = False
for solutionIdx in range(1, self.numSolutions):
if scores[solutionIdx] < scores[winnerIdx]:
winnerIdx = solutionIdx
hasWinner = True
elif scores[solutionIdx] > scores[winnerIdx]:
hasWinner = True
else:
pass # still no winner
return winnerIdx if hasWinner else -1
##############################################################################
# Score (microseconds) Range For Solutions
def scoreRangeForSolutions(self, indexRange):
scores = [0]*self.numSolutions
for problemIndices in self.problemIndicesForRange(indexRange):
problemSerial = self.indicesToSerial(0, problemIndices)
totalFlops = self.totalFlopsForProblemIndices(problemIndices)
for solutionIdx in range(0, self.numSolutions):
gflops = self.data[problemSerial+solutionIdx]
if gflops > 0:
timeUs = totalFlops / gflops / 1000
else: # this solution not benchmarked for this size, therefore set
# its score to +inf so that its automatically disqualified
timeUs = float("inf")
scores[solutionIdx] += timeUs
return scores
##############################################################################
# Score Logic Complexity
def scoreLogicComplexity(self, logic, logicComplexity):
depth = self.getLogicDepth(logic)
if depth == 0: return
depth = self.numIndices - depth
for i in range(0, len(logic)):
logicComplexity[depth] += 1
self.scoreLogicComplexity(logic[i][1], logicComplexity)
##############################################################################
# Get Logic Depth
def getLogicDepth(self, logic):
obj = logic
depth = 0
while isinstance(obj, list):
obj = obj[0][1]
depth += 1
return depth
##############################################################################
# To Index Order
def toIndexOrder(self, problemIndices):
ordered = []
for i in self.indexOrder:
ordered.append(problemIndices[i])
return ordered
##############################################################################
# Total Flops For Problem Indices
def totalFlopsForProblemIndices(self, problemIndices):
totalFlops = self.flopsPerMac
for i in range(0, self.numIndices):
totalFlops *= self.problemIndexToSize[i][problemIndices[i]]
return totalFlops
##############################################################################
# Recommended Index Order
# TODO, this may depend on transposes
def recommendedIndexOrder(self):
order = []
for i in range(0, self.problemType["TotalIndices"]):
if i != self.idxU and i != self.idx1 and i != self.idx0:
order.append(i)
order.append(self.idxU)
order.append(self.idx0)
order.append(self.idx1)
return order
##############################################################################
# Problem Indices For Range
def problemIndicesForRange(self, indexRange):
problemIndexList = []
# early return for empty set
for i in range(0, self.numIndices):
if indexRange[i][0] == indexRange[i][1]:
return []
problemIndices = []
for idx in indexRange:
problemIndices.append(idx[0])
moreProblems = True
while moreProblems:
problemIndexList.append(deepcopy(problemIndices))
# next problem
problemIndices[0] += 1
for i in range(0, self.numIndices):
if problemIndices[i] >= indexRange[i][1]:
if i == self.numIndices-1:
moreProblems = False
break
else:
problemIndices[i] = indexRange[i][0]
problemIndices[i+1] += 1
else:
break
return problemIndexList
##############################################################################
# Get Item
def __getitem__(self, indexTuple):
indices = indexTuple[0] # in analysis order
solutionIdx = indexTuple[1]
serial = self.indicesToSerial(solutionIdx, indices)
return self.data[serial]
##############################################################################
# Set Item
def __setitem__(self, indexTuple, value):
indices = indexTuple[0] # in analysis order
solutionIdx = indexTuple[1]
serial = self.indicesToSerial(solutionIdx, indices )
self.data[serial] = value
##############################################################################
# Indices -> Serial
def indicesToSerial(self, solutionIdx, indices ):
serial = 0
stride = 1
serial += solutionIdx * stride
stride *= self.numSolutions
for i in range(0, self.numIndices):
serial += indices[i] * stride
stride *= self.numProblemSizes[i]
return serial
def generateLogic(config, benchmarkDataPath, libraryLogicPath):
print2("# LibraryLogic config: %s" % config)
print2("# DefaultAnalysisParameters: " % defaultAnalysisParameters)
setWorkingPath(libraryLogicPath)
# Assign Defaults
analysisParameters = {}
for parameter in defaultAnalysisParameters:
assignParameterWithDefault(analysisParameters, parameter, config, \
defaultAnalysisParameters)
print1("")
print1(HR)
currentTime = time.time()
elapsedTime = currentTime - startTime
print1("# Analysing data in %s - %.3fs" % (globalParameters["BenchmarkDataPath"], elapsedTime) )
for parameter in analysisParameters:
print2("# %s: %s" % (parameter, analysisParameters[parameter]))
print1(HR)
print1("")
##############################################################################
# Determine Which Problem Types
##############################################################################
problemTypes = {}
if not os.path.exists(benchmarkDataPath):
printExit("Path doesn't exist: %s" % benchmarkDataPath)
fileNames = os.listdir(benchmarkDataPath)
fileNames = sorted(fileNames)
for fileName in fileNames:
if os.path.splitext(fileName)[1] == ".csv":
fileBase = os.path.splitext( \
os.path.join(benchmarkDataPath, \
fileName))[0]
dataFileName = fileBase + ".csv"
solutionsFileName = fileBase + ".yaml"
selectionFileName = fileBase + ".gsp"
if not os.path.exists(dataFileName):
printExit("%s doesn't exist for %s" % (dataFileName, fileBase) )
if not os.path.exists(solutionsFileName):
printExit("%s doesn't exist for %s" % (solutionsFileName, fileBase) )
(problemSizes, solutions) = LibraryIO.parseSolutionsFile(solutionsFileName)
if len(solutions) == 0:
printExit("%s doesn't contains any solutions." % (solutionsFileName) )
problemType = solutions[0]["ProblemType"]
if problemType not in problemTypes:
problemTypes[problemType] = []
problemTypes[problemType].append( (problemSizes, \
dataFileName, solutionsFileName, selectionFileName, solutions) )
for problemType in problemTypes:
logicTuple = analyzeProblemType(problemType, problemTypes[problemType], analysisParameters)
filename = os.path.join(globalParameters["WorkingPath"], \
"{}_{}".format(analysisParameters["ScheduleName"], str(problemType) + ".yaml"))
print2("# writing library logic YAML {}".format(filename))
data = LibraryIO.createLibraryLogic(analysisParameters["ScheduleName"], \
analysisParameters["ArchitectureName"], analysisParameters["DeviceNames"], logicTuple)
LibraryIO.writeYAML(filename, data, explicit_start=False, explicit_end=False)
currentTime = time.time()
elapsedTime = currentTime - startTime
print1("%s\n# Finish Analysing data to %s in %.3fs\n%s" % (HR, os.path.split(libraryLogicPath)[0], elapsedTime, HR) )
popWorkingPath()
################################################################################
################################################################################
###
### Main
###
################################################################################
################################################################################
def main( config ):
benchmarkDataPath = os.path.join(globalParameters["WorkingPath"], \
globalParameters["BenchmarkDataPath"])
libraryLogicPath = os.path.join(globalParameters["WorkingPath"], \
globalParameters["LibraryLogicPath"])
generateLogic(config, benchmarkDataPath, libraryLogicPath)
| mit |
hurricup/intellij-community | python/testData/inspections/PyNumpyType/ReturnOptional.py | 79 | 1681 | def unique(ar, return_index=False, return_inverse=False, return_counts=False):
"""
Find the unique elements of an array.
Returns the sorted unique elements of an array. There are two optional
outputs in addition to the unique elements: the indices of the input array
that give the unique values, and the indices of the unique array that
reconstruct the input array.
Parameters
----------
ar : array_like
Input array. This will be flattened if it is not already 1-D.
return_index : bool, optional
If True, also return the indices of `ar` that result in the unique
array.
return_inverse : bool, optional
If True, also return the indices of the unique array that can be used
to reconstruct `ar`.
return_counts : bool, optional
.. versionadded:: 1.9.0
If True, also return the number of times each unique value comes up
in `ar`.
Returns
-------
unique : ndarray
The sorted unique values.
unique_indices : ndarray, optional
The indices of the first occurrences of the unique values in the
(flattened) original array. Only provided if `return_index` is True.
unique_inverse : ndarray, optional
The indices to reconstruct the (flattened) original array from the
unique array. Only provided if `return_inverse` is True.
unique_counts : ndarray, optional
.. versionadded:: 1.9.0
The number of times each of the unique values comes up in the
original array. Only provided if `return_counts` is True.
"""
ar = np.asanyarray(ar).flatten()
u, indices = unique(a, return_index=True) | apache-2.0 |
jjingrong/PONUS-1.2 | venv/Lib/encodings/cp1140.py | 593 | 13361 | """ Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1140',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x9c' # 0x04 -> CONTROL
u'\t' # 0x05 -> HORIZONTAL TABULATION
u'\x86' # 0x06 -> CONTROL
u'\x7f' # 0x07 -> DELETE
u'\x97' # 0x08 -> CONTROL
u'\x8d' # 0x09 -> CONTROL
u'\x8e' # 0x0A -> CONTROL
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x9d' # 0x14 -> CONTROL
u'\x85' # 0x15 -> CONTROL
u'\x08' # 0x16 -> BACKSPACE
u'\x87' # 0x17 -> CONTROL
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x92' # 0x1A -> CONTROL
u'\x8f' # 0x1B -> CONTROL
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u'\x80' # 0x20 -> CONTROL
u'\x81' # 0x21 -> CONTROL
u'\x82' # 0x22 -> CONTROL
u'\x83' # 0x23 -> CONTROL
u'\x84' # 0x24 -> CONTROL
u'\n' # 0x25 -> LINE FEED
u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
u'\x1b' # 0x27 -> ESCAPE
u'\x88' # 0x28 -> CONTROL
u'\x89' # 0x29 -> CONTROL
u'\x8a' # 0x2A -> CONTROL
u'\x8b' # 0x2B -> CONTROL
u'\x8c' # 0x2C -> CONTROL
u'\x05' # 0x2D -> ENQUIRY
u'\x06' # 0x2E -> ACKNOWLEDGE
u'\x07' # 0x2F -> BELL
u'\x90' # 0x30 -> CONTROL
u'\x91' # 0x31 -> CONTROL
u'\x16' # 0x32 -> SYNCHRONOUS IDLE
u'\x93' # 0x33 -> CONTROL
u'\x94' # 0x34 -> CONTROL
u'\x95' # 0x35 -> CONTROL
u'\x96' # 0x36 -> CONTROL
u'\x04' # 0x37 -> END OF TRANSMISSION
u'\x98' # 0x38 -> CONTROL
u'\x99' # 0x39 -> CONTROL
u'\x9a' # 0x3A -> CONTROL
u'\x9b' # 0x3B -> CONTROL
u'\x14' # 0x3C -> DEVICE CONTROL FOUR
u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
u'\x9e' # 0x3E -> CONTROL
u'\x1a' # 0x3F -> SUBSTITUTE
u' ' # 0x40 -> SPACE
u'\xa0' # 0x41 -> NO-BREAK SPACE
u'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
u'\xa2' # 0x4A -> CENT SIGN
u'.' # 0x4B -> FULL STOP
u'<' # 0x4C -> LESS-THAN SIGN
u'(' # 0x4D -> LEFT PARENTHESIS
u'+' # 0x4E -> PLUS SIGN
u'|' # 0x4F -> VERTICAL LINE
u'&' # 0x50 -> AMPERSAND
u'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
u'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
u'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
u'!' # 0x5A -> EXCLAMATION MARK
u'$' # 0x5B -> DOLLAR SIGN
u'*' # 0x5C -> ASTERISK
u')' # 0x5D -> RIGHT PARENTHESIS
u';' # 0x5E -> SEMICOLON
u'\xac' # 0x5F -> NOT SIGN
u'-' # 0x60 -> HYPHEN-MINUS
u'/' # 0x61 -> SOLIDUS
u'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xa6' # 0x6A -> BROKEN BAR
u',' # 0x6B -> COMMA
u'%' # 0x6C -> PERCENT SIGN
u'_' # 0x6D -> LOW LINE
u'>' # 0x6E -> GREATER-THAN SIGN
u'?' # 0x6F -> QUESTION MARK
u'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
u'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
u'`' # 0x79 -> GRAVE ACCENT
u':' # 0x7A -> COLON
u'#' # 0x7B -> NUMBER SIGN
u'@' # 0x7C -> COMMERCIAL AT
u"'" # 0x7D -> APOSTROPHE
u'=' # 0x7E -> EQUALS SIGN
u'"' # 0x7F -> QUOTATION MARK
u'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
u'a' # 0x81 -> LATIN SMALL LETTER A
u'b' # 0x82 -> LATIN SMALL LETTER B
u'c' # 0x83 -> LATIN SMALL LETTER C
u'd' # 0x84 -> LATIN SMALL LETTER D
u'e' # 0x85 -> LATIN SMALL LETTER E
u'f' # 0x86 -> LATIN SMALL LETTER F
u'g' # 0x87 -> LATIN SMALL LETTER G
u'h' # 0x88 -> LATIN SMALL LETTER H
u'i' # 0x89 -> LATIN SMALL LETTER I
u'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC)
u'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE
u'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC)
u'\xb1' # 0x8F -> PLUS-MINUS SIGN
u'\xb0' # 0x90 -> DEGREE SIGN
u'j' # 0x91 -> LATIN SMALL LETTER J
u'k' # 0x92 -> LATIN SMALL LETTER K
u'l' # 0x93 -> LATIN SMALL LETTER L
u'm' # 0x94 -> LATIN SMALL LETTER M
u'n' # 0x95 -> LATIN SMALL LETTER N
u'o' # 0x96 -> LATIN SMALL LETTER O
u'p' # 0x97 -> LATIN SMALL LETTER P
u'q' # 0x98 -> LATIN SMALL LETTER Q
u'r' # 0x99 -> LATIN SMALL LETTER R
u'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
u'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
u'\xb8' # 0x9D -> CEDILLA
u'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
u'\u20ac' # 0x9F -> EURO SIGN
u'\xb5' # 0xA0 -> MICRO SIGN
u'~' # 0xA1 -> TILDE
u's' # 0xA2 -> LATIN SMALL LETTER S
u't' # 0xA3 -> LATIN SMALL LETTER T
u'u' # 0xA4 -> LATIN SMALL LETTER U
u'v' # 0xA5 -> LATIN SMALL LETTER V
u'w' # 0xA6 -> LATIN SMALL LETTER W
u'x' # 0xA7 -> LATIN SMALL LETTER X
u'y' # 0xA8 -> LATIN SMALL LETTER Y
u'z' # 0xA9 -> LATIN SMALL LETTER Z
u'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
u'\xbf' # 0xAB -> INVERTED QUESTION MARK
u'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC)
u'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC)
u'\xae' # 0xAF -> REGISTERED SIGN
u'^' # 0xB0 -> CIRCUMFLEX ACCENT
u'\xa3' # 0xB1 -> POUND SIGN
u'\xa5' # 0xB2 -> YEN SIGN
u'\xb7' # 0xB3 -> MIDDLE DOT
u'\xa9' # 0xB4 -> COPYRIGHT SIGN
u'\xa7' # 0xB5 -> SECTION SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
u'[' # 0xBA -> LEFT SQUARE BRACKET
u']' # 0xBB -> RIGHT SQUARE BRACKET
u'\xaf' # 0xBC -> MACRON
u'\xa8' # 0xBD -> DIAERESIS
u'\xb4' # 0xBE -> ACUTE ACCENT
u'\xd7' # 0xBF -> MULTIPLICATION SIGN
u'{' # 0xC0 -> LEFT CURLY BRACKET
u'A' # 0xC1 -> LATIN CAPITAL LETTER A
u'B' # 0xC2 -> LATIN CAPITAL LETTER B
u'C' # 0xC3 -> LATIN CAPITAL LETTER C
u'D' # 0xC4 -> LATIN CAPITAL LETTER D
u'E' # 0xC5 -> LATIN CAPITAL LETTER E
u'F' # 0xC6 -> LATIN CAPITAL LETTER F
u'G' # 0xC7 -> LATIN CAPITAL LETTER G
u'H' # 0xC8 -> LATIN CAPITAL LETTER H
u'I' # 0xC9 -> LATIN CAPITAL LETTER I
u'\xad' # 0xCA -> SOFT HYPHEN
u'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
u'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
u'}' # 0xD0 -> RIGHT CURLY BRACKET
u'J' # 0xD1 -> LATIN CAPITAL LETTER J
u'K' # 0xD2 -> LATIN CAPITAL LETTER K
u'L' # 0xD3 -> LATIN CAPITAL LETTER L
u'M' # 0xD4 -> LATIN CAPITAL LETTER M
u'N' # 0xD5 -> LATIN CAPITAL LETTER N
u'O' # 0xD6 -> LATIN CAPITAL LETTER O
u'P' # 0xD7 -> LATIN CAPITAL LETTER P
u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
u'R' # 0xD9 -> LATIN CAPITAL LETTER R
u'\xb9' # 0xDA -> SUPERSCRIPT ONE
u'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
u'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\\' # 0xE0 -> REVERSE SOLIDUS
u'\xf7' # 0xE1 -> DIVISION SIGN
u'S' # 0xE2 -> LATIN CAPITAL LETTER S
u'T' # 0xE3 -> LATIN CAPITAL LETTER T
u'U' # 0xE4 -> LATIN CAPITAL LETTER U
u'V' # 0xE5 -> LATIN CAPITAL LETTER V
u'W' # 0xE6 -> LATIN CAPITAL LETTER W
u'X' # 0xE7 -> LATIN CAPITAL LETTER X
u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
u'\xb2' # 0xEA -> SUPERSCRIPT TWO
u'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
u'0' # 0xF0 -> DIGIT ZERO
u'1' # 0xF1 -> DIGIT ONE
u'2' # 0xF2 -> DIGIT TWO
u'3' # 0xF3 -> DIGIT THREE
u'4' # 0xF4 -> DIGIT FOUR
u'5' # 0xF5 -> DIGIT FIVE
u'6' # 0xF6 -> DIGIT SIX
u'7' # 0xF7 -> DIGIT SEVEN
u'8' # 0xF8 -> DIGIT EIGHT
u'9' # 0xF9 -> DIGIT NINE
u'\xb3' # 0xFA -> SUPERSCRIPT THREE
u'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
u'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
knossos-project/PythonQt | examples/NicePyConsole/pygments/lexers/pascal.py | 5 | 40611 | # -*- coding: utf-8 -*-
"""
pygments.lexers.pascal
~~~~~~~~~~~~~~~~~~~~~~
Lexers for Pascal family languages.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \
using, this, default
from pygments.util import get_bool_opt, get_list_opt
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
from pygments.scanner import Scanner
__all__ = ['DelphiLexer', 'Modula2Lexer', 'AdaLexer']
class DelphiLexer(Lexer):
"""
For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
Turbo Pascal and Free Pascal source code.
Additional options accepted:
`turbopascal`
Highlight Turbo Pascal specific keywords (default: ``True``).
`delphi`
Highlight Borland Delphi specific keywords (default: ``True``).
`freepascal`
Highlight Free Pascal specific keywords (default: ``True``).
`units`
A list of units that should be considered builtin, supported are
``System``, ``SysUtils``, ``Classes`` and ``Math``.
Default is to consider all of them builtin.
"""
name = 'Delphi'
aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
filenames = ['*.pas']
mimetypes = ['text/x-pascal']
TURBO_PASCAL_KEYWORDS = (
'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
'const', 'constructor', 'continue', 'destructor', 'div', 'do',
'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
)
DELPHI_KEYWORDS = (
'as', 'class', 'except', 'exports', 'finalization', 'finally',
'initialization', 'is', 'library', 'on', 'property', 'raise',
'threadvar', 'try'
)
FREE_PASCAL_KEYWORDS = (
'dispose', 'exit', 'false', 'new', 'true'
)
BLOCK_KEYWORDS = set((
'begin', 'class', 'const', 'constructor', 'destructor', 'end',
'finalization', 'function', 'implementation', 'initialization',
'label', 'library', 'operator', 'procedure', 'program', 'property',
'record', 'threadvar', 'type', 'unit', 'uses', 'var'
))
FUNCTION_MODIFIERS = set((
'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
'override', 'assembler'
))
# XXX: those aren't global. but currently we know no way for defining
# them just for the type context.
DIRECTIVES = set((
'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
'published', 'public'
))
BUILTIN_TYPES = set((
'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
'widechar', 'widestring', 'word', 'wordbool'
))
BUILTIN_UNITS = {
'System': (
'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
'append', 'arctan', 'assert', 'assigned', 'assignfile',
'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
'dispose', 'doubletocomp', 'endthread', 'enummodules',
'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
'findresourcehinstance', 'flush', 'frac', 'freemem',
'get8087cw', 'getdir', 'getlasterror', 'getmem',
'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
'randomize', 'read', 'readln', 'reallocmem',
'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
'set8087cw', 'setlength', 'setlinebreakstyle',
'setmemorymanager', 'setstring', 'settextbuf',
'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
'utf8tounicode', 'val', 'vararrayredim', 'varclear',
'widecharlentostring', 'widecharlentostrvar',
'widechartostring', 'widechartostrvar',
'widestringtoucs4string', 'write', 'writeln'
),
'SysUtils': (
'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
'allocmem', 'ansicomparefilename', 'ansicomparestr',
'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
'ansistrscan', 'ansistrupper', 'ansiuppercase',
'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
'callterminateprocs', 'changefileext', 'charlength',
'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
'comparetext', 'createdir', 'createguid', 'currentyear',
'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
'exceptionerrormessage', 'excludetrailingbackslash',
'excludetrailingpathdelimiter', 'expandfilename',
'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
'extractfiledrive', 'extractfileext', 'extractfilename',
'extractfilepath', 'extractrelativepath', 'extractshortpathname',
'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
'getenvironmentvariable', 'getfileversion', 'getformatsettings',
'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
'includetrailingbackslash', 'includetrailingpathdelimiter',
'incmonth', 'initializepackage', 'interlockeddecrement',
'interlockedexchange', 'interlockedexchangeadd',
'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
'outofmemoryerror', 'quotedstr', 'raiselastoserror',
'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
'strtotimedef', 'strupper', 'supports', 'syserrormessage',
'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
'wraptext'
),
'Classes': (
'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
'groupdescendantswith', 'hextobin', 'identtoint',
'initinheritedcomponent', 'inttoident', 'invalidpoint',
'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
'pointsequal', 'readcomponentres', 'readcomponentresex',
'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
'registerclasses', 'registercomponents', 'registerintegerconsts',
'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
'teststreamformat', 'unregisterclass', 'unregisterclasses',
'unregisterintegerconsts', 'unregistermoduleclasses',
'writecomponentresfile'
),
'Math': (
'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
'tan', 'tanh', 'totalvariance', 'variance'
)
}
ASM_REGISTERS = set((
'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
'xmm6', 'xmm7'
))
ASM_INSTRUCTIONS = set((
'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
'xlatb', 'xor'
))
def __init__(self, **options):
Lexer.__init__(self, **options)
self.keywords = set()
if get_bool_opt(options, 'turbopascal', True):
self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
if get_bool_opt(options, 'delphi', True):
self.keywords.update(self.DELPHI_KEYWORDS)
if get_bool_opt(options, 'freepascal', True):
self.keywords.update(self.FREE_PASCAL_KEYWORDS)
self.builtins = set()
for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
self.builtins.update(self.BUILTIN_UNITS[unit])
def get_tokens_unprocessed(self, text):
scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
stack = ['initial']
in_function_block = False
in_property_block = False
was_dot = False
next_token_is_function = False
next_token_is_property = False
collect_labels = False
block_labels = set()
brace_balance = [0, 0]
while not scanner.eos:
token = Error
if stack[-1] == 'initial':
if scanner.scan(r'\s+'):
token = Text
elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
if scanner.match.startswith('$'):
token = Comment.Preproc
else:
token = Comment.Multiline
elif scanner.scan(r'//.*?$'):
token = Comment.Single
elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
token = Operator
# stop label highlighting on next ";"
if collect_labels and scanner.match == ';':
collect_labels = False
elif scanner.scan(r'[\(\)\[\]]+'):
token = Punctuation
# abort function naming ``foo = Function(...)``
next_token_is_function = False
# if we are in a function block we count the open
# braces because ootherwise it's impossible to
# determine the end of the modifier context
if in_function_block or in_property_block:
if scanner.match == '(':
brace_balance[0] += 1
elif scanner.match == ')':
brace_balance[0] -= 1
elif scanner.match == '[':
brace_balance[1] += 1
elif scanner.match == ']':
brace_balance[1] -= 1
elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
lowercase_name = scanner.match.lower()
if lowercase_name == 'result':
token = Name.Builtin.Pseudo
elif lowercase_name in self.keywords:
token = Keyword
# if we are in a special block and a
# block ending keyword occours (and the parenthesis
# is balanced) we end the current block context
if (in_function_block or in_property_block) and \
lowercase_name in self.BLOCK_KEYWORDS and \
brace_balance[0] <= 0 and \
brace_balance[1] <= 0:
in_function_block = False
in_property_block = False
brace_balance = [0, 0]
block_labels = set()
if lowercase_name in ('label', 'goto'):
collect_labels = True
elif lowercase_name == 'asm':
stack.append('asm')
elif lowercase_name == 'property':
in_property_block = True
next_token_is_property = True
elif lowercase_name in ('procedure', 'operator',
'function', 'constructor',
'destructor'):
in_function_block = True
next_token_is_function = True
# we are in a function block and the current name
# is in the set of registered modifiers. highlight
# it as pseudo keyword
elif in_function_block and \
lowercase_name in self.FUNCTION_MODIFIERS:
token = Keyword.Pseudo
# if we are in a property highlight some more
# modifiers
elif in_property_block and \
lowercase_name in ('read', 'write'):
token = Keyword.Pseudo
next_token_is_function = True
# if the last iteration set next_token_is_function
# to true we now want this name highlighted as
# function. so do that and reset the state
elif next_token_is_function:
# Look if the next token is a dot. If yes it's
# not a function, but a class name and the
# part after the dot a function name
if scanner.test(r'\s*\.\s*'):
token = Name.Class
# it's not a dot, our job is done
else:
token = Name.Function
next_token_is_function = False
# same for properties
elif next_token_is_property:
token = Name.Property
next_token_is_property = False
# Highlight this token as label and add it
# to the list of known labels
elif collect_labels:
token = Name.Label
block_labels.add(scanner.match.lower())
# name is in list of known labels
elif lowercase_name in block_labels:
token = Name.Label
elif lowercase_name in self.BUILTIN_TYPES:
token = Keyword.Type
elif lowercase_name in self.DIRECTIVES:
token = Keyword.Pseudo
# builtins are just builtins if the token
# before isn't a dot
elif not was_dot and lowercase_name in self.builtins:
token = Name.Builtin
else:
token = Name
elif scanner.scan(r"'"):
token = String
stack.append('string')
elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
token = String.Char
elif scanner.scan(r'\$[0-9A-Fa-f]+'):
token = Number.Hex
elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
token = Number.Integer
elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
token = Number.Float
else:
# if the stack depth is deeper than once, pop
if len(stack) > 1:
stack.pop()
scanner.get_char()
elif stack[-1] == 'string':
if scanner.scan(r"''"):
token = String.Escape
elif scanner.scan(r"'"):
token = String
stack.pop()
elif scanner.scan(r"[^']*"):
token = String
else:
scanner.get_char()
stack.pop()
elif stack[-1] == 'asm':
if scanner.scan(r'\s+'):
token = Text
elif scanner.scan(r'end'):
token = Keyword
stack.pop()
elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
if scanner.match.startswith('$'):
token = Comment.Preproc
else:
token = Comment.Multiline
elif scanner.scan(r'//.*?$'):
token = Comment.Single
elif scanner.scan(r"'"):
token = String
stack.append('string')
elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
token = Name.Label
elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
lowercase_name = scanner.match.lower()
if lowercase_name in self.ASM_INSTRUCTIONS:
token = Keyword
elif lowercase_name in self.ASM_REGISTERS:
token = Name.Builtin
else:
token = Name
elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
token = Operator
elif scanner.scan(r'[\(\)\[\]]+'):
token = Punctuation
elif scanner.scan(r'\$[0-9A-Fa-f]+'):
token = Number.Hex
elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
token = Number.Integer
elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
token = Number.Float
else:
scanner.get_char()
stack.pop()
# save the dot!!!11
if scanner.match.strip():
was_dot = scanner.match == '.'
yield scanner.start_pos, token, scanner.match or ''
class Modula2Lexer(RegexLexer):
"""
For `Modula-2 <http://www.modula2.org/>`_ source code.
Additional options that determine which keywords are highlighted:
`pim`
Select PIM Modula-2 dialect (default: True).
`iso`
Select ISO Modula-2 dialect (default: False).
`objm2`
Select Objective Modula-2 dialect (default: False).
`gm2ext`
Also highlight GNU extensions (default: False).
.. versionadded:: 1.3
"""
name = 'Modula-2'
aliases = ['modula2', 'm2']
filenames = ['*.def', '*.mod']
mimetypes = ['text/x-modula2']
flags = re.MULTILINE | re.DOTALL
tokens = {
'whitespace': [
(r'\n+', Text), # blank lines
(r'\s+', Text), # whitespace
],
'identifiers': [
(r'([a-zA-Z_\$][\w\$]*)', Name),
],
'numliterals': [
(r'[01]+B', Number.Bin), # binary number (ObjM2)
(r'[0-7]+B', Number.Oct), # octal number (PIM + ISO)
(r'[0-7]+C', Number.Oct), # char code (PIM + ISO)
(r'[0-9A-F]+C', Number.Hex), # char code (ObjM2)
(r'[0-9A-F]+H', Number.Hex), # hexadecimal number
(r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
(r'[0-9]+\.[0-9]+', Number.Float), # real number
(r'[0-9]+', Number.Integer), # decimal whole number
],
'strings': [
(r"'(\\\\|\\'|[^'])*'", String), # single quoted string
(r'"(\\\\|\\"|[^"])*"', String), # double quoted string
],
'operators': [
(r'[*/+=#~&<>\^-]', Operator),
(r':=', Operator), # assignment
(r'@', Operator), # pointer deref (ISO)
(r'\.\.', Operator), # ellipsis or range
(r'`', Operator), # Smalltalk message (ObjM2)
(r'::', Operator), # type conversion (ObjM2)
],
'punctuation': [
(r'[\(\)\[\]{},.:;|]', Punctuation),
],
'comments': [
(r'//.*?\n', Comment.Single), # ObjM2
(r'/\*(.*?)\*/', Comment.Multiline), # ObjM2
(r'\(\*([^\$].*?)\*\)', Comment.Multiline),
# TO DO: nesting of (* ... *) comments
],
'pragmas': [
(r'\(\*\$(.*?)\*\)', Comment.Preproc), # PIM
(r'<\*(.*?)\*>', Comment.Preproc), # ISO + ObjM2
],
'root': [
include('whitespace'),
include('comments'),
include('pragmas'),
include('identifiers'),
include('numliterals'),
include('strings'),
include('operators'),
include('punctuation'),
]
}
pim_reserved_words = [
# 40 reserved words
'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION',
'DIV', 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'EXPORT', 'FOR',
'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD',
'MODULE', 'NOT', 'OF', 'OR', 'POINTER', 'PROCEDURE', 'QUALIFIED',
'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
'UNTIL', 'VAR', 'WHILE', 'WITH',
]
pim_pervasives = [
# 31 pervasives
'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'DEC',
'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', 'INC', 'INCL',
'INTEGER', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', 'NIL', 'ODD',
'ORD', 'PROC', 'REAL', 'SIZE', 'TRUE', 'TRUNC', 'VAL',
]
iso_reserved_words = [
# 46 reserved words
'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
'DO', 'ELSE', 'ELSIF', 'END', 'EXCEPT', 'EXIT', 'EXPORT', 'FINALLY',
'FOR', 'FORWARD', 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN',
'LOOP', 'MOD', 'MODULE', 'NOT', 'OF', 'OR', 'PACKEDSET', 'POINTER',
'PROCEDURE', 'QUALIFIED', 'RECORD', 'REPEAT', 'REM', 'RETRY',
'RETURN', 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
'WITH',
]
iso_pervasives = [
# 42 pervasives
'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'CMPLX',
'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH',
'IM', 'INC', 'INCL', 'INT', 'INTEGER', 'INTERRUPTIBLE', 'LENGTH',
'LFLOAT', 'LONGCOMPLEX', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW',
'NIL', 'ODD', 'ORD', 'PROC', 'PROTECTION', 'RE', 'REAL', 'SIZE',
'TRUE', 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
]
objm2_reserved_words = [
# base language, 42 reserved words
'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
'DO', 'ELSE', 'ELSIF', 'END', 'ENUM', 'EXIT', 'FOR', 'FROM', 'IF',
'IMMUTABLE', 'IMPLEMENTATION', 'IMPORT', 'IN', 'IS', 'LOOP', 'MOD',
'MODULE', 'NOT', 'OF', 'OPAQUE', 'OR', 'POINTER', 'PROCEDURE',
'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
'UNTIL', 'VAR', 'VARIADIC', 'WHILE',
# OO extensions, 16 reserved words
'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
'SUPER', 'TRY',
]
objm2_pervasives = [
# base language, 38 pervasives
'ABS', 'BITSET', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'DISPOSE',
'FALSE', 'HALT', 'HIGH', 'INTEGER', 'INRANGE', 'LENGTH', 'LONGCARD',
'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEG', 'NEW', 'NEXTV', 'NIL',
'OCTET', 'ODD', 'ORD', 'PRED', 'PROC', 'READ', 'REAL', 'SUCC', 'TMAX',
'TMIN', 'TRUE', 'TSIZE', 'UNICHAR', 'VAL', 'WRITE', 'WRITEF',
# OO extensions, 3 pervasives
'OBJECT', 'NO', 'YES',
]
gnu_reserved_words = [
# 10 additional reserved words
'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
'__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
]
gnu_pervasives = [
# 21 identifiers, actually from pseudo-module SYSTEM
# but we will highlight them as if they were pervasives
'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
]
def __init__(self, **options):
self.reserved_words = set()
self.pervasives = set()
# ISO Modula-2
if get_bool_opt(options, 'iso', False):
self.reserved_words.update(self.iso_reserved_words)
self.pervasives.update(self.iso_pervasives)
# Objective Modula-2
elif get_bool_opt(options, 'objm2', False):
self.reserved_words.update(self.objm2_reserved_words)
self.pervasives.update(self.objm2_pervasives)
# PIM Modula-2 (DEFAULT)
else:
self.reserved_words.update(self.pim_reserved_words)
self.pervasives.update(self.pim_pervasives)
# GNU extensions
if get_bool_opt(options, 'gm2ext', False):
self.reserved_words.update(self.gnu_reserved_words)
self.pervasives.update(self.gnu_pervasives)
# initialise
RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
# check for reserved words and pervasives
if token is Name:
if value in self.reserved_words:
token = Keyword.Reserved
elif value in self.pervasives:
token = Keyword.Pervasive
# return result
yield index, token, value
class AdaLexer(RegexLexer):
"""
For Ada source code.
.. versionadded:: 1.3
"""
name = 'Ada'
aliases = ['ada', 'ada95', 'ada2005']
filenames = ['*.adb', '*.ads', '*.ada']
mimetypes = ['text/x-ada']
flags = re.MULTILINE | re.I # Ignore case
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'--.*?\n', Comment.Single),
(r'[^\S\n]+', Text),
(r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
(r'(subtype|type)(\s+)([a-z0-9_]+)',
bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
(r'task|protected', Keyword.Declaration),
(r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
(r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
(r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
Comment.Preproc)),
(r'(true|false|null)\b', Keyword.Constant),
(words((
'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count', 'Cursor',
'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator', 'Integer', 'Long_Float',
'Long_Integer', 'Long_Long_Float', 'Long_Long_Integer', 'Natural', 'Positive',
'Reference_Type', 'Short_Float', 'Short_Integer', 'Short_Short_Float',
'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'), suffix=r'\b'),
Keyword.Type),
(r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
(r'generic|private', Keyword.Declaration),
(r'package', Keyword.Declaration, 'package'),
(r'array\b', Keyword.Reserved, 'array_def'),
(r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'([a-z0-9_]+)(\s*)(:)(\s*)(constant)',
bygroups(Name.Constant, Text, Punctuation, Text,
Keyword.Reserved)),
(r'<<[a-z0-9_]+>>', Name.Label),
(r'([a-z0-9_]+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
(words((
'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all',
'array', 'at', 'begin', 'body', 'case', 'constant', 'declare',
'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry',
'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized',
'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when',
'while', 'xor'), prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
(r'"[^"]*"', String),
include('attribute'),
include('numbers'),
(r"'[^']'", String.Character),
(r'([a-z0-9_]+)(\s*|[(,])', bygroups(Name, using(this))),
(r"(<>|=>|:=|[()|:;,.'])", Punctuation),
(r'[*<>+=/&-]', Operator),
(r'\n+', Text),
],
'numbers': [
(r'[0-9_]+#[0-9a-f]+#', Number.Hex),
(r'[0-9_]+\.[0-9_]*', Number.Float),
(r'[0-9_]+', Number.Integer),
],
'attribute': [
(r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
],
'subprogram': [
(r'\(', Punctuation, ('#pop', 'formal_part')),
(r';', Punctuation, '#pop'),
(r'is\b', Keyword.Reserved, '#pop'),
(r'"[^"]+"|[a-z0-9_]+', Name.Function),
include('root'),
],
'end': [
('(if|case|record|loop|select)', Keyword.Reserved),
('"[^"]+"|[\w.]+', Name.Function),
('\s+', Text),
(';', Punctuation, '#pop'),
],
'type_def': [
(r';', Punctuation, '#pop'),
(r'\(', Punctuation, 'formal_part'),
(r'with|and|use', Keyword.Reserved),
(r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
(r'record\b', Keyword.Reserved, ('record_def')),
(r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
include('root'),
],
'array_def': [
(r';', Punctuation, '#pop'),
(r'([a-z0-9_]+)(\s+)(range)', bygroups(Keyword.Type, Text,
Keyword.Reserved)),
include('root'),
],
'record_def': [
(r'end record', Keyword.Reserved, '#pop'),
include('root'),
],
'import': [
(r'[a-z0-9_.]+', Name.Namespace, '#pop'),
default('#pop'),
],
'formal_part': [
(r'\)', Punctuation, '#pop'),
(r'[a-z0-9_]+', Name.Variable),
(r',|:[^=]', Punctuation),
(r'(in|not|null|out|access)\b', Keyword.Reserved),
include('root'),
],
'package': [
('body', Keyword.Declaration),
('is\s+new|renames', Keyword.Reserved),
('is', Keyword.Reserved, '#pop'),
(';', Punctuation, '#pop'),
('\(', Punctuation, 'package_instantiation'),
('([\w.]+)', Name.Class),
include('root'),
],
'package_instantiation': [
(r'("[^"]+"|[a-z0-9_]+)(\s+)(=>)', bygroups(Name.Variable,
Text, Punctuation)),
(r'[a-z0-9._\'"]', Text),
(r'\)', Punctuation, '#pop'),
include('root'),
],
}
| lgpl-2.1 |
danlrobertson/servo | tests/wpt/web-platform-tests/tools/third_party/attrs/docs/conf.py | 53 | 4831 | # -*- coding: utf-8 -*-
import codecs
import os
import re
def read(*parts):
"""
Build an absolute path from *parts* and and return the contents of the
resulting file. Assume UTF-8 encoding.
"""
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, *parts), "rb", "utf-8") as f:
return f.read()
def find_version(*file_paths):
"""
Build a path from *file_paths* and search for a ``__version__``
string inside.
"""
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'attrs'
copyright = u'2015, Hynek Schlawack'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
release = find_version("../src/attr/__init__.py")
version = release.rsplit(u".", 1)[0]
# The full version, including alpha/beta/rc tags.
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
html_theme_options = {
"font_family": '"Avenir Next", Calibri, "PT Sans", sans-serif',
"head_font_family": '"Avenir Next", Calibri, "PT Sans", sans-serif',
"font_size": "18px",
"page_width": "980px",
}
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "_static/attrs_logo.svg"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'attrsdoc'
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'attrs', u'attrs Documentation',
[u'Hynek Schlawack'], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'attrs', u'attrs Documentation',
u'Hynek Schlawack', 'attrs', 'One line description of project.',
'Miscellaneous'),
]
intersphinx_mapping = {
"https://docs.python.org/3": None,
}
# Allow non-local URIs so we can have images in CHANGELOG etc.
suppress_warnings = ['image.nonlocal_uri']
| mpl-2.0 |
melersh/Yasta | flask/testsuite/ext.py | 57 | 4720 | # -*- coding: utf-8 -*-
"""
flask.testsuite.ext
~~~~~~~~~~~~~~~~~~~
Tests the extension import thing.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import sys
import unittest
from flask.testsuite import FlaskTestCase
class ExtImportHookTestCase(FlaskTestCase):
def setup(self):
# we clear this out for various reasons. The most important one is
# that a real flaskext could be in there which would disable our
# fake package. Secondly we want to make sure that the flaskext
# import hook does not break on reloading.
for entry, value in sys.modules.items():
if (entry.startswith('flask.ext.') or
entry.startswith('flask_') or
entry.startswith('flaskext.') or
entry == 'flaskext') and value is not None:
sys.modules.pop(entry, None)
from flask import ext
reload(ext)
# reloading must not add more hooks
import_hooks = 0
for item in sys.meta_path:
cls = type(item)
if cls.__module__ == 'flask.exthook' and \
cls.__name__ == 'ExtensionImporter':
import_hooks += 1
self.assert_equal(import_hooks, 1)
def teardown(self):
from flask import ext
for key in ext.__dict__:
self.assert_('.' not in key)
def test_flaskext_new_simple_import_normal(self):
from flask.ext.newext_simple import ext_id
self.assert_equal(ext_id, 'newext_simple')
def test_flaskext_new_simple_import_module(self):
from flask.ext import newext_simple
self.assert_equal(newext_simple.ext_id, 'newext_simple')
self.assert_equal(newext_simple.__name__, 'flask_newext_simple')
def test_flaskext_new_package_import_normal(self):
from flask.ext.newext_package import ext_id
self.assert_equal(ext_id, 'newext_package')
def test_flaskext_new_package_import_module(self):
from flask.ext import newext_package
self.assert_equal(newext_package.ext_id, 'newext_package')
self.assert_equal(newext_package.__name__, 'flask_newext_package')
def test_flaskext_new_package_import_submodule_function(self):
from flask.ext.newext_package.submodule import test_function
self.assert_equal(test_function(), 42)
def test_flaskext_new_package_import_submodule(self):
from flask.ext.newext_package import submodule
self.assert_equal(submodule.__name__, 'flask_newext_package.submodule')
self.assert_equal(submodule.test_function(), 42)
def test_flaskext_old_simple_import_normal(self):
from flask.ext.oldext_simple import ext_id
self.assert_equal(ext_id, 'oldext_simple')
def test_flaskext_old_simple_import_module(self):
from flask.ext import oldext_simple
self.assert_equal(oldext_simple.ext_id, 'oldext_simple')
self.assert_equal(oldext_simple.__name__, 'flaskext.oldext_simple')
def test_flaskext_old_package_import_normal(self):
from flask.ext.oldext_package import ext_id
self.assert_equal(ext_id, 'oldext_package')
def test_flaskext_old_package_import_module(self):
from flask.ext import oldext_package
self.assert_equal(oldext_package.ext_id, 'oldext_package')
self.assert_equal(oldext_package.__name__, 'flaskext.oldext_package')
def test_flaskext_old_package_import_submodule(self):
from flask.ext.oldext_package import submodule
self.assert_equal(submodule.__name__, 'flaskext.oldext_package.submodule')
self.assert_equal(submodule.test_function(), 42)
def test_flaskext_old_package_import_submodule_function(self):
from flask.ext.oldext_package.submodule import test_function
self.assert_equal(test_function(), 42)
def test_flaskext_broken_package_no_module_caching(self):
for x in xrange(2):
with self.assert_raises(ImportError):
import flask.ext.broken
def test_no_error_swallowing(self):
try:
import flask.ext.broken
except ImportError:
exc_type, exc_value, tb = sys.exc_info()
self.assert_(exc_type is ImportError)
self.assert_equal(str(exc_value), 'No module named missing_module')
self.assert_(tb.tb_frame.f_globals is globals())
next = tb.tb_next
self.assert_('flask_broken/__init__.py' in next.tb_frame.f_code.co_filename)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ExtImportHookTestCase))
return suite
| mit |
MMaus/mutils | fmalibs/aviout.py | 2 | 1460 | import time, glob, os
from pylab import *
class AviOut:
def __init__( self, pth, fps=25):
self.n = 1
self.fig = gcf()
if type(pth)==str:
pfx = pth[:-4]
if pth[-4:].lower() != ".avi":
raise TypeError,"Filename pattern must end with .avi"
if glob.glob(pfx):
raise KeyError,"A file/dirctory by the name '%s' exists -- bailing out" % pfx
os.system('mkdir -p %s' % pfx)
self.pfx = pfx
self.pth = pth
self.fps = fps
self.step = self._step_AVI
self.stop = self._stop_AVI
else:
self.dur = float(pth)
self.step = self._step_PAUSE
self.stop = self._stop_PAUSE
self.fig.set_visible(False)
def _step_PAUSE(self):
self.fig.set_visible(True)
draw()
time.sleep(self.dur)
self.fig.set_visible(False)
def _stop_PAUSE(self):
self.fig.set_visible(True)
draw()
def _step_AVI(self):
self.fig.set_visible(True)
self.n = self.n + 1
draw()
savefig( "%s/fr-%04d.png" % (self.pfx,self.n) )
self.fig.set_visible(False)
def _stop_AVI(self):
self.fig.set_visible(True)
draw()
exc = None
try:
os.system("mencoder mf://%s/fr-*.png "
"-mf fps=%d:type=png -ovc lavc "
"-lavcopts vcodec=mpeg4:mbd=2:trell"
" -oac copy -o %s" % (self.pfx,self.fps,self.pth))
except ex:
exc = ex
os.system('rm -rf %s' % self.pfx )
if exc is not None:
raise exc
| gpl-2.0 |
CloudServer/cinder | cinder/tests/unit/monkey_patch_example/__init__.py | 57 | 1112 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Example Module for testing utils.monkey_patch()."""
CALLED_FUNCTION = []
def example_decorator(name, function):
"""decorator for notify which is used from utils.monkey_patch().
:param name: name of the function
:param function: - object of the function
:returns: function -- decorated function
"""
def wrapped_func(*args, **kwarg):
CALLED_FUNCTION.append(name)
return function(*args, **kwarg)
return wrapped_func
| apache-2.0 |
stochasticHydroTools/RotationalDiffusion | read_input/read_clones_file.py | 1 | 1575 | '''
Small module to read a file with the initial locations and orientation
of the rigid bodies.
'''
from __future__ import division, print_function
import numpy as np
import sys
sys.path.append('../')
from quaternion_integrator.quaternion import Quaternion
def read_clones_file(name_file):
'''
It reads a file with the initial locations and orientation
of the rigid bodies.
Input:
name_file = string.
Output:
locations = locations of rigid bodies, numpy array shape (Nbodies, 3).
orientations = orientations of rigid bodies, numpy array of Quaternions,
shape (Nbodies).
'''
comment_symbols = ['#']
with open(name_file, 'r') as f:
locations = []
orientations = []
i = 0
for line in f:
# Strip comments
if comment_symbols[0] in line:
line, comment = line.split(comment_symbols[0], 1)
# Ignore blank lines
line = line.strip()
if line != '':
if i == 0:
number_of_bodies = int(line.split()[0])
else:
data = line.split()
location = [float(data[0]), float(data[1]), float(data[2])]
orientation = [float(data[3]), float(data[4]), float(data[5]), float(data[6])]
norm_orientation = np.linalg.norm(orientation)
q = Quaternion(orientation / norm_orientation)
locations.append(location)
orientations.append(q)
i += 1
# Creat and return numpy arrays
locations = np.array(locations)
orientations = np.array(orientations)
return number_of_bodies, locations, orientations
| gpl-3.0 |
micahflee/supercipher | supercipher_gui/supercipher_gui.py | 1 | 6130 | import os, sys, subprocess, inspect, platform, argparse, socket, json
from PyQt4 import QtCore, QtGui
from file_selection import FileSelection
from passphrases import Passphrases
import common
try:
import supercipher
except ImportError:
sys.path.append(os.path.abspath(common.supercipher_gui_dir+"/.."))
import supercipher
from supercipher import strings
class Application(QtGui.QApplication):
def __init__(self):
self.setAttribute(QtCore.Qt.AA_X11InitThreads, True)
QtGui.QApplication.__init__(self, sys.argv)
class SuperCipherGui(QtGui.QWidget):
def __init__(self):
super(SuperCipherGui, self).__init__()
self.setWindowTitle(strings._('supercipher'))
# icon
self.window_icon = QtGui.QIcon("{0}/icon.png".format(common.supercipher_gui_dir))
self.setWindowIcon(self.window_icon)
def create_layout(self):
if not hasattr(self, 'main_layout'):
self.main_layout = QtGui.QVBoxLayout()
self.setLayout(self.main_layout)
def start_choose(self):
# encrypt button
self.choose_encrypt_button = QtGui.QPushButton(strings._('gui_encrypt_button'))
self.choose_encrypt_button.clicked.connect(self.encrypt_clicked)
self.choose_decrypt_button = QtGui.QPushButton(strings._('gui_decrypt_button'))
self.choose_decrypt_button.clicked.connect(self.decrypt_clicked)
# main layout
self.create_layout()
self.main_layout.addWidget(self.choose_encrypt_button)
self.main_layout.addWidget(self.choose_decrypt_button)
self.show()
def remove_choose_widgets(self):
self.main_layout.removeWidget(self.choose_encrypt_button)
self.main_layout.removeWidget(self.choose_decrypt_button)
def encrypt_clicked(self):
self.remove_choose_widgets()
self.start_encrypt()
def decrypt_clicked(self):
self.remove_choose_widgets()
self.start_decrypt()
def start_encrypt(self, encrypt_filenames=None, pubkey=None):
# file selection
file_selection = FileSelection()
if encrypt_filenames:
for filename in encrypt_filenames:
file_selection.file_list.add_file(filename)
# passphrases
passphrases = Passphrases()
# main layout
self.create_layout()
self.main_layout.addLayout(file_selection)
self.main_layout.addLayout(passphrases)
self.show()
def start_decrypt(self, decrypt_filename=None):
# label
label = QtGui.QLabel("Decrypt is not implemented yet")
# main layout
self.create_layout()
self.main_layout.addWidget(label)
self.show()
def alert(self, msg, icon=QtGui.QMessageBox.Warning):
dialog = QtGui.QMessageBox()
dialog.setWindowTitle(strings._('supercipher'))
dialog.setWindowIcon(self.window_icon)
dialog.setText(msg)
dialog.setIcon(icon)
dialog.exec_()
def main():
strings.load_strings(supercipher.common.supercipher_dir)
# start the Qt app
app = Application()
# clean up when app quits
def shutdown():
# nothing to clean up yet
pass
app.connect(app, QtCore.SIGNAL("aboutToQuit()"), shutdown)
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('-e', '--encrypt', metavar='filename', nargs='+', help=strings._('arg_help_encrypt'))
parser.add_argument('-d', '--decrypt', metavar='filename', dest='decrypt', help=strings._('arg_help_decrypt'))
parser.add_argument('-p', '--pubkey', metavar='public_key', dest='pubkey', help=strings._('arg_help_pubkey'))
args = parser.parse_args()
encrypt_filenames = args.encrypt
decrypt_filename = args.decrypt
pubkey = args.pubkey
# convert filenames to absolute paths
if encrypt_filenames:
for i in range(len(encrypt_filenames)):
encrypt_filenames[i] = os.path.abspath(encrypt_filenames[i])
if decrypt_filename:
decrypt_filename = os.path.abspath(decrypt_filename)
# validation
if encrypt_filenames and decrypt_filename:
gui.alert(strings._('validation_dont_choose_two'))
sys.exit(0)
if encrypt_filenames:
action = 'encrypt'
elif decrypt_filename:
action = 'decrypt'
else:
action = 'none'
# encrypt validation
if action == 'encrypt':
# make sure encrypt_filenames is a list of valid files/folders
if encrypt_filenames:
valid = True
error_msg = ''
for filename in encrypt_filenames:
if not os.path.exists(filename):
error_msg += strings._('validation_doesnt_exist').format(filename) + '\n\n'
valid = False
if not valid:
error_msg += strings._('validation_invalid_file')
gui.alert(error_msg)
sys.exit(0)
# if pubkey is passed, make sure the fingerprint is valid
if pubkey:
try:
supercipher.gpg.valid_pubkey(pubkey)
except InvalidPubkeyLength:
gui.alert(strings._('validation_pubkey_length'))
sys.exit(0)
except InvalidPubkeyNotHex:
gui.alert(strings._('validation_pubkey_not_hex'))
sys.exit(0)
except MissingPubkey:
gui.alert(strings._('validation_missing_pubkey'))
sys.exit(0)
elif action == 'decrypt':
# make sure decrypt_filename is a valid file
if decrypt_filename:
if not os.path.isfile(decrypt_filename):
gui.alert(strings._('validation_not_file').format(decrypt_filename))
sys.exit(0)
# launch the gui
gui = SuperCipherGui()
if action == 'none':
gui.start_choose()
elif action == 'encrypt':
gui.start_encrypt(encrypt_filenames, pubkey)
elif action == 'decrypt':
gui.start_decrypt(decrypt_filename)
# all done
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| gpl-3.0 |
infoxchange/lettuce | tests/integration/lib/Django-1.3/tests/regressiontests/forms/tests/widgets.py | 47 | 67270 | # -*- coding: utf-8 -*-
import datetime
from decimal import Decimal
import re
import time
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms import *
from django.forms.widgets import RadioFieldRenderer
from django.utils import copycompat as copy
from django.utils import formats
from django.utils.safestring import mark_safe
from django.utils.translation import activate, deactivate
from django.utils.unittest import TestCase
class FormsWidgetTestCase(TestCase):
# Each Widget class corresponds to an HTML form widget. A Widget knows how to
# render itself, given a field name and some data. Widgets don't perform
# validation.
def test_textinput(self):
w = TextInput()
self.assertEqual(w.render('email', ''), u'<input type="text" name="email" />')
self.assertEqual(w.render('email', None), u'<input type="text" name="email" />')
self.assertEqual(w.render('email', 'test@example.com'), u'<input type="text" name="email" value="test@example.com" />')
self.assertEqual(w.render('email', 'some "quoted" & ampersanded value'), u'<input type="text" name="email" value="some "quoted" & ampersanded value" />')
self.assertEqual(w.render('email', 'test@example.com', attrs={'class': 'fun'}), u'<input type="text" name="email" value="test@example.com" class="fun" />')
# Note that doctest in Python 2.4 (and maybe 2.5?) doesn't support non-ascii
# characters in output, so we're displaying the repr() here.
self.assertEqual(w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), u'<input type="text" name="email" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = TextInput(attrs={'class': 'fun'})
self.assertEqual(w.render('email', ''), u'<input type="text" class="fun" name="email" />')
self.assertEqual(w.render('email', 'foo@example.com'), u'<input type="text" class="fun" value="foo@example.com" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = TextInput(attrs={'class': 'pretty'})
self.assertEqual(w.render('email', '', attrs={'class': 'special'}), u'<input type="text" class="special" name="email" />')
# 'attrs' can be safe-strings if needed)
w = TextInput(attrs={'onBlur': mark_safe("function('foo')")})
self.assertEqual(w.render('email', ''), u'<input onBlur="function(\'foo\')" type="text" name="email" />')
def test_passwordinput(self):
w = PasswordInput()
self.assertEqual(w.render('email', ''), u'<input type="password" name="email" />')
self.assertEqual(w.render('email', None), u'<input type="password" name="email" />')
self.assertEqual(w.render('email', 'secret'), u'<input type="password" name="email" />')
# The render_value argument lets you specify whether the widget should render
# its value. For security reasons, this is off by default.
w = PasswordInput(render_value=True)
self.assertEqual(w.render('email', ''), u'<input type="password" name="email" />')
self.assertEqual(w.render('email', None), u'<input type="password" name="email" />')
self.assertEqual(w.render('email', 'test@example.com'), u'<input type="password" name="email" value="test@example.com" />')
self.assertEqual(w.render('email', 'some "quoted" & ampersanded value'), u'<input type="password" name="email" value="some "quoted" & ampersanded value" />')
self.assertEqual(w.render('email', 'test@example.com', attrs={'class': 'fun'}), u'<input type="password" name="email" value="test@example.com" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = PasswordInput(attrs={'class': 'fun'}, render_value=True)
self.assertEqual(w.render('email', ''), u'<input type="password" class="fun" name="email" />')
self.assertEqual(w.render('email', 'foo@example.com'), u'<input type="password" class="fun" value="foo@example.com" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = PasswordInput(attrs={'class': 'pretty'}, render_value=True)
self.assertEqual(w.render('email', '', attrs={'class': 'special'}), u'<input type="password" class="special" name="email" />')
self.assertEqual(w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), u'<input type="password" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />')
def test_hiddeninput(self):
w = HiddenInput()
self.assertEqual(w.render('email', ''), u'<input type="hidden" name="email" />')
self.assertEqual(w.render('email', None), u'<input type="hidden" name="email" />')
self.assertEqual(w.render('email', 'test@example.com'), u'<input type="hidden" name="email" value="test@example.com" />')
self.assertEqual(w.render('email', 'some "quoted" & ampersanded value'), u'<input type="hidden" name="email" value="some "quoted" & ampersanded value" />')
self.assertEqual(w.render('email', 'test@example.com', attrs={'class': 'fun'}), u'<input type="hidden" name="email" value="test@example.com" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = HiddenInput(attrs={'class': 'fun'})
self.assertEqual(w.render('email', ''), u'<input type="hidden" class="fun" name="email" />')
self.assertEqual(w.render('email', 'foo@example.com'), u'<input type="hidden" class="fun" value="foo@example.com" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = HiddenInput(attrs={'class': 'pretty'})
self.assertEqual(w.render('email', '', attrs={'class': 'special'}), u'<input type="hidden" class="special" name="email" />')
self.assertEqual(w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), u'<input type="hidden" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = HiddenInput(attrs={'class': 'pretty'})
self.assertEqual(w.render('email', '', attrs={'class': 'special'}), u'<input type="hidden" class="special" name="email" />')
# Boolean values are rendered to their string forms ("True" and "False").
w = HiddenInput()
self.assertEqual(w.render('get_spam', False), u'<input type="hidden" name="get_spam" value="False" />')
self.assertEqual(w.render('get_spam', True), u'<input type="hidden" name="get_spam" value="True" />')
def test_multiplehiddeninput(self):
w = MultipleHiddenInput()
self.assertEqual(w.render('email', []), u'')
self.assertEqual(w.render('email', None), u'')
self.assertEqual(w.render('email', ['test@example.com']), u'<input type="hidden" name="email" value="test@example.com" />')
self.assertEqual(w.render('email', ['some "quoted" & ampersanded value']), u'<input type="hidden" name="email" value="some "quoted" & ampersanded value" />')
self.assertEqual(w.render('email', ['test@example.com', 'foo@example.com']), u'<input type="hidden" name="email" value="test@example.com" />\n<input type="hidden" name="email" value="foo@example.com" />')
self.assertEqual(w.render('email', ['test@example.com'], attrs={'class': 'fun'}), u'<input type="hidden" name="email" value="test@example.com" class="fun" />')
self.assertEqual(w.render('email', ['test@example.com', 'foo@example.com'], attrs={'class': 'fun'}), u'<input type="hidden" name="email" value="test@example.com" class="fun" />\n<input type="hidden" name="email" value="foo@example.com" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = MultipleHiddenInput(attrs={'class': 'fun'})
self.assertEqual(w.render('email', []), u'')
self.assertEqual(w.render('email', ['foo@example.com']), u'<input type="hidden" class="fun" value="foo@example.com" name="email" />')
self.assertEqual(w.render('email', ['foo@example.com', 'test@example.com']), u'<input type="hidden" class="fun" value="foo@example.com" name="email" />\n<input type="hidden" class="fun" value="test@example.com" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = MultipleHiddenInput(attrs={'class': 'pretty'})
self.assertEqual(w.render('email', ['foo@example.com'], attrs={'class': 'special'}), u'<input type="hidden" class="special" value="foo@example.com" name="email" />')
self.assertEqual(w.render('email', ['ŠĐĆŽćžšđ'], attrs={'class': 'fun'}), u'<input type="hidden" class="fun" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" name="email" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = MultipleHiddenInput(attrs={'class': 'pretty'})
self.assertEqual(w.render('email', ['foo@example.com'], attrs={'class': 'special'}), u'<input type="hidden" class="special" value="foo@example.com" name="email" />')
# Each input gets a separate ID.
w = MultipleHiddenInput()
self.assertEqual(w.render('letters', list('abc'), attrs={'id': 'hideme'}), u'<input type="hidden" name="letters" value="a" id="hideme_0" />\n<input type="hidden" name="letters" value="b" id="hideme_1" />\n<input type="hidden" name="letters" value="c" id="hideme_2" />')
def test_fileinput(self):
# FileInput widgets don't ever show the value, because the old value is of no use
# if you are updating the form or if the provided file generated an error.
w = FileInput()
self.assertEqual(w.render('email', ''), u'<input type="file" name="email" />')
self.assertEqual(w.render('email', None), u'<input type="file" name="email" />')
self.assertEqual(w.render('email', 'test@example.com'), u'<input type="file" name="email" />')
self.assertEqual(w.render('email', 'some "quoted" & ampersanded value'), u'<input type="file" name="email" />')
self.assertEqual(w.render('email', 'test@example.com', attrs={'class': 'fun'}), u'<input type="file" name="email" class="fun" />')
# You can also pass 'attrs' to the constructor:
w = FileInput(attrs={'class': 'fun'})
self.assertEqual(w.render('email', ''), u'<input type="file" class="fun" name="email" />')
self.assertEqual(w.render('email', 'foo@example.com'), u'<input type="file" class="fun" name="email" />')
self.assertEqual(w.render('email', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), u'<input type="file" class="fun" name="email" />')
# Test for the behavior of _has_changed for FileInput. The value of data will
# more than likely come from request.FILES. The value of initial data will
# likely be a filename stored in the database. Since its value is of no use to
# a FileInput it is ignored.
w = FileInput()
# No file was uploaded and no initial data.
self.assertFalse(w._has_changed(u'', None))
# A file was uploaded and no initial data.
self.assertTrue(w._has_changed(u'', {'filename': 'resume.txt', 'content': 'My resume'}))
# A file was not uploaded, but there is initial data
self.assertFalse(w._has_changed(u'resume.txt', None))
# A file was uploaded and there is initial data (file identity is not dealt
# with here)
self.assertTrue(w._has_changed('resume.txt', {'filename': 'resume.txt', 'content': 'My resume'}))
def test_textarea(self):
w = Textarea()
self.assertEqual(w.render('msg', ''), u'<textarea rows="10" cols="40" name="msg"></textarea>')
self.assertEqual(w.render('msg', None), u'<textarea rows="10" cols="40" name="msg"></textarea>')
self.assertEqual(w.render('msg', 'value'), u'<textarea rows="10" cols="40" name="msg">value</textarea>')
self.assertEqual(w.render('msg', 'some "quoted" & ampersanded value'), u'<textarea rows="10" cols="40" name="msg">some "quoted" & ampersanded value</textarea>')
self.assertEqual(w.render('msg', mark_safe('pre "quoted" value')), u'<textarea rows="10" cols="40" name="msg">pre "quoted" value</textarea>')
self.assertEqual(w.render('msg', 'value', attrs={'class': 'pretty', 'rows': 20}), u'<textarea class="pretty" rows="20" cols="40" name="msg">value</textarea>')
# You can also pass 'attrs' to the constructor:
w = Textarea(attrs={'class': 'pretty'})
self.assertEqual(w.render('msg', ''), u'<textarea rows="10" cols="40" name="msg" class="pretty"></textarea>')
self.assertEqual(w.render('msg', 'example'), u'<textarea rows="10" cols="40" name="msg" class="pretty">example</textarea>')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = Textarea(attrs={'class': 'pretty'})
self.assertEqual(w.render('msg', '', attrs={'class': 'special'}), u'<textarea rows="10" cols="40" name="msg" class="special"></textarea>')
self.assertEqual(w.render('msg', 'ŠĐĆŽćžšđ', attrs={'class': 'fun'}), u'<textarea rows="10" cols="40" name="msg" class="fun">\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111</textarea>')
def test_checkboxinput(self):
w = CheckboxInput()
self.assertEqual(w.render('is_cool', ''), u'<input type="checkbox" name="is_cool" />')
self.assertEqual(w.render('is_cool', None), u'<input type="checkbox" name="is_cool" />')
self.assertEqual(w.render('is_cool', False), u'<input type="checkbox" name="is_cool" />')
self.assertEqual(w.render('is_cool', True), u'<input checked="checked" type="checkbox" name="is_cool" />')
# Using any value that's not in ('', None, False, True) will check the checkbox
# and set the 'value' attribute.
self.assertEqual(w.render('is_cool', 'foo'), u'<input checked="checked" type="checkbox" name="is_cool" value="foo" />')
self.assertEqual(w.render('is_cool', False, attrs={'class': 'pretty'}), u'<input type="checkbox" name="is_cool" class="pretty" />')
# You can also pass 'attrs' to the constructor:
w = CheckboxInput(attrs={'class': 'pretty'})
self.assertEqual(w.render('is_cool', ''), u'<input type="checkbox" class="pretty" name="is_cool" />')
# 'attrs' passed to render() get precedence over those passed to the constructor:
w = CheckboxInput(attrs={'class': 'pretty'})
self.assertEqual(w.render('is_cool', '', attrs={'class': 'special'}), u'<input type="checkbox" class="special" name="is_cool" />')
# You can pass 'check_test' to the constructor. This is a callable that takes the
# value and returns True if the box should be checked.
w = CheckboxInput(check_test=lambda value: value.startswith('hello'))
self.assertEqual(w.render('greeting', ''), u'<input type="checkbox" name="greeting" />')
self.assertEqual(w.render('greeting', 'hello'), u'<input checked="checked" type="checkbox" name="greeting" value="hello" />')
self.assertEqual(w.render('greeting', 'hello there'), u'<input checked="checked" type="checkbox" name="greeting" value="hello there" />')
self.assertEqual(w.render('greeting', 'hello & goodbye'), u'<input checked="checked" type="checkbox" name="greeting" value="hello & goodbye" />')
# A subtlety: If the 'check_test' argument cannot handle a value and raises any
# exception during its __call__, then the exception will be swallowed and the box
# will not be checked. In this example, the 'check_test' assumes the value has a
# startswith() method, which fails for the values True, False and None.
self.assertEqual(w.render('greeting', True), u'<input type="checkbox" name="greeting" />')
self.assertEqual(w.render('greeting', False), u'<input type="checkbox" name="greeting" />')
self.assertEqual(w.render('greeting', None), u'<input type="checkbox" name="greeting" />')
# The CheckboxInput widget will return False if the key is not found in the data
# dictionary (because HTML form submission doesn't send any result for unchecked
# checkboxes).
self.assertFalse(w.value_from_datadict({}, {}, 'testing'))
self.assertFalse(w._has_changed(None, None))
self.assertFalse(w._has_changed(None, u''))
self.assertFalse(w._has_changed(u'', None))
self.assertFalse(w._has_changed(u'', u''))
self.assertTrue(w._has_changed(False, u'on'))
self.assertFalse(w._has_changed(True, u'on'))
self.assertTrue(w._has_changed(True, u''))
def test_select(self):
w = Select()
self.assertEqual(w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select name="beatle">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# If the value is None, none of the options are selected:
self.assertEqual(w.render('beatle', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select name="beatle">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# If the value corresponds to a label (but not to an option value), none of the options are selected:
self.assertEqual(w.render('beatle', 'John', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select name="beatle">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# The value is compared to its str():
self.assertEqual(w.render('num', 2, choices=[('1', '1'), ('2', '2'), ('3', '3')]), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
self.assertEqual(w.render('num', '2', choices=[(1, 1), (2, 2), (3, 3)]), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
self.assertEqual(w.render('num', 2, choices=[(1, 1), (2, 2), (3, 3)]), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
# The 'choices' argument can be any iterable:
from itertools import chain
def get_choices():
for i in range(5):
yield (i, i)
self.assertEqual(w.render('num', 2, choices=get_choices()), """<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>""")
things = ({'id': 1, 'name': 'And Boom'}, {'id': 2, 'name': 'One More Thing!'})
class SomeForm(Form):
somechoice = ChoiceField(choices=chain((('', '-'*9),), [(thing['id'], thing['name']) for thing in things]))
f = SomeForm()
self.assertEqual(f.as_table(), u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="" selected="selected">---------</option>\n<option value="1">And Boom</option>\n<option value="2">One More Thing!</option>\n</select></td></tr>')
self.assertEqual(f.as_table(), u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="" selected="selected">---------</option>\n<option value="1">And Boom</option>\n<option value="2">One More Thing!</option>\n</select></td></tr>')
f = SomeForm({'somechoice': 2})
self.assertEqual(f.as_table(), u'<tr><th><label for="id_somechoice">Somechoice:</label></th><td><select name="somechoice" id="id_somechoice">\n<option value="">---------</option>\n<option value="1">And Boom</option>\n<option value="2" selected="selected">One More Thing!</option>\n</select></td></tr>')
# You can also pass 'choices' to the constructor:
w = Select(choices=[(1, 1), (2, 2), (3, 3)])
self.assertEqual(w.render('num', 2), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
# If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
self.assertEqual(w.render('num', 2, choices=[(4, 4), (5, 5)]), """<select name="num">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
</select>""")
# Choices are escaped correctly
self.assertEqual(w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me')))), """<select name="escape">
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="bad">you & me</option>
<option value="good">you > me</option>
</select>""")
# Unicode choices are correctly rendered as HTML
self.assertEqual(w.render('email', 'ŠĐĆŽćžšđ', choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]), u'<select name="email">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected="selected">\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</option>\n<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option>\n</select>')
# If choices is passed to the constructor and is a generator, it can be iterated
# over multiple times without getting consumed:
w = Select(choices=get_choices())
self.assertEqual(w.render('num', 2), """<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>""")
self.assertEqual(w.render('num', 3), """<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3" selected="selected">3</option>
<option value="4">4</option>
</select>""")
# Choices can be nested one level in order to create HTML optgroups:
w.choices=(('outer1', 'Outer 1'), ('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))))
self.assertEqual(w.render('nestchoice', None), """<select name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertEqual(w.render('nestchoice', 'outer1'), """<select name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertEqual(w.render('nestchoice', 'inner1'), """<select name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1" selected="selected">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
def test_nullbooleanselect(self):
w = NullBooleanSelect()
self.assertTrue(w.render('is_cool', True), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>""")
self.assertEqual(w.render('is_cool', False), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>""")
self.assertEqual(w.render('is_cool', None), """<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>""")
self.assertEqual(w.render('is_cool', '2'), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>""")
self.assertEqual(w.render('is_cool', '3'), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>""")
self.assertTrue(w._has_changed(False, None))
self.assertTrue(w._has_changed(None, False))
self.assertFalse(w._has_changed(None, None))
self.assertFalse(w._has_changed(False, False))
self.assertTrue(w._has_changed(True, False))
self.assertTrue(w._has_changed(True, None))
self.assertTrue(w._has_changed(True, False))
def test_selectmultiple(self):
w = SelectMultiple()
self.assertEqual(w.render('beatles', ['J'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
self.assertEqual(w.render('beatles', ['J', 'P'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
self.assertEqual(w.render('beatles', ['J', 'P', 'R'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P" selected="selected">Paul</option>
<option value="G">George</option>
<option value="R" selected="selected">Ringo</option>
</select>""")
# If the value is None, none of the options are selected:
self.assertEqual(w.render('beatles', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# If the value corresponds to a label (but not to an option value), none of the options are selected:
self.assertEqual(w.render('beatles', ['John'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>""")
# If multiple values are given, but some of them are not valid, the valid ones are selected:
self.assertEqual(w.render('beatles', ['J', 'G', 'foo'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<select multiple="multiple" name="beatles">
<option value="J" selected="selected">John</option>
<option value="P">Paul</option>
<option value="G" selected="selected">George</option>
<option value="R">Ringo</option>
</select>""")
# The value is compared to its str():
self.assertEqual(w.render('nums', [2], choices=[('1', '1'), ('2', '2'), ('3', '3')]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
self.assertEqual(w.render('nums', ['2'], choices=[(1, 1), (2, 2), (3, 3)]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
self.assertEqual(w.render('nums', [2], choices=[(1, 1), (2, 2), (3, 3)]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
# The 'choices' argument can be any iterable:
def get_choices():
for i in range(5):
yield (i, i)
self.assertEqual(w.render('nums', [2], choices=get_choices()), """<select multiple="multiple" name="nums">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>""")
# You can also pass 'choices' to the constructor:
w = SelectMultiple(choices=[(1, 1), (2, 2), (3, 3)])
self.assertEqual(w.render('nums', [2]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
</select>""")
# If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
self.assertEqual(w.render('nums', [2], choices=[(4, 4), (5, 5)]), """<select multiple="multiple" name="nums">
<option value="1">1</option>
<option value="2" selected="selected">2</option>
<option value="3">3</option>
<option value="4">4</option>
<option value="5">5</option>
</select>""")
# Choices are escaped correctly
self.assertEqual(w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me')))), """<select multiple="multiple" name="escape">
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="bad">you & me</option>
<option value="good">you > me</option>
</select>""")
# Unicode choices are correctly rendered as HTML
self.assertEqual(w.render('nums', ['ŠĐĆŽćžšđ'], choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]), u'<select multiple="multiple" name="nums">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" selected="selected">\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</option>\n<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111</option>\n</select>')
# Test the usage of _has_changed
self.assertFalse(w._has_changed(None, None))
self.assertFalse(w._has_changed([], None))
self.assertTrue(w._has_changed(None, [u'1']))
self.assertFalse(w._has_changed([1, 2], [u'1', u'2']))
self.assertTrue(w._has_changed([1, 2], [u'1']))
self.assertTrue(w._has_changed([1, 2], [u'1', u'3']))
# Choices can be nested one level in order to create HTML optgroups:
w.choices = (('outer1', 'Outer 1'), ('Group "1"', (('inner1', 'Inner 1'), ('inner2', 'Inner 2'))))
self.assertEqual(w.render('nestchoice', None), """<select multiple="multiple" name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertEqual(w.render('nestchoice', ['outer1']), """<select multiple="multiple" name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertEqual(w.render('nestchoice', ['inner1']), """<select multiple="multiple" name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1" selected="selected">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>""")
self.assertEqual(w.render('nestchoice', ['outer1', 'inner2']), """<select multiple="multiple" name="nestchoice">
<option value="outer1" selected="selected">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2" selected="selected">Inner 2</option>
</optgroup>
</select>""")
def test_radioselect(self):
w = RadioSelect()
self.assertEqual(w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>""")
# If the value is None, none of the options are checked:
self.assertEqual(w.render('beatle', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>""")
# If the value corresponds to a label (but not to an option value), none of the options are checked:
self.assertEqual(w.render('beatle', 'John', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input type="radio" name="beatle" value="J" /> John</label></li>
<li><label><input type="radio" name="beatle" value="P" /> Paul</label></li>
<li><label><input type="radio" name="beatle" value="G" /> George</label></li>
<li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li>
</ul>""")
# The value is compared to its str():
self.assertEqual(w.render('num', 2, choices=[('1', '1'), ('2', '2'), ('3', '3')]), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>""")
self.assertEqual(w.render('num', '2', choices=[(1, 1), (2, 2), (3, 3)]), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>""")
self.assertEqual(w.render('num', 2, choices=[(1, 1), (2, 2), (3, 3)]), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>""")
# The 'choices' argument can be any iterable:
def get_choices():
for i in range(5):
yield (i, i)
self.assertEqual(w.render('num', 2, choices=get_choices()), """<ul>
<li><label><input type="radio" name="num" value="0" /> 0</label></li>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
<li><label><input type="radio" name="num" value="4" /> 4</label></li>
</ul>""")
# You can also pass 'choices' to the constructor:
w = RadioSelect(choices=[(1, 1), (2, 2), (3, 3)])
self.assertEqual(w.render('num', 2), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
</ul>""")
# If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
self.assertEqual(w.render('num', 2, choices=[(4, 4), (5, 5)]), """<ul>
<li><label><input type="radio" name="num" value="1" /> 1</label></li>
<li><label><input checked="checked" type="radio" name="num" value="2" /> 2</label></li>
<li><label><input type="radio" name="num" value="3" /> 3</label></li>
<li><label><input type="radio" name="num" value="4" /> 4</label></li>
<li><label><input type="radio" name="num" value="5" /> 5</label></li>
</ul>""")
# RadioSelect uses a RadioFieldRenderer to render the individual radio inputs.
# You can manipulate that object directly to customize the way the RadioSelect
# is rendered.
w = RadioSelect()
r = w.get_renderer('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
inp_set1 = []
inp_set2 = []
inp_set3 = []
inp_set4 = []
for inp in r:
inp_set1.append(str(inp))
inp_set2.append('%s<br />' % inp)
inp_set3.append('<p>%s %s</p>' % (inp.tag(), inp.choice_label))
inp_set4.append('%s %s %s %s %s' % (inp.name, inp.value, inp.choice_value, inp.choice_label, inp.is_checked()))
self.assertEqual('\n'.join(inp_set1), """<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label>
<label><input type="radio" name="beatle" value="P" /> Paul</label>
<label><input type="radio" name="beatle" value="G" /> George</label>
<label><input type="radio" name="beatle" value="R" /> Ringo</label>""")
self.assertEqual('\n'.join(inp_set2), """<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label><br />""")
self.assertEqual('\n'.join(inp_set3), """<p><input checked="checked" type="radio" name="beatle" value="J" /> John</p>
<p><input type="radio" name="beatle" value="P" /> Paul</p>
<p><input type="radio" name="beatle" value="G" /> George</p>
<p><input type="radio" name="beatle" value="R" /> Ringo</p>""")
self.assertEqual('\n'.join(inp_set4), """beatle J J John True
beatle J P Paul False
beatle J G George False
beatle J R Ringo False""")
# You can create your own custom renderers for RadioSelect to use.
class MyRenderer(RadioFieldRenderer):
def render(self):
return u'<br />\n'.join([unicode(choice) for choice in self])
w = RadioSelect(renderer=MyRenderer)
self.assertEqual(w.render('beatle', 'G', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<label><input type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input checked="checked" type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label>""")
# Or you can use custom RadioSelect fields that use your custom renderer.
class CustomRadioSelect(RadioSelect):
renderer = MyRenderer
w = CustomRadioSelect()
self.assertEqual(w.render('beatle', 'G', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<label><input type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br />
<label><input checked="checked" type="radio" name="beatle" value="G" /> George</label><br />
<label><input type="radio" name="beatle" value="R" /> Ringo</label>""")
# A RadioFieldRenderer object also allows index access to individual RadioInput
w = RadioSelect()
r = w.get_renderer('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')))
self.assertEqual(str(r[1]), '<label><input type="radio" name="beatle" value="P" /> Paul</label>')
self.assertEqual(str(r[0]), '<label><input checked="checked" type="radio" name="beatle" value="J" /> John</label>')
self.assertTrue(r[0].is_checked())
self.assertFalse(r[1].is_checked())
self.assertEqual((r[1].name, r[1].value, r[1].choice_value, r[1].choice_label), ('beatle', u'J', u'P', u'Paul'))
try:
r[10]
self.fail("This offset should not exist.")
except IndexError:
pass
# Choices are escaped correctly
w = RadioSelect()
self.assertEqual(w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me')))), """<ul>
<li><label><input type="radio" name="escape" value="bad" /> you & me</label></li>
<li><label><input type="radio" name="escape" value="good" /> you > me</label></li>
</ul>""")
# Unicode choices are correctly rendered as HTML
w = RadioSelect()
self.assertEqual(unicode(w.render('email', 'ŠĐĆŽćžšđ', choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')])), u'<ul>\n<li><label><input checked="checked" type="radio" name="email" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" /> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</label></li>\n<li><label><input type="radio" name="email" value="\u0107\u017e\u0161\u0111" /> abc\u0107\u017e\u0161\u0111</label></li>\n</ul>')
# Attributes provided at instantiation are passed to the constituent inputs
w = RadioSelect(attrs={'id':'foo'})
self.assertEqual(w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label for="foo_0"><input checked="checked" type="radio" id="foo_0" value="J" name="beatle" /> John</label></li>
<li><label for="foo_1"><input type="radio" id="foo_1" value="P" name="beatle" /> Paul</label></li>
<li><label for="foo_2"><input type="radio" id="foo_2" value="G" name="beatle" /> George</label></li>
<li><label for="foo_3"><input type="radio" id="foo_3" value="R" name="beatle" /> Ringo</label></li>
</ul>""")
# Attributes provided at render-time are passed to the constituent inputs
w = RadioSelect()
self.assertEqual(w.render('beatle', 'J', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo')), attrs={'id':'bar'}), """<ul>
<li><label for="bar_0"><input checked="checked" type="radio" id="bar_0" value="J" name="beatle" /> John</label></li>
<li><label for="bar_1"><input type="radio" id="bar_1" value="P" name="beatle" /> Paul</label></li>
<li><label for="bar_2"><input type="radio" id="bar_2" value="G" name="beatle" /> George</label></li>
<li><label for="bar_3"><input type="radio" id="bar_3" value="R" name="beatle" /> Ringo</label></li>
</ul>""")
def test_checkboxselectmultiple(self):
w = CheckboxSelectMultiple()
self.assertEqual(w.render('beatles', ['J'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
self.assertEqual(w.render('beatles', ['J', 'P'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
self.assertEqual(w.render('beatles', ['J', 'P', 'R'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
# If the value is None, none of the options are selected:
self.assertEqual(w.render('beatles', None, choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
# If the value corresponds to a label (but not to an option value), none of the options are selected:
self.assertEqual(w.render('beatles', ['John'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
# If multiple values are given, but some of them are not valid, the valid ones are selected:
self.assertEqual(w.render('beatles', ['J', 'G', 'foo'], choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>""")
# The value is compared to its str():
self.assertEqual(w.render('nums', [2], choices=[('1', '1'), ('2', '2'), ('3', '3')]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>""")
self.assertEqual(w.render('nums', ['2'], choices=[(1, 1), (2, 2), (3, 3)]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>""")
self.assertEqual(w.render('nums', [2], choices=[(1, 1), (2, 2), (3, 3)]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>""")
# The 'choices' argument can be any iterable:
def get_choices():
for i in range(5):
yield (i, i)
self.assertEqual(w.render('nums', [2], choices=get_choices()), """<ul>
<li><label><input type="checkbox" name="nums" value="0" /> 0</label></li>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="nums" value="4" /> 4</label></li>
</ul>""")
# You can also pass 'choices' to the constructor:
w = CheckboxSelectMultiple(choices=[(1, 1), (2, 2), (3, 3)])
self.assertEqual(w.render('nums', [2]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
</ul>""")
# If 'choices' is passed to both the constructor and render(), then they'll both be in the output:
self.assertEqual(w.render('nums', [2], choices=[(4, 4), (5, 5)]), """<ul>
<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>
<li><label><input checked="checked" type="checkbox" name="nums" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="nums" value="4" /> 4</label></li>
<li><label><input type="checkbox" name="nums" value="5" /> 5</label></li>
</ul>""")
# Choices are escaped correctly
self.assertEqual(w.render('escape', None, choices=(('bad', 'you & me'), ('good', mark_safe('you > me')))), """<ul>
<li><label><input type="checkbox" name="escape" value="1" /> 1</label></li>
<li><label><input type="checkbox" name="escape" value="2" /> 2</label></li>
<li><label><input type="checkbox" name="escape" value="3" /> 3</label></li>
<li><label><input type="checkbox" name="escape" value="bad" /> you & me</label></li>
<li><label><input type="checkbox" name="escape" value="good" /> you > me</label></li>
</ul>""")
# Test the usage of _has_changed
self.assertFalse(w._has_changed(None, None))
self.assertFalse(w._has_changed([], None))
self.assertTrue(w._has_changed(None, [u'1']))
self.assertFalse(w._has_changed([1, 2], [u'1', u'2']))
self.assertTrue(w._has_changed([1, 2], [u'1']))
self.assertTrue(w._has_changed([1, 2], [u'1', u'3']))
self.assertFalse(w._has_changed([2, 1], [u'1', u'2']))
# Unicode choices are correctly rendered as HTML
self.assertEqual(w.render('nums', ['ŠĐĆŽćžšđ'], choices=[('ŠĐĆŽćžšđ', 'ŠĐabcĆŽćžšđ'), ('ćžšđ', 'abcćžšđ')]), u'<ul>\n<li><label><input type="checkbox" name="nums" value="1" /> 1</label></li>\n<li><label><input type="checkbox" name="nums" value="2" /> 2</label></li>\n<li><label><input type="checkbox" name="nums" value="3" /> 3</label></li>\n<li><label><input checked="checked" type="checkbox" name="nums" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" /> \u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111</label></li>\n<li><label><input type="checkbox" name="nums" value="\u0107\u017e\u0161\u0111" /> abc\u0107\u017e\u0161\u0111</label></li>\n</ul>')
# Each input gets a separate ID
self.assertEqual(CheckboxSelectMultiple().render('letters', list('ac'), choices=zip(list('abc'), list('ABC')), attrs={'id': 'abc'}), """<ul>
<li><label for="abc_0"><input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" /> A</label></li>
<li><label for="abc_1"><input type="checkbox" name="letters" value="b" id="abc_1" /> B</label></li>
<li><label for="abc_2"><input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" /> C</label></li>
</ul>""")
def test_multi(self):
class MyMultiWidget(MultiWidget):
def decompress(self, value):
if value:
return value.split('__')
return ['', '']
def format_output(self, rendered_widgets):
return u'<br />'.join(rendered_widgets)
w = MyMultiWidget(widgets=(TextInput(attrs={'class': 'big'}), TextInput(attrs={'class': 'small'})))
self.assertEqual(w.render('name', ['john', 'lennon']), u'<input type="text" class="big" value="john" name="name_0" /><br /><input type="text" class="small" value="lennon" name="name_1" />')
self.assertEqual(w.render('name', 'john__lennon'), u'<input type="text" class="big" value="john" name="name_0" /><br /><input type="text" class="small" value="lennon" name="name_1" />')
self.assertEqual(w.render('name', 'john__lennon', attrs={'id':'foo'}), u'<input id="foo_0" type="text" class="big" value="john" name="name_0" /><br /><input id="foo_1" type="text" class="small" value="lennon" name="name_1" />')
w = MyMultiWidget(widgets=(TextInput(attrs={'class': 'big'}), TextInput(attrs={'class': 'small'})), attrs={'id': 'bar'})
self.assertEqual(w.render('name', ['john', 'lennon']), u'<input id="bar_0" type="text" class="big" value="john" name="name_0" /><br /><input id="bar_1" type="text" class="small" value="lennon" name="name_1" />')
w = MyMultiWidget(widgets=(TextInput(), TextInput()))
# test with no initial data
self.assertTrue(w._has_changed(None, [u'john', u'lennon']))
# test when the data is the same as initial
self.assertFalse(w._has_changed(u'john__lennon', [u'john', u'lennon']))
# test when the first widget's data has changed
self.assertTrue(w._has_changed(u'john__lennon', [u'alfred', u'lennon']))
# test when the last widget's data has changed. this ensures that it is not
# short circuiting while testing the widgets.
self.assertTrue(w._has_changed(u'john__lennon', [u'john', u'denver']))
def test_splitdatetime(self):
w = SplitDateTimeWidget()
self.assertEqual(w.render('date', ''), u'<input type="text" name="date_0" /><input type="text" name="date_1" />')
self.assertEqual(w.render('date', None), u'<input type="text" name="date_0" /><input type="text" name="date_1" />')
self.assertEqual(w.render('date', datetime.datetime(2006, 1, 10, 7, 30)), u'<input type="text" name="date_0" value="2006-01-10" /><input type="text" name="date_1" value="07:30:00" />')
self.assertEqual(w.render('date', [datetime.date(2006, 1, 10), datetime.time(7, 30)]), u'<input type="text" name="date_0" value="2006-01-10" /><input type="text" name="date_1" value="07:30:00" />')
# You can also pass 'attrs' to the constructor. In this case, the attrs will be
w = SplitDateTimeWidget(attrs={'class': 'pretty'})
self.assertEqual(w.render('date', datetime.datetime(2006, 1, 10, 7, 30)), u'<input type="text" class="pretty" value="2006-01-10" name="date_0" /><input type="text" class="pretty" value="07:30:00" name="date_1" />')
# Use 'date_format' and 'time_format' to change the way a value is displayed.
w = SplitDateTimeWidget(date_format='%d/%m/%Y', time_format='%H:%M')
self.assertEqual(w.render('date', datetime.datetime(2006, 1, 10, 7, 30)), u'<input type="text" name="date_0" value="10/01/2006" /><input type="text" name="date_1" value="07:30" />')
self.assertTrue(w._has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), [u'2008-05-06', u'12:40:00']))
self.assertFalse(w._has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), [u'06/05/2008', u'12:40']))
self.assertTrue(w._has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), [u'06/05/2008', u'12:41']))
def test_datetimeinput(self):
w = DateTimeInput()
self.assertEqual(w.render('date', None), u'<input type="text" name="date" />')
d = datetime.datetime(2007, 9, 17, 12, 51, 34, 482548)
self.assertEqual(str(d), '2007-09-17 12:51:34.482548')
# The microseconds are trimmed on display, by default.
self.assertEqual(w.render('date', d), u'<input type="text" name="date" value="2007-09-17 12:51:34" />')
self.assertEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51, 34)), u'<input type="text" name="date" value="2007-09-17 12:51:34" />')
self.assertEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51)), u'<input type="text" name="date" value="2007-09-17 12:51:00" />')
# Use 'format' to change the way a value is displayed.
w = DateTimeInput(format='%d/%m/%Y %H:%M')
self.assertEqual(w.render('date', d), u'<input type="text" name="date" value="17/09/2007 12:51" />')
self.assertFalse(w._has_changed(d, '17/09/2007 12:51'))
# Make sure a custom format works with _has_changed. The hidden input will use
data = datetime.datetime(2010, 3, 6, 12, 0, 0)
custom_format = '%d.%m.%Y %H:%M'
w = DateTimeInput(format=custom_format)
self.assertFalse(w._has_changed(formats.localize_input(data), data.strftime(custom_format)))
def test_dateinput(self):
w = DateInput()
self.assertEqual(w.render('date', None), u'<input type="text" name="date" />')
d = datetime.date(2007, 9, 17)
self.assertEqual(str(d), '2007-09-17')
self.assertEqual(w.render('date', d), u'<input type="text" name="date" value="2007-09-17" />')
self.assertEqual(w.render('date', datetime.date(2007, 9, 17)), u'<input type="text" name="date" value="2007-09-17" />')
# We should be able to initialize from a unicode value.
self.assertEqual(w.render('date', u'2007-09-17'), u'<input type="text" name="date" value="2007-09-17" />')
# Use 'format' to change the way a value is displayed.
w = DateInput(format='%d/%m/%Y')
self.assertEqual(w.render('date', d), u'<input type="text" name="date" value="17/09/2007" />')
self.assertFalse(w._has_changed(d, '17/09/2007'))
# Make sure a custom format works with _has_changed. The hidden input will use
data = datetime.date(2010, 3, 6)
custom_format = '%d.%m.%Y'
w = DateInput(format=custom_format)
self.assertFalse(w._has_changed(formats.localize_input(data), data.strftime(custom_format)))
def test_timeinput(self):
w = TimeInput()
self.assertEqual(w.render('time', None), u'<input type="text" name="time" />')
t = datetime.time(12, 51, 34, 482548)
self.assertEqual(str(t), '12:51:34.482548')
# The microseconds are trimmed on display, by default.
self.assertEqual(w.render('time', t), u'<input type="text" name="time" value="12:51:34" />')
self.assertEqual(w.render('time', datetime.time(12, 51, 34)), u'<input type="text" name="time" value="12:51:34" />')
self.assertEqual(w.render('time', datetime.time(12, 51)), u'<input type="text" name="time" value="12:51:00" />')
# We should be able to initialize from a unicode value.
self.assertEqual(w.render('time', u'13:12:11'), u'<input type="text" name="time" value="13:12:11" />')
# Use 'format' to change the way a value is displayed.
w = TimeInput(format='%H:%M')
self.assertEqual(w.render('time', t), u'<input type="text" name="time" value="12:51" />')
self.assertFalse(w._has_changed(t, '12:51'))
# Make sure a custom format works with _has_changed. The hidden input will use
data = datetime.time(13, 0)
custom_format = '%I:%M %p'
w = TimeInput(format=custom_format)
self.assertFalse(w._has_changed(formats.localize_input(data), data.strftime(custom_format)))
def test_splithiddendatetime(self):
from django.forms.widgets import SplitHiddenDateTimeWidget
w = SplitHiddenDateTimeWidget()
self.assertEqual(w.render('date', ''), u'<input type="hidden" name="date_0" /><input type="hidden" name="date_1" />')
d = datetime.datetime(2007, 9, 17, 12, 51, 34, 482548)
self.assertEqual(str(d), '2007-09-17 12:51:34.482548')
self.assertEqual(w.render('date', d), u'<input type="hidden" name="date_0" value="2007-09-17" /><input type="hidden" name="date_1" value="12:51:34" />')
self.assertEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51, 34)), u'<input type="hidden" name="date_0" value="2007-09-17" /><input type="hidden" name="date_1" value="12:51:34" />')
self.assertEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51)), u'<input type="hidden" name="date_0" value="2007-09-17" /><input type="hidden" name="date_1" value="12:51:00" />')
class FormsI18NWidgetsTestCase(TestCase):
def setUp(self):
super(FormsI18NWidgetsTestCase, self).setUp()
self.old_use_l10n = getattr(settings, 'USE_L10N', False)
settings.USE_L10N = True
activate('de-at')
def tearDown(self):
deactivate()
settings.USE_L10N = self.old_use_l10n
super(FormsI18NWidgetsTestCase, self).tearDown()
def test_splitdatetime(self):
w = SplitDateTimeWidget(date_format='%d/%m/%Y', time_format='%H:%M')
self.assertTrue(w._has_changed(datetime.datetime(2008, 5, 6, 12, 40, 00), [u'06.05.2008', u'12:41']))
def test_datetimeinput(self):
w = DateTimeInput()
d = datetime.datetime(2007, 9, 17, 12, 51, 34, 482548)
w.is_localized = True
self.assertEqual(w.render('date', d), u'<input type="text" name="date" value="17.09.2007 12:51:34" />')
def test_dateinput(self):
w = DateInput()
d = datetime.date(2007, 9, 17)
w.is_localized = True
self.assertEqual(w.render('date', d), u'<input type="text" name="date" value="17.09.2007" />')
def test_timeinput(self):
w = TimeInput()
t = datetime.time(12, 51, 34, 482548)
w.is_localized = True
self.assertEqual(w.render('time', t), u'<input type="text" name="time" value="12:51:34" />')
def test_splithiddendatetime(self):
from django.forms.widgets import SplitHiddenDateTimeWidget
w = SplitHiddenDateTimeWidget()
w.is_localized = True
self.assertEqual(w.render('date', datetime.datetime(2007, 9, 17, 12, 51)), u'<input type="hidden" name="date_0" value="17.09.2007" /><input type="hidden" name="date_1" value="12:51:00" />')
class SelectAndTextWidget(MultiWidget):
"""
MultiWidget subclass
"""
def __init__(self, choices=[]):
widgets = [
RadioSelect(choices=choices),
TextInput
]
super(SelectAndTextWidget, self).__init__(widgets)
def _set_choices(self, choices):
"""
When choices are set for this widget, we want to pass those along to the Select widget
"""
self.widgets[0].choices = choices
def _get_choices(self):
"""
The choices for this widget are the Select widget's choices
"""
return self.widgets[0].choices
choices = property(_get_choices, _set_choices)
class WidgetTests(TestCase):
def test_12048(self):
# See ticket #12048.
w1 = SelectAndTextWidget(choices=[1,2,3])
w2 = copy.deepcopy(w1)
w2.choices = [4,5,6]
# w2 ought to be independent of w1, since MultiWidget ought
# to make a copy of its sub-widgets when it is copied.
self.assertEqual(w1.choices, [1,2,3])
def test_13390(self):
# See ticket #13390
class SplitDateForm(Form):
field = DateTimeField(widget=SplitDateTimeWidget, required=False)
form = SplitDateForm({'field': ''})
self.assertTrue(form.is_valid())
form = SplitDateForm({'field': ['', '']})
self.assertTrue(form.is_valid())
class SplitDateRequiredForm(Form):
field = DateTimeField(widget=SplitDateTimeWidget, required=True)
form = SplitDateRequiredForm({'field': ''})
self.assertFalse(form.is_valid())
form = SplitDateRequiredForm({'field': ['', '']})
self.assertFalse(form.is_valid())
class FakeFieldFile(object):
"""
Quacks like a FieldFile (has a .url and unicode representation), but
doesn't require us to care about storages etc.
"""
url = 'something'
def __unicode__(self):
return self.url
class ClearableFileInputTests(TestCase):
def test_clear_input_renders(self):
"""
A ClearableFileInput with is_required False and rendered with
an initial value that is a file renders a clear checkbox.
"""
widget = ClearableFileInput()
widget.is_required = False
self.assertEqual(widget.render('myfile', FakeFieldFile()),
u'Currently: <a href="something">something</a> <input type="checkbox" name="myfile-clear" id="myfile-clear_id" /> <label for="myfile-clear_id">Clear</label><br />Change: <input type="file" name="myfile" />')
def test_html_escaped(self):
"""
A ClearableFileInput should escape name, filename and URL when
rendering HTML. Refs #15182.
"""
class StrangeFieldFile(object):
url = "something?chapter=1§=2©=3&lang=en"
def __unicode__(self):
return u'''something<div onclick="alert('oops')">.jpg'''
widget = ClearableFileInput()
field = StrangeFieldFile()
output = widget.render('my<div>file', field)
self.assertFalse(field.url in output)
self.assertTrue(u'href="something?chapter=1&sect=2&copy=3&lang=en"' in output)
self.assertFalse(unicode(field) in output)
self.assertTrue(u'something<div onclick="alert('oops')">.jpg' in output)
self.assertTrue(u'my<div>file' in output)
self.assertFalse(u'my<div>file' in output)
def test_clear_input_renders_only_if_not_required(self):
"""
A ClearableFileInput with is_required=False does not render a clear
checkbox.
"""
widget = ClearableFileInput()
widget.is_required = True
self.assertEqual(widget.render('myfile', FakeFieldFile()),
u'Currently: <a href="something">something</a> <br />Change: <input type="file" name="myfile" />')
def test_clear_input_renders_only_if_initial(self):
"""
A ClearableFileInput instantiated with no initial value does not render
a clear checkbox.
"""
widget = ClearableFileInput()
widget.is_required = False
self.assertEqual(widget.render('myfile', None),
u'<input type="file" name="myfile" />')
def test_clear_input_checked_returns_false(self):
"""
ClearableFileInput.value_from_datadict returns False if the clear
checkbox is checked, if not required.
"""
widget = ClearableFileInput()
widget.is_required = False
self.assertEqual(widget.value_from_datadict(
data={'myfile-clear': True},
files={},
name='myfile'), False)
def test_clear_input_checked_returns_false_only_if_not_required(self):
"""
ClearableFileInput.value_from_datadict never returns False if the field
is required.
"""
widget = ClearableFileInput()
widget.is_required = True
f = SimpleUploadedFile('something.txt', 'content')
self.assertEqual(widget.value_from_datadict(
data={'myfile-clear': True},
files={'myfile': f},
name='myfile'), f)
| gpl-3.0 |
noxora/flask-base | flask/lib/python3.4/site-packages/passlib/hash.py | 5 | 3710 | """
passlib.hash - proxy object mapping hash scheme names -> handlers
==================
***** NOTICE *****
==================
This module does not actually contain any hashes. This file
is a stub that replaces itself with a proxy object.
This proxy object (passlib.registry._PasslibRegistryProxy)
handles lazy-loading hashes as they are requested.
The actual implementation of the various hashes is store elsewhere,
mainly in the submodules of the ``passlib.handlers`` subpackage.
"""
#=============================================================================
# import proxy object and replace this module
#=============================================================================
# XXX: if any platform has problem w/ lazy modules, could support 'non-lazy'
# version which just imports all schemes known to list_crypt_handlers()
from passlib.registry import _proxy
import sys
sys.modules[__name__] = _proxy
#=============================================================================
# HACK: the following bit of code is unreachable, but it's presence seems to
# help make autocomplete work for certain IDEs such as PyCharm.
# this list is automatically regenerated using $SOURCE/admin/regen.py
#=============================================================================
#----------------------------------------------------
# begin autocomplete hack (autogenerated 2016-11-10)
#----------------------------------------------------
if False:
from passlib.handlers.argon2 import argon2
from passlib.handlers.bcrypt import bcrypt, bcrypt_sha256
from passlib.handlers.cisco import cisco_asa, cisco_pix, cisco_type7
from passlib.handlers.des_crypt import bigcrypt, bsdi_crypt, crypt16, des_crypt
from passlib.handlers.digests import hex_md4, hex_md5, hex_sha1, hex_sha256, hex_sha512, htdigest
from passlib.handlers.django import django_bcrypt, django_bcrypt_sha256, django_des_crypt, django_disabled, django_pbkdf2_sha1, django_pbkdf2_sha256, django_salted_md5, django_salted_sha1
from passlib.handlers.fshp import fshp
from passlib.handlers.ldap_digests import ldap_bcrypt, ldap_bsdi_crypt, ldap_des_crypt, ldap_md5, ldap_md5_crypt, ldap_plaintext, ldap_salted_md5, ldap_salted_sha1, ldap_sha1, ldap_sha1_crypt, ldap_sha256_crypt, ldap_sha512_crypt
from passlib.handlers.md5_crypt import apr_md5_crypt, md5_crypt
from passlib.handlers.misc import plaintext, unix_disabled, unix_fallback
from passlib.handlers.mssql import mssql2000, mssql2005
from passlib.handlers.mysql import mysql323, mysql41
from passlib.handlers.oracle import oracle10, oracle11
from passlib.handlers.pbkdf2 import atlassian_pbkdf2_sha1, cta_pbkdf2_sha1, dlitz_pbkdf2_sha1, grub_pbkdf2_sha512, ldap_pbkdf2_sha1, ldap_pbkdf2_sha256, ldap_pbkdf2_sha512, pbkdf2_sha1, pbkdf2_sha256, pbkdf2_sha512
from passlib.handlers.phpass import phpass
from passlib.handlers.postgres import postgres_md5
from passlib.handlers.roundup import ldap_hex_md5, ldap_hex_sha1, roundup_plaintext
from passlib.handlers.scram import scram
from passlib.handlers.scrypt import scrypt
from passlib.handlers.sha1_crypt import sha1_crypt
from passlib.handlers.sha2_crypt import sha256_crypt, sha512_crypt
from passlib.handlers.sun_md5_crypt import sun_md5_crypt
from passlib.handlers.windows import bsd_nthash, lmhash, msdcc, msdcc2, nthash
#----------------------------------------------------
# end autocomplete hack
#----------------------------------------------------
#=============================================================================
# eoc
#=============================================================================
| mit |
roadmapper/ansible | test/units/modules/network/fortios/test_fortios_endpoint_control_settings.py | 21 | 10922 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_endpoint_control_settings
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_endpoint_control_settings.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_endpoint_control_settings_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'endpoint_control_settings': {
'download_custom_link': 'test_value_3',
'download_location': 'fortiguard',
'forticlient_avdb_update_interval': '5',
'forticlient_dereg_unsupported_client': 'enable',
'forticlient_ems_rest_api_call_timeout': '7',
'forticlient_keepalive_interval': '8',
'forticlient_offline_grace': 'enable',
'forticlient_offline_grace_interval': '10',
'forticlient_reg_key': 'test_value_11',
'forticlient_reg_key_enforce': 'enable',
'forticlient_reg_timeout': '13',
'forticlient_sys_update_interval': '14',
'forticlient_user_avatar': 'enable',
'forticlient_warning_interval': '16'
},
'vdom': 'root'}
is_error, changed, response = fortios_endpoint_control_settings.fortios_endpoint_control(input_data, fos_instance)
expected_data = {
'download-custom-link': 'test_value_3',
'download-location': 'fortiguard',
'forticlient-avdb-update-interval': '5',
'forticlient-dereg-unsupported-client': 'enable',
'forticlient-ems-rest-api-call-timeout': '7',
'forticlient-keepalive-interval': '8',
'forticlient-offline-grace': 'enable',
'forticlient-offline-grace-interval': '10',
'forticlient-reg-key': 'test_value_11',
'forticlient-reg-key-enforce': 'enable',
'forticlient-reg-timeout': '13',
'forticlient-sys-update-interval': '14',
'forticlient-user-avatar': 'enable',
'forticlient-warning-interval': '16'
}
set_method_mock.assert_called_with('endpoint-control', 'settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_endpoint_control_settings_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'endpoint_control_settings': {
'download_custom_link': 'test_value_3',
'download_location': 'fortiguard',
'forticlient_avdb_update_interval': '5',
'forticlient_dereg_unsupported_client': 'enable',
'forticlient_ems_rest_api_call_timeout': '7',
'forticlient_keepalive_interval': '8',
'forticlient_offline_grace': 'enable',
'forticlient_offline_grace_interval': '10',
'forticlient_reg_key': 'test_value_11',
'forticlient_reg_key_enforce': 'enable',
'forticlient_reg_timeout': '13',
'forticlient_sys_update_interval': '14',
'forticlient_user_avatar': 'enable',
'forticlient_warning_interval': '16'
},
'vdom': 'root'}
is_error, changed, response = fortios_endpoint_control_settings.fortios_endpoint_control(input_data, fos_instance)
expected_data = {
'download-custom-link': 'test_value_3',
'download-location': 'fortiguard',
'forticlient-avdb-update-interval': '5',
'forticlient-dereg-unsupported-client': 'enable',
'forticlient-ems-rest-api-call-timeout': '7',
'forticlient-keepalive-interval': '8',
'forticlient-offline-grace': 'enable',
'forticlient-offline-grace-interval': '10',
'forticlient-reg-key': 'test_value_11',
'forticlient-reg-key-enforce': 'enable',
'forticlient-reg-timeout': '13',
'forticlient-sys-update-interval': '14',
'forticlient-user-avatar': 'enable',
'forticlient-warning-interval': '16'
}
set_method_mock.assert_called_with('endpoint-control', 'settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_endpoint_control_settings_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'endpoint_control_settings': {
'download_custom_link': 'test_value_3',
'download_location': 'fortiguard',
'forticlient_avdb_update_interval': '5',
'forticlient_dereg_unsupported_client': 'enable',
'forticlient_ems_rest_api_call_timeout': '7',
'forticlient_keepalive_interval': '8',
'forticlient_offline_grace': 'enable',
'forticlient_offline_grace_interval': '10',
'forticlient_reg_key': 'test_value_11',
'forticlient_reg_key_enforce': 'enable',
'forticlient_reg_timeout': '13',
'forticlient_sys_update_interval': '14',
'forticlient_user_avatar': 'enable',
'forticlient_warning_interval': '16'
},
'vdom': 'root'}
is_error, changed, response = fortios_endpoint_control_settings.fortios_endpoint_control(input_data, fos_instance)
expected_data = {
'download-custom-link': 'test_value_3',
'download-location': 'fortiguard',
'forticlient-avdb-update-interval': '5',
'forticlient-dereg-unsupported-client': 'enable',
'forticlient-ems-rest-api-call-timeout': '7',
'forticlient-keepalive-interval': '8',
'forticlient-offline-grace': 'enable',
'forticlient-offline-grace-interval': '10',
'forticlient-reg-key': 'test_value_11',
'forticlient-reg-key-enforce': 'enable',
'forticlient-reg-timeout': '13',
'forticlient-sys-update-interval': '14',
'forticlient-user-avatar': 'enable',
'forticlient-warning-interval': '16'
}
set_method_mock.assert_called_with('endpoint-control', 'settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_endpoint_control_settings_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'endpoint_control_settings': {
'random_attribute_not_valid': 'tag',
'download_custom_link': 'test_value_3',
'download_location': 'fortiguard',
'forticlient_avdb_update_interval': '5',
'forticlient_dereg_unsupported_client': 'enable',
'forticlient_ems_rest_api_call_timeout': '7',
'forticlient_keepalive_interval': '8',
'forticlient_offline_grace': 'enable',
'forticlient_offline_grace_interval': '10',
'forticlient_reg_key': 'test_value_11',
'forticlient_reg_key_enforce': 'enable',
'forticlient_reg_timeout': '13',
'forticlient_sys_update_interval': '14',
'forticlient_user_avatar': 'enable',
'forticlient_warning_interval': '16'
},
'vdom': 'root'}
is_error, changed, response = fortios_endpoint_control_settings.fortios_endpoint_control(input_data, fos_instance)
expected_data = {
'download-custom-link': 'test_value_3',
'download-location': 'fortiguard',
'forticlient-avdb-update-interval': '5',
'forticlient-dereg-unsupported-client': 'enable',
'forticlient-ems-rest-api-call-timeout': '7',
'forticlient-keepalive-interval': '8',
'forticlient-offline-grace': 'enable',
'forticlient-offline-grace-interval': '10',
'forticlient-reg-key': 'test_value_11',
'forticlient-reg-key-enforce': 'enable',
'forticlient-reg-timeout': '13',
'forticlient-sys-update-interval': '14',
'forticlient-user-avatar': 'enable',
'forticlient-warning-interval': '16'
}
set_method_mock.assert_called_with('endpoint-control', 'settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 |
ismangil/pjproject | tests/pjsua/mod_sipp.py | 19 | 8562 | # $Id$
## Automatic test module for SIPp.
##
## This module will need a test driver for each SIPp scenario:
## - For simple scenario, i.e: make/receive call (including auth), this
## test module can auto-generate a default test driver, i.e: make call
## or apply auto answer. Just name the SIPp scenario using "uas" or
## "uac" prefix accordingly.
## - Custom test driver can be defined in a python script file containing
## a list of the PJSUA instances and another list for PJSUA expects/
## commands. The custom test driver file must use the same filename as
## the SIPp XML scenario. See samples of SIPp scenario + its driver
## in tests/pjsua/scripts-sipp/ folder for detail.
##
## Here are defined macros that can be used in the custom driver:
## - $SIPP_PORT : SIPp binding port
## - $SIPP_URI : SIPp SIP URI
## - $PJSUA_PORT[N] : binding port of PJSUA instance #N
## - $PJSUA_URI[N] : SIP URI of PJSUA instance #N
import ctypes
import time
import imp
import sys
import os
import re
import subprocess
from inc_cfg import *
import inc_const
# flags that test is running in Unix
G_INUNIX = False
if sys.platform.lower().find("win32")!=-1 or sys.platform.lower().find("microsoft")!=-1:
G_INUNIX = False
else:
G_INUNIX = True
# /dev/null handle, for redirecting output when SIPP is not in background mode
FDEVNULL = None
# SIPp executable path and param
#SIPP_PATH = '"C:\\devs\\bin\\Sipp_3.2\\sipp.exe"'
SIPP_PATH = 'sipp'
SIPP_PORT = 6000
SIPP_PARAM = "-m 1 -i 127.0.0.1 -p " + str(SIPP_PORT)
SIPP_TIMEOUT = 60
# On BG mode, SIPp doesn't require special terminal
# On non-BG mode, on win, it needs env var: "TERMINFO=c:\cygwin\usr\share\terminfo"
# TODO: on unix with BG mode, waitpid() always fails, need to be fixed
SIPP_BG_MODE = False
#SIPP_BG_MODE = not G_INUNIX
# Will be updated based on the test driver file (a .py file whose the same name as SIPp XML file)
PJSUA_INST_PARAM = []
PJSUA_EXPECTS = []
# Default PJSUA param if test driver is not available:
# - no-tcp as SIPp is on UDP only
# - id, username, and realm: to allow PJSUA sending re-INVITE with auth after receiving 401/407 response
PJSUA_DEF_PARAM = "--null-audio --max-calls=1 --no-tcp --id=sip:a@localhost --username=a --realm=*"
# Get SIPp scenario (XML file)
SIPP_SCEN_XML = ""
if ARGS[1].endswith('.xml'):
SIPP_SCEN_XML = ARGS[1]
else:
exit(-99)
# Functions for resolving macros in the test driver
def resolve_pjsua_port(mo):
return str(PJSUA_INST_PARAM[int(mo.group(1))].sip_port)
def resolve_pjsua_uri(mo):
return PJSUA_INST_PARAM[int(mo.group(1))].uri[1:-1]
def resolve_driver_macros(st):
st = re.sub("\$SIPP_PORT", str(SIPP_PORT), st)
st = re.sub("\$SIPP_URI", "sip:sipp@127.0.0.1:"+str(SIPP_PORT), st)
st = re.sub("\$PJSUA_PORT\[(\d+)\]", resolve_pjsua_port, st)
st = re.sub("\$PJSUA_URI\[(\d+)\]", resolve_pjsua_uri, st)
return st
# Init test driver
if os.access(SIPP_SCEN_XML[:-4]+".py", os.R_OK):
# Load test driver file (the corresponding .py file), if any
cfg_file = imp.load_source("cfg_file", SIPP_SCEN_XML[:-4]+".py")
for ua_idx, ua_param in enumerate(cfg_file.PJSUA):
ua_param = resolve_driver_macros(ua_param)
PJSUA_INST_PARAM.append(InstanceParam("pjsua"+str(ua_idx), ua_param))
PJSUA_EXPECTS = cfg_file.PJSUA_EXPECTS
else:
# Generate default test driver
if os.path.basename(SIPP_SCEN_XML)[0:3] == "uas":
# auto make call when SIPp is as UAS
ua_param = PJSUA_DEF_PARAM + " sip:127.0.0.1:" + str(SIPP_PORT)
else:
# auto answer when SIPp is as UAC
ua_param = PJSUA_DEF_PARAM + " --auto-answer=200"
PJSUA_INST_PARAM.append(InstanceParam("pjsua", ua_param))
# Start SIPp process, returning PID
def start_sipp():
global SIPP_BG_MODE
sipp_proc = None
sipp_param = SIPP_PARAM + " -sf " + SIPP_SCEN_XML
if SIPP_BG_MODE:
sipp_param = sipp_param + " -bg"
if SIPP_TIMEOUT:
sipp_param = sipp_param + " -timeout "+str(SIPP_TIMEOUT)+"s -timeout_error" + " -deadcall_wait "+str(SIPP_TIMEOUT)+"s"
# add target param
sipp_param = sipp_param + " 127.0.0.1:" + str(PJSUA_INST_PARAM[0].sip_port)
# run SIPp
fullcmd = os.path.normpath(SIPP_PATH) + " " + sipp_param
print "Running SIPP: " + fullcmd
if SIPP_BG_MODE:
sipp_proc = subprocess.Popen(fullcmd, bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell=G_INUNIX, universal_newlines=False)
else:
# redirect output to NULL
global FDEVNULL
#FDEVNULL = open(os.devnull, 'w')
FDEVNULL = open("logs/sipp_output.tmp", 'w')
sipp_proc = subprocess.Popen(fullcmd, shell=G_INUNIX, stdout=FDEVNULL, stderr=FDEVNULL)
if not SIPP_BG_MODE:
if sipp_proc == None or sipp_proc.poll():
return None
return sipp_proc
else:
# get SIPp child process PID
pid = 0
r = re.compile("PID=\[(\d+)\]", re.I)
while True:
line = sipp_proc.stdout.readline()
pid_r = r.search(line)
if pid_r:
pid = int(pid_r.group(1))
break
if not sipp_proc.poll():
break
if pid != 0:
# Win specific: get process handle from PID, as on win32, os.waitpid() takes process handle instead of pid
if (sys.platform == "win32"):
SYNCHRONIZE = 0x00100000
PROCESS_QUERY_INFORMATION = 0x0400
hnd = ctypes.windll.kernel32.OpenProcess(SYNCHRONIZE | PROCESS_QUERY_INFORMATION, False, pid)
pid = hnd
return pid
# Wait SIPp process to exit, returning SIPp exit code
def wait_sipp(sipp):
if not SIPP_BG_MODE:
global FDEVNULL
sipp.wait()
FDEVNULL.close()
return sipp.returncode
else:
print "Waiting SIPp (PID=" + str(sipp) + ") to exit.."
wait_cnt = 0
while True:
try:
wait_cnt = wait_cnt + 1
[pid_, ret_code] = os.waitpid(sipp, 0)
if sipp == pid_:
#print "SIPP returned ", ret_code
ret_code = ret_code >> 8
# Win specific: Close process handle
if (sys.platform == "win32"):
ctypes.windll.kernel32.CloseHandle(sipp)
return ret_code
except os.error:
if wait_cnt <= 5:
print "Retry ("+str(wait_cnt)+") waiting SIPp.."
else:
return -99
# Execute PJSUA flow
def exec_pjsua_expects(t, sipp):
# Get all PJSUA instances
ua = []
for ua_idx in range(len(PJSUA_INST_PARAM)):
ua.append(t.process[ua_idx])
ua_err_st = ""
while len(PJSUA_EXPECTS):
expect = PJSUA_EXPECTS.pop(0)
ua_idx = expect[0]
expect_st = expect[1]
send_cmd = resolve_driver_macros(expect[2])
# Handle exception in pjsua flow, to avoid zombie SIPp process
try:
if expect_st != "":
ua[ua_idx].expect(expect_st, raise_on_error = True)
if send_cmd != "":
ua[ua_idx].send(send_cmd)
except TestError, e:
ua_err_st = e.desc
break;
except:
ua_err_st = "Unknown error"
break;
# Need to poll here for handling these cases:
# - If there is no PJSUA EXPECT scenario, we must keep polling the stdout,
# otherwise PJSUA process may stuck (due to stdout pipe buffer full?).
# - last PJSUA_EXPECT contains a pjsua command that needs time to
# finish, for example "v" (re-INVITE), the SIPp XML scenario may expect
# that re-INVITE transaction to be completed and without stdout poll
# PJSUA process may stuck.
# Ideally the poll should be done contiunously until SIPp process is
# terminated.
# Update: now pjsua stdout is polled continuously by a dedicated thread,
# so the poll is no longer needed
#for ua_idx in range(len(ua)):
# ua[ua_idx].expect(inc_const.STDOUT_REFRESH, raise_on_error = False)
return ua_err_st
def sipp_err_to_str(err_code):
if err_code == 0:
return "All calls were successful"
elif err_code == 1:
return "At least one call failed"
elif err_code == 97:
return "exit on internal command. Calls may have been processed"
elif err_code == 99:
return "Normal exit without calls processed"
elif err_code == -1:
return "Fatal error (timeout)"
elif err_code == -2:
return "Fatal error binding a socket"
else:
return "Unknown error"
# Test body function
def TEST_FUNC(t):
sipp_ret_code = 0
ua_err_st = ""
sipp = start_sipp()
if not sipp:
raise TestError("Failed starting SIPp")
ua_err_st = exec_pjsua_expects(t, sipp)
sipp_ret_code = wait_sipp(sipp)
if ua_err_st != "":
raise TestError(ua_err_st)
if sipp_ret_code:
rc = ctypes.c_byte(sipp_ret_code).value
raise TestError("SIPp returned error " + str(rc) + ": " + sipp_err_to_str(rc))
# Here where it all comes together
test = TestParam(SIPP_SCEN_XML[:-4],
PJSUA_INST_PARAM,
TEST_FUNC)
| gpl-2.0 |
claudep/pootle | pootle/core/dateparse.py | 10 | 1145 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.utils import dateparse as django_dateparse
def parse_datetime(value):
"""Parse a ISO 8601 formatted value into a date or datetime object.
Django's own date parsing facilities differentiate date and datetime
parsing. We need to parse dates and datetimes either way, so this is a
convenience wrapper.
:return: either a `datetime` or a `date` object. If the provided input
string doesn't represent a valid date or datetime, `None` will be
returned instead.
"""
try:
datetime_obj = django_dateparse.parse_datetime(value)
except ValueError:
datetime_obj = None
# Not a valid datetime, check with date
if datetime_obj is None:
try:
datetime_obj = django_dateparse.parse_date(value)
except ValueError:
datetime_obj = None
return datetime_obj
| gpl-3.0 |
zding5/Microblog-Flask | flask/lib/python2.7/site-packages/sqlalchemy/util/deprecations.py | 21 | 4403 | # util/deprecations.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Helpers related to deprecation of functions, methods, classes, other
functionality."""
from .. import exc
import warnings
import re
from .langhelpers import decorator
def warn_deprecated(msg, stacklevel=3):
warnings.warn(msg, exc.SADeprecationWarning, stacklevel=stacklevel)
def warn_pending_deprecation(msg, stacklevel=3):
warnings.warn(msg, exc.SAPendingDeprecationWarning, stacklevel=stacklevel)
def deprecated(version, message=None, add_deprecation_to_docstring=True):
"""Decorates a function and issues a deprecation warning on use.
:param message:
If provided, issue message in the warning. A sensible default
is used if not provided.
:param add_deprecation_to_docstring:
Default True. If False, the wrapped function's __doc__ is left
as-is. If True, the 'message' is prepended to the docs if
provided, or sensible default if message is omitted.
"""
if add_deprecation_to_docstring:
header = ".. deprecated:: %s %s" % \
(version, (message or ''))
else:
header = None
if message is None:
message = "Call to deprecated function %(func)s"
def decorate(fn):
return _decorate_with_warning(
fn, exc.SADeprecationWarning,
message % dict(func=fn.__name__), header)
return decorate
def pending_deprecation(version, message=None,
add_deprecation_to_docstring=True):
"""Decorates a function and issues a pending deprecation warning on use.
:param version:
An approximate future version at which point the pending deprecation
will become deprecated. Not used in messaging.
:param message:
If provided, issue message in the warning. A sensible default
is used if not provided.
:param add_deprecation_to_docstring:
Default True. If False, the wrapped function's __doc__ is left
as-is. If True, the 'message' is prepended to the docs if
provided, or sensible default if message is omitted.
"""
if add_deprecation_to_docstring:
header = ".. deprecated:: %s (pending) %s" % \
(version, (message or ''))
else:
header = None
if message is None:
message = "Call to deprecated function %(func)s"
def decorate(fn):
return _decorate_with_warning(
fn, exc.SAPendingDeprecationWarning,
message % dict(func=fn.__name__), header)
return decorate
def _sanitize_restructured_text(text):
def repl(m):
type_, name = m.group(1, 2)
if type_ in ("func", "meth"):
name += "()"
return name
return re.sub(r'\:(\w+)\:`~?\.?(.+?)`', repl, text)
def _decorate_with_warning(func, wtype, message, docstring_header=None):
"""Wrap a function with a warnings.warn and augmented docstring."""
message = _sanitize_restructured_text(message)
@decorator
def warned(fn, *args, **kwargs):
warnings.warn(wtype(message), stacklevel=3)
return fn(*args, **kwargs)
doc = func.__doc__ is not None and func.__doc__ or ''
if docstring_header is not None:
docstring_header %= dict(func=func.__name__)
doc = inject_docstring_text(doc, docstring_header, 1)
decorated = warned(func)
decorated.__doc__ = doc
return decorated
import textwrap
def _dedent_docstring(text):
split_text = text.split("\n", 1)
if len(split_text) == 1:
return text
else:
firstline, remaining = split_text
if not firstline.startswith(" "):
return firstline + "\n" + textwrap.dedent(remaining)
else:
return textwrap.dedent(text)
def inject_docstring_text(doctext, injecttext, pos):
doctext = _dedent_docstring(doctext or "")
lines = doctext.split('\n')
injectlines = textwrap.dedent(injecttext).split("\n")
if injectlines[0]:
injectlines.insert(0, "")
blanks = [num for num, line in enumerate(lines) if not line.strip()]
blanks.insert(0, 0)
inject_pos = blanks[min(pos, len(blanks) - 1)]
lines = lines[0:inject_pos] + injectlines + lines[inject_pos:]
return "\n".join(lines)
| mit |
igemsoftware/SYSU-Software2013 | project/Python27/Lib/test/test_richcmp.py | 129 | 11466 | # Tests for rich comparisons
import unittest
from test import test_support
import operator
class Number:
def __init__(self, x):
self.x = x
def __lt__(self, other):
return self.x < other
def __le__(self, other):
return self.x <= other
def __eq__(self, other):
return self.x == other
def __ne__(self, other):
return self.x != other
def __gt__(self, other):
return self.x > other
def __ge__(self, other):
return self.x >= other
def __cmp__(self, other):
raise test_support.TestFailed, "Number.__cmp__() should not be called"
def __repr__(self):
return "Number(%r)" % (self.x, )
class Vector:
def __init__(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __getitem__(self, i):
return self.data[i]
def __setitem__(self, i, v):
self.data[i] = v
__hash__ = None # Vectors cannot be hashed
def __nonzero__(self):
raise TypeError, "Vectors cannot be used in Boolean contexts"
def __cmp__(self, other):
raise test_support.TestFailed, "Vector.__cmp__() should not be called"
def __repr__(self):
return "Vector(%r)" % (self.data, )
def __lt__(self, other):
return Vector([a < b for a, b in zip(self.data, self.__cast(other))])
def __le__(self, other):
return Vector([a <= b for a, b in zip(self.data, self.__cast(other))])
def __eq__(self, other):
return Vector([a == b for a, b in zip(self.data, self.__cast(other))])
def __ne__(self, other):
return Vector([a != b for a, b in zip(self.data, self.__cast(other))])
def __gt__(self, other):
return Vector([a > b for a, b in zip(self.data, self.__cast(other))])
def __ge__(self, other):
return Vector([a >= b for a, b in zip(self.data, self.__cast(other))])
def __cast(self, other):
if isinstance(other, Vector):
other = other.data
if len(self.data) != len(other):
raise ValueError, "Cannot compare vectors of different length"
return other
opmap = {
"lt": (lambda a,b: a< b, operator.lt, operator.__lt__),
"le": (lambda a,b: a<=b, operator.le, operator.__le__),
"eq": (lambda a,b: a==b, operator.eq, operator.__eq__),
"ne": (lambda a,b: a!=b, operator.ne, operator.__ne__),
"gt": (lambda a,b: a> b, operator.gt, operator.__gt__),
"ge": (lambda a,b: a>=b, operator.ge, operator.__ge__)
}
class VectorTest(unittest.TestCase):
def checkfail(self, error, opname, *args):
for op in opmap[opname]:
self.assertRaises(error, op, *args)
def checkequal(self, opname, a, b, expres):
for op in opmap[opname]:
realres = op(a, b)
# can't use assertEqual(realres, expres) here
self.assertEqual(len(realres), len(expres))
for i in xrange(len(realres)):
# results are bool, so we can use "is" here
self.assertTrue(realres[i] is expres[i])
def test_mixed(self):
# check that comparisons involving Vector objects
# which return rich results (i.e. Vectors with itemwise
# comparison results) work
a = Vector(range(2))
b = Vector(range(3))
# all comparisons should fail for different length
for opname in opmap:
self.checkfail(ValueError, opname, a, b)
a = range(5)
b = 5 * [2]
# try mixed arguments (but not (a, b) as that won't return a bool vector)
args = [(a, Vector(b)), (Vector(a), b), (Vector(a), Vector(b))]
for (a, b) in args:
self.checkequal("lt", a, b, [True, True, False, False, False])
self.checkequal("le", a, b, [True, True, True, False, False])
self.checkequal("eq", a, b, [False, False, True, False, False])
self.checkequal("ne", a, b, [True, True, False, True, True ])
self.checkequal("gt", a, b, [False, False, False, True, True ])
self.checkequal("ge", a, b, [False, False, True, True, True ])
for ops in opmap.itervalues():
for op in ops:
# calls __nonzero__, which should fail
self.assertRaises(TypeError, bool, op(a, b))
class NumberTest(unittest.TestCase):
def test_basic(self):
# Check that comparisons involving Number objects
# give the same results give as comparing the
# corresponding ints
for a in xrange(3):
for b in xrange(3):
for typea in (int, Number):
for typeb in (int, Number):
if typea==typeb==int:
continue # the combination int, int is useless
ta = typea(a)
tb = typeb(b)
for ops in opmap.itervalues():
for op in ops:
realoutcome = op(a, b)
testoutcome = op(ta, tb)
self.assertEqual(realoutcome, testoutcome)
def checkvalue(self, opname, a, b, expres):
for typea in (int, Number):
for typeb in (int, Number):
ta = typea(a)
tb = typeb(b)
for op in opmap[opname]:
realres = op(ta, tb)
realres = getattr(realres, "x", realres)
self.assertTrue(realres is expres)
def test_values(self):
# check all operators and all comparison results
self.checkvalue("lt", 0, 0, False)
self.checkvalue("le", 0, 0, True )
self.checkvalue("eq", 0, 0, True )
self.checkvalue("ne", 0, 0, False)
self.checkvalue("gt", 0, 0, False)
self.checkvalue("ge", 0, 0, True )
self.checkvalue("lt", 0, 1, True )
self.checkvalue("le", 0, 1, True )
self.checkvalue("eq", 0, 1, False)
self.checkvalue("ne", 0, 1, True )
self.checkvalue("gt", 0, 1, False)
self.checkvalue("ge", 0, 1, False)
self.checkvalue("lt", 1, 0, False)
self.checkvalue("le", 1, 0, False)
self.checkvalue("eq", 1, 0, False)
self.checkvalue("ne", 1, 0, True )
self.checkvalue("gt", 1, 0, True )
self.checkvalue("ge", 1, 0, True )
class MiscTest(unittest.TestCase):
def test_misbehavin(self):
class Misb:
def __lt__(self_, other): return 0
def __gt__(self_, other): return 0
def __eq__(self_, other): return 0
def __le__(self_, other): self.fail("This shouldn't happen")
def __ge__(self_, other): self.fail("This shouldn't happen")
def __ne__(self_, other): self.fail("This shouldn't happen")
def __cmp__(self_, other): raise RuntimeError, "expected"
a = Misb()
b = Misb()
self.assertEqual(a<b, 0)
self.assertEqual(a==b, 0)
self.assertEqual(a>b, 0)
self.assertRaises(RuntimeError, cmp, a, b)
def test_not(self):
# Check that exceptions in __nonzero__ are properly
# propagated by the not operator
import operator
class Exc(Exception):
pass
class Bad:
def __nonzero__(self):
raise Exc
def do(bad):
not bad
for func in (do, operator.not_):
self.assertRaises(Exc, func, Bad())
def test_recursion(self):
# Check that comparison for recursive objects fails gracefully
from UserList import UserList
a = UserList()
b = UserList()
a.append(b)
b.append(a)
self.assertRaises(RuntimeError, operator.eq, a, b)
self.assertRaises(RuntimeError, operator.ne, a, b)
self.assertRaises(RuntimeError, operator.lt, a, b)
self.assertRaises(RuntimeError, operator.le, a, b)
self.assertRaises(RuntimeError, operator.gt, a, b)
self.assertRaises(RuntimeError, operator.ge, a, b)
b.append(17)
# Even recursive lists of different lengths are different,
# but they cannot be ordered
self.assertTrue(not (a == b))
self.assertTrue(a != b)
self.assertRaises(RuntimeError, operator.lt, a, b)
self.assertRaises(RuntimeError, operator.le, a, b)
self.assertRaises(RuntimeError, operator.gt, a, b)
self.assertRaises(RuntimeError, operator.ge, a, b)
a.append(17)
self.assertRaises(RuntimeError, operator.eq, a, b)
self.assertRaises(RuntimeError, operator.ne, a, b)
a.insert(0, 11)
b.insert(0, 12)
self.assertTrue(not (a == b))
self.assertTrue(a != b)
self.assertTrue(a < b)
class DictTest(unittest.TestCase):
def test_dicts(self):
# Verify that __eq__ and __ne__ work for dicts even if the keys and
# values don't support anything other than __eq__ and __ne__ (and
# __hash__). Complex numbers are a fine example of that.
import random
imag1a = {}
for i in range(50):
imag1a[random.randrange(100)*1j] = random.randrange(100)*1j
items = imag1a.items()
random.shuffle(items)
imag1b = {}
for k, v in items:
imag1b[k] = v
imag2 = imag1b.copy()
imag2[k] = v + 1.0
self.assertTrue(imag1a == imag1a)
self.assertTrue(imag1a == imag1b)
self.assertTrue(imag2 == imag2)
self.assertTrue(imag1a != imag2)
for opname in ("lt", "le", "gt", "ge"):
for op in opmap[opname]:
self.assertRaises(TypeError, op, imag1a, imag2)
class ListTest(unittest.TestCase):
def test_coverage(self):
# exercise all comparisons for lists
x = [42]
self.assertIs(x<x, False)
self.assertIs(x<=x, True)
self.assertIs(x==x, True)
self.assertIs(x!=x, False)
self.assertIs(x>x, False)
self.assertIs(x>=x, True)
y = [42, 42]
self.assertIs(x<y, True)
self.assertIs(x<=y, True)
self.assertIs(x==y, False)
self.assertIs(x!=y, True)
self.assertIs(x>y, False)
self.assertIs(x>=y, False)
def test_badentry(self):
# make sure that exceptions for item comparison are properly
# propagated in list comparisons
class Exc(Exception):
pass
class Bad:
def __eq__(self, other):
raise Exc
x = [Bad()]
y = [Bad()]
for op in opmap["eq"]:
self.assertRaises(Exc, op, x, y)
def test_goodentry(self):
# This test exercises the final call to PyObject_RichCompare()
# in Objects/listobject.c::list_richcompare()
class Good:
def __lt__(self, other):
return True
x = [Good()]
y = [Good()]
for op in opmap["lt"]:
self.assertIs(op(x, y), True)
def test_main():
test_support.run_unittest(VectorTest, NumberTest, MiscTest, ListTest)
with test_support.check_py3k_warnings(("dict inequality comparisons "
"not supported in 3.x",
DeprecationWarning)):
test_support.run_unittest(DictTest)
if __name__ == "__main__":
test_main()
| mit |
DevicePilot/synth | synth/devices/helpers/people_names.py | 1 | 6446 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 DevicePilot Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from common.utils import hashIt
female_names=["Amelia","Olivia","Isla","Emily","Poppy","Ava","Isabella","Jessica","Lily","Sophie","Grace","Sophia","Mia","Evie","Ruby","Ella","Scarlett","Isabelle","Chloe","Sienna","Freya","Phoebe","Charlotte","Daisy","Alice","Florence","Eva","Sofia","Millie","Lucy","Evelyn","Elsie","Rosie","Imogen","Lola","Matilda","Elizabeth","Layla","Holly","Lilly","Molly","Erin","Ellie","Maisie","Maya","Abigail","Eliza","Georgia","Jasmine","Esme","Willow","Bella","Annabelle","Ivy","Amber","Emilia","Emma","Summer","Hannah","Eleanor","Harriet","Rose","Amelie","Lexi","Megan","Gracie","Zara","Lacey","Martha","Anna","Violet","Darcey","Maria","Maryam","Brooke","Aisha","Katie","Leah","Thea","Darcie","Hollie","Amy","Mollie","Heidi","Lottie","Bethany","Francesca","Faith","Harper","Nancy","Beatrice","Isabel","Darcy","Lydia","Sarah","Sara","Julia","Victoria","Zoe","Robyn"]
male_names=["Oliver","Jack","Harry","Jacob","Charlie","Thomas","George","Oscar","James","William","Noah","Alfie","Joshua","Muhammad","Henry","Leo","Archie","Ethan","Joseph","Freddie","Samuel","Alexander","Logan","Daniel","Isaac","Max","Mohammed","Benjamin","Mason","Lucas","Edward","Harrison","Jake","Dylan","Riley","Finley","Theo","Sebastian","Adam","Zachary","Arthur","Toby","Jayden","Luke","Harley","Lewis","Tyler","Harvey","Matthew","David","Reuben","Michael","Elijah","Kian","Tommy","Mohammad","Blake","Luca","Theodore","Stanley","Jenson","Nathan","Charles","Frankie","Jude","Teddy","Louie","Louis","Ryan","Hugo","Bobby","Elliott","Dexter","Ollie","Alex","Liam","Kai","Gabriel","Connor","Aaron","Frederick","Callum","Elliot","Albert","Leon","Ronnie","Rory","Jamie","Austin","Seth","Ibrahim","Owen","Caleb","Ellis","Sonny","Robert","Joey","Felix","Finlay","Jackson"]
last_names = ["Adams","Aigner","Allen","Andersen","Anderson","André","Andreassen","Angelopoulos","Antoniou","Athanasiadis","Auer","Babić","Bailey","Baker","Bakker","Barbieri","Barišić","Barnes","Bauer","Baumgartner","Becker","Bell","Bennett","Berg","Berger","Bernard","Bertrand","Bianchi","Binder","Blažević","Bogdanov","Bonnet","Bos","Bošnjak","Božić","Brooks","Brouwer","Brown","Brunner","Bruno","Butler","Campbell","Carter","Caruso","Christensen","Christiansen","Claes","Clark","Clarke","Collins","Colombo","Conti","Cook","Cooper","Costa","Cox","Cruz","Dahl","David","Davies","Davis","De Boer","De Graaf","De Groot","De Jong","De Luca","De Smet","De Vries","De Wit","Dekker","Diaz","Dijkstra","Dimitriadis","Dubois","Dubois","Dupont","Durand","Ebner","Eder","Edwards","Egger","Eriksen","Esposito","Evans","Ferrari","Filipović","Fischer","Fisher","Flores","Fontana","Foster","Fournier","François","Fuchs","Gallo","Garcia","Garcia","Georgiou","Giordano","Girard","Golubev","Gomez","Gonzalez","Goossens","Gray","Greco","Green","Grgić","Gruber","Gutierrez","Haas","Hagen","Hall","Halvorsen","Hansen","Harris","Haugen","Heilig","Hendriks","Henriksen","Hernandez","Hill","Hofer","Hoffmann","Horvat","Howard","Huber","Hughes","Ivanov","Jackson","Jacobs","Jacobsen","James","Jansen","Janssen","Janssens","Jenkins","Jensen","Johannessen","Johansen","Johnsen","Johnson","Jones","Jørgensen","Jukić","Jurić","Karlsen","Kelly","King","Knežević","Koller","Kovač","Kovačević","Kovačić","Kozlov","Kristiansen","Kuznetsov","Lambert","Lang","Larsen","Laurent","Lebedev","Lechner","Lee","Lefebvre","Lefèvre","Lehner","Leitner","Leroy","Lewis","Lombardi","Long","Lopez","Lovrić","Lund","Madsen","Maes","Maier","Mancini","Mariani","Marić","Marino","Marković","Martin","Martinez","Martinez","Matić","Mayer","Mayr","Meijer","Meyer","Mercier","Mertens","Meyer","Michel","Miller","Mitchell","Møller","Moore","Morales","Moreau","Morel","Moretti","Morgan","Morozov","Morris","Mortensen","Moser","Mulder","Müller","Murphy","Myers","Nelson","Nguyen","Nielsen","Nikolaidis","Nilsen","Novak","Novikov","Olsen","Ortiz","Panagiotopoulos","Papadakis","Papadopoulos","Papantoniou","Parker","Pavić","Pavlov","Pavlović","Pedersen","Peeters","Perez","Perić","Perković","Perry","Peters","Petersen","Peterson","Petit","Petridis","Petrov","Petrović","Pettersen","Phillips","Pichler","Popov","Popović","Poulsen","Powell","Price","Radić","Ramirez","Rasmussen","Reed","Reiter","Reyes","Ricci","Richard","Richardson","Rinaldi","Rivera","Rizzo","Robert","Roberts","Robinson","Rodriguez","Rogers","Romano","Ross","Rossi","Roux","Russell","Russo","Sanchez","Sanders","Santoro","Šarić","Schmid","Schmidt","Schneider","Schulz","Schuster","Schwarz","Scott","Semyonov","Simon","Simon","Smirnov","Smit","Smith","Smits","Sokolov","Solovyov","Sørensen","Steiner","Stewart","Sullivan","Taylor","Thomas","Thompson","Thomsen","Tomić","Torres","Turner","Van den Berg","Van der Meer","Van Dijk","Van Leeuwen","Vasilyev","Vidović","Vincent","Vinogradov","Visser","Vlahos","Volkov","Vorobyov","Vos","Vuković","Wagner","Walker","Wallner","Ward","Watson","Weber","White","Willems","Williams","Wilson","Wimmer","Winkler","Wolf","Wood","Wouters","Wright","Young","Zaytsev"]
def first_name(r):
if hashIt(r,1):
first = female_names
else:
first = male_names
return first[hashIt(r,len(first))]
def last_name(r):
return last_names[hashIt(r,len(last_names))]
def full_name(r):
return first_name(r) + " " + last_name(r)
| mit |
mezz64/home-assistant | homeassistant/components/venstar/climate.py | 16 | 11240 | """Support for Venstar WiFi Thermostats."""
import logging
from venstarcolortouch import VenstarColorTouch
import voluptuous as vol
from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
FAN_AUTO,
FAN_ON,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_NONE,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_HOST,
CONF_PASSWORD,
CONF_PIN,
CONF_SSL,
CONF_TIMEOUT,
CONF_USERNAME,
PRECISION_HALVES,
STATE_ON,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_FAN_STATE = "fan_state"
ATTR_HVAC_STATE = "hvac_mode"
CONF_HUMIDIFIER = "humidifier"
DEFAULT_SSL = False
VALID_FAN_STATES = [STATE_ON, HVAC_MODE_AUTO]
VALID_THERMOSTAT_MODES = [HVAC_MODE_HEAT, HVAC_MODE_COOL, HVAC_MODE_OFF, HVAC_MODE_AUTO]
HOLD_MODE_OFF = "off"
HOLD_MODE_TEMPERATURE = "temperature"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_HUMIDIFIER, default=True): cv.boolean,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_TIMEOUT, default=5): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PIN): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Venstar thermostat."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
pin = config.get(CONF_PIN)
host = config.get(CONF_HOST)
timeout = config.get(CONF_TIMEOUT)
humidifier = config.get(CONF_HUMIDIFIER)
protocol = "https" if config[CONF_SSL] else "http"
client = VenstarColorTouch(
addr=host,
timeout=timeout,
user=username,
password=password,
pin=pin,
proto=protocol,
)
add_entities([VenstarThermostat(client, humidifier)], True)
class VenstarThermostat(ClimateEntity):
"""Representation of a Venstar thermostat."""
def __init__(self, client, humidifier):
"""Initialize the thermostat."""
self._client = client
self._humidifier = humidifier
self._mode_map = {
HVAC_MODE_HEAT: self._client.MODE_HEAT,
HVAC_MODE_COOL: self._client.MODE_COOL,
HVAC_MODE_AUTO: self._client.MODE_AUTO,
}
def update(self):
"""Update the data from the thermostat."""
info_success = self._client.update_info()
sensor_success = self._client.update_sensors()
if not info_success or not sensor_success:
_LOGGER.error("Failed to update data")
@property
def supported_features(self):
"""Return the list of supported features."""
features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE | SUPPORT_PRESET_MODE
if self._client.mode == self._client.MODE_AUTO:
features |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self._humidifier and hasattr(self._client, "hum_active"):
features |= SUPPORT_TARGET_HUMIDITY
return features
@property
def name(self):
"""Return the name of the thermostat."""
return self._client.name
@property
def precision(self):
"""Return the precision of the system.
Venstar temperature values are passed back and forth in the
API in C or F, with half-degree accuracy.
"""
return PRECISION_HALVES
@property
def temperature_unit(self):
"""Return the unit of measurement, as defined by the API."""
if self._client.tempunits == self._client.TEMPUNITS_F:
return TEMP_FAHRENHEIT
return TEMP_CELSIUS
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return VALID_FAN_STATES
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return VALID_THERMOSTAT_MODES
@property
def current_temperature(self):
"""Return the current temperature."""
return self._client.get_indoor_temp()
@property
def current_humidity(self):
"""Return the current humidity."""
return self._client.get_indoor_humidity()
@property
def hvac_mode(self):
"""Return current operation mode ie. heat, cool, auto."""
if self._client.mode == self._client.MODE_HEAT:
return HVAC_MODE_HEAT
if self._client.mode == self._client.MODE_COOL:
return HVAC_MODE_COOL
if self._client.mode == self._client.MODE_AUTO:
return HVAC_MODE_AUTO
return HVAC_MODE_OFF
@property
def hvac_action(self):
"""Return current operation mode ie. heat, cool, auto."""
if self._client.state == self._client.STATE_IDLE:
return CURRENT_HVAC_IDLE
if self._client.state == self._client.STATE_HEATING:
return CURRENT_HVAC_HEAT
if self._client.state == self._client.STATE_COOLING:
return CURRENT_HVAC_COOL
return CURRENT_HVAC_OFF
@property
def fan_mode(self):
"""Return the current fan mode."""
if self._client.fan == self._client.FAN_ON:
return FAN_ON
return FAN_AUTO
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
return {
ATTR_FAN_STATE: self._client.fanstate,
ATTR_HVAC_STATE: self._client.state,
}
@property
def target_temperature(self):
"""Return the target temperature we try to reach."""
if self._client.mode == self._client.MODE_HEAT:
return self._client.heattemp
if self._client.mode == self._client.MODE_COOL:
return self._client.cooltemp
return None
@property
def target_temperature_low(self):
"""Return the lower bound temp if auto mode is on."""
if self._client.mode == self._client.MODE_AUTO:
return self._client.heattemp
return None
@property
def target_temperature_high(self):
"""Return the upper bound temp if auto mode is on."""
if self._client.mode == self._client.MODE_AUTO:
return self._client.cooltemp
return None
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
return self._client.hum_setpoint
@property
def min_humidity(self):
"""Return the minimum humidity. Hardcoded to 0 in API."""
return 0
@property
def max_humidity(self):
"""Return the maximum humidity. Hardcoded to 60 in API."""
return 60
@property
def preset_mode(self):
"""Return current preset."""
if self._client.away:
return PRESET_AWAY
if self._client.schedule == 0:
return HOLD_MODE_TEMPERATURE
return PRESET_NONE
@property
def preset_modes(self):
"""Return valid preset modes."""
return [PRESET_NONE, PRESET_AWAY, HOLD_MODE_TEMPERATURE]
def _set_operation_mode(self, operation_mode):
"""Change the operation mode (internal)."""
if operation_mode == HVAC_MODE_HEAT:
success = self._client.set_mode(self._client.MODE_HEAT)
elif operation_mode == HVAC_MODE_COOL:
success = self._client.set_mode(self._client.MODE_COOL)
elif operation_mode == HVAC_MODE_AUTO:
success = self._client.set_mode(self._client.MODE_AUTO)
else:
success = self._client.set_mode(self._client.MODE_OFF)
if not success:
_LOGGER.error("Failed to change the operation mode")
return success
def set_temperature(self, **kwargs):
"""Set a new target temperature."""
set_temp = True
operation_mode = kwargs.get(ATTR_HVAC_MODE)
temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
temperature = kwargs.get(ATTR_TEMPERATURE)
if operation_mode and self._mode_map.get(operation_mode) != self._client.mode:
set_temp = self._set_operation_mode(operation_mode)
if set_temp:
if (
self._mode_map.get(operation_mode, self._client.mode)
== self._client.MODE_HEAT
):
success = self._client.set_setpoints(temperature, self._client.cooltemp)
elif (
self._mode_map.get(operation_mode, self._client.mode)
== self._client.MODE_COOL
):
success = self._client.set_setpoints(self._client.heattemp, temperature)
elif (
self._mode_map.get(operation_mode, self._client.mode)
== self._client.MODE_AUTO
):
success = self._client.set_setpoints(temp_low, temp_high)
else:
success = False
_LOGGER.error(
"The thermostat is currently not in a mode "
"that supports target temperature: %s",
operation_mode,
)
if not success:
_LOGGER.error("Failed to change the temperature")
def set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
if fan_mode == STATE_ON:
success = self._client.set_fan(self._client.FAN_ON)
else:
success = self._client.set_fan(self._client.FAN_AUTO)
if not success:
_LOGGER.error("Failed to change the fan mode")
def set_hvac_mode(self, hvac_mode):
"""Set new target operation mode."""
self._set_operation_mode(hvac_mode)
def set_humidity(self, humidity):
"""Set new target humidity."""
success = self._client.set_hum_setpoint(humidity)
if not success:
_LOGGER.error("Failed to change the target humidity level")
def set_preset_mode(self, preset_mode):
"""Set the hold mode."""
if preset_mode == PRESET_AWAY:
success = self._client.set_away(self._client.AWAY_AWAY)
elif preset_mode == HOLD_MODE_TEMPERATURE:
success = self._client.set_away(self._client.AWAY_HOME)
success = success and self._client.set_schedule(0)
elif preset_mode == PRESET_NONE:
success = self._client.set_away(self._client.AWAY_HOME)
success = success and self._client.set_schedule(1)
else:
_LOGGER.error("Unknown hold mode: %s", preset_mode)
success = False
if not success:
_LOGGER.error("Failed to change the schedule/hold state")
| apache-2.0 |
OpenUpgrade/OpenUpgrade | addons/account/wizard/account_use_model.py | 341 | 3361 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_use_model(osv.osv_memory):
_name = 'account.use.model'
_description = 'Use model'
_columns = {
'model': fields.many2many('account.model', 'account_use_model_relation', 'account_id', 'model_id', 'Account Model'),
}
def view_init(self, cr , uid , fields_list, context=None):
account_model_obj = self.pool.get('account.model')
if context is None:
context = {}
if context.get('active_ids',False):
data_model = account_model_obj.browse(cr, uid, context['active_ids'])
for model in data_model:
for line in model.lines_id:
if line.date_maturity == 'partner':
if not line.partner_id:
raise osv.except_osv(_('Error!'), _("Maturity date of entry line generated by model line '%s' is based on partner payment term!"\
"\nPlease define partner on it!")%line.name)
pass
def create_entries(self, cr, uid, ids, context=None):
account_model_obj = self.pool.get('account.model')
mod_obj = self.pool.get('ir.model.data')
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
record_id = context and context.get('model_line', False) or False
if record_id:
model_ids = data['model']
else:
model_ids = context['active_ids']
move_ids = account_model_obj.generate(cr, uid, model_ids, context=context)
context = dict(context, move_ids=move_ids)
model_data_ids = mod_obj.search(cr, uid,[('model','=','ir.ui.view'),('name','=','view_move_form')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {
'domain': "[('id','in', ["+','.join(map(str,context['move_ids']))+"])]",
'name': 'Entries',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.move',
'views': [(False,'tree'),(resource_id,'form')],
'type': 'ir.actions.act_window',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
koningdde/Arduitools | ESP8266/nodemcu-firmware-master/nodemcu-firmware-master/tools/make_server_cert.py | 13 | 1910 | import os
import argparse
import base64
import re
import sys
class Cert(object):
def __init__(self, name, buff):
self.name = name
self.len = len(buff)
self.buff = buff
pass
def __str__(self):
out_str = ['\0']*32
for i in range(len(self.name)):
out_str[i] = self.name[i]
out_str = "".join(out_str)
out_str += str(chr(self.len & 0xFF))
out_str += str(chr((self.len & 0xFF00) >> 8))
out_str += self.buff
return out_str
def main():
parser = argparse.ArgumentParser(description='Convert PEM file(s) into C source file.')
parser.add_argument('--section',
default='.servercert.flash',
help='specify the section for the data (default is .servercert.flash)')
parser.add_argument('--name',
default='tls_server_cert_area',
help='specify the variable name for the data (default is tls_server_cert_area)')
parser.add_argument('file', nargs='+',
help='One or more PEM files')
args = parser.parse_args()
cert_list = []
cert_file_list = []
for cert_file in args.file:
with open(cert_file, 'r') as f:
buff = f.read()
m = re.search(r"-----BEGIN ([A-Z ]+)-----([^-]+?)-----END \1-----", buff, flags=re.DOTALL)
if not m:
sys.exit("Input file was not in PEM format")
if "----BEGIN" in buff[m.end(0):]:
sys.exit("Input file contains more than one PEM object")
cert_list.append(Cert(m.group(1), base64.b64decode(''.join(m.group(2).split()))))
print '__attribute__((section("%s"))) unsigned char %s[INTERNAL_FLASH_SECTOR_SIZE] = {' % (args.section, args.name)
for _cert in cert_list:
col = 0
for ch in str(_cert):
print ("0x%02x," % ord(ch)),
if col & 15 == 15:
print
col = col + 1
print '\n0xff};\n'
if __name__ == '__main__':
main()
| gpl-3.0 |
MattDevo/edk2 | AppPkg/Applications/Python/Python-2.7.10/Lib/encodings/iso8859_13.py | 93 | 13834 | """ Python Character Mapping Codec iso8859_13 generated from 'MAPPINGS/ISO8859/8859-13.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-13',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u201d' # 0xA1 -> RIGHT DOUBLE QUOTATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\u201e' # 0xA5 -> DOUBLE LOW-9 QUOTATION MARK
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xd8' # 0xA8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u0156' # 0xAA -> LATIN CAPITAL LETTER R WITH CEDILLA
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xc6' # 0xAF -> LATIN CAPITAL LETTER AE
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\u201c' # 0xB4 -> LEFT DOUBLE QUOTATION MARK
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xf8' # 0xB8 -> LATIN SMALL LETTER O WITH STROKE
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\u0157' # 0xBA -> LATIN SMALL LETTER R WITH CEDILLA
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xe6' # 0xBF -> LATIN SMALL LETTER AE
u'\u0104' # 0xC0 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u012e' # 0xC1 -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u0100' # 0xC2 -> LATIN CAPITAL LETTER A WITH MACRON
u'\u0106' # 0xC3 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\u0118' # 0xC6 -> LATIN CAPITAL LETTER E WITH OGONEK
u'\u0112' # 0xC7 -> LATIN CAPITAL LETTER E WITH MACRON
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0179' # 0xCA -> LATIN CAPITAL LETTER Z WITH ACUTE
u'\u0116' # 0xCB -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\u0122' # 0xCC -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\u0136' # 0xCD -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\u012a' # 0xCE -> LATIN CAPITAL LETTER I WITH MACRON
u'\u013b' # 0xCF -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\u0160' # 0xD0 -> LATIN CAPITAL LETTER S WITH CARON
u'\u0143' # 0xD1 -> LATIN CAPITAL LETTER N WITH ACUTE
u'\u0145' # 0xD2 -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\u014c' # 0xD4 -> LATIN CAPITAL LETTER O WITH MACRON
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\u0172' # 0xD8 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\u0141' # 0xD9 -> LATIN CAPITAL LETTER L WITH STROKE
u'\u015a' # 0xDA -> LATIN CAPITAL LETTER S WITH ACUTE
u'\u016a' # 0xDB -> LATIN CAPITAL LETTER U WITH MACRON
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u017b' # 0xDD -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\u017d' # 0xDE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
u'\u0105' # 0xE0 -> LATIN SMALL LETTER A WITH OGONEK
u'\u012f' # 0xE1 -> LATIN SMALL LETTER I WITH OGONEK
u'\u0101' # 0xE2 -> LATIN SMALL LETTER A WITH MACRON
u'\u0107' # 0xE3 -> LATIN SMALL LETTER C WITH ACUTE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\u0119' # 0xE6 -> LATIN SMALL LETTER E WITH OGONEK
u'\u0113' # 0xE7 -> LATIN SMALL LETTER E WITH MACRON
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\u017a' # 0xEA -> LATIN SMALL LETTER Z WITH ACUTE
u'\u0117' # 0xEB -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\u0123' # 0xEC -> LATIN SMALL LETTER G WITH CEDILLA
u'\u0137' # 0xED -> LATIN SMALL LETTER K WITH CEDILLA
u'\u012b' # 0xEE -> LATIN SMALL LETTER I WITH MACRON
u'\u013c' # 0xEF -> LATIN SMALL LETTER L WITH CEDILLA
u'\u0161' # 0xF0 -> LATIN SMALL LETTER S WITH CARON
u'\u0144' # 0xF1 -> LATIN SMALL LETTER N WITH ACUTE
u'\u0146' # 0xF2 -> LATIN SMALL LETTER N WITH CEDILLA
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\u014d' # 0xF4 -> LATIN SMALL LETTER O WITH MACRON
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\u0173' # 0xF8 -> LATIN SMALL LETTER U WITH OGONEK
u'\u0142' # 0xF9 -> LATIN SMALL LETTER L WITH STROKE
u'\u015b' # 0xFA -> LATIN SMALL LETTER S WITH ACUTE
u'\u016b' # 0xFB -> LATIN SMALL LETTER U WITH MACRON
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u017c' # 0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\u017e' # 0xFE -> LATIN SMALL LETTER Z WITH CARON
u'\u2019' # 0xFF -> RIGHT SINGLE QUOTATION MARK
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| bsd-2-clause |
dexterx17/nodoSocket | clients/Python-2.7.6/Misc/BeOS-setup.py | 10 | 23664 | # Autodetecting setup.py script for building the Python extensions
#
# Modified for BeOS build. Donn Cave, March 27 2001.
__version__ = "special BeOS after 1.37"
import sys, os
from distutils import sysconfig
from distutils import text_file
from distutils.errors import *
from distutils.core import Extension, setup
from distutils.command.build_ext import build_ext
# This global variable is used to hold the list of modules to be disabled.
disabled_module_list = ['dbm', 'mmap', 'resource', 'nis']
def find_file(filename, std_dirs, paths):
"""Searches for the directory where a given file is located,
and returns a possibly-empty list of additional directories, or None
if the file couldn't be found at all.
'filename' is the name of a file, such as readline.h or libcrypto.a.
'std_dirs' is the list of standard system directories; if the
file is found in one of them, no additional directives are needed.
'paths' is a list of additional locations to check; if the file is
found in one of them, the resulting list will contain the directory.
"""
# Check the standard locations
for dir in std_dirs:
f = os.path.join(dir, filename)
if os.path.exists(f): return []
# Check the additional directories
for dir in paths:
f = os.path.join(dir, filename)
if os.path.exists(f):
return [dir]
# Not found anywhere
return None
def find_library_file(compiler, libname, std_dirs, paths):
filename = compiler.library_filename(libname, lib_type='shared')
result = find_file(filename, std_dirs, paths)
if result is not None: return result
filename = compiler.library_filename(libname, lib_type='static')
result = find_file(filename, std_dirs, paths)
return result
def module_enabled(extlist, modname):
"""Returns whether the module 'modname' is present in the list
of extensions 'extlist'."""
extlist = [ext for ext in extlist if ext.name == modname]
return len(extlist)
class PyBuildExt(build_ext):
def build_extensions(self):
# Detect which modules should be compiled
self.detect_modules()
# Remove modules that are present on the disabled list
self.extensions = [ext for ext in self.extensions
if ext.name not in disabled_module_list]
# Fix up the autodetected modules, prefixing all the source files
# with Modules/ and adding Python's include directory to the path.
(srcdir,) = sysconfig.get_config_vars('srcdir')
# Figure out the location of the source code for extension modules
moddir = os.path.join(os.getcwd(), srcdir, 'Modules')
moddir = os.path.normpath(moddir)
srcdir, tail = os.path.split(moddir)
srcdir = os.path.normpath(srcdir)
moddir = os.path.normpath(moddir)
# Fix up the paths for scripts, too
self.distribution.scripts = [os.path.join(srcdir, filename)
for filename in self.distribution.scripts]
for ext in self.extensions[:]:
ext.sources = [ os.path.join(moddir, filename)
for filename in ext.sources ]
ext.include_dirs.append( '.' ) # to get config.h
ext.include_dirs.append( os.path.join(srcdir, './Include') )
# If a module has already been built statically,
# don't build it here
if ext.name in sys.builtin_module_names:
self.extensions.remove(ext)
# Parse Modules/Setup to figure out which modules are turned
# on in the file.
input = text_file.TextFile('Modules/Setup', join_lines=1)
remove_modules = []
while 1:
line = input.readline()
if not line: break
line = line.split()
remove_modules.append( line[0] )
input.close()
for ext in self.extensions[:]:
if ext.name in remove_modules:
self.extensions.remove(ext)
# When you run "make CC=altcc" or something similar, you really want
# those environment variables passed into the setup.py phase. Here's
# a small set of useful ones.
compiler = os.environ.get('CC')
linker_so = os.environ.get('LDSHARED')
args = {}
# unfortunately, distutils doesn't let us provide separate C and C++
# compilers
if compiler is not None:
args['compiler_so'] = compiler
if linker_so is not None:
args['linker_so'] = linker_so + ' -shared'
self.compiler.set_executables(**args)
build_ext.build_extensions(self)
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except (CCompilerError, DistutilsError), why:
self.announce('WARNING: building of extension "%s" failed: %s' %
(ext.name, sys.exc_info()[1]))
def get_platform (self):
# Get value of sys.platform
platform = sys.platform
if platform[:6] =='cygwin':
platform = 'cygwin'
elif platform[:4] =='beos':
platform = 'beos'
return platform
def detect_modules(self):
try:
belibs = os.environ['BELIBRARIES'].split(';')
except KeyError:
belibs = ['/boot/beos/system/lib']
belibs.append('/boot/home/config/lib')
self.compiler.library_dirs.append('/boot/home/config/lib')
try:
beincl = os.environ['BEINCLUDES'].split(';')
except KeyError:
beincl = []
beincl.append('/boot/home/config/include')
self.compiler.include_dirs.append('/boot/home/config/include')
# lib_dirs and inc_dirs are used to search for files;
# if a file is found in one of those directories, it can
# be assumed that no additional -I,-L directives are needed.
lib_dirs = belibs
inc_dirs = beincl
exts = []
platform = self.get_platform()
# Check for MacOS X, which doesn't need libm.a at all
math_libs = ['m']
if platform in ['Darwin1.2', 'beos']:
math_libs = []
# XXX Omitted modules: gl, pure, dl, SGI-specific modules
#
# The following modules are all pretty straightforward, and compile
# on pretty much any POSIXish platform.
#
# Some modules that are normally always on:
exts.append( Extension('_weakref', ['_weakref.c']) )
exts.append( Extension('_symtable', ['symtablemodule.c']) )
# array objects
exts.append( Extension('array', ['arraymodule.c']) )
# complex math library functions
exts.append( Extension('cmath', ['cmathmodule.c'],
libraries=math_libs) )
# math library functions, e.g. sin()
exts.append( Extension('math', ['mathmodule.c'],
libraries=math_libs) )
# fast string operations implemented in C
exts.append( Extension('strop', ['stropmodule.c']) )
# time operations and variables
exts.append( Extension('time', ['timemodule.c'],
libraries=math_libs) )
# operator.add() and similar goodies
exts.append( Extension('operator', ['operator.c']) )
# access to the built-in codecs and codec registry
exts.append( Extension('_codecs', ['_codecsmodule.c']) )
# Python C API test module
exts.append( Extension('_testcapi', ['_testcapimodule.c']) )
# static Unicode character database
exts.append( Extension('unicodedata', ['unicodedata.c']) )
# access to ISO C locale support
exts.append( Extension('_locale', ['_localemodule.c']) )
# Modules with some UNIX dependencies -- on by default:
# (If you have a really backward UNIX, select and socket may not be
# supported...)
# fcntl(2) and ioctl(2)
exts.append( Extension('fcntl', ['fcntlmodule.c']) )
# pwd(3)
exts.append( Extension('pwd', ['pwdmodule.c']) )
# grp(3)
exts.append( Extension('grp', ['grpmodule.c']) )
# posix (UNIX) errno values
exts.append( Extension('errno', ['errnomodule.c']) )
# select(2); not on ancient System V
exts.append( Extension('select', ['selectmodule.c']) )
# The md5 module implements the RSA Data Security, Inc. MD5
# Message-Digest Algorithm, described in RFC 1321. The necessary files
# md5c.c and md5.h are included here.
exts.append( Extension('md5', ['md5module.c', 'md5c.c']) )
# The sha module implements the SHA checksum algorithm.
# (NIST's Secure Hash Algorithm.)
exts.append( Extension('sha', ['shamodule.c']) )
# Helper module for various ascii-encoders
exts.append( Extension('binascii', ['binascii.c']) )
# Fred Drake's interface to the Python parser
exts.append( Extension('parser', ['parsermodule.c']) )
# cStringIO and cPickle
exts.append( Extension('cStringIO', ['cStringIO.c']) )
exts.append( Extension('cPickle', ['cPickle.c']) )
# Memory-mapped files (also works on Win32).
exts.append( Extension('mmap', ['mmapmodule.c']) )
# Lance Ellinghaus's syslog daemon interface
exts.append( Extension('syslog', ['syslogmodule.c']) )
# George Neville-Neil's timing module:
exts.append( Extension('timing', ['timingmodule.c']) )
#
# Here ends the simple stuff. From here on, modules need certain
# libraries, are platform-specific, or present other surprises.
#
# Multimedia modules
# These don't work for 64-bit platforms!!!
# These represent audio samples or images as strings:
# Disabled on 64-bit platforms
if sys.maxint != 9223372036854775807L:
# Operations on audio samples
exts.append( Extension('audioop', ['audioop.c']) )
# Operations on images
exts.append( Extension('imageop', ['imageop.c']) )
# Read SGI RGB image files (but coded portably)
exts.append( Extension('rgbimg', ['rgbimgmodule.c']) )
# readline
if self.compiler.find_library_file(lib_dirs, 'readline'):
readline_libs = ['readline']
if self.compiler.find_library_file(lib_dirs +
['/usr/lib/termcap'],
'termcap'):
readline_libs.append('termcap')
exts.append( Extension('readline', ['readline.c'],
library_dirs=['/usr/lib/termcap'],
libraries=readline_libs) )
# The crypt module is now disabled by default because it breaks builds
# on many systems (where -lcrypt is needed), e.g. Linux (I believe).
if self.compiler.find_library_file(lib_dirs, 'crypt'):
libs = ['crypt']
else:
libs = []
exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
# socket(2)
# Detect SSL support for the socket module
ssl_incs = find_file('openssl/ssl.h', inc_dirs,
['/usr/local/ssl/include',
'/usr/contrib/ssl/include/'
]
)
ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
['/usr/local/ssl/lib',
'/usr/contrib/ssl/lib/'
] )
if (ssl_incs is not None and
ssl_libs is not None):
exts.append( Extension('_socket', ['socketmodule.c'],
include_dirs = ssl_incs,
library_dirs = ssl_libs,
libraries = ['ssl', 'crypto'],
define_macros = [('USE_SSL',1)] ) )
else:
exts.append( Extension('_socket', ['socketmodule.c']) )
# Modules that provide persistent dictionary-like semantics. You will
# probably want to arrange for at least one of them to be available on
# your machine, though none are defined by default because of library
# dependencies. The Python module anydbm.py provides an
# implementation independent wrapper for these; dumbdbm.py provides
# similar functionality (but slower of course) implemented in Python.
# The standard Unix dbm module:
if platform not in ['cygwin']:
if (self.compiler.find_library_file(lib_dirs, 'ndbm')):
exts.append( Extension('dbm', ['dbmmodule.c'],
libraries = ['ndbm'] ) )
else:
exts.append( Extension('dbm', ['dbmmodule.c']) )
# Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm:
if (self.compiler.find_library_file(lib_dirs, 'gdbm')):
exts.append( Extension('gdbm', ['gdbmmodule.c'],
libraries = ['gdbm'] ) )
# Berkeley DB interface.
#
# This requires the Berkeley DB code, see
# ftp://ftp.cs.berkeley.edu/pub/4bsd/db.1.85.tar.gz
#
# Edit the variables DB and DBPORT to point to the db top directory
# and the subdirectory of PORT where you built it.
#
# (See http://electricrain.com/greg/python/bsddb3/ for an interface to
# BSD DB 3.x.)
dblib = []
if self.compiler.find_library_file(lib_dirs, 'db'):
dblib = ['db']
db185_incs = find_file('db_185.h', inc_dirs,
['/usr/include/db3', '/usr/include/db2'])
db_inc = find_file('db.h', inc_dirs, ['/usr/include/db1'])
if db185_incs is not None:
exts.append( Extension('bsddb', ['bsddbmodule.c'],
include_dirs = db185_incs,
define_macros=[('HAVE_DB_185_H',1)],
libraries = dblib ) )
elif db_inc is not None:
exts.append( Extension('bsddb', ['bsddbmodule.c'],
include_dirs = db_inc,
libraries = dblib) )
# Unix-only modules
if platform == 'win32':
# Steen Lumholt's termios module
exts.append( Extension('termios', ['termios.c']) )
# Jeremy Hylton's rlimit interface
if platform not in ['cygwin']:
exts.append( Extension('resource', ['resource.c']) )
# Generic dynamic loading module
#exts.append( Extension('dl', ['dlmodule.c']) )
# Sun yellow pages. Some systems have the functions in libc.
if platform not in ['cygwin']:
if (self.compiler.find_library_file(lib_dirs, 'nsl')):
libs = ['nsl']
else:
libs = []
exts.append( Extension('nis', ['nismodule.c'],
libraries = libs) )
# Curses support, requring the System V version of curses, often
# provided by the ncurses library.
if (self.compiler.find_library_file(lib_dirs, 'ncurses')):
curses_libs = ['ncurses']
exts.append( Extension('_curses', ['_cursesmodule.c'],
libraries = curses_libs) )
elif (self.compiler.find_library_file(lib_dirs, 'curses')):
if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
curses_libs = ['curses', 'terminfo']
else:
curses_libs = ['curses', 'termcap']
exts.append( Extension('_curses', ['_cursesmodule.c'],
libraries = curses_libs) )
# If the curses module is enabled, check for the panel module
if (os.path.exists('Modules/_curses_panel.c') and
module_enabled(exts, '_curses') and
self.compiler.find_library_file(lib_dirs, 'panel')):
exts.append( Extension('_curses_panel', ['_curses_panel.c'],
libraries = ['panel'] + curses_libs) )
# Lee Busby's SIGFPE modules.
# The library to link fpectl with is platform specific.
# Choose *one* of the options below for fpectl:
if platform == 'irix5':
# For SGI IRIX (tested on 5.3):
exts.append( Extension('fpectl', ['fpectlmodule.c'],
libraries=['fpe']) )
elif 0: # XXX how to detect SunPro?
# For Solaris with SunPro compiler (tested on Solaris 2.5 with SunPro C 4.2):
# (Without the compiler you don't have -lsunmath.)
#fpectl fpectlmodule.c -R/opt/SUNWspro/lib -lsunmath -lm
pass
else:
# For other systems: see instructions in fpectlmodule.c.
#fpectl fpectlmodule.c ...
exts.append( Extension('fpectl', ['fpectlmodule.c']) )
# Andrew Kuchling's zlib module.
# This require zlib 1.1.3 (or later).
# See http://www.gzip.org/zlib/
if (self.compiler.find_library_file(lib_dirs, 'z')):
exts.append( Extension('zlib', ['zlibmodule.c'],
libraries = ['z']) )
# Interface to the Expat XML parser
#
# Expat is written by James Clark and must be downloaded separately
# (see below). The pyexpat module was written by Paul Prescod after a
# prototype by Jack Jansen.
#
# The Expat dist includes Windows .lib and .dll files. Home page is
# at http://www.jclark.com/xml/expat.html, the current production
# release is always ftp://ftp.jclark.com/pub/xml/expat.zip.
#
# EXPAT_DIR, below, should point to the expat/ directory created by
# unpacking the Expat source distribution.
#
# Note: the expat build process doesn't yet build a libexpat.a; you
# can do this manually while we try convince the author to add it. To
# do so, cd to EXPAT_DIR, run "make" if you have not done so, then
# run:
#
# ar cr libexpat.a xmltok/*.o xmlparse/*.o
#
expat_defs = []
expat_incs = find_file('expat.h', inc_dirs, [])
if expat_incs is not None:
# expat.h was found
expat_defs = [('HAVE_EXPAT_H', 1)]
else:
expat_incs = find_file('xmlparse.h', inc_dirs, [])
if (expat_incs is not None and
self.compiler.find_library_file(lib_dirs, 'expat')):
exts.append( Extension('pyexpat', ['pyexpat.c'],
define_macros = expat_defs,
libraries = ['expat']) )
# Platform-specific libraries
if platform == 'linux2':
# Linux-specific modules
exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
if platform == 'sunos5':
# SunOS specific modules
exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
self.extensions.extend(exts)
# Call the method for detecting whether _tkinter can be compiled
self.detect_tkinter(inc_dirs, lib_dirs)
def detect_tkinter(self, inc_dirs, lib_dirs):
# The _tkinter module.
# Assume we haven't found any of the libraries or include files
tcllib = tklib = tcl_includes = tk_includes = None
for version in ['8.4', '8.3', '8.2', '8.1', '8.0']:
tklib = self.compiler.find_library_file(lib_dirs,
'tk' + version )
tcllib = self.compiler.find_library_file(lib_dirs,
'tcl' + version )
if tklib and tcllib:
# Exit the loop when we've found the Tcl/Tk libraries
break
# Now check for the header files
if tklib and tcllib:
# Check for the include files on Debian, where
# they're put in /usr/include/{tcl,tk}X.Y
debian_tcl_include = [ '/usr/include/tcl' + version ]
debian_tk_include = [ '/usr/include/tk' + version ] + debian_tcl_include
tcl_includes = find_file('tcl.h', inc_dirs, debian_tcl_include)
tk_includes = find_file('tk.h', inc_dirs, debian_tk_include)
if (tcllib is None or tklib is None and
tcl_includes is None or tk_includes is None):
# Something's missing, so give up
return
# OK... everything seems to be present for Tcl/Tk.
include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
for dir in tcl_includes + tk_includes:
if dir not in include_dirs:
include_dirs.append(dir)
# Check for various platform-specific directories
platform = self.get_platform()
if platform == 'sunos5':
include_dirs.append('/usr/openwin/include')
added_lib_dirs.append('/usr/openwin/lib')
elif os.path.exists('/usr/X11R6/include'):
include_dirs.append('/usr/X11R6/include')
added_lib_dirs.append('/usr/X11R6/lib')
elif os.path.exists('/usr/X11R5/include'):
include_dirs.append('/usr/X11R5/include')
added_lib_dirs.append('/usr/X11R5/lib')
else:
# Assume default location for X11
include_dirs.append('/usr/X11/include')
added_lib_dirs.append('/usr/X11/lib')
# Check for BLT extension
if self.compiler.find_library_file(lib_dirs + added_lib_dirs, 'BLT8.0'):
defs.append( ('WITH_BLT', 1) )
libs.append('BLT8.0')
# Add the Tcl/Tk libraries
libs.append('tk'+version)
libs.append('tcl'+version)
if platform in ['aix3', 'aix4']:
libs.append('ld')
# Finally, link with the X11 libraries
libs.append('X11')
ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
define_macros=[('WITH_APPINIT', 1)] + defs,
include_dirs = include_dirs,
libraries = libs,
library_dirs = added_lib_dirs,
)
self.extensions.append(ext)
# XXX handle these, but how to detect?
# *** Uncomment and edit for PIL (TkImaging) extension only:
# -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \
# *** Uncomment and edit for TOGL extension only:
# -DWITH_TOGL togl.c \
# *** Uncomment these for TOGL extension only:
# -lGL -lGLU -lXext -lXmu \
def main():
setup(name = 'Python standard library',
version = '%d.%d' % sys.version_info[:2],
cmdclass = {'build_ext':PyBuildExt},
# The struct module is defined here, because build_ext won't be
# called unless there's at least one extension module defined.
ext_modules=[Extension('struct', ['structmodule.c'])],
# Scripts to install
scripts = ['Tools/scripts/pydoc']
)
# --install-platlib
if __name__ == '__main__':
sysconfig.set_python_build()
main()
| mit |
amyvmiwei/chromium | third_party/scons/scons-local/SCons/Variables/ListVariable.py | 3 | 4455 | """engine.SCons.Variables.ListVariable
This file defines the option type for SCons implementing 'lists'.
A 'list' option may either be 'all', 'none' or a list of names
separated by comma. After the option has been processed, the option
value holds either the named list elements, all list elemens or no
list elements at all.
Usage example:
list_of_libs = Split('x11 gl qt ical')
opts = Variables()
opts.Add(ListVariable('shared',
'libraries to build as shared libraries',
'all',
elems = list_of_libs))
...
for lib in list_of_libs:
if lib in env['shared']:
env.SharedObject(...)
else:
env.Object(...)
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/ListVariable.py 3897 2009/01/13 06:45:54 scons"
# Know Bug: This should behave like a Set-Type, but does not really,
# since elements can occur twice.
__all__ = ['ListVariable',]
import string
import UserList
import SCons.Util
class _ListVariable(UserList.UserList):
def __init__(self, initlist=[], allowedElems=[]):
UserList.UserList.__init__(self, filter(None, initlist))
self.allowedElems = allowedElems[:]
self.allowedElems.sort()
def __cmp__(self, other):
raise NotImplementedError
def __eq__(self, other):
raise NotImplementedError
def __ge__(self, other):
raise NotImplementedError
def __gt__(self, other):
raise NotImplementedError
def __le__(self, other):
raise NotImplementedError
def __lt__(self, other):
raise NotImplementedError
def __str__(self):
if len(self) == 0:
return 'none'
self.data.sort()
if self.data == self.allowedElems:
return 'all'
else:
return string.join(self, ',')
def prepare_to_store(self):
return self.__str__()
def _converter(val, allowedElems, mapdict):
"""
"""
if val == 'none':
val = []
elif val == 'all':
val = allowedElems
else:
val = filter(None, string.split(val, ','))
val = map(lambda v, m=mapdict: m.get(v, v), val)
notAllowed = filter(lambda v, aE=allowedElems: not v in aE, val)
if notAllowed:
raise ValueError("Invalid value(s) for option: %s" %
string.join(notAllowed, ','))
return _ListVariable(val, allowedElems)
## def _validator(key, val, env):
## """
## """
## # todo: write validater for pgk list
## return 1
def ListVariable(key, help, default, names, map={}):
"""
The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validater appended. The
result is usable for input to opts.Add() .
A 'package list' option may either be 'all', 'none' or a list of
package names (separated by space).
"""
names_str = 'allowed names: %s' % string.join(names, ' ')
if SCons.Util.is_List(default):
default = string.join(default, ',')
help = string.join(
(help, '(all|none|comma-separated list of names)', names_str),
'\n ')
return (key, help, default,
None, #_validator,
lambda val, elems=names, m=map: _converter(val, elems, m))
| bsd-3-clause |
hendradarwin/VTK | ThirdParty/Twisted/twisted/python/threadpool.py | 29 | 8125 | # -*- test-case-name: twisted.test.test_threadpool -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
twisted.python.threadpool: a pool of threads to which we dispatch tasks.
In most cases you can just use C{reactor.callInThread} and friends
instead of creating a thread pool directly.
"""
from __future__ import division, absolute_import
try:
from Queue import Queue
except ImportError:
from queue import Queue
import contextlib
import threading
import copy
from twisted.python import log, context, failure
WorkerStop = object()
class ThreadPool:
"""
This class (hopefully) generalizes the functionality of a pool of
threads to which work can be dispatched.
L{callInThread} and L{stop} should only be called from
a single thread, unless you make a subclass where L{stop} and
L{_startSomeWorkers} are synchronized.
@ivar started: Whether or not the thread pool is currently running.
@type started: L{bool}
@ivar threads: List of workers currently running in this thread pool.
@type threads: L{list}
"""
min = 5
max = 20
joined = False
started = False
workers = 0
name = None
threadFactory = threading.Thread
currentThread = staticmethod(threading.currentThread)
def __init__(self, minthreads=5, maxthreads=20, name=None):
"""
Create a new threadpool.
@param minthreads: minimum number of threads in the pool
@param maxthreads: maximum number of threads in the pool
"""
assert minthreads >= 0, 'minimum is negative'
assert minthreads <= maxthreads, 'minimum is greater than maximum'
self.q = Queue(0)
self.min = minthreads
self.max = maxthreads
self.name = name
self.waiters = []
self.threads = []
self.working = []
def start(self):
"""
Start the threadpool.
"""
self.joined = False
self.started = True
# Start some threads.
self.adjustPoolsize()
def startAWorker(self):
self.workers += 1
name = "PoolThread-%s-%s" % (self.name or id(self), self.workers)
newThread = self.threadFactory(target=self._worker, name=name)
self.threads.append(newThread)
newThread.start()
def stopAWorker(self):
self.q.put(WorkerStop)
self.workers -= 1
def __setstate__(self, state):
self.__dict__ = state
ThreadPool.__init__(self, self.min, self.max)
def __getstate__(self):
state = {}
state['min'] = self.min
state['max'] = self.max
return state
def _startSomeWorkers(self):
neededSize = self.q.qsize() + len(self.working)
# Create enough, but not too many
while self.workers < min(self.max, neededSize):
self.startAWorker()
def callInThread(self, func, *args, **kw):
"""
Call a callable object in a separate thread.
@param func: callable object to be called in separate thread
@param *args: positional arguments to be passed to C{func}
@param **kw: keyword args to be passed to C{func}
"""
self.callInThreadWithCallback(None, func, *args, **kw)
def callInThreadWithCallback(self, onResult, func, *args, **kw):
"""
Call a callable object in a separate thread and call C{onResult}
with the return value, or a L{twisted.python.failure.Failure}
if the callable raises an exception.
The callable is allowed to block, but the C{onResult} function
must not block and should perform as little work as possible.
A typical action for C{onResult} for a threadpool used with a
Twisted reactor would be to schedule a
L{twisted.internet.defer.Deferred} to fire in the main
reactor thread using C{.callFromThread}. Note that C{onResult}
is called inside the separate thread, not inside the reactor thread.
@param onResult: a callable with the signature C{(success, result)}.
If the callable returns normally, C{onResult} is called with
C{(True, result)} where C{result} is the return value of the
callable. If the callable throws an exception, C{onResult} is
called with C{(False, failure)}.
Optionally, C{onResult} may be C{None}, in which case it is not
called at all.
@param func: callable object to be called in separate thread
@param *args: positional arguments to be passed to C{func}
@param **kwargs: keyword arguments to be passed to C{func}
"""
if self.joined:
return
ctx = context.theContextTracker.currentContext().contexts[-1]
o = (ctx, func, args, kw, onResult)
self.q.put(o)
if self.started:
self._startSomeWorkers()
@contextlib.contextmanager
def _workerState(self, stateList, workerThread):
"""
Manages adding and removing this worker from a list of workers
in a particular state.
@param stateList: the list managing workers in this state
@param workerThread: the thread the worker is running in, used to
represent the worker in stateList
"""
stateList.append(workerThread)
try:
yield
finally:
stateList.remove(workerThread)
def _worker(self):
"""
Method used as target of the created threads: retrieve a task to run
from the threadpool, run it, and proceed to the next task until
threadpool is stopped.
"""
ct = self.currentThread()
o = self.q.get()
while o is not WorkerStop:
with self._workerState(self.working, ct):
ctx, function, args, kwargs, onResult = o
del o
try:
result = context.call(ctx, function, *args, **kwargs)
success = True
except:
success = False
if onResult is None:
context.call(ctx, log.err)
result = None
else:
result = failure.Failure()
del function, args, kwargs
if onResult is not None:
try:
context.call(ctx, onResult, success, result)
except:
context.call(ctx, log.err)
del ctx, onResult, result
with self._workerState(self.waiters, ct):
o = self.q.get()
self.threads.remove(ct)
def stop(self):
"""
Shutdown the threads in the threadpool.
"""
self.joined = True
self.started = False
threads = copy.copy(self.threads)
while self.workers:
self.q.put(WorkerStop)
self.workers -= 1
# and let's just make sure
# FIXME: threads that have died before calling stop() are not joined.
for thread in threads:
thread.join()
def adjustPoolsize(self, minthreads=None, maxthreads=None):
if minthreads is None:
minthreads = self.min
if maxthreads is None:
maxthreads = self.max
assert minthreads >= 0, 'minimum is negative'
assert minthreads <= maxthreads, 'minimum is greater than maximum'
self.min = minthreads
self.max = maxthreads
if not self.started:
return
# Kill of some threads if we have too many.
while self.workers > self.max:
self.stopAWorker()
# Start some threads if we have too few.
while self.workers < self.min:
self.startAWorker()
# Start some threads if there is a need.
self._startSomeWorkers()
def dumpStats(self):
log.msg('queue: %s' % self.q.queue)
log.msg('waiters: %s' % self.waiters)
log.msg('workers: %s' % self.working)
log.msg('total: %s' % self.threads)
| bsd-3-clause |
zorroz/microblog | flask/lib/python2.7/site-packages/pip/util.py | 343 | 24172 | import sys
import shutil
import os
import stat
import re
import posixpath
import zipfile
import tarfile
import subprocess
import textwrap
from pip.exceptions import InstallationError, BadCommand, PipError
from pip.backwardcompat import(WindowsError, string_types, raw_input,
console_to_str, user_site, PermissionError)
from pip.locations import site_packages, running_under_virtualenv, virtualenv_no_global
from pip.log import logger
from pip._vendor import pkg_resources
from pip._vendor.distlib import version
__all__ = ['rmtree', 'display_path', 'backup_dir',
'find_command', 'ask', 'Inf',
'normalize_name', 'splitext',
'format_size', 'is_installable_dir',
'is_svn_page', 'file_contents',
'split_leading_dir', 'has_leading_dir',
'make_path_relative', 'normalize_path',
'renames', 'get_terminal_size', 'get_prog',
'unzip_file', 'untar_file', 'create_download_cache_folder',
'cache_download', 'unpack_file', 'call_subprocess']
def get_prog():
try:
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
return "%s -m pip" % sys.executable
except (AttributeError, TypeError, IndexError):
pass
return 'pip'
def rmtree(dir, ignore_errors=False):
shutil.rmtree(dir, ignore_errors=ignore_errors,
onerror=rmtree_errorhandler)
def rmtree_errorhandler(func, path, exc_info):
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
exctype, value = exc_info[:2]
if not ((exctype is WindowsError and value.args[0] == 5) or #others
(exctype is OSError and value.args[0] == 13) or #python2.4
(exctype is PermissionError and value.args[3] == 5) #python3.3
):
raise
# file type should currently be read only
if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
raise
# convert to read/write
os.chmod(path, stat.S_IWRITE)
# use the original function to repeat the operation
func(path)
def display_path(path):
"""Gives the display value for a given path, making it relative to cwd
if possible."""
path = os.path.normcase(os.path.abspath(path))
if path.startswith(os.getcwd() + os.path.sep):
path = '.' + path[len(os.getcwd()):]
return path
def backup_dir(dir, ext='.bak'):
"""Figure out the name of a directory to back up the given dir to
(adding .bak, .bak2, etc)"""
n = 1
extension = ext
while os.path.exists(dir + extension):
n += 1
extension = ext + str(n)
return dir + extension
def find_command(cmd, paths=None, pathext=None):
"""Searches the PATH for the given command and returns its path"""
if paths is None:
paths = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(paths, string_types):
paths = [paths]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = get_pathext()
pathext = [ext for ext in pathext.lower().split(os.pathsep) if len(ext)]
# don't use extensions if the command ends with one of them
if os.path.splitext(cmd)[1].lower() in pathext:
pathext = ['']
# check if we find the command on PATH
for path in paths:
# try without extension first
cmd_path = os.path.join(path, cmd)
for ext in pathext:
# then including the extension
cmd_path_ext = cmd_path + ext
if os.path.isfile(cmd_path_ext):
return cmd_path_ext
if os.path.isfile(cmd_path):
return cmd_path
raise BadCommand('Cannot find command %r' % cmd)
def get_pathext(default_pathext=None):
"""Returns the path extensions from environment or a default"""
if default_pathext is None:
default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD'])
pathext = os.environ.get('PATHEXT', default_pathext)
return pathext
def ask_path_exists(message, options):
for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
if action in options:
return action
return ask(message, options)
def ask(message, options):
"""Ask the message interactively, with the given possible responses"""
while 1:
if os.environ.get('PIP_NO_INPUT'):
raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
response = raw_input(message)
response = response.strip().lower()
if response not in options:
print('Your response (%r) was not one of the expected responses: %s' % (
response, ', '.join(options)))
else:
return response
class _Inf(object):
"""I am bigger than everything!"""
def __eq__(self, other):
if self is other:
return True
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
return False
def __le__(self, other):
return False
def __gt__(self, other):
return True
def __ge__(self, other):
return True
def __repr__(self):
return 'Inf'
Inf = _Inf() #this object is not currently used as a sortable in our code
del _Inf
_normalize_re = re.compile(r'[^a-z]', re.I)
def normalize_name(name):
return _normalize_re.sub('-', name.lower())
def format_size(bytes):
if bytes > 1000*1000:
return '%.1fMB' % (bytes/1000.0/1000)
elif bytes > 10*1000:
return '%ikB' % (bytes/1000)
elif bytes > 1000:
return '%.1fkB' % (bytes/1000.0)
else:
return '%ibytes' % bytes
def is_installable_dir(path):
"""Return True if `path` is a directory containing a setup.py file."""
if not os.path.isdir(path):
return False
setup_py = os.path.join(path, 'setup.py')
if os.path.isfile(setup_py):
return True
return False
def is_svn_page(html):
"""Returns true if the page appears to be the index page of an svn repository"""
return (re.search(r'<title>[^<]*Revision \d+:', html)
and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
def file_contents(filename):
fp = open(filename, 'rb')
try:
return fp.read().decode('utf-8')
finally:
fp.close()
def split_leading_dir(path):
path = str(path)
path = path.lstrip('/').lstrip('\\')
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
or '\\' not in path):
return path.split('/', 1)
elif '\\' in path:
return path.split('\\', 1)
else:
return path, ''
def has_leading_dir(paths):
"""Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)"""
common_prefix = None
for path in paths:
prefix, rest = split_leading_dir(path)
if not prefix:
return False
elif common_prefix is None:
common_prefix = prefix
elif prefix != common_prefix:
return False
return True
def make_path_relative(path, rel_to):
"""
Make a filename relative, where the filename path, and it is
relative to rel_to
>>> make_relative_path('/usr/share/something/a-file.pth',
... '/usr/share/another-place/src/Directory')
'../../../something/a-file.pth'
>>> make_relative_path('/usr/share/something/a-file.pth',
... '/home/user/src/Directory')
'../../../usr/share/something/a-file.pth'
>>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
'a-file.pth'
"""
path_filename = os.path.basename(path)
path = os.path.dirname(path)
path = os.path.normpath(os.path.abspath(path))
rel_to = os.path.normpath(os.path.abspath(rel_to))
path_parts = path.strip(os.path.sep).split(os.path.sep)
rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
path_parts.pop(0)
rel_to_parts.pop(0)
full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
if full_parts == ['']:
return '.' + os.path.sep
return os.path.sep.join(full_parts)
def normalize_path(path):
"""
Convert a path to its canonical, case-normalized, absolute version.
"""
return os.path.normcase(os.path.realpath(os.path.expanduser(path)))
def splitext(path):
"""Like os.path.splitext, but take off .tar too"""
base, ext = posixpath.splitext(path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def renames(old, new):
"""Like os.renames(), but handles renaming across devices."""
# Implementation borrowed from os.renames().
head, tail = os.path.split(new)
if head and tail and not os.path.exists(head):
os.makedirs(head)
shutil.move(old, new)
head, tail = os.path.split(old)
if head and tail:
try:
os.removedirs(head)
except OSError:
pass
def is_local(path):
"""
Return True if path is within sys.prefix, if we're running in a virtualenv.
If we're not in a virtualenv, all paths are considered "local."
"""
if not running_under_virtualenv():
return True
return normalize_path(path).startswith(normalize_path(sys.prefix))
def dist_is_local(dist):
"""
Return True if given Distribution object is installed locally
(i.e. within current virtualenv).
Always True if we're not in a virtualenv.
"""
return is_local(dist_location(dist))
def dist_in_usersite(dist):
"""
Return True if given Distribution is installed in user site.
"""
if user_site:
return normalize_path(dist_location(dist)).startswith(normalize_path(user_site))
else:
return False
def dist_in_site_packages(dist):
"""
Return True if given Distribution is installed in distutils.sysconfig.get_python_lib().
"""
return normalize_path(dist_location(dist)).startswith(normalize_path(site_packages))
def dist_is_editable(dist):
"""Is distribution an editable install?"""
#TODO: factor out determining editableness out of FrozenRequirement
from pip import FrozenRequirement
req = FrozenRequirement.from_dist(dist, [])
return req.editable
def get_installed_distributions(local_only=True,
skip=('setuptools', 'pip', 'python', 'distribute'),
include_editables=True,
editables_only=False):
"""
Return a list of installed Distribution objects.
If ``local_only`` is True (default), only return installations
local to the current virtualenv, if in a virtualenv.
``skip`` argument is an iterable of lower-case project names to
ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also
skip virtualenv?]
If ``editables`` is False, don't report editables.
If ``editables_only`` is True , only report editables.
"""
if local_only:
local_test = dist_is_local
else:
local_test = lambda d: True
if include_editables:
editable_test = lambda d: True
else:
editable_test = lambda d: not dist_is_editable(d)
if editables_only:
editables_only_test = lambda d: dist_is_editable(d)
else:
editables_only_test = lambda d: True
return [d for d in pkg_resources.working_set
if local_test(d)
and d.key not in skip
and editable_test(d)
and editables_only_test(d)
]
def egg_link_path(dist):
"""
Return the path for the .egg-link file if it exists, otherwise, None.
There's 3 scenarios:
1) not in a virtualenv
try to find in site.USER_SITE, then site_packages
2) in a no-global virtualenv
try to find in site_packages
3) in a yes-global virtualenv
try to find in site_packages, then site.USER_SITE (don't look in global location)
For #1 and #3, there could be odd cases, where there's an egg-link in 2 locations.
This method will just return the first one found.
"""
sites = []
if running_under_virtualenv():
if virtualenv_no_global():
sites.append(site_packages)
else:
sites.append(site_packages)
if user_site:
sites.append(user_site)
else:
if user_site:
sites.append(user_site)
sites.append(site_packages)
for site in sites:
egglink = os.path.join(site, dist.project_name) + '.egg-link'
if os.path.isfile(egglink):
return egglink
def dist_location(dist):
"""
Get the site-packages location of this distribution. Generally
this is dist.location, except in the case of develop-installed
packages, where dist.location is the source code location, and we
want to know where the egg-link file is.
"""
egg_link = egg_link_path(dist)
if egg_link:
return egg_link
return dist.location
def get_terminal_size():
"""Returns a tuple (x, y) representing the width(x) and the height(x)
in characters of the terminal window."""
def ioctl_GWINSZ(fd):
try:
import fcntl
import termios
import struct
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
'1234'))
except:
return None
if cr == (0, 0):
return None
if cr == (0, 0):
return None
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(cr[1]), int(cr[0])
def current_umask():
"""Get the current umask which involves having to set it temporarily."""
mask = os.umask(0)
os.umask(mask)
return mask
def unzip_file(filename, location, flatten=True):
"""
Unzip the file (with path `filename`) to the destination `location`. All
files are written based on system defaults and umask (i.e. permissions are
not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
if not os.path.exists(location):
os.makedirs(location)
zipfp = open(filename, 'rb')
try:
zip = zipfile.ZipFile(zipfp)
leading = has_leading_dir(zip.namelist()) and flatten
for info in zip.infolist():
name = info.filename
data = zip.read(name)
fn = name
if leading:
fn = split_leading_dir(name)[1]
fn = os.path.join(location, fn)
dir = os.path.dirname(fn)
if not os.path.exists(dir):
os.makedirs(dir)
if fn.endswith('/') or fn.endswith('\\'):
# A directory
if not os.path.exists(fn):
os.makedirs(fn)
else:
fp = open(fn, 'wb')
try:
fp.write(data)
finally:
fp.close()
mode = info.external_attr >> 16
# if mode and regular file and any execute permissions for user/group/world?
if mode and stat.S_ISREG(mode) and mode & 0o111:
# make dest file have execute for user/group/world (chmod +x)
# no-op on windows per python docs
os.chmod(fn, (0o777-current_umask() | 0o111))
finally:
zipfp.close()
def untar_file(filename, location):
"""
Untar the file (with path `filename`) to the destination `location`.
All files are written based on system defaults and umask (i.e. permissions
are not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
if not os.path.exists(location):
os.makedirs(location)
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
mode = 'r:gz'
elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
mode = 'r:bz2'
elif filename.lower().endswith('.tar'):
mode = 'r'
else:
logger.warn('Cannot determine compression type for file %s' % filename)
mode = 'r:*'
tar = tarfile.open(filename, mode)
try:
# note: python<=2.5 doesnt seem to know about pax headers, filter them
leading = has_leading_dir([
member.name for member in tar.getmembers()
if member.name != 'pax_global_header'
])
for member in tar.getmembers():
fn = member.name
if fn == 'pax_global_header':
continue
if leading:
fn = split_leading_dir(fn)[1]
path = os.path.join(location, fn)
if member.isdir():
if not os.path.exists(path):
os.makedirs(path)
elif member.issym():
try:
tar._extract_member(member, path)
except:
e = sys.exc_info()[1]
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warn(
'In the tar file %s the member %s is invalid: %s'
% (filename, member.name, e))
continue
else:
try:
fp = tar.extractfile(member)
except (KeyError, AttributeError):
e = sys.exc_info()[1]
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warn(
'In the tar file %s the member %s is invalid: %s'
% (filename, member.name, e))
continue
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
destfp = open(path, 'wb')
try:
shutil.copyfileobj(fp, destfp)
finally:
destfp.close()
fp.close()
# member have any execute permissions for user/group/world?
if member.mode & 0o111:
# make dest file have execute for user/group/world
# no-op on windows per python docs
os.chmod(path, (0o777-current_umask() | 0o111))
finally:
tar.close()
def create_download_cache_folder(folder):
logger.indent -= 2
logger.notify('Creating supposed download cache at %s' % folder)
logger.indent += 2
os.makedirs(folder)
def cache_download(target_file, temp_location, content_type):
logger.notify('Storing download in cache at %s' % display_path(target_file))
shutil.copyfile(temp_location, target_file)
fp = open(target_file+'.content-type', 'w')
fp.write(content_type)
fp.close()
def unpack_file(filename, location, content_type, link):
filename = os.path.realpath(filename)
if (content_type == 'application/zip'
or filename.endswith('.zip')
or filename.endswith('.pybundle')
or filename.endswith('.whl')
or zipfile.is_zipfile(filename)):
unzip_file(filename, location, flatten=not filename.endswith(('.pybundle', '.whl')))
elif (content_type == 'application/x-gzip'
or tarfile.is_tarfile(filename)
or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
untar_file(filename, location)
elif (content_type and content_type.startswith('text/html')
and is_svn_page(file_contents(filename))):
# We don't really care about this
from pip.vcs.subversion import Subversion
Subversion('svn+' + link.url).unpack(location)
else:
## FIXME: handle?
## FIXME: magic signatures?
logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
% (filename, location, content_type))
raise InstallationError('Cannot determine archive format of %s' % location)
def call_subprocess(cmd, show_stdout=True,
filter_stdout=None, cwd=None,
raise_on_returncode=True,
command_level=logger.DEBUG, command_desc=None,
extra_environ=None):
if command_desc is None:
cmd_parts = []
for part in cmd:
if ' ' in part or '\n' in part or '"' in part or "'" in part:
part = '"%s"' % part.replace('"', '\\"')
cmd_parts.append(part)
command_desc = ' '.join(cmd_parts)
if show_stdout:
stdout = None
else:
stdout = subprocess.PIPE
logger.log(command_level, "Running command %s" % command_desc)
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
try:
proc = subprocess.Popen(
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
cwd=cwd, env=env)
except Exception:
e = sys.exc_info()[1]
logger.fatal(
"Error %s while executing command %s" % (e, command_desc))
raise
all_output = []
if stdout is not None:
stdout = proc.stdout
while 1:
line = console_to_str(stdout.readline())
if not line:
break
line = line.rstrip()
all_output.append(line + '\n')
if filter_stdout:
level = filter_stdout(line)
if isinstance(level, tuple):
level, line = level
logger.log(level, line)
if not logger.stdout_level_matches(level):
logger.show_progress()
else:
logger.info(line)
else:
returned_stdout, returned_stderr = proc.communicate()
all_output = [returned_stdout or '']
proc.wait()
if proc.returncode:
if raise_on_returncode:
if all_output:
logger.notify('Complete output from command %s:' % command_desc)
logger.notify('\n'.join(all_output) + '\n----------------------------------------')
raise InstallationError(
"Command %s failed with error code %s in %s"
% (command_desc, proc.returncode, cwd))
else:
logger.warn(
"Command %s had error code %s in %s"
% (command_desc, proc.returncode, cwd))
if stdout is not None:
return ''.join(all_output)
def is_prerelease(vers):
"""
Attempt to determine if this is a pre-release using PEP386/PEP426 rules.
Will return True if it is a pre-release and False if not. Versions are
assumed to be a pre-release if they cannot be parsed.
"""
normalized = version._suggest_normalized_version(vers)
if normalized is None:
# Cannot normalize, assume it is a pre-release
return True
parsed = version._normalized_key(normalized)
return any([any([y in set(["a", "b", "c", "rc", "dev"]) for y in x]) for x in parsed])
| bsd-3-clause |
wiltonlazary/arangodb | 3rdParty/V8/V8-5.0.71.39/build/gyp/test/rules/gyptest-all.py | 34 | 2191 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple rules when using an explicit build target of 'all'.
"""
import sys
if sys.platform == 'win32':
print "This test is currently disabled: https://crbug.com/483696."
sys.exit(0)
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('no_action_with_rules_fails.gyp', chdir='src/noaction', status=1,
stderr=None)
test.run_gyp('actions.gyp',
'-G', 'xcode_ninja_target_pattern=^pull_in_all_actions$',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('actions.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Hello from function1.in
Hello from function2.in
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir1'
else:
chdir = 'relocate/src'
test.run_built_executable('program', chdir=chdir, stdout=expect)
expect = """\
Hello from program.c
Hello from function3.in
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
else:
chdir = 'relocate/src'
test.run_built_executable('program2', chdir=chdir, stdout=expect)
test.must_match('relocate/src/subdir2/file1.out', 'Hello from file1.in\n')
test.must_match('relocate/src/subdir2/file2.out', 'Hello from file2.in\n')
test.must_match('relocate/src/subdir2/file1.out2', 'Hello from file1.in\n')
test.must_match('relocate/src/subdir2/file2.out2', 'Hello from file2.in\n')
test.must_match('relocate/src/subdir2/file1.out4', 'Hello from file1.in\n')
test.must_match('relocate/src/subdir2/file2.out4', 'Hello from file2.in\n')
test.must_match('relocate/src/subdir2/file1.copy', 'Hello from file1.in\n')
test.must_match('relocate/src/external/file1.external_rules.out',
'Hello from file1.in\n')
test.must_match('relocate/src/external/file2.external_rules.out',
'Hello from file2.in\n')
expect = """\
Hello from program.c
Got 41.
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir4'
else:
chdir = 'relocate/src'
test.run_built_executable('program4', chdir=chdir, stdout=expect)
test.pass_test()
| apache-2.0 |
Jollytown/Garuda | server/garuda/lib/python2.7/site-packages/django/contrib/admin/checks.py | 34 | 38521 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import chain
from django.contrib.admin.utils import get_fields_from_path, NotRelationField, flatten
from django.core import checks
from django.db import models
from django.db.models.fields import FieldDoesNotExist
from django.forms.models import BaseModelForm, _get_foreign_key, BaseModelFormSet
def check_admin_app(**kwargs):
from django.contrib.admin.sites import system_check_errors
return system_check_errors
class BaseModelAdminChecks(object):
def check(self, cls, model, **kwargs):
errors = []
errors.extend(self._check_raw_id_fields(cls, model))
errors.extend(self._check_fields(cls, model))
errors.extend(self._check_fieldsets(cls, model))
errors.extend(self._check_exclude(cls, model))
errors.extend(self._check_form(cls, model))
errors.extend(self._check_filter_vertical(cls, model))
errors.extend(self._check_filter_horizontal(cls, model))
errors.extend(self._check_radio_fields(cls, model))
errors.extend(self._check_prepopulated_fields(cls, model))
errors.extend(self._check_view_on_site_url(cls, model))
errors.extend(self._check_ordering(cls, model))
errors.extend(self._check_readonly_fields(cls, model))
return errors
def _check_raw_id_fields(self, cls, model):
""" Check that `raw_id_fields` only contains field names that are listed
on the model. """
if not isinstance(cls.raw_id_fields, (list, tuple)):
return must_be('a list or tuple', option='raw_id_fields', obj=cls, id='admin.E001')
else:
return list(chain(*[
self._check_raw_id_fields_item(cls, model, field_name, 'raw_id_fields[%d]' % index)
for index, field_name in enumerate(cls.raw_id_fields)
]))
def _check_raw_id_fields_item(self, cls, model, field_name, label):
""" Check an item of `raw_id_fields`, i.e. check that field named
`field_name` exists in model `model` and is a ForeignKey or a
ManyToManyField. """
try:
field = model._meta.get_field(field_name)
except models.FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=cls, id='admin.E002')
else:
if not isinstance(field, (models.ForeignKey, models.ManyToManyField)):
return must_be('a ForeignKey or ManyToManyField',
option=label, obj=cls, id='admin.E003')
else:
return []
def _check_fields(self, cls, model):
""" Check that `fields` only refer to existing fields, doesn't contain
duplicates. Check if at most one of `fields` and `fieldsets` is defined.
"""
if cls.fields is None:
return []
elif not isinstance(cls.fields, (list, tuple)):
return must_be('a list or tuple', option='fields', obj=cls, id='admin.E004')
elif cls.fieldsets:
return [
checks.Error(
"Both 'fieldsets' and 'fields' are specified.",
hint=None,
obj=cls,
id='admin.E005',
)
]
fields = flatten(cls.fields)
if len(fields) != len(set(fields)):
return [
checks.Error(
"The value of 'fields' contains duplicate field(s).",
hint=None,
obj=cls,
id='admin.E006',
)
]
return list(chain(*[
self._check_field_spec(cls, model, field_name, 'fields')
for field_name in cls.fields
]))
def _check_fieldsets(self, cls, model):
""" Check that fieldsets is properly formatted and doesn't contain
duplicates. """
if cls.fieldsets is None:
return []
elif not isinstance(cls.fieldsets, (list, tuple)):
return must_be('a list or tuple', option='fieldsets', obj=cls, id='admin.E007')
else:
return list(chain(*[
self._check_fieldsets_item(cls, model, fieldset, 'fieldsets[%d]' % index)
for index, fieldset in enumerate(cls.fieldsets)
]))
def _check_fieldsets_item(self, cls, model, fieldset, label):
""" Check an item of `fieldsets`, i.e. check that this is a pair of a
set name and a dictionary containing "fields" key. """
if not isinstance(fieldset, (list, tuple)):
return must_be('a list or tuple', option=label, obj=cls, id='admin.E008')
elif len(fieldset) != 2:
return must_be('of length 2', option=label, obj=cls, id='admin.E009')
elif not isinstance(fieldset[1], dict):
return must_be('a dictionary', option='%s[1]' % label, obj=cls, id='admin.E010')
elif 'fields' not in fieldset[1]:
return [
checks.Error(
"The value of '%s[1]' must contain the key 'fields'." % label,
hint=None,
obj=cls,
id='admin.E011',
)
]
fields = flatten(fieldset[1]['fields'])
if len(fields) != len(set(fields)):
return [
checks.Error(
"There are duplicate field(s) in '%s[1]'." % label,
hint=None,
obj=cls,
id='admin.E012',
)
]
return list(chain(*[
self._check_field_spec(cls, model, fieldset_fields, '%s[1]["fields"]' % label)
for fieldset_fields in fieldset[1]['fields']
]))
def _check_field_spec(self, cls, model, fields, label):
""" `fields` should be an item of `fields` or an item of
fieldset[1]['fields'] for any `fieldset` in `fieldsets`. It should be a
field name or a tuple of field names. """
if isinstance(fields, tuple):
return list(chain(*[
self._check_field_spec_item(cls, model, field_name, "%s[%d]" % (label, index))
for index, field_name in enumerate(fields)
]))
else:
return self._check_field_spec_item(cls, model, fields, label)
def _check_field_spec_item(self, cls, model, field_name, label):
if field_name in cls.readonly_fields:
# Stuff can be put in fields that isn't actually a model field if
# it's in readonly_fields, readonly_fields will handle the
# validation of such things.
return []
else:
try:
field = model._meta.get_field(field_name)
except models.FieldDoesNotExist:
# If we can't find a field on the model that matches, it could
# be an extra field on the form.
return []
else:
if (isinstance(field, models.ManyToManyField) and
not field.rel.through._meta.auto_created):
return [
checks.Error(
("The value of '%s' cannot include the ManyToManyField '%s', "
"because that field manually specifies a relationship model.")
% (label, field_name),
hint=None,
obj=cls,
id='admin.E013',
)
]
else:
return []
def _check_exclude(self, cls, model):
""" Check that exclude is a sequence without duplicates. """
if cls.exclude is None: # default value is None
return []
elif not isinstance(cls.exclude, (list, tuple)):
return must_be('a list or tuple', option='exclude', obj=cls, id='admin.E014')
elif len(cls.exclude) > len(set(cls.exclude)):
return [
checks.Error(
"The value of 'exclude' contains duplicate field(s).",
hint=None,
obj=cls,
id='admin.E015',
)
]
else:
return []
def _check_form(self, cls, model):
""" Check that form subclasses BaseModelForm. """
if hasattr(cls, 'form') and not issubclass(cls.form, BaseModelForm):
return must_inherit_from(parent='BaseModelForm', option='form',
obj=cls, id='admin.E016')
else:
return []
def _check_filter_vertical(self, cls, model):
""" Check that filter_vertical is a sequence of field names. """
if not hasattr(cls, 'filter_vertical'):
return []
elif not isinstance(cls.filter_vertical, (list, tuple)):
return must_be('a list or tuple', option='filter_vertical', obj=cls, id='admin.E017')
else:
return list(chain(*[
self._check_filter_item(cls, model, field_name, "filter_vertical[%d]" % index)
for index, field_name in enumerate(cls.filter_vertical)
]))
def _check_filter_horizontal(self, cls, model):
""" Check that filter_horizontal is a sequence of field names. """
if not hasattr(cls, 'filter_horizontal'):
return []
elif not isinstance(cls.filter_horizontal, (list, tuple)):
return must_be('a list or tuple', option='filter_horizontal', obj=cls, id='admin.E018')
else:
return list(chain(*[
self._check_filter_item(cls, model, field_name, "filter_horizontal[%d]" % index)
for index, field_name in enumerate(cls.filter_horizontal)
]))
def _check_filter_item(self, cls, model, field_name, label):
""" Check one item of `filter_vertical` or `filter_horizontal`, i.e.
check that given field exists and is a ManyToManyField. """
try:
field = model._meta.get_field(field_name)
except models.FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=cls, id='admin.E019')
else:
if not isinstance(field, models.ManyToManyField):
return must_be('a ManyToManyField', option=label, obj=cls, id='admin.E020')
else:
return []
def _check_radio_fields(self, cls, model):
""" Check that `radio_fields` is a dictionary. """
if not hasattr(cls, 'radio_fields'):
return []
elif not isinstance(cls.radio_fields, dict):
return must_be('a dictionary', option='radio_fields', obj=cls, id='admin.E021')
else:
return list(chain(*[
self._check_radio_fields_key(cls, model, field_name, 'radio_fields') +
self._check_radio_fields_value(cls, model, val, 'radio_fields["%s"]' % field_name)
for field_name, val in cls.radio_fields.items()
]))
def _check_radio_fields_key(self, cls, model, field_name, label):
""" Check that a key of `radio_fields` dictionary is name of existing
field and that the field is a ForeignKey or has `choices` defined. """
try:
field = model._meta.get_field(field_name)
except models.FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=cls, id='admin.E022')
else:
if not (isinstance(field, models.ForeignKey) or field.choices):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an instance of ForeignKey, and does not have a 'choices' definition." % (
label, field_name
),
hint=None,
obj=cls,
id='admin.E023',
)
]
else:
return []
def _check_radio_fields_value(self, cls, model, val, label):
""" Check type of a value of `radio_fields` dictionary. """
from django.contrib.admin.options import HORIZONTAL, VERTICAL
if val not in (HORIZONTAL, VERTICAL):
return [
checks.Error(
"The value of '%s' must be either admin.HORIZONTAL or admin.VERTICAL." % label,
hint=None,
obj=cls,
id='admin.E024',
)
]
else:
return []
def _check_view_on_site_url(self, cls, model):
if hasattr(cls, 'view_on_site'):
if not callable(cls.view_on_site) and not isinstance(cls.view_on_site, bool):
return [
checks.Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
hint=None,
obj=cls,
id='admin.E025',
)
]
else:
return []
else:
return []
def _check_prepopulated_fields(self, cls, model):
""" Check that `prepopulated_fields` is a dictionary containing allowed
field types. """
if not hasattr(cls, 'prepopulated_fields'):
return []
elif not isinstance(cls.prepopulated_fields, dict):
return must_be('a dictionary', option='prepopulated_fields', obj=cls, id='admin.E026')
else:
return list(chain(*[
self._check_prepopulated_fields_key(cls, model, field_name, 'prepopulated_fields') +
self._check_prepopulated_fields_value(cls, model, val, 'prepopulated_fields["%s"]' % field_name)
for field_name, val in cls.prepopulated_fields.items()
]))
def _check_prepopulated_fields_key(self, cls, model, field_name, label):
""" Check a key of `prepopulated_fields` dictionary, i.e. check that it
is a name of existing field and the field is one of the allowed types.
"""
forbidden_field_types = (
models.DateTimeField,
models.ForeignKey,
models.ManyToManyField
)
try:
field = model._meta.get_field(field_name)
except models.FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=cls, id='admin.E027')
else:
if isinstance(field, forbidden_field_types):
return [
checks.Error(
"The value of '%s' refers to '%s', which must not be a DateTimeField, "
"ForeignKey or ManyToManyField." % (
label, field_name
),
hint=None,
obj=cls,
id='admin.E028',
)
]
else:
return []
def _check_prepopulated_fields_value(self, cls, model, val, label):
""" Check a value of `prepopulated_fields` dictionary, i.e. it's an
iterable of existing fields. """
if not isinstance(val, (list, tuple)):
return must_be('a list or tuple', option=label, obj=cls, id='admin.E029')
else:
return list(chain(*[
self._check_prepopulated_fields_value_item(cls, model, subfield_name, "%s[%r]" % (label, index))
for index, subfield_name in enumerate(val)
]))
def _check_prepopulated_fields_value_item(self, cls, model, field_name, label):
""" For `prepopulated_fields` equal to {"slug": ("title",)},
`field_name` is "title". """
try:
model._meta.get_field(field_name)
except models.FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=cls, id='admin.E030')
else:
return []
def _check_ordering(self, cls, model):
""" Check that ordering refers to existing fields or is random. """
# ordering = None
if cls.ordering is None: # The default value is None
return []
elif not isinstance(cls.ordering, (list, tuple)):
return must_be('a list or tuple', option='ordering', obj=cls, id='admin.E031')
else:
return list(chain(*[
self._check_ordering_item(cls, model, field_name, 'ordering[%d]' % index)
for index, field_name in enumerate(cls.ordering)
]))
def _check_ordering_item(self, cls, model, field_name, label):
""" Check that `ordering` refers to existing fields. """
if field_name == '?' and len(cls.ordering) != 1:
return [
checks.Error(
("The value of 'ordering' has the random ordering marker '?', "
"but contains other fields as well."),
hint='Either remove the "?", or remove the other fields.',
obj=cls,
id='admin.E032',
)
]
elif field_name == '?':
return []
elif '__' in field_name:
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
return []
else:
if field_name.startswith('-'):
field_name = field_name[1:]
try:
model._meta.get_field(field_name)
except models.FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=cls, id='admin.E033')
else:
return []
def _check_readonly_fields(self, cls, model):
""" Check that readonly_fields refers to proper attribute or field. """
if cls.readonly_fields == ():
return []
elif not isinstance(cls.readonly_fields, (list, tuple)):
return must_be('a list or tuple', option='readonly_fields', obj=cls, id='admin.E034')
else:
return list(chain(*[
self._check_readonly_fields_item(cls, model, field_name, "readonly_fields[%d]" % index)
for index, field_name in enumerate(cls.readonly_fields)
]))
def _check_readonly_fields_item(self, cls, model, field_name, label):
if callable(field_name):
return []
elif hasattr(cls, field_name):
return []
elif hasattr(model, field_name):
return []
else:
try:
model._meta.get_field(field_name)
except models.FieldDoesNotExist:
return [
checks.Error(
"The value of '%s' is not a callable, an attribute of '%s', or an attribute of '%s.%s'." % (
label, cls.__name__, model._meta.app_label, model._meta.object_name
),
hint=None,
obj=cls,
id='admin.E035',
)
]
else:
return []
class ModelAdminChecks(BaseModelAdminChecks):
def check(self, cls, model, **kwargs):
errors = super(ModelAdminChecks, self).check(cls, model=model, **kwargs)
errors.extend(self._check_save_as(cls, model))
errors.extend(self._check_save_on_top(cls, model))
errors.extend(self._check_inlines(cls, model))
errors.extend(self._check_list_display(cls, model))
errors.extend(self._check_list_display_links(cls, model))
errors.extend(self._check_list_filter(cls, model))
errors.extend(self._check_list_select_related(cls, model))
errors.extend(self._check_list_per_page(cls, model))
errors.extend(self._check_list_max_show_all(cls, model))
errors.extend(self._check_list_editable(cls, model))
errors.extend(self._check_search_fields(cls, model))
errors.extend(self._check_date_hierarchy(cls, model))
return errors
def _check_save_as(self, cls, model):
""" Check save_as is a boolean. """
if not isinstance(cls.save_as, bool):
return must_be('a boolean', option='save_as',
obj=cls, id='admin.E101')
else:
return []
def _check_save_on_top(self, cls, model):
""" Check save_on_top is a boolean. """
if not isinstance(cls.save_on_top, bool):
return must_be('a boolean', option='save_on_top',
obj=cls, id='admin.E102')
else:
return []
def _check_inlines(self, cls, model):
""" Check all inline model admin classes. """
if not isinstance(cls.inlines, (list, tuple)):
return must_be('a list or tuple', option='inlines', obj=cls, id='admin.E103')
else:
return list(chain(*[
self._check_inlines_item(cls, model, item, "inlines[%d]" % index)
for index, item in enumerate(cls.inlines)
]))
def _check_inlines_item(self, cls, model, inline, label):
""" Check one inline model admin. """
inline_label = '.'.join([inline.__module__, inline.__name__])
from django.contrib.admin.options import BaseModelAdmin
if not issubclass(inline, BaseModelAdmin):
return [
checks.Error(
"'%s' must inherit from 'BaseModelAdmin'." % inline_label,
hint=None,
obj=cls,
id='admin.E104',
)
]
elif not inline.model:
return [
checks.Error(
"'%s' must have a 'model' attribute." % inline_label,
hint=None,
obj=cls,
id='admin.E105',
)
]
elif not issubclass(inline.model, models.Model):
return must_be('a Model', option='%s.model' % inline_label,
obj=cls, id='admin.E106')
else:
return inline.check(model)
def _check_list_display(self, cls, model):
""" Check that list_display only contains fields or usable attributes.
"""
if not isinstance(cls.list_display, (list, tuple)):
return must_be('a list or tuple', option='list_display', obj=cls, id='admin.E107')
else:
return list(chain(*[
self._check_list_display_item(cls, model, item, "list_display[%d]" % index)
for index, item in enumerate(cls.list_display)
]))
def _check_list_display_item(self, cls, model, item, label):
if callable(item):
return []
elif hasattr(cls, item):
return []
elif hasattr(model, item):
# getattr(model, item) could be an X_RelatedObjectsDescriptor
try:
field = model._meta.get_field(item)
except models.FieldDoesNotExist:
try:
field = getattr(model, item)
except AttributeError:
field = None
if field is None:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a callable, an attribute of '%s', or an attribute or method on '%s.%s'." % (
label, item, cls.__name__, model._meta.app_label, model._meta.object_name
),
hint=None,
obj=cls,
id='admin.E108',
)
]
elif isinstance(field, models.ManyToManyField):
return [
checks.Error(
"The value of '%s' must not be a ManyToManyField." % label,
hint=None,
obj=cls,
id='admin.E109',
)
]
else:
return []
else:
try:
model._meta.get_field(item)
except models.FieldDoesNotExist:
return [
# This is a deliberate repeat of E108; there's more than one path
# required to test this condition.
checks.Error(
"The value of '%s' refers to '%s', which is not a callable, an attribute of '%s', or an attribute or method on '%s.%s'." % (
label, item, cls.__name__, model._meta.app_label, model._meta.object_name
),
hint=None,
obj=cls,
id='admin.E108',
)
]
else:
return []
def _check_list_display_links(self, cls, model):
""" Check that list_display_links is a unique subset of list_display.
"""
if cls.list_display_links is None:
return []
elif not isinstance(cls.list_display_links, (list, tuple)):
return must_be('a list, a tuple, or None', option='list_display_links', obj=cls, id='admin.E110')
else:
return list(chain(*[
self._check_list_display_links_item(cls, model, field_name, "list_display_links[%d]" % index)
for index, field_name in enumerate(cls.list_display_links)
]))
def _check_list_display_links_item(self, cls, model, field_name, label):
if field_name not in cls.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not defined in 'list_display'." % (
label, field_name
),
hint=None,
obj=cls,
id='admin.E111',
)
]
else:
return []
def _check_list_filter(self, cls, model):
if not isinstance(cls.list_filter, (list, tuple)):
return must_be('a list or tuple', option='list_filter', obj=cls, id='admin.E112')
else:
return list(chain(*[
self._check_list_filter_item(cls, model, item, "list_filter[%d]" % index)
for index, item in enumerate(cls.list_filter)
]))
def _check_list_filter_item(self, cls, model, item, label):
"""
Check one item of `list_filter`, i.e. check if it is one of three options:
1. 'field' -- a basic field filter, possibly w/ relationships (e.g.
'field__rel')
2. ('field', SomeFieldListFilter) - a field-based list filter class
3. SomeListFilter - a non-field list filter class
"""
from django.contrib.admin import ListFilter, FieldListFilter
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not issubclass(item, ListFilter):
return must_inherit_from(parent='ListFilter', option=label,
obj=cls, id='admin.E113')
# ... but not a FieldListFilter.
elif issubclass(item, FieldListFilter):
return [
checks.Error(
"The value of '%s' must not inherit from 'FieldListFilter'." % label,
hint=None,
obj=cls,
id='admin.E114',
)
]
else:
return []
elif isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not issubclass(list_filter_class, FieldListFilter):
return must_inherit_from(parent='FieldListFilter', option='%s[1]' % label,
obj=cls, id='admin.E115')
else:
return []
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(model, field)
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of '%s' refers to '%s', which does not refer to a Field." % (label, field),
hint=None,
obj=cls,
id='admin.E116',
)
]
else:
return []
def _check_list_select_related(self, cls, model):
""" Check that list_select_related is a boolean, a list or a tuple. """
if not isinstance(cls.list_select_related, (bool, list, tuple)):
return must_be('a boolean, tuple or list', option='list_select_related',
obj=cls, id='admin.E117')
else:
return []
def _check_list_per_page(self, cls, model):
""" Check that list_per_page is an integer. """
if not isinstance(cls.list_per_page, int):
return must_be('an integer', option='list_per_page', obj=cls, id='admin.E118')
else:
return []
def _check_list_max_show_all(self, cls, model):
""" Check that list_max_show_all is an integer. """
if not isinstance(cls.list_max_show_all, int):
return must_be('an integer', option='list_max_show_all', obj=cls, id='admin.E119')
else:
return []
def _check_list_editable(self, cls, model):
""" Check that list_editable is a sequence of editable fields from
list_display without first element. """
if not isinstance(cls.list_editable, (list, tuple)):
return must_be('a list or tuple', option='list_editable', obj=cls, id='admin.E120')
else:
return list(chain(*[
self._check_list_editable_item(cls, model, item, "list_editable[%d]" % index)
for index, item in enumerate(cls.list_editable)
]))
def _check_list_editable_item(self, cls, model, field_name, label):
try:
field = model._meta.get_field_by_name(field_name)[0]
except models.FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=cls, id='admin.E121')
else:
if field_name not in cls.list_display:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=cls, id='admin.E122')
checks.Error(
"The value of '%s' refers to '%s', which is not contained in 'list_display'." % (
label, field_name
),
hint=None,
obj=cls,
id='admin.E122',
),
elif cls.list_display_links and field_name in cls.list_display_links:
return [
checks.Error(
"The value of '%s' cannot be in both 'list_editable' and 'list_display_links'." % field_name,
hint=None,
obj=cls,
id='admin.E123',
)
]
# Check that list_display_links is set, and that the first values of list_editable and list_display are
# not the same. See ticket #22792 for the use case relating to this.
elif (cls.list_display[0] in cls.list_editable and cls.list_display[0] != cls.list_editable[0] and
cls.list_display_links is not None):
return [
checks.Error(
"The value of '%s' refers to the first field in 'list_display' ('%s'), "
"which cannot be used unless 'list_display_links' is set." % (
label, cls.list_display[0]
),
hint=None,
obj=cls,
id='admin.E124',
)
]
elif not field.editable:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not editable through the admin." % (
label, field_name
),
hint=None,
obj=cls,
id='admin.E125',
)
]
else:
return []
def _check_search_fields(self, cls, model):
""" Check search_fields is a sequence. """
if not isinstance(cls.search_fields, (list, tuple)):
return must_be('a list or tuple', option='search_fields', obj=cls, id='admin.E126')
else:
return []
def _check_date_hierarchy(self, cls, model):
""" Check that date_hierarchy refers to DateField or DateTimeField. """
if cls.date_hierarchy is None:
return []
else:
try:
field = model._meta.get_field(cls.date_hierarchy)
except models.FieldDoesNotExist:
return refer_to_missing_field(option='date_hierarchy',
field=cls.date_hierarchy,
model=model, obj=cls, id='admin.E127')
else:
if not isinstance(field, (models.DateField, models.DateTimeField)):
return must_be('a DateField or DateTimeField', option='date_hierarchy',
obj=cls, id='admin.E128')
else:
return []
class InlineModelAdminChecks(BaseModelAdminChecks):
def check(self, cls, parent_model, **kwargs):
errors = super(InlineModelAdminChecks, self).check(cls, model=cls.model, **kwargs)
errors.extend(self._check_relation(cls, parent_model))
errors.extend(self._check_exclude_of_parent_model(cls, parent_model))
errors.extend(self._check_extra(cls))
errors.extend(self._check_max_num(cls))
errors.extend(self._check_min_num(cls))
errors.extend(self._check_formset(cls))
return errors
def _check_exclude_of_parent_model(self, cls, parent_model):
# Do not perform more specific checks if the base checks result in an
# error.
errors = super(InlineModelAdminChecks, self)._check_exclude(cls, parent_model)
if errors:
return []
# Skip if `fk_name` is invalid.
if self._check_relation(cls, parent_model):
return []
if cls.exclude is None:
return []
fk = _get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name)
if fk.name in cls.exclude:
return [
checks.Error(
"Cannot exclude the field '%s', because it is the foreign key "
"to the parent model '%s.%s'." % (
fk.name, parent_model._meta.app_label, parent_model._meta.object_name
),
hint=None,
obj=cls,
id='admin.E201',
)
]
else:
return []
def _check_relation(self, cls, parent_model):
try:
_get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name)
except ValueError as e:
return [checks.Error(e.args[0], hint=None, obj=cls, id='admin.E202')]
else:
return []
def _check_extra(self, cls):
""" Check that extra is an integer. """
if not isinstance(cls.extra, int):
return must_be('an integer', option='extra', obj=cls, id='admin.E203')
else:
return []
def _check_max_num(self, cls):
""" Check that max_num is an integer. """
if cls.max_num is None:
return []
elif not isinstance(cls.max_num, int):
return must_be('an integer', option='max_num', obj=cls, id='admin.E204')
else:
return []
def _check_min_num(self, cls):
""" Check that min_num is an integer. """
if cls.min_num is None:
return []
elif not isinstance(cls.min_num, int):
return must_be('an integer', option='min_num', obj=cls, id='admin.E205')
else:
return []
def _check_formset(self, cls):
""" Check formset is a subclass of BaseModelFormSet. """
if not issubclass(cls.formset, BaseModelFormSet):
return must_inherit_from(parent='BaseModelFormSet', option='formset',
obj=cls, id='admin.E206')
else:
return []
def must_be(type, option, obj, id):
return [
checks.Error(
"The value of '%s' must be %s." % (option, type),
hint=None,
obj=obj,
id=id,
),
]
def must_inherit_from(parent, option, obj, id):
return [
checks.Error(
"The value of '%s' must inherit from '%s'." % (option, parent),
hint=None,
obj=obj,
id=id,
),
]
def refer_to_missing_field(field, option, model, obj, id):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an attribute of '%s.%s'." % (
option, field, model._meta.app_label, model._meta.object_name
),
hint=None,
obj=obj,
id=id,
),
]
| mit |
egabancho/invenio-access | invenio_access/upgrades/access_2015_05_06_accROLE_accACTION_accARGUMENT_id.py | 4 | 3144 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Upgrade recipe."""
import warnings
from invenio.ext.sqlalchemy import db
from invenio.legacy.dbquery import run_sql
from invenio_upgrader.api import op
from sqlalchemy.engine import reflection
depends_on = ['invenio_release_1_2_0']
def exists_id_column():
"""Check if id column already exists."""
insp = reflection.Inspector.from_engine(db.engine)
columns = insp.get_columns('accROLE_accACTION_accARGUMENT')
return any([column['name'] == 'id' for column in columns])
def info():
"""Info."""
return "Add new column id to accROLE_accACTION_accARGUMENT table."""
def do_upgrade():
"""Implement your upgrades here."""
if exists_id_column():
warnings.warn(
"""Upgrade skipped. """
"""Column 'id' already exists on accROLE_accACTION_accARGUMENT.""")
return
if op.impl.dialect.name != 'mysql':
warnings.warn("""This upgrade supports only MySQL.""")
return
# table accROLE_accACTION_accARGUMENT
# - drop primary key
# - add "id" column int(15) unsigned
# - set "id" as primary key, autoincrement
# - column id_accROLE, id_accACTION, id_accARGUMENT, argumentlistid server
# default = None
op.execute(
"""
SET SESSION sql_mode = ANSI_QUOTES;
ALTER TABLE "accROLE_accACTION_accARGUMENT"
CHANGE COLUMN "id_accROLE" "id_accROLE" INT(15) UNSIGNED NULL ,
CHANGE COLUMN "id_accACTION" "id_accACTION" INT(15) UNSIGNED NULL ,
CHANGE COLUMN "id_accARGUMENT" "id_accARGUMENT" INT(15) NULL ,
CHANGE COLUMN "argumentlistid" "argumentlistid" MEDIUMINT(8) NULL ,
ADD COLUMN "id" INT(15) UNSIGNED NOT NULL AUTO_INCREMENT,
DROP PRIMARY KEY,
ADD PRIMARY KEY ("id");
""")
def estimate():
"""Estimate running time of upgrade in seconds (optional)."""
total = run_sql(
"""SELECT count(*) FROM "accROLE_accACTION_accARGUMENT" """)
return int(float(int(total[0][0])) / 1000) + 1
def pre_upgrade():
"""Run pre-upgrade checks (optional)."""
if exists_id_column():
warnings.warn(
"""Column 'id' already exists on accROLE_accACTION_accARGUMENT.""")
if op.impl.dialect.name != 'mysql':
warnings.warn("""This upgrade supports only MySQL.""")
def post_upgrade():
"""Run post-upgrade checks (optional)."""
pass
| gpl-2.0 |
cloudant/bigcouch | couchjs/scons/scons-local-2.0.1/SCons/Script/__init__.py | 61 | 14152 | """SCons.Script
This file implements the main() function used by the scons script.
Architecturally, this *is* the scons script, and will likely only be
called from the external "scons" wrapper. Consequently, anything here
should not be, or be considered, part of the build engine. If it's
something that we expect other software to want to use, it should go in
some other module. If it's specific to the "scons" script invocation,
it goes here.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Script/__init__.py 5134 2010/08/16 23:02:40 bdeegan"
import time
start_time = time.time()
import collections
import os
import sys
# Special chicken-and-egg handling of the "--debug=memoizer" flag:
#
# SCons.Memoize contains a metaclass implementation that affects how
# the other classes are instantiated. The Memoizer may add shim methods
# to classes that have methods that cache computed values in order to
# count and report the hits and misses.
#
# If we wait to enable the Memoization until after we've parsed the
# command line options normally, it will be too late, because the Memoizer
# will have already analyzed the classes that it's Memoizing and decided
# to not add the shims. So we use a special-case, up-front check for
# the "--debug=memoizer" flag and enable Memoizer before we import any
# of the other modules that use it.
_args = sys.argv + os.environ.get('SCONSFLAGS', '').split()
if "--debug=memoizer" in _args:
import SCons.Memoize
import SCons.Warnings
try:
SCons.Memoize.EnableMemoization()
except SCons.Warnings.Warning:
# Some warning was thrown. Arrange for it to be displayed
# or not after warnings are configured.
import Main
exc_type, exc_value, tb = sys.exc_info()
Main.delayed_warnings.append((exc_type, exc_value))
del _args
import SCons.Action
import SCons.Builder
import SCons.Environment
import SCons.Node.FS
import SCons.Options
import SCons.Platform
import SCons.Scanner
import SCons.SConf
import SCons.Subst
import SCons.Tool
import SCons.Util
import SCons.Variables
import SCons.Defaults
import Main
main = Main.main
# The following are global class definitions and variables that used to
# live directly in this module back before 0.96.90, when it contained
# a lot of code. Some SConscript files in widely-distributed packages
# (Blender is the specific example) actually reached into SCons.Script
# directly to use some of these. Rather than break those SConscript
# files, we're going to propagate these names into the SCons.Script
# namespace here.
#
# Some of these are commented out because it's *really* unlikely anyone
# used them, but we're going to leave the comment here to try to make
# it obvious what to do if the situation arises.
BuildTask = Main.BuildTask
CleanTask = Main.CleanTask
QuestionTask = Main.QuestionTask
#PrintHelp = Main.PrintHelp
#SConscriptSettableOptions = Main.SConscriptSettableOptions
AddOption = Main.AddOption
GetOption = Main.GetOption
SetOption = Main.SetOption
Progress = Main.Progress
GetBuildFailures = Main.GetBuildFailures
#keep_going_on_error = Main.keep_going_on_error
#print_dtree = Main.print_dtree
#print_explanations = Main.print_explanations
#print_includes = Main.print_includes
#print_objects = Main.print_objects
#print_time = Main.print_time
#print_tree = Main.print_tree
#memory_stats = Main.memory_stats
#ignore_errors = Main.ignore_errors
#sconscript_time = Main.sconscript_time
#command_time = Main.command_time
#exit_status = Main.exit_status
#profiling = Main.profiling
#repositories = Main.repositories
#
import SConscript
_SConscript = SConscript
call_stack = _SConscript.call_stack
#
Action = SCons.Action.Action
AddMethod = SCons.Util.AddMethod
AllowSubstExceptions = SCons.Subst.SetAllowableExceptions
Builder = SCons.Builder.Builder
Configure = _SConscript.Configure
Environment = SCons.Environment.Environment
#OptParser = SCons.SConsOptions.OptParser
FindPathDirs = SCons.Scanner.FindPathDirs
Platform = SCons.Platform.Platform
Return = _SConscript.Return
Scanner = SCons.Scanner.Base
Tool = SCons.Tool.Tool
WhereIs = SCons.Util.WhereIs
#
BoolVariable = SCons.Variables.BoolVariable
EnumVariable = SCons.Variables.EnumVariable
ListVariable = SCons.Variables.ListVariable
PackageVariable = SCons.Variables.PackageVariable
PathVariable = SCons.Variables.PathVariable
# Deprecated names that will go away some day.
BoolOption = SCons.Options.BoolOption
EnumOption = SCons.Options.EnumOption
ListOption = SCons.Options.ListOption
PackageOption = SCons.Options.PackageOption
PathOption = SCons.Options.PathOption
# Action factories.
Chmod = SCons.Defaults.Chmod
Copy = SCons.Defaults.Copy
Delete = SCons.Defaults.Delete
Mkdir = SCons.Defaults.Mkdir
Move = SCons.Defaults.Move
Touch = SCons.Defaults.Touch
# Pre-made, public scanners.
CScanner = SCons.Tool.CScanner
DScanner = SCons.Tool.DScanner
DirScanner = SCons.Defaults.DirScanner
ProgramScanner = SCons.Tool.ProgramScanner
SourceFileScanner = SCons.Tool.SourceFileScanner
# Functions we might still convert to Environment methods.
CScan = SCons.Defaults.CScan
DefaultEnvironment = SCons.Defaults.DefaultEnvironment
# Other variables we provide.
class TargetList(collections.UserList):
def _do_nothing(self, *args, **kw):
pass
def _add_Default(self, list):
self.extend(list)
def _clear(self):
del self[:]
ARGUMENTS = {}
ARGLIST = []
BUILD_TARGETS = TargetList()
COMMAND_LINE_TARGETS = []
DEFAULT_TARGETS = []
# BUILD_TARGETS can be modified in the SConscript files. If so, we
# want to treat the modified BUILD_TARGETS list as if they specified
# targets on the command line. To do that, though, we need to know if
# BUILD_TARGETS was modified through "official" APIs or by hand. We do
# this by updating two lists in parallel, the documented BUILD_TARGETS
# list, above, and this internal _build_plus_default targets list which
# should only have "official" API changes. Then Script/Main.py can
# compare these two afterwards to figure out if the user added their
# own targets to BUILD_TARGETS.
_build_plus_default = TargetList()
def _Add_Arguments(alist):
for arg in alist:
a, b = arg.split('=', 1)
ARGUMENTS[a] = b
ARGLIST.append((a, b))
def _Add_Targets(tlist):
if tlist:
COMMAND_LINE_TARGETS.extend(tlist)
BUILD_TARGETS.extend(tlist)
BUILD_TARGETS._add_Default = BUILD_TARGETS._do_nothing
BUILD_TARGETS._clear = BUILD_TARGETS._do_nothing
_build_plus_default.extend(tlist)
_build_plus_default._add_Default = _build_plus_default._do_nothing
_build_plus_default._clear = _build_plus_default._do_nothing
def _Set_Default_Targets_Has_Been_Called(d, fs):
return DEFAULT_TARGETS
def _Set_Default_Targets_Has_Not_Been_Called(d, fs):
if d is None:
d = [fs.Dir('.')]
return d
_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called
def _Set_Default_Targets(env, tlist):
global DEFAULT_TARGETS
global _Get_Default_Targets
_Get_Default_Targets = _Set_Default_Targets_Has_Been_Called
for t in tlist:
if t is None:
# Delete the elements from the list in-place, don't
# reassign an empty list to DEFAULT_TARGETS, so that the
# variables will still point to the same object we point to.
del DEFAULT_TARGETS[:]
BUILD_TARGETS._clear()
_build_plus_default._clear()
elif isinstance(t, SCons.Node.Node):
DEFAULT_TARGETS.append(t)
BUILD_TARGETS._add_Default([t])
_build_plus_default._add_Default([t])
else:
nodes = env.arg2nodes(t, env.fs.Entry)
DEFAULT_TARGETS.extend(nodes)
BUILD_TARGETS._add_Default(nodes)
_build_plus_default._add_Default(nodes)
#
help_text = None
def HelpFunction(text):
global help_text
if SCons.Script.help_text is None:
SCons.Script.help_text = text
else:
help_text = help_text + text
#
# Will be non-zero if we are reading an SConscript file.
sconscript_reading = 0
#
def Variables(files=[], args=ARGUMENTS):
return SCons.Variables.Variables(files, args)
def Options(files=[], args=ARGUMENTS):
return SCons.Options.Options(files, args)
# The list of global functions to add to the SConscript name space
# that end up calling corresponding methods or Builders in the
# DefaultEnvironment().
GlobalDefaultEnvironmentFunctions = [
# Methods from the SConsEnvironment class, above.
'Default',
'EnsurePythonVersion',
'EnsureSConsVersion',
'Exit',
'Export',
'GetLaunchDir',
'Help',
'Import',
#'SConscript', is handled separately, below.
'SConscriptChdir',
# Methods from the Environment.Base class.
'AddPostAction',
'AddPreAction',
'Alias',
'AlwaysBuild',
'BuildDir',
'CacheDir',
'Clean',
#The Command() method is handled separately, below.
'Decider',
'Depends',
'Dir',
'NoClean',
'NoCache',
'Entry',
'Execute',
'File',
'FindFile',
'FindInstalledFiles',
'FindSourceFiles',
'Flatten',
'GetBuildPath',
'Glob',
'Ignore',
'Install',
'InstallAs',
'Literal',
'Local',
'ParseDepends',
'Precious',
'Repository',
'Requires',
'SConsignFile',
'SideEffect',
'SourceCode',
'SourceSignatures',
'Split',
'Tag',
'TargetSignatures',
'Value',
'VariantDir',
]
GlobalDefaultBuilders = [
# Supported builders.
'CFile',
'CXXFile',
'DVI',
'Jar',
'Java',
'JavaH',
'Library',
'M4',
'MSVSProject',
'Object',
'PCH',
'PDF',
'PostScript',
'Program',
'RES',
'RMIC',
'SharedLibrary',
'SharedObject',
'StaticLibrary',
'StaticObject',
'Tar',
'TypeLibrary',
'Zip',
'Package',
]
for name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders:
exec "%s = _SConscript.DefaultEnvironmentCall(%s)" % (name, repr(name))
del name
# There are a handful of variables that used to live in the
# Script/SConscript.py module that some SConscript files out there were
# accessing directly as SCons.Script.SConscript.*. The problem is that
# "SConscript" in this namespace is no longer a module, it's a global
# function call--or more precisely, an object that implements a global
# function call through the default Environment. Nevertheless, we can
# maintain backwards compatibility for SConscripts that were reaching in
# this way by hanging some attributes off the "SConscript" object here.
SConscript = _SConscript.DefaultEnvironmentCall('SConscript')
# Make SConscript look enough like the module it used to be so
# that pychecker doesn't barf.
SConscript.__name__ = 'SConscript'
SConscript.Arguments = ARGUMENTS
SConscript.ArgList = ARGLIST
SConscript.BuildTargets = BUILD_TARGETS
SConscript.CommandLineTargets = COMMAND_LINE_TARGETS
SConscript.DefaultTargets = DEFAULT_TARGETS
# The global Command() function must be handled differently than the
# global functions for other construction environment methods because
# we want people to be able to use Actions that must expand $TARGET
# and $SOURCE later, when (and if) the Action is invoked to build
# the target(s). We do this with the subst=1 argument, which creates
# a DefaultEnvironmentCall instance that wraps up a normal default
# construction environment that performs variable substitution, not a
# proxy that doesn't.
#
# There's a flaw here, though, because any other $-variables on a command
# line will *also* be expanded, each to a null string, but that should
# only be a problem in the unusual case where someone was passing a '$'
# on a command line and *expected* the $ to get through to the shell
# because they were calling Command() and not env.Command()... This is
# unlikely enough that we're going to leave this as is and cross that
# bridge if someone actually comes to it.
Command = _SConscript.DefaultEnvironmentCall('Command', subst=1)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
vrv/tensorflow | tensorflow/contrib/learn/python/learn/ops/embeddings_ops.py | 116 | 3510 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow Ops to work with embeddings.
Note: categorical variables are handled via embeddings in many cases.
For example, in case of words.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework import deprecated
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops as array_ops_
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope as vs
@deprecated('2016-12-01', 'Use `tf.embedding_lookup` instead.')
def embedding_lookup(params, ids, name='embedding_lookup'):
"""Provides a N dimensional version of tf.embedding_lookup.
Ids are flattened to a 1d tensor before being passed to embedding_lookup
then, they are unflattend to match the original ids shape plus an extra
leading dimension of the size of the embeddings.
Args:
params: List of tensors of size D0 x D1 x ... x Dn-2 x Dn-1.
ids: N-dimensional tensor of B0 x B1 x .. x Bn-2 x Bn-1.
Must contain indexes into params.
name: Optional name for the op.
Returns:
A tensor of size B0 x B1 x .. x Bn-2 x Bn-1 x D1 x ... x Dn-2 x Dn-1
containing the values from the params tensor(s) for indecies in ids.
Raises:
ValueError: if some parameters are invalid.
"""
with ops.name_scope(name, 'embedding_lookup', [params, ids]):
params = ops.convert_to_tensor(params)
ids = ops.convert_to_tensor(ids)
shape = array_ops_.shape(ids)
ids_flat = array_ops_.reshape(
ids, math_ops.reduce_prod(shape, keep_dims=True))
embeds_flat = nn.embedding_lookup(params, ids_flat, name)
embed_shape = array_ops_.concat([shape, [-1]], 0)
embeds = array_ops_.reshape(embeds_flat, embed_shape)
embeds.set_shape(ids.get_shape().concatenate(params.get_shape()[1:]))
return embeds
@deprecated('2016-12-01', 'Use `tf.contrib.layers.embed_sequence` instead.')
def categorical_variable(tensor_in, n_classes, embedding_size, name):
"""Creates an embedding for categorical variable with given number of classes.
Args:
tensor_in: Input tensor with class identifier (can be batch or
N-dimensional).
n_classes: Number of classes.
embedding_size: Size of embedding vector to represent each class.
name: Name of this categorical variable.
Returns:
Tensor of input shape, with additional dimension for embedding.
Example:
Calling categorical_variable([1, 2], 5, 10, "my_cat"), will return 2 x 10
tensor, where each row is representation of the class.
"""
with vs.variable_scope(name):
embeddings = vs.get_variable(name + '_embeddings',
[n_classes, embedding_size])
return embedding_lookup(embeddings, tensor_in)
| apache-2.0 |
edx-solutions/edx-platform | openedx/core/djangoapps/credentials/tests/test_tasks.py | 4 | 2124 | """
Test credentials tasks
"""
import mock
from django.conf import settings
from django.test import TestCase, override_settings
from openedx.core.djangolib.testing.utils import skip_unless_lms
from student.tests.factories import UserFactory
from ..tasks.v1 import tasks
TASKS_MODULE = 'openedx.core.djangoapps.credentials.tasks.v1.tasks'
def boom():
raise Exception('boom')
@skip_unless_lms
@mock.patch(TASKS_MODULE + '.get_credentials_api_client')
@override_settings(CREDENTIALS_SERVICE_USERNAME='test-service-username')
class TestSendGradeToCredentialTask(TestCase):
"""
Tests for the 'send_grade_to_credentials' method.
"""
def setUp(self):
super(TestSendGradeToCredentialTask, self).setUp()
self.user = UserFactory.create(username=settings.CREDENTIALS_SERVICE_USERNAME)
def test_happy_path(self, mock_get_api_client):
"""
Test that we actually do check expiration on each entitlement (happy path)
"""
api_client = mock.MagicMock()
mock_get_api_client.return_value = api_client
tasks.send_grade_to_credentials.delay('user', 'course-v1:org+course+run', True, 'A', 1.0).get()
self.assertEqual(mock_get_api_client.call_count, 1)
self.assertEqual(mock_get_api_client.call_args[0], (self.user,))
self.assertDictEqual(mock_get_api_client.call_args[1], {'org': 'org'})
self.assertEqual(api_client.grades.post.call_count, 1)
self.assertDictEqual(api_client.grades.post.call_args[0][0], {
'username': 'user',
'course_run': 'course-v1:org+course+run',
'letter_grade': 'A',
'percent_grade': 1.0,
'verified': True,
})
def test_retry(self, mock_get_api_client):
"""
Test that we retry when an exception occurs.
"""
mock_get_api_client.side_effect = boom
task = tasks.send_grade_to_credentials.delay('user', 'course-v1:org+course+run', True, 'A', 1.0)
self.assertRaises(Exception, task.get)
self.assertEqual(mock_get_api_client.call_count, tasks.MAX_RETRIES + 1)
| agpl-3.0 |
xbmc/xbmc-antiquated | xbmc/lib/libPython/Python/Lib/stat.py | 145 | 1667 | """Constants/functions for interpreting results of os.stat() and os.lstat().
Suggested usage: from stat import *
"""
# XXX Strictly spoken, this module may have to be adapted for each POSIX
# implementation; in practice, however, the numeric constants used by
# stat() are almost universal (even for stat() emulations on non-UNIX
# systems like MS-DOS).
# Indices for stat struct members in tuple returned by os.stat()
ST_MODE = 0
ST_INO = 1
ST_DEV = 2
ST_NLINK = 3
ST_UID = 4
ST_GID = 5
ST_SIZE = 6
ST_ATIME = 7
ST_MTIME = 8
ST_CTIME = 9
# Extract bits from the mode
def S_IMODE(mode):
return mode & 07777
def S_IFMT(mode):
return mode & 0170000
# Constants used as S_IFMT() for various file types
# (not all are implemented on all systems)
S_IFDIR = 0040000
S_IFCHR = 0020000
S_IFBLK = 0060000
S_IFREG = 0100000
S_IFIFO = 0010000
S_IFLNK = 0120000
S_IFSOCK = 0140000
# Functions to test for each file type
def S_ISDIR(mode):
return S_IFMT(mode) == S_IFDIR
def S_ISCHR(mode):
return S_IFMT(mode) == S_IFCHR
def S_ISBLK(mode):
return S_IFMT(mode) == S_IFBLK
def S_ISREG(mode):
return S_IFMT(mode) == S_IFREG
def S_ISFIFO(mode):
return S_IFMT(mode) == S_IFIFO
def S_ISLNK(mode):
return S_IFMT(mode) == S_IFLNK
def S_ISSOCK(mode):
return S_IFMT(mode) == S_IFSOCK
# Names for permission bits
S_ISUID = 04000
S_ISGID = 02000
S_ENFMT = S_ISGID
S_ISVTX = 01000
S_IREAD = 00400
S_IWRITE = 00200
S_IEXEC = 00100
S_IRWXU = 00700
S_IRUSR = 00400
S_IWUSR = 00200
S_IXUSR = 00100
S_IRWXG = 00070
S_IRGRP = 00040
S_IWGRP = 00020
S_IXGRP = 00010
S_IRWXO = 00007
S_IROTH = 00004
S_IWOTH = 00002
S_IXOTH = 00001
| gpl-2.0 |
sonnyhu/numpy | numpy/lib/stride_tricks.py | 57 | 6761 | """
Utilities that manipulate strides to achieve desirable effects.
An explanation of strides can be found in the "ndarray.rst" file in the
NumPy reference guide.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
__all__ = ['broadcast_to', 'broadcast_arrays']
class DummyArray(object):
"""Dummy object that just exists to hang __array_interface__ dictionaries
and possibly keep alive a reference to a base array.
"""
def __init__(self, interface, base=None):
self.__array_interface__ = interface
self.base = base
def _maybe_view_as_subclass(original_array, new_array):
if type(original_array) is not type(new_array):
# if input was an ndarray subclass and subclasses were OK,
# then view the result as that subclass.
new_array = new_array.view(type=type(original_array))
# Since we have done something akin to a view from original_array, we
# should let the subclass finalize (if it has it implemented, i.e., is
# not None).
if new_array.__array_finalize__:
new_array.__array_finalize__(original_array)
return new_array
def as_strided(x, shape=None, strides=None, subok=False):
""" Make an ndarray from the given array with the given shape and strides.
"""
# first convert input to array, possibly keeping subclass
x = np.array(x, copy=False, subok=subok)
interface = dict(x.__array_interface__)
if shape is not None:
interface['shape'] = tuple(shape)
if strides is not None:
interface['strides'] = tuple(strides)
array = np.asarray(DummyArray(interface, base=x))
if array.dtype.fields is None and x.dtype.fields is not None:
# This should only happen if x.dtype is [('', 'Vx')]
array.dtype = x.dtype
return _maybe_view_as_subclass(x, array)
def _broadcast_to(array, shape, subok, readonly):
shape = tuple(shape) if np.iterable(shape) else (shape,)
array = np.array(array, copy=False, subok=subok)
if not shape and array.shape:
raise ValueError('cannot broadcast a non-scalar to a scalar array')
if any(size < 0 for size in shape):
raise ValueError('all elements of broadcast shape must be non-'
'negative')
broadcast = np.nditer(
(array,), flags=['multi_index', 'refs_ok', 'zerosize_ok'],
op_flags=['readonly'], itershape=shape, order='C').itviews[0]
result = _maybe_view_as_subclass(array, broadcast)
if not readonly and array.flags.writeable:
result.flags.writeable = True
return result
def broadcast_to(array, shape, subok=False):
"""Broadcast an array to a new shape.
Parameters
----------
array : array_like
The array to broadcast.
shape : tuple
The shape of the desired array.
subok : bool, optional
If True, then sub-classes will be passed-through, otherwise
the returned array will be forced to be a base-class array (default).
Returns
-------
broadcast : array
A readonly view on the original array with the given shape. It is
typically not contiguous. Furthermore, more than one element of a
broadcasted array may refer to a single memory location.
Raises
------
ValueError
If the array is not compatible with the new shape according to NumPy's
broadcasting rules.
Notes
-----
.. versionadded:: 1.10.0
Examples
--------
>>> x = np.array([1, 2, 3])
>>> np.broadcast_to(x, (3, 3))
array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3]])
"""
return _broadcast_to(array, shape, subok=subok, readonly=True)
def _broadcast_shape(*args):
"""Returns the shape of the ararys that would result from broadcasting the
supplied arrays against each other.
"""
if not args:
raise ValueError('must provide at least one argument')
if len(args) == 1:
# a single argument does not work with np.broadcast
return np.asarray(args[0]).shape
# use the old-iterator because np.nditer does not handle size 0 arrays
# consistently
b = np.broadcast(*args[:32])
# unfortunately, it cannot handle 32 or more arguments directly
for pos in range(32, len(args), 31):
# ironically, np.broadcast does not properly handle np.broadcast
# objects (it treats them as scalars)
# use broadcasting to avoid allocating the full array
b = broadcast_to(0, b.shape)
b = np.broadcast(b, *args[pos:(pos + 31)])
return b.shape
def broadcast_arrays(*args, **kwargs):
"""
Broadcast any number of arrays against each other.
Parameters
----------
`*args` : array_likes
The arrays to broadcast.
subok : bool, optional
If True, then sub-classes will be passed-through, otherwise
the returned arrays will be forced to be a base-class array (default).
Returns
-------
broadcasted : list of arrays
These arrays are views on the original arrays. They are typically
not contiguous. Furthermore, more than one element of a
broadcasted array may refer to a single memory location. If you
need to write to the arrays, make copies first.
Examples
--------
>>> x = np.array([[1,2,3]])
>>> y = np.array([[1],[2],[3]])
>>> np.broadcast_arrays(x, y)
[array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3]]), array([[1, 1, 1],
[2, 2, 2],
[3, 3, 3]])]
Here is a useful idiom for getting contiguous copies instead of
non-contiguous views.
>>> [np.array(a) for a in np.broadcast_arrays(x, y)]
[array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3]]), array([[1, 1, 1],
[2, 2, 2],
[3, 3, 3]])]
"""
# nditer is not used here to avoid the limit of 32 arrays.
# Otherwise, something like the following one-liner would suffice:
# return np.nditer(args, flags=['multi_index', 'zerosize_ok'],
# order='C').itviews
subok = kwargs.pop('subok', False)
if kwargs:
raise TypeError('broadcast_arrays() got an unexpected keyword '
'argument {}'.format(kwargs.pop()))
args = [np.array(_m, copy=False, subok=subok) for _m in args]
shape = _broadcast_shape(*args)
if all(array.shape == shape for array in args):
# Common case where nothing needs to be broadcasted.
return args
# TODO: consider making the results of broadcast_arrays readonly to match
# broadcast_to. This will require a deprecation cycle.
return [_broadcast_to(array, shape, subok=subok, readonly=False)
for array in args]
| bsd-3-clause |
djbaldey/django | tests/template_tests/syntax_tests/test_if_equal.py | 368 | 9892 | from django.test import SimpleTestCase
from ..utils import setup
class IfEqualTagTests(SimpleTestCase):
@setup({'ifequal01': '{% ifequal a b %}yes{% endifequal %}'})
def test_ifequal01(self):
output = self.engine.render_to_string('ifequal01', {'a': 1, 'b': 2})
self.assertEqual(output, '')
@setup({'ifequal02': '{% ifequal a b %}yes{% endifequal %}'})
def test_ifequal02(self):
output = self.engine.render_to_string('ifequal02', {'a': 1, 'b': 1})
self.assertEqual(output, 'yes')
@setup({'ifequal03': '{% ifequal a b %}yes{% else %}no{% endifequal %}'})
def test_ifequal03(self):
output = self.engine.render_to_string('ifequal03', {'a': 1, 'b': 2})
self.assertEqual(output, 'no')
@setup({'ifequal04': '{% ifequal a b %}yes{% else %}no{% endifequal %}'})
def test_ifequal04(self):
output = self.engine.render_to_string('ifequal04', {'a': 1, 'b': 1})
self.assertEqual(output, 'yes')
@setup({'ifequal05': '{% ifequal a \'test\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal05(self):
output = self.engine.render_to_string('ifequal05', {'a': 'test'})
self.assertEqual(output, 'yes')
@setup({'ifequal06': '{% ifequal a \'test\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal06(self):
output = self.engine.render_to_string('ifequal06', {'a': 'no'})
self.assertEqual(output, 'no')
@setup({'ifequal07': '{% ifequal a "test" %}yes{% else %}no{% endifequal %}'})
def test_ifequal07(self):
output = self.engine.render_to_string('ifequal07', {'a': 'test'})
self.assertEqual(output, 'yes')
@setup({'ifequal08': '{% ifequal a "test" %}yes{% else %}no{% endifequal %}'})
def test_ifequal08(self):
output = self.engine.render_to_string('ifequal08', {'a': 'no'})
self.assertEqual(output, 'no')
@setup({'ifequal09': '{% ifequal a "test" %}yes{% else %}no{% endifequal %}'})
def test_ifequal09(self):
output = self.engine.render_to_string('ifequal09')
self.assertEqual(output, 'no')
@setup({'ifequal10': '{% ifequal a b %}yes{% else %}no{% endifequal %}'})
def test_ifequal10(self):
output = self.engine.render_to_string('ifequal10')
self.assertEqual(output, 'yes')
# SMART SPLITTING
@setup({'ifequal-split01': '{% ifequal a "test man" %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split01(self):
output = self.engine.render_to_string('ifequal-split01')
self.assertEqual(output, 'no')
@setup({'ifequal-split02': '{% ifequal a "test man" %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split02(self):
output = self.engine.render_to_string('ifequal-split02', {'a': 'foo'})
self.assertEqual(output, 'no')
@setup({'ifequal-split03': '{% ifequal a "test man" %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split03(self):
output = self.engine.render_to_string('ifequal-split03', {'a': 'test man'})
self.assertEqual(output, 'yes')
@setup({'ifequal-split04': '{% ifequal a \'test man\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split04(self):
output = self.engine.render_to_string('ifequal-split04', {'a': 'test man'})
self.assertEqual(output, 'yes')
@setup({'ifequal-split05': '{% ifequal a \'i "love" you\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split05(self):
output = self.engine.render_to_string('ifequal-split05', {'a': ''})
self.assertEqual(output, 'no')
@setup({'ifequal-split06': '{% ifequal a \'i "love" you\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split06(self):
output = self.engine.render_to_string('ifequal-split06', {'a': 'i "love" you'})
self.assertEqual(output, 'yes')
@setup({'ifequal-split07': '{% ifequal a \'i "love" you\' %}yes{% else %}no{% endifequal %}'})
def test_ifequal_split07(self):
output = self.engine.render_to_string('ifequal-split07', {'a': 'i love you'})
self.assertEqual(output, 'no')
@setup({'ifequal-split08': r"{% ifequal a 'I\'m happy' %}yes{% else %}no{% endifequal %}"})
def test_ifequal_split08(self):
output = self.engine.render_to_string('ifequal-split08', {'a': "I'm happy"})
self.assertEqual(output, 'yes')
@setup({'ifequal-split09': r"{% ifequal a 'slash\man' %}yes{% else %}no{% endifequal %}"})
def test_ifequal_split09(self):
output = self.engine.render_to_string('ifequal-split09', {'a': 'slash\man'})
self.assertEqual(output, 'yes')
@setup({'ifequal-split10': r"{% ifequal a 'slash\man' %}yes{% else %}no{% endifequal %}"})
def test_ifequal_split10(self):
output = self.engine.render_to_string('ifequal-split10', {'a': 'slashman'})
self.assertEqual(output, 'no')
# NUMERIC RESOLUTION
@setup({'ifequal-numeric01': '{% ifequal x 5 %}yes{% endifequal %}'})
def test_ifequal_numeric01(self):
output = self.engine.render_to_string('ifequal-numeric01', {'x': '5'})
self.assertEqual(output, '')
@setup({'ifequal-numeric02': '{% ifequal x 5 %}yes{% endifequal %}'})
def test_ifequal_numeric02(self):
output = self.engine.render_to_string('ifequal-numeric02', {'x': 5})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric03': '{% ifequal x 5.2 %}yes{% endifequal %}'})
def test_ifequal_numeric03(self):
output = self.engine.render_to_string('ifequal-numeric03', {'x': 5})
self.assertEqual(output, '')
@setup({'ifequal-numeric04': '{% ifequal x 5.2 %}yes{% endifequal %}'})
def test_ifequal_numeric04(self):
output = self.engine.render_to_string('ifequal-numeric04', {'x': 5.2})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric05': '{% ifequal x 0.2 %}yes{% endifequal %}'})
def test_ifequal_numeric05(self):
output = self.engine.render_to_string('ifequal-numeric05', {'x': 0.2})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric06': '{% ifequal x .2 %}yes{% endifequal %}'})
def test_ifequal_numeric06(self):
output = self.engine.render_to_string('ifequal-numeric06', {'x': 0.2})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric07': '{% ifequal x 2. %}yes{% endifequal %}'})
def test_ifequal_numeric07(self):
output = self.engine.render_to_string('ifequal-numeric07', {'x': 2})
self.assertEqual(output, '')
@setup({'ifequal-numeric08': '{% ifequal x "5" %}yes{% endifequal %}'})
def test_ifequal_numeric08(self):
output = self.engine.render_to_string('ifequal-numeric08', {'x': 5})
self.assertEqual(output, '')
@setup({'ifequal-numeric09': '{% ifequal x "5" %}yes{% endifequal %}'})
def test_ifequal_numeric09(self):
output = self.engine.render_to_string('ifequal-numeric09', {'x': '5'})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric10': '{% ifequal x -5 %}yes{% endifequal %}'})
def test_ifequal_numeric10(self):
output = self.engine.render_to_string('ifequal-numeric10', {'x': -5})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric11': '{% ifequal x -5.2 %}yes{% endifequal %}'})
def test_ifequal_numeric11(self):
output = self.engine.render_to_string('ifequal-numeric11', {'x': -5.2})
self.assertEqual(output, 'yes')
@setup({'ifequal-numeric12': '{% ifequal x +5 %}yes{% endifequal %}'})
def test_ifequal_numeric12(self):
output = self.engine.render_to_string('ifequal-numeric12', {'x': 5})
self.assertEqual(output, 'yes')
# FILTER EXPRESSIONS AS ARGUMENTS
@setup({'ifequal-filter01': '{% ifequal a|upper "A" %}x{% endifequal %}'})
def test_ifequal_filter01(self):
output = self.engine.render_to_string('ifequal-filter01', {'a': 'a'})
self.assertEqual(output, 'x')
@setup({'ifequal-filter02': '{% ifequal "A" a|upper %}x{% endifequal %}'})
def test_ifequal_filter02(self):
output = self.engine.render_to_string('ifequal-filter02', {'a': 'a'})
self.assertEqual(output, 'x')
@setup({'ifequal-filter03': '{% ifequal a|upper b|upper %}x{% endifequal %}'})
def test_ifequal_filter03(self):
output = self.engine.render_to_string('ifequal-filter03', {'a': 'x', 'b': 'X'})
self.assertEqual(output, 'x')
@setup({'ifequal-filter04': '{% ifequal x|slice:"1" "a" %}x{% endifequal %}'})
def test_ifequal_filter04(self):
output = self.engine.render_to_string('ifequal-filter04', {'x': 'aaa'})
self.assertEqual(output, 'x')
@setup({'ifequal-filter05': '{% ifequal x|slice:"1"|upper "A" %}x{% endifequal %}'})
def test_ifequal_filter05(self):
output = self.engine.render_to_string('ifequal-filter05', {'x': 'aaa'})
self.assertEqual(output, 'x')
class IfNotEqualTagTests(SimpleTestCase):
@setup({'ifnotequal01': '{% ifnotequal a b %}yes{% endifnotequal %}'})
def test_ifnotequal01(self):
output = self.engine.render_to_string('ifnotequal01', {'a': 1, 'b': 2})
self.assertEqual(output, 'yes')
@setup({'ifnotequal02': '{% ifnotequal a b %}yes{% endifnotequal %}'})
def test_ifnotequal02(self):
output = self.engine.render_to_string('ifnotequal02', {'a': 1, 'b': 1})
self.assertEqual(output, '')
@setup({'ifnotequal03': '{% ifnotequal a b %}yes{% else %}no{% endifnotequal %}'})
def test_ifnotequal03(self):
output = self.engine.render_to_string('ifnotequal03', {'a': 1, 'b': 2})
self.assertEqual(output, 'yes')
@setup({'ifnotequal04': '{% ifnotequal a b %}yes{% else %}no{% endifnotequal %}'})
def test_ifnotequal04(self):
output = self.engine.render_to_string('ifnotequal04', {'a': 1, 'b': 1})
self.assertEqual(output, 'no')
| bsd-3-clause |
chiragjogi/odoo | addons/website_crm/controllers/main.py | 250 | 5499 | # -*- coding: utf-8 -*-
import base64
import werkzeug
import werkzeug.urls
from openerp import http, SUPERUSER_ID
from openerp.http import request
from openerp.tools.translate import _
class contactus(http.Controller):
def generate_google_map_url(self, street, city, city_zip, country_name):
url = "http://maps.googleapis.com/maps/api/staticmap?center=%s&sensor=false&zoom=8&size=298x298" % werkzeug.url_quote_plus(
'%s, %s %s, %s' % (street, city, city_zip, country_name)
)
return url
@http.route(['/page/website.contactus', '/page/contactus'], type='http', auth="public", website=True)
def contact(self, **kwargs):
values = {}
for field in ['description', 'partner_name', 'phone', 'contact_name', 'email_from', 'name']:
if kwargs.get(field):
values[field] = kwargs.pop(field)
values.update(kwargs=kwargs.items())
return request.website.render("website.contactus", values)
def create_lead(self, request, values, kwargs):
""" Allow to be overrided """
cr, context = request.cr, request.context
return request.registry['crm.lead'].create(cr, SUPERUSER_ID, values, context=dict(context, mail_create_nosubscribe=True))
def preRenderThanks(self, values, kwargs):
""" Allow to be overrided """
company = request.website.company_id
return {
'google_map_url': self.generate_google_map_url(company.street, company.city, company.zip, company.country_id and company.country_id.name_get()[0][1] or ''),
'_values': values,
'_kwargs': kwargs,
}
def get_contactus_response(self, values, kwargs):
values = self.preRenderThanks(values, kwargs)
return request.website.render(kwargs.get("view_callback", "website_crm.contactus_thanks"), values)
@http.route(['/crm/contactus'], type='http', auth="public", website=True)
def contactus(self, **kwargs):
def dict_to_str(title, dictvar):
ret = "\n\n%s" % title
for field in dictvar:
ret += "\n%s" % field
return ret
_TECHNICAL = ['show_info', 'view_from', 'view_callback'] # Only use for behavior, don't stock it
_BLACKLIST = ['id', 'create_uid', 'create_date', 'write_uid', 'write_date', 'user_id', 'active'] # Allow in description
_REQUIRED = ['name', 'contact_name', 'email_from', 'description'] # Could be improved including required from model
post_file = [] # List of file to add to ir_attachment once we have the ID
post_description = [] # Info to add after the message
values = {}
values['medium_id'] = request.registry['ir.model.data'].xmlid_to_res_id(request.cr, SUPERUSER_ID, 'crm.crm_medium_website')
values['section_id'] = request.registry['ir.model.data'].xmlid_to_res_id(request.cr, SUPERUSER_ID, 'website.salesteam_website_sales')
for field_name, field_value in kwargs.items():
if hasattr(field_value, 'filename'):
post_file.append(field_value)
elif field_name in request.registry['crm.lead']._fields and field_name not in _BLACKLIST:
values[field_name] = field_value
elif field_name not in _TECHNICAL: # allow to add some free fields or blacklisted field like ID
post_description.append("%s: %s" % (field_name, field_value))
if "name" not in kwargs and values.get("contact_name"): # if kwarg.name is empty, it's an error, we cannot copy the contact_name
values["name"] = values.get("contact_name")
# fields validation : Check that required field from model crm_lead exists
error = set(field for field in _REQUIRED if not values.get(field))
if error:
values = dict(values, error=error, kwargs=kwargs.items())
return request.website.render(kwargs.get("view_from", "website.contactus"), values)
# description is required, so it is always already initialized
if post_description:
values['description'] += dict_to_str(_("Custom Fields: "), post_description)
if kwargs.get("show_info"):
post_description = []
environ = request.httprequest.headers.environ
post_description.append("%s: %s" % ("IP", environ.get("REMOTE_ADDR")))
post_description.append("%s: %s" % ("USER_AGENT", environ.get("HTTP_USER_AGENT")))
post_description.append("%s: %s" % ("ACCEPT_LANGUAGE", environ.get("HTTP_ACCEPT_LANGUAGE")))
post_description.append("%s: %s" % ("REFERER", environ.get("HTTP_REFERER")))
values['description'] += dict_to_str(_("Environ Fields: "), post_description)
lead_id = self.create_lead(request, dict(values, user_id=False), kwargs)
values.update(lead_id=lead_id)
if lead_id:
for field_value in post_file:
attachment_value = {
'name': field_value.filename,
'res_name': field_value.filename,
'res_model': 'crm.lead',
'res_id': lead_id,
'datas': base64.encodestring(field_value.read()),
'datas_fname': field_value.filename,
}
request.registry['ir.attachment'].create(request.cr, SUPERUSER_ID, attachment_value, context=request.context)
return self.get_contactus_response(values, kwargs)
| agpl-3.0 |
mitdbg/aurum-datadiscovery | nearpy/__init__.py | 8 | 1126 | # Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
from nearpy.engine import Engine
| mit |
Jianchun1/zulip | zerver/lib/str_utils.py | 5 | 3687 | """
String Utilities:
This module helps in converting strings from one type to another.
Currently we have strings of 3 semantic types:
1. text strings: These strings are used to represent all textual data,
like people's names, stream names, content of messages, etc.
These strings can contain non-ASCII characters, so its type should be
six.text_type (which is `str` in python 3 and `unicode` in python 2).
2. binary strings: These strings are used to represent binary data.
This should be of type six.binary_type (which is `bytes` in python 3
and `str` in python 2).
3. native strings: These strings are for internal use only. Strings of
this type are not meant to be stored in database, displayed to end
users, etc. Things like exception names, parameter names, attribute
names, etc should be native strings. These strings should only
contain ASCII characters and they should have type `str`.
There are 3 utility functions provided for converting strings from one type
to another - force_text, force_bytes, force_str
Interconversion between text strings and binary strings can be done by
using encode and decode appropriately or by using the utility functions
force_text and force_bytes.
It is recommended to use the utility functions for other string conversions.
"""
import six
from six import text_type, binary_type
from typing import Any, Mapping, Union, TypeVar, Text
NonBinaryStr = TypeVar('NonBinaryStr', str, text_type)
# This is used to represent text or native strings
def force_text(s, encoding='utf-8'):
# type: (Union[Text, binary_type], str) -> Text
"""converts a string to a text string"""
if isinstance(s, text_type):
return s
elif isinstance(s, binary_type):
return s.decode(encoding)
else:
raise TypeError("force_text expects a string type")
def force_bytes(s, encoding='utf-8'):
# type: (Union[Text, binary_type], str) -> binary_type
"""converts a string to binary string"""
if isinstance(s, binary_type):
return s
elif isinstance(s, text_type):
return s.encode(encoding)
else:
raise TypeError("force_bytes expects a string type")
def force_str(s, encoding='utf-8'):
# type: (Union[Text, binary_type], str) -> str
"""converts a string to a native string"""
if isinstance(s, str):
return s
elif isinstance(s, text_type):
return s.encode(encoding)
elif isinstance(s, binary_type):
return s.decode(encoding)
else:
raise TypeError("force_str expects a string type")
def dict_with_str_keys(dct, encoding='utf-8'):
# type: (Mapping[NonBinaryStr, Any], str) -> Dict[str, Any]
"""applies force_str on the keys of a dict (non-recursively)"""
return {force_str(key, encoding): value for key, value in six.iteritems(dct)}
class ModelReprMixin(object):
"""
This mixin provides a python 2 and 3 compatible way of handling string representation of a model.
When declaring a model, inherit this mixin before django.db.models.Model.
Define __unicode__ on your model which returns a six.text_type object.
This mixin will automatically define __str__ and __repr__.
"""
def __unicode__(self):
# type: () -> Text
# Originally raised an exception, but Django (e.g. the ./manage.py shell)
# was catching the exception and not displaying any sort of error
return u"Implement __unicode__ in your subclass of ModelReprMixin!"
def __str__(self):
# type: () -> str
return force_str(self.__unicode__())
def __repr__(self):
# type: () -> str
return force_str(self.__unicode__())
| apache-2.0 |
massifor/distcc | bench/actions.py | 28 | 3161 | # benchmark -- automated system for testing distcc correctness
# and performance on various source trees.
# Copyright (C) 2002, 2003 by Martin Pool
# Copyright 2008 Google Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
# Tuples of (name, default, descr)
all_actions = [('download', True, ''),
('md5check', True, 'check file was downloaded correctly'),
('sweep', True, 'remove build directory before unpacking'),
('unpack', True, 'unpack source'),
('configure', True, ''),
('build', True, ''),
('clean', True, 'run "make clean" or equivalent'),
('scrub', False, 'remove build directory')]
# Actions done on a per-project (rather than a per-build) basis
project_actions = ('download', 'md5check')
def action_help():
print "Actions:"
for action, default, descr in all_actions:
default_ch = default and '*' or ' '
print " %c %-20s %s" % (default_ch, action, descr)
print " (* = on by default)"
# Filter out only actions where 'default' is true
default_actions = [a[0] for a in all_actions if a[1]]
def parse_opt_actions(optarg):
opt_actions = optarg.split(',')
action_names = [a[0] for a in all_actions]
for oa in opt_actions:
if oa not in action_names:
raise ValueError, ("no such action: %s" % `oa`)
return opt_actions
def remove_unnecessary_actions(opt_actions, force, did_download, did_configure):
"""Given a list of actions (as a string), and a force value
(as described in the help text for benchmark.py), and a
bool indicating whether 'configure' was successfully run
for this build or not, return a new list which is the actions
to actually perform for this build.
Returns two lists: one that can be done on a per-project basis,
and one that has to be done on a per-build basis (as we build the
project with various different flags).
"""
if force == 0 and did_configure and did_download:
remove = ('download', 'md5check', 'sweep', 'unpack', 'configure')
elif force <= 1 and did_download:
remove = ('download', )
else:
remove = ()
new_project_actions = [oa for oa in opt_actions
if oa in project_actions and oa not in remove]
new_build_actions = [oa for oa in opt_actions
if oa not in project_actions and oa not in remove]
return new_project_actions, new_build_actions
| gpl-2.0 |
betoesquivel/CIE | flask/lib/python2.7/site-packages/mako/pyparser.py | 13 | 7737 | # mako/pyparser.py
# Copyright (C) 2006-2014 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Handles parsing of Python code.
Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
module is used.
"""
from mako import exceptions, util, compat
from mako.compat import arg_stringname
import operator
if compat.py3k:
# words that cannot be assigned to (notably
# smaller than the total keys in __builtins__)
reserved = set(['True', 'False', 'None', 'print'])
# the "id" attribute on a function node
arg_id = operator.attrgetter('arg')
else:
# words that cannot be assigned to (notably
# smaller than the total keys in __builtins__)
reserved = set(['True', 'False', 'None'])
# the "id" attribute on a function node
arg_id = operator.attrgetter('id')
import _ast
util.restore__ast(_ast)
from mako import _ast_util
def parse(code, mode='exec', **exception_kwargs):
"""Parse an expression into AST"""
try:
return _ast_util.parse(code, '<unknown>', mode)
except Exception:
raise exceptions.SyntaxException(
"(%s) %s (%r)" % (
compat.exception_as().__class__.__name__,
compat.exception_as(),
code[0:50]
), **exception_kwargs)
class FindIdentifiers(_ast_util.NodeVisitor):
def __init__(self, listener, **exception_kwargs):
self.in_function = False
self.in_assign_targets = False
self.local_ident_stack = set()
self.listener = listener
self.exception_kwargs = exception_kwargs
def _add_declared(self, name):
if not self.in_function:
self.listener.declared_identifiers.add(name)
else:
self.local_ident_stack.add(name)
def visit_ClassDef(self, node):
self._add_declared(node.name)
def visit_Assign(self, node):
# flip around the visiting of Assign so the expression gets
# evaluated first, in the case of a clause like "x=x+5" (x
# is undeclared)
self.visit(node.value)
in_a = self.in_assign_targets
self.in_assign_targets = True
for n in node.targets:
self.visit(n)
self.in_assign_targets = in_a
if compat.py3k:
# ExceptHandler is in Python 2, but this block only works in
# Python 3 (and is required there)
def visit_ExceptHandler(self, node):
if node.name is not None:
self._add_declared(node.name)
if node.type is not None:
self.visit(node.type)
for statement in node.body:
self.visit(statement)
def visit_Lambda(self, node, *args):
self._visit_function(node, True)
def visit_FunctionDef(self, node):
self._add_declared(node.name)
self._visit_function(node, False)
def _expand_tuples(self, args):
for arg in args:
if isinstance(arg, _ast.Tuple):
for n in arg.elts:
yield n
else:
yield arg
def _visit_function(self, node, islambda):
# push function state onto stack. dont log any more
# identifiers as "declared" until outside of the function,
# but keep logging identifiers as "undeclared". track
# argument names in each function header so they arent
# counted as "undeclared"
inf = self.in_function
self.in_function = True
local_ident_stack = self.local_ident_stack
self.local_ident_stack = local_ident_stack.union([
arg_id(arg) for arg in self._expand_tuples(node.args.args)
])
if islambda:
self.visit(node.body)
else:
for n in node.body:
self.visit(n)
self.in_function = inf
self.local_ident_stack = local_ident_stack
def visit_For(self, node):
# flip around visit
self.visit(node.iter)
self.visit(node.target)
for statement in node.body:
self.visit(statement)
for statement in node.orelse:
self.visit(statement)
def visit_Name(self, node):
if isinstance(node.ctx, _ast.Store):
# this is eqiuvalent to visit_AssName in
# compiler
self._add_declared(node.id)
elif node.id not in reserved and node.id \
not in self.listener.declared_identifiers and node.id \
not in self.local_ident_stack:
self.listener.undeclared_identifiers.add(node.id)
def visit_Import(self, node):
for name in node.names:
if name.asname is not None:
self._add_declared(name.asname)
else:
self._add_declared(name.name.split('.')[0])
def visit_ImportFrom(self, node):
for name in node.names:
if name.asname is not None:
self._add_declared(name.asname)
else:
if name.name == '*':
raise exceptions.CompileException(
"'import *' is not supported, since all identifier "
"names must be explicitly declared. Please use the "
"form 'from <modulename> import <name1>, <name2>, "
"...' instead.", **self.exception_kwargs)
self._add_declared(name.name)
class FindTuple(_ast_util.NodeVisitor):
def __init__(self, listener, code_factory, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
self.code_factory = code_factory
def visit_Tuple(self, node):
for n in node.elts:
p = self.code_factory(n, **self.exception_kwargs)
self.listener.codeargs.append(p)
self.listener.args.append(ExpressionGenerator(n).value())
self.listener.declared_identifiers = \
self.listener.declared_identifiers.union(
p.declared_identifiers)
self.listener.undeclared_identifiers = \
self.listener.undeclared_identifiers.union(
p.undeclared_identifiers)
class ParseFunc(_ast_util.NodeVisitor):
def __init__(self, listener, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
def visit_FunctionDef(self, node):
self.listener.funcname = node.name
argnames = [arg_id(arg) for arg in node.args.args]
if node.args.vararg:
argnames.append(arg_stringname(node.args.vararg))
if compat.py2k:
# kw-only args don't exist in Python 2
kwargnames = []
else:
kwargnames = [arg_id(arg) for arg in node.args.kwonlyargs]
if node.args.kwarg:
kwargnames.append(arg_stringname(node.args.kwarg))
self.listener.argnames = argnames
self.listener.defaults = node.args.defaults # ast
self.listener.kwargnames = kwargnames
if compat.py2k:
self.listener.kwdefaults = []
else:
self.listener.kwdefaults = node.args.kw_defaults
self.listener.varargs = node.args.vararg
self.listener.kwargs = node.args.kwarg
class ExpressionGenerator(object):
def __init__(self, astnode):
self.generator = _ast_util.SourceGenerator(' ' * 4)
self.generator.visit(astnode)
def value(self):
return ''.join(self.generator.result)
| mit |
M4sse/chromium.src | tools/telemetry/telemetry/core/platform/profiler/tcpdump_profiler.py | 48 | 4160 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import signal
import subprocess
import sys
import tempfile
from telemetry.core.platform import profiler
from telemetry.core.platform.profiler import android_prebuilt_profiler_helper
_TCP_DUMP_BASE_OPTS = ['-i', 'any', '-p', '-s', '0', '-w']
class _TCPDumpProfilerAndroid(object):
"""An internal class to collect TCP dumps on android.
This profiler uses pre-built binaries from AOSP.
See more details in prebuilt/android/README.txt.
"""
_DEVICE_DUMP_FILE = '/sdcard/tcpdump_profiles/capture.pcap'
def __init__(self, adb, output_path):
self._adb = adb
self._output_path = output_path
self._adb.RunShellCommand('mkdir -p ' +
os.path.dirname(self._DEVICE_DUMP_FILE))
self._proc = subprocess.Popen(
['adb', '-s', self._adb.device_serial(),
'shell', android_prebuilt_profiler_helper.GetDevicePath('tcpdump')] +
_TCP_DUMP_BASE_OPTS +
[self._DEVICE_DUMP_FILE])
def CollectProfile(self):
tcpdump_pid = self._adb.ExtractPid('tcpdump')
if not tcpdump_pid or not tcpdump_pid[0]:
raise Exception('Unable to find TCPDump. Check your device is rooted '
'and tcpdump is installed at ' +
android_prebuilt_profiler_helper.GetDevicePath('tcpdump'))
self._adb.RunShellCommand('kill -term ' + tcpdump_pid[0])
self._proc.terminate()
host_dump = os.path.join(self._output_path,
os.path.basename(self._DEVICE_DUMP_FILE))
self._adb.device().old_interface.Adb().Pull(self._DEVICE_DUMP_FILE,
host_dump)
print 'TCP dump available at: %s ' % host_dump
print 'Use Wireshark to open it.'
return host_dump
class _TCPDumpProfilerLinux(object):
"""An internal class to collect TCP dumps on linux desktop."""
_DUMP_FILE = 'capture.pcap'
def __init__(self, output_path):
if not os.path.exists(output_path):
os.makedirs(output_path)
self._dump_file = os.path.join(output_path, self._DUMP_FILE)
self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0)
try:
self._proc = subprocess.Popen(
['tcpdump'] + _TCP_DUMP_BASE_OPTS + [self._dump_file],
stdout=self._tmp_output_file, stderr=subprocess.STDOUT)
except OSError as e:
raise Exception('Unable to execute TCPDump, please check your '
'installation. ' + str(e))
def CollectProfile(self):
self._proc.send_signal(signal.SIGINT)
exit_code = self._proc.wait()
try:
if exit_code:
raise Exception(
'tcpdump failed with exit code %d. Output:\n%s' %
(exit_code, self._GetStdOut()))
finally:
self._tmp_output_file.close()
print 'TCP dump available at: ', self._dump_file
print 'Use Wireshark to open it.'
return self._dump_file
def _GetStdOut(self):
self._tmp_output_file.flush()
try:
with open(self._tmp_output_file.name) as f:
return f.read()
except IOError:
return ''
class TCPDumpProfiler(profiler.Profiler):
"""A Factory to instantiate the platform-specific profiler."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(TCPDumpProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
if platform_backend.GetOSName() == 'android':
android_prebuilt_profiler_helper.InstallOnDevice(
browser_backend.adb.device(), 'tcpdump')
self._platform_profiler = _TCPDumpProfilerAndroid(
browser_backend.adb, output_path)
else:
self._platform_profiler = _TCPDumpProfilerLinux(output_path)
@classmethod
def name(cls):
return 'tcpdump'
@classmethod
def is_supported(cls, browser_type):
if browser_type.startswith('cros'):
return False
if sys.platform.startswith('linux'):
return True
return browser_type.startswith('android')
def CollectProfile(self):
return self._platform_profiler.CollectProfile()
| bsd-3-clause |
mbr/simplekv | tests/idgens.py | 3 | 4248 | import os
import re
import tempfile
import uuid
from simplekv.idgen import UUIDDecorator, HashDecorator
from simplekv._compat import text_type
import pytest
UUID_REGEXP = re.compile(
r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$'
)
class IDGen(object):
@pytest.fixture(params=[
u'constant',
u'foo{}bar',
u'{}.jpeg',
u'prefix-{}.hello',
u'justprefix{}',
])
def idgen_template(self, request):
return request.param
class UUIDGen(IDGen):
@pytest.fixture
def uuidstore(self, store):
return UUIDDecorator(store)
@pytest.fixture
def templated_uuidstore(self, store, idgen_template):
return UUIDDecorator(store, idgen_template)
def test_put_generates_uuid_form(self, uuidstore, value):
key = uuidstore.put(None, value)
assert UUID_REGEXP.match(key)
def test_put_file_generates_uuid_form(self, uuidstore):
key = uuidstore.put_file(None, open('/dev/null', 'rb'))
assert UUID_REGEXP.match(key)
tmpfile = tempfile.NamedTemporaryFile(delete=False)
try:
tmpfile.close()
key2 = uuidstore.put_file(None, tmpfile.name)
assert UUID_REGEXP.match(key2)
finally:
if os.path.exists(tmpfile.name):
os.unlink(tmpfile.name)
def test_put_generates_valid_uuid(self, uuidstore, value):
key = uuidstore.put(None, value)
uuid.UUID(hex=key)
def test_put_file_generates_valid_uuid(self, uuidstore):
key = uuidstore.put_file(None, open('/dev/null', 'rb'))
uuid.UUID(hex=key)
tmpfile = tempfile.NamedTemporaryFile(delete=False)
try:
tmpfile.close()
key2 = uuidstore.put_file(None, tmpfile.name)
uuid.UUID(hex=key2)
finally:
if os.path.exists(tmpfile.name):
os.unlink(tmpfile.name)
def test_templates_work(self, templated_uuidstore, value, idgen_template):
key = templated_uuidstore.put(None, value)
# should not be a valid UUID
assert not UUID_REGEXP.match(key)
class HashGen(IDGen):
@pytest.fixture
def hashstore(self, store, hashfunc):
return HashDecorator(store, hashfunc)
@pytest.fixture
def templated_hashstore(self, store, hashfunc, idgen_template):
return HashDecorator(store, hashfunc, idgen_template)
@pytest.fixture
def validate_hash(self, hashfunc):
hash_regexp = re.compile(r'^[0-9a-f]{{{}}}$'.format(
hashfunc().digest_size * 2,
))
return hash_regexp.match
@pytest.fixture
def value_hash(self, hashfunc, value):
return hashfunc(value).hexdigest()
def test_put_generates_valid_form(self, hashstore, validate_hash, value):
key = hashstore.put(None, value)
assert validate_hash(key)
def test_put_file_generates_valid_form(self, hashstore, validate_hash):
key = hashstore.put_file(None, open('/dev/null', 'rb'))
assert validate_hash(key)
# this is not correct according to our interface
# /dev/null cannot be claimed by the hashstore
# key2 = hashstore.put_file(None, '/dev/null')
# assert validate_hash(key2)
def test_put_generates_correct_hash(self, hashstore, value_hash, value):
key = hashstore.put(None, value)
assert value_hash == key
assert isinstance(key, text_type)
def test_put_file_generates_correct_hash(
self, hashstore, value_hash, value
):
tmpfile = tempfile.NamedTemporaryFile(delete=False)
try:
tmpfile.write(value)
tmpfile.close()
with open(tmpfile.name, 'rb') as f:
key = hashstore.put_file(None, f)
assert key == value_hash
key2 = hashstore.put_file(None, tmpfile.name)
assert key2 == value_hash
finally:
if os.path.exists(tmpfile.name):
os.unlink(tmpfile.name)
def test_templates_work(
self, templated_hashstore, value, idgen_template, value_hash
):
key = templated_hashstore.put(None, value)
assert idgen_template.format(value_hash) == key
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.