repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
andreas-andrade/outputty
|
refs/heads/develop
|
outputty/plugin_mysql.py
|
2
|
#!/usr/bin/env python
# coding: utf-8
import datetime
from unicodedata import normalize
import MySQLdb
MYSQL_TYPE = {str: 'TEXT', int: 'INT', float: 'FLOAT', datetime.date: 'DATE',
datetime.datetime: 'DATETIME'}
MYSQLDB_TYPE = {getattr(MySQLdb.FIELD_TYPE, x): x \
for x in dir(MySQLdb.FIELD_TYPE) if not x.startswith('_')}
MYSQLDB_TO_PYTHON = {'ENUM': str,
'STRING': str,
'VAR_STRING': str,
'BLOB': bytes,
'LONG_BLOB': bytes,
'MEDIUM_BLOB': bytes,
'TINY_BLOB': bytes,
'DECIMAL': float,
'DOUBLE': float,
'FLOAT': float,
'INT24': int,
'LONG': int,
'LONGLONG': int,
'TINY': int,
'YEAR': int,
'DATE': datetime.date,
'NEWDATE': datetime.date,
'TIME': int,
'TIMESTAMP': int,
'DATETIME': datetime.datetime}
def slug(text, encoding=None, separator='_',
permitted_chars='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_',
replace_with_separator=[' ', '-', '_']):
if isinstance(text, str):
text = text.decode(encoding or 'ascii')
clean_text = text.strip()
for char in replace_with_separator:
clean_text = clean_text.replace(char, separator)
double_separator = separator + separator
while double_separator in clean_text:
clean_text = clean_text.replace(double_separator, separator)
ascii_text = normalize('NFKD', clean_text).encode('ascii', 'ignore')
strict_text = [x for x in ascii_text if x in permitted_chars]
return ''.join(strict_text)
def _get_mysql_config(connection_str):
colon_index = connection_str.index(':')
at_index = connection_str.index('@')
slash_index = connection_str.index('/')
config = {}
config['user'] = connection_str[:colon_index]
config['passwd'] = connection_str[colon_index + 1:at_index]
config['host'] = connection_str[at_index + 1:slash_index]
config['port'] = 3306
if ':' in config['host']:
data = config['host'].split(':')
config['host'] = data[0]
config['port'] = int(data[1])
if connection_str.count('/') == 1:
table_name = None
config['db'] = connection_str[slash_index + 1:]
else:
second_slash_index = connection_str.index('/', slash_index + 1)
config['db'] = connection_str[slash_index + 1:second_slash_index]
table_name = connection_str[second_slash_index + 1:]
return config, table_name
def _connect_to_mysql(config):
return MySQLdb.connect(**config)
def read(table, connection_string, limit=None, order_by=None, query=''):
config, table_name = _get_mysql_config(connection_string)
connection = _connect_to_mysql(config)
cursor = connection.cursor()
if query:
sql = query
else:
sql = 'SELECT * FROM ' + table_name
if limit is not None:
sql += ' LIMIT {0[0]}, {0[1]}'.format(limit)
if order_by is not None:
sql += ' ORDER BY ' + order_by
cursor.execute(sql)
column_info = [(x[0], x[1]) for x in cursor.description]
table.headers = [x[0] for x in cursor.description]
table.types = {name: MYSQLDB_TO_PYTHON[MYSQLDB_TYPE[type_]] \
for name, type_ in column_info}
table._rows = [list(row) for row in cursor.fetchall()]
encoding = connection.character_set_name()
for row_index, row in enumerate(table):
for column_index, value in enumerate(row):
if type(value) is str:
table[row_index][column_index] = value.decode(encoding)
cursor.close()
connection.close()
def write(table, connection_string, encoding=None):
config, table_name = _get_mysql_config(connection_string)
connection = _connect_to_mysql(config)
if encoding is None:
db_encoding = connection.character_set_name()
else:
db_encoding = encoding
escape_string = connection.escape_string
# Create table
table._identify_type_of_data()
columns_and_types = []
slug_headers = []
for header in table.headers:
slug_header = slug(header)
slug_headers.append(slug_header)
mysql_type = MYSQL_TYPE[table.types[header]]
columns_and_types.append(slug_header + ' ' + mysql_type)
table_cols = ', '.join(columns_and_types)
sql = 'CREATE TABLE IF NOT EXISTS {} ({})'.format(table_name, table_cols)
connection.query(sql)
# Insert items
columns = ', '.join(slug_headers)
for row in table:
values = []
for index, value in enumerate(row):
if value is None:
value = 'NULL'
else:
value = escape_string(unicode(value).encode(db_encoding))
value = '"' + value + '"'
values.append(value)
sql = 'INSERT INTO {} ({}) VALUES ('.format(table_name, columns)
sql = sql.encode(db_encoding)
sql += ', '.join(values)
sql += ')'
connection.query(sql)
connection.commit()
connection.close()
|
abramhindle/UnnaturalCodeFork
|
refs/heads/master
|
python/testdata/launchpad/lib/lp/services/identity/model/emailaddress.py
|
1
|
# Copyright 2009-2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
__metaclass__ = type
__all__ = [
'EmailAddress',
'EmailAddressSet',
'HasOwnerMixin',
'UndeletableEmailAddress',
]
import hashlib
import operator
from sqlobject import (
ForeignKey,
StringCol,
)
from zope.interface import implements
from lp.app.validators.email import valid_email
from lp.services.database.enumcol import EnumCol
from lp.services.database.sqlbase import (
quote,
SQLBase,
sqlvalues,
)
from lp.services.identity.interfaces.emailaddress import (
EmailAddressAlreadyTaken,
EmailAddressStatus,
IEmailAddress,
IEmailAddressSet,
InvalidEmailAddress,
)
class HasOwnerMixin:
"""A mixing providing an 'owner' property which returns self.person.
This is to be used on content classes who want to provide IHasOwner but
have the owner stored in an attribute named 'person' rather than 'owner'.
"""
owner = property(operator.attrgetter('person'))
class EmailAddress(SQLBase, HasOwnerMixin):
implements(IEmailAddress)
_table = 'EmailAddress'
_defaultOrder = ['email']
email = StringCol(
dbName='email', notNull=True, unique=True, alternateID=True)
status = EnumCol(dbName='status', schema=EmailAddressStatus, notNull=True)
person = ForeignKey(dbName='person', foreignKey='Person', notNull=False)
def __repr__(self):
return '<EmailAddress at 0x%x <%s> [%s]>' % (
id(self), self.email, self.status)
def destroySelf(self):
"""See `IEmailAddress`."""
# Import this here to avoid circular references.
from lp.registry.interfaces.mailinglist import MailingListStatus
from lp.registry.model.mailinglist import (
MailingListSubscription)
if self.status == EmailAddressStatus.PREFERRED:
raise UndeletableEmailAddress(
"This is a person's preferred email, so it can't be deleted.")
mailing_list = self.person and self.person.mailing_list
if (mailing_list is not None
and mailing_list.status != MailingListStatus.PURGED
and mailing_list.address == self.email):
raise UndeletableEmailAddress(
"This is the email address of a team's mailing list, so it "
"can't be deleted.")
# XXX 2009-05-04 jamesh bug=371567: This function should not
# be responsible for removing subscriptions, since the SSO
# server can't write to that table.
for subscription in MailingListSubscription.selectBy(
email_address=self):
subscription.destroySelf()
super(EmailAddress, self).destroySelf()
@property
def rdf_sha1(self):
"""See `IEmailAddress`."""
return hashlib.sha1('mailto:' + self.email).hexdigest().upper()
class EmailAddressSet:
implements(IEmailAddressSet)
def getByPerson(self, person):
"""See `IEmailAddressSet`."""
return EmailAddress.selectBy(person=person, orderBy='email')
def getPreferredEmailForPeople(self, people):
"""See `IEmailAddressSet`."""
return EmailAddress.select("""
EmailAddress.status = %s AND
EmailAddress.person IN %s
""" % sqlvalues(EmailAddressStatus.PREFERRED,
[person.id for person in people]))
def getByEmail(self, email):
"""See `IEmailAddressSet`."""
return EmailAddress.selectOne(
"lower(email) = %s" % quote(email.strip().lower()))
def new(self, email, person=None, status=EmailAddressStatus.NEW):
"""See IEmailAddressSet."""
email = email.strip()
if not valid_email(email):
raise InvalidEmailAddress(
"%s is not a valid email address." % email)
if self.getByEmail(email) is not None:
raise EmailAddressAlreadyTaken(
"The email address '%s' is already registered." % email)
assert status in EmailAddressStatus.items
assert person
return EmailAddress(
email=email,
status=status,
person=person)
class UndeletableEmailAddress(Exception):
"""User attempted to delete an email address which can't be deleted."""
|
ttair/TuxSinbad
|
refs/heads/master
|
Tools/BlenderExport/ogrepkg/gui.py
|
28
|
"""Graphical user interface system.
Widgets properties are classified as mutable or static.
Mutable properties have a clear separation between model and view.
@author Michael Reimpell
"""
# Copyright (C) 2005 Michael Reimpell
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# epydoc doc format
__docformat__ = "javadoc en"
import os
import Blender
from Blender import Draw
from Blender.BGL import *
import base
from base import *
class Action:
"""Action interface.
Actions encapsulate user requests.
"""
def __init__(self):
"""Constructor.
"""
return
def execute(self):
"""Executes the action.
"""
return
class QuitAction(Action):
"""Quits the windowing system.
"""
def execute(self):
Blender.Draw.Exit()
return
class Size:
"""Size hints.
@cvar INFINITY Infinity value for size hints.
"""
INFINITY = 2147483647
def __init__(self, preferredSize=None, minimumSize=None, maximumSize=None):
"""Constructor.
A size hint is a list of integers <code>[width, height]</code>.
@param preferredSize Default <code>[0,0]</code>.
@param minimumSize Default <code>[0,0]</code>.
@param maximumSize Default <code>[Size.INFINITY, Size.INFINITY]</code>.
"""
self.preferredSize = preferredSize or [0, 0]
if minimumSize:
self.minimumSize = minimumSize
elif ((self.preferredSize[0] < Size.INFINITY) and (self.preferredSize[1] < Size.INFINITY)):
self.minimumSize = self.preferredSize[:]
else:
self.minimumSize = [0, 0]
if preferredSize:
self.maximumSize = maximumSize or self.preferredSize[:]
else:
self.maximumSize = maximumSize or [Size.INFINITY, Size.INFINITY]
return
def getPreferredSize(self):
return self.preferredSize
def getMinimumSize(self):
return self.minimumSize
def getMaximumSize(self):
return self.maximumSize
class Widget:
"""Widget interface.
"""
def __init__(self, parent, size = Size()):
"""Constructor.
Overwrite the constructor to get event numbers for all used
actions via a call to <code>parent._addButtonAction()</code>.
@param name The widget name must be unique.
@param size Size hints.
"""
self.parent = parent
self.size = size
self.parent._addWidget(self)
return
def draw(self, screenRectangle):
"""Draws the widget into an area of the screen.
@param screenRectangle Area of the screen to draw into.
The screenRectangle is a list of the integers
<code>[xl, yl, xu, yu]</code>, where (xl,yl) is
the lower left corner of the area and (xu, yu) is
the upper right corner of the area.
"""
return
def eventFilter(self, event, value):
"""Called from event callback function.
@see Blender.Draw.Register
"""
return
def getSize(self):
"""Size hints of the widget.
@return Size object.
"""
return self.size
def resize(self, size=None):
"""Resizes the widget.
@param size New widget size or <code>None</code> to
inform about resize of child widgets.
"""
if size:
self.size = size
self.parent.resize()
return
def removeFromParent(self):
"""Remove this widget from parent widget.
Remove a widget from its parent before deleting it. Overwrite
this to also remove all button actions separately with a call
to <code>self.parent._removeButtonAction()</code>. This is not
done in the destructor as Python's garbage collector does not
guarantee to delete objects.
"""
self.parent._removeWidget(self)
return
def _addWidget(self, widget):
"""Adds a child widget.
@param widget Child widget to add.
"""
raise NotImplementedError
return
def _removeWidget(self, widget):
"""Removes a child widget.
@param widget Child widget to remove.
"""
raise NotImplementedError
return
def _addButtonAction(self, action):
"""Registers an action for a button event.
@param action Action to execute on receive of the returned button event number.
@return eventNumber Event number to use for the button that corresponds to that action.
"""
return self.parent._addButtonAction(action)
def _removeButtonAction(self, eventNumber):
"""Action for the given event number will no longer be called.
@param eventNumber Event number for the action.
"""
self.parent._removeButtonAction(eventNumber)
return
class Spacer(Widget):
"""Occupies blank space on the screen.
"""
def __init__(self, parent, size):
Widget.__init__(self, parent, size)
return
class Decorator(Widget):
"""Decorates a child widget.
A decorator does not have a name on its own. It adopts the name
of its child widget.
"""
def __init__(self, parent):
self.childWidget = None
Widget.__init__(self, parent)
return
def draw(self, screenRectangle):
self.childWidget.draw(screenRectangle)
return
def eventFilter(self, event, value):
self.childWidget.eventFilter(event, value)
return
def getSize(self):
if self.childWidget:
size = self.childWidget.getSize()
else:
# no child widget yet
size = Size()
return size
def resize(self, size=None):
if size:
# pass resize request to the child
self.childWidget.resize(size)
else:
# pass child resize notification to the parent
self.parent.resize()
return
def _addWidget(self, widget):
self.childWidget = widget
self.parent.resize()
return
class Activator(Decorator):
"""Enables and disables child widget.
"""
def __init__(self, parent, enabled=1):
Decorator.__init__(self, parent)
self.enabled = enabled
def eventFilter(self, event, value):
if self.enabled:
self.childWidget.eventFilter(event, value)
return
def draw(self, screenRectangle):
if self.enabled:
self.childWidget.draw(screenRectangle)
return
def setEnabled(self, enabled):
self.enabled = enabled
return
def isEnabled(self):
return self.enabled
class ValueModel(Model):
"""Model with a value of arbitrary type.
"""
def __init__(self, value):
Model.__init__(self)
self.value = None
self.setValue(value)
return
def setValue(self, value):
self.value = value
self._notify()
return
def getValue(self):
return self.value
class T(ValueModel):
"""Short name for ValueModel.
@see ValueModel
"""
class BoundedValueModel(ValueModel):
def __init__(self, minimum=0, maximum=0, initial=0):
self.minimum = minimum
self.maximum = maximum
ValueModel.__init__(self, initial)
return
def getMinimum(self):
return self.minimum
def getMaximum(self):
return self.maximum
def setValue(self, value):
if (value != self.value):
if value < self.minimum:
self.value = self.minimum
elif value > self.maximum:
self.value = self.maximum
else:
self.value = value
self._notify()
return
class BoundedRangeModel(BoundedValueModel):
"""Model for a bounded range.
minimum <= value <= value + extend <= maximum
"""
def __init__(self, minimum=0, initial=0, extend=0, maximum=0):
self.extend = 0
BoundedValueModel.__init__(self, minimum, maximum, initial)
self.setExtend(extend)
return
def setMaximum(self, maximum, silent=0):
if (maximum != self.maximum):
self.maximum = maximum
if self.value > self.maximum:
self.value = self.maximum
if ((self.value + self.extend) > self.maximum):
self.extend = self.maximum - self.value
if not silent:
self._notify()
return
def setValue(self, value, silent=0):
if (value != self.value):
if value < self.minimum:
self.value = self.minimum
elif value > self.maximum:
self.value = self.maximum
else:
self.value = value
if ((self.value + self.extend) > self.maximum):
# minimum <= value <= maximum
# ==> maximum - value >= 0
self.extend = self.maximum - self.value
if not silent:
self._notify()
return
def getExtend(self):
return self.extend
def setExtend(self, extend, silent=0):
"""
@param extend positive integer.
"""
if (extend != self.extend):
if ((self.value + extend) > self.maximum):
self.extend = self.maximum - self.value
else:
self.extend = extend
if not silent:
self._notify()
return
class BasenameModel(ValueModel):
"""Ensure string is a valid file name.
"""
def __init__(self, basename):
ValueModel.__init__(self, self._ensureIsBasename(basename))
return
def setValue(self, basename):
ValueModel.setValue(self, self._ensureIsBasename(basename))
return
def _ensureIsBasename(self, basename):
return os.path.basename(basename)
class DirnameModel(ValueModel):
"""Ensure string is a valid directory name.
"""
def __init__(self, dirname):
ValueModel.__init__(self, self._ensureIsDirname(dirname))
return
def setValue(self, dirname):
ValueModel.setValue(self, self._ensureIsDirname(dirname))
return
def _ensureIsDirname(self, dirname):
if os.path.isdir(dirname) and os.path.exists(dirname):
# remove possible trailing seperators
name = os.path.dirname(dirname + os.sep)
else:
name = os.path.dirname(dirname)
return name
class RedrawView(View):
def __init__(self, model):
View.__init__(self, model)
return
def update(self):
Blender.Draw.Redraw(1)
return
class ActionWidget(Widget):
"""Widget with single action and tooltip.
"""
def __init__(self, parent, size=Size(), action=Action(), tooltip=None):
"""Constructor.
@param tooltip Optional widget tooltip string ValueModel.
"""
Widget.__init__(self, parent, size)
self.event = self.parent._addButtonAction(action)
self.tooltip = tooltip
if tooltip:
RedrawView(self.tooltip)
return
def removeFromParent(self):
self.parent._removeButtonAction(self.event)
Widget.removeFromParent(self)
return
class ActionTitleWidget(ActionWidget):
"""Widget with single action, title and tooltip.
"""
def __init__(self, parent, size=Size(), action=Action(), title=ValueModel(''), tooltip=None):
"""Constructor.
@param title Widget title string ValueModel.
@param tooltip Optional widget tooltip string ValueModel.
"""
ActionWidget.__init__(self, parent, size, action, tooltip)
self.title = title
RedrawView(self.title)
return
class Button(ActionTitleWidget):
"""Push button.
"""
def __init__(self, parent, size, action, title, tooltip=None):
"""Constructor.
@param action Action to execute when the button is pushed.
"""
ActionTitleWidget.__init__(self, parent, size, action, title, tooltip)
return
def draw(self, rect):
if self.tooltip:
Blender.Draw.PushButton(self.title.getValue(), self.event, rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, self.tooltip.getValue())
else:
Blender.Draw.PushButton(self.title.getValue(), self.event, rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1)
return
class ActionView(View):
"""View with single model and action.
"""
def __init__(self, model):
View.__init__(self, model)
self.valueButton = Blender.Draw.Create(self.model.getValue())
return
def update(self):
Blender.Draw.Redraw(1)
return
class ViewAction(Action):
def __init__(self, view):
self.view = view
return
def execute(self):
self.view.model.setValue(self.view.valueButton.val)
return
class StringView(ActionTitleWidget, ActionView):
def __init__(self, parent, size, model, title=ValueModel(''), tooltip=None):
"""Constructor.
@param model String ValueModel.
"""
ActionView.__init__(self, model)
ActionTitleWidget.__init__(self, parent, size, StringView.ViewAction(self), title, tooltip)
return
def draw(self, rect):
if self.tooltip:
self.valueButton = Blender.Draw.String(self.title.getValue(), self.event, rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, self.model.getValue(), 255, self.tooltip.getValue())
else:
self.valueButton = Blender.Draw.String(self.title.getValue(), self.event, rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, self.model.getValue(), 255)
return
class ToggleModel(ValueModel):
"""Interface and default implementation for a toggle model.
The toggle value can be <code>True</code> or <code>False</code>.
"""
def toggle(self):
if self.getValue():
self.setValue(False)
else:
self.setValue(True)
return
class ToggleGroup(ValueModel):
"""An exclusive toggle group.
Only one toggle is selected at a time. Returns current active
ToggleModel as value.
"""
def __init__(self):
# key: ToggleModel, value: ToggleGroup.Toggle
self.toggleDict = {}
ValueModel.__init__(self, None)
return
def addToggle(self, model):
"""Adds a toggle to the toggle group.
@param model ToggleModel.
"""
self.toggleDict[model] = ToggleGroup.Toggle(self, model)
if (len(self.toggleDict) == 1):
# always one toggle selected
self.value = model
model.setValue(True)
self._notify()
elif model.getValue():
# enable toggle
self._toggle(model)
return
def removeToggle(self, model):
"""Removes a toggle from the toggle group.
If the removed toggle was the current active,
select the first toggle instead.
@param model ToggleModel.
"""
if model in self.toggleDict.keys():
# remove toggle from group
self.toggleDict[model].detachModel()
del self.toggleDict[model]
# update current selected
if (model == self.getValue()):
if (len(self.toggleDict) > 0):
self.toggleDict.keys()[0].toggle()
else:
self.value = None
self._notify()
else:
raise KeyError
return
def setValue(self, value):
"""Sets a toggle to <code>True</code>.
@param value Key of ToggleModel.
"""
# set value as current active
if (value in self.toggleDict.keys()):
self.value = value
self._notify()
elif value is None:
pass
else:
raise KeyError
return
def _toggle(self, model):
# if self.toggleDict.has_key(model):
if model.getValue():
## selected
if (self.value != model):
# deselect old
oldKey = self.value
self.setValue(model)
if self.toggleDict.has_key(oldKey):
oldKey.setValue(False)
elif (model == self.value):
## current selected deselected
# select again, as always one toggle is selected
model.setValue(True)
return
class Toggle(View):
def __init__(self, group, model):
View.__init__(self, model)
self.group = group
return
def update(self):
self.group._toggle(self.model)
return
class ToggleView(ActionTitleWidget, ActionView):
def __init__(self, parent, size, model, title=ValueModel(''), tooltip=None):
"""Constructor.
@param model ToggleModel.
"""
View.__init__(self, model)
ActionTitleWidget.__init__(self, parent, size, ToggleView.ViewAction(self), title, tooltip)
def draw(self, rect):
if self.tooltip:
Blender.Draw.Toggle(self.title.getValue(), self.event, rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, self.model.getValue(), self.tooltip.getValue())
else:
Blender.Draw.Toggle(self.title.getValue(), self.event, rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, self.model.getValue())
return
class ViewAction(Action):
def __init__(self, view):
self.view = view
return
def execute(self):
self.view.model.toggle()
return
class ScrollbarBase(Widget, RedrawView):
"""Scrollbar base class.
This class contains code common to VerticalScrollbar and HorizontalScrollbar.
Don't use this class directly, use VerticalScrollbar or HorizontalScrollbar instead.
"""
def __init__(self, parent, size, model):
"""Constructor.
@param size If l is the desired length of the smaller side, set the larger side to at least 2*l+5.
@param model BoundedRangeModel
"""
Widget.__init__(self, parent, size)
RedrawView.__init__(self, model)
# translate MOUSEX and MOUSEY coordinates into local ones
self.barRect = [0,0,0,0]
self.markerRect = [0,0,0,0]
self.mousePressed = 0
self.mouseFocusX = 0
self.mouseFocusY = 0
self.markerFocus = 0
self.mousePosition = 0
return
def _inc(self, amount=1):
"""limit maximum value to value + extend <= maximum
"""
value = self.model.getValue()
value += amount
if ((value + self.model.getExtend()) <= self.model.getMaximum()):
self.model.setValue(value)
else:
# set to maximum value
self.model.setValue(self.model.getMaximum() - self.model.getExtend())
return
def _dec(self, amount=1):
value = self.model.getValue()
value -= amount
if (self.model.getMinimum() <= value):
self.model.setValue(value)
else:
# set to minimum value
self.model.setValue(self.model.getMinimum())
return
def _addWidget(self, widget):
return
class IncAction(Action):
def __init__(self, scrollbar):
self.scrollbar = scrollbar
return
def execute(self):
self.scrollbar._inc()
return
class DecAction(Action):
def __init__(self, scrollbar):
self.scrollbar = scrollbar
return
def execute(self):
self.scrollbar._dec()
return
class VerticalScrollbar(ScrollbarBase):
"""Vertical scrollbar.
"""
def __init__(self, parent, size, model):
ScrollbarBase.__init__(self, parent, size, model)
self.incButton = Button(self, Size(), ScrollbarBase.IncAction(self), ValueModel("\\/"), ValueModel("Scroll down"))
self.decButton = Button(self, Size(), ScrollbarBase.DecAction(self), ValueModel("/\\"), ValueModel("Scroll up"))
return
def draw(self, rect):
# buttons
buttonSize = rect[2] - rect[0]
# \/
self.incButton.draw([rect[0], rect[1], rect[2], rect[1] + buttonSize])
# /\
self.decButton.draw([rect[0], rect[3]-buttonSize, rect[2], rect[3]])
# bar
# marker and bar are > 3x3 pix each as they have 1 pix border
self.barRect = [rect[0], rect[1] + buttonSize, rect[2], rect[3] - buttonSize]
Blender.BGL.glColor3f(0.13,0.13,0.13) # dark grey
Blender.BGL.glRectf(self.barRect[0], self.barRect[1], self.barRect[2], self.barRect[3])
Blender.BGL.glColor3f(0.78,0.78,0.78) # light grey
Blender.BGL.glRectf(self.barRect[0] + 1, self.barRect[1], self.barRect[2], self.barRect[3] - 1)
Blender.BGL.glColor3f(0.48,0.48,0.48) # grey
Blender.BGL.glRectf(self.barRect[0] + 1, self.barRect[1] + 1, self.barRect[2] - 1, self.barRect[3] - 1)
# marker
# calculate marker size
range = self.model.getMaximum() - self.model.getMinimum()
if range:
step = float(self.barRect[3] - self.barRect[1] - 2)/range
# relative positions
markerStart = step*(self.model.getValue() - self.model.getMinimum())
markerEnd = markerStart + step*self.model.getExtend()
else:
# relative positions
markerStart = 0.0
markerEnd = self.barRect[3] - self.barRect[1] - 2
if ((markerEnd - markerStart) < 3):
# minimal marker size
markerEnd = markerStart + 3
self.markerRect = [self.barRect[0] + 1, \
self.barRect[3] - 1 - markerEnd, \
self.barRect[2] - 1, \
self.barRect[3] - 1 - markerStart]
# draw maker
Blender.BGL.glColor3f(0.78,0.78,0.78) # light grey
Blender.BGL.glRectf(self.markerRect[0], self.markerRect[1], self.markerRect[2], self.markerRect[3])
Blender.BGL.glColor3f(0.13,0.13,0.13) # dark grey
Blender.BGL.glRectf(self.markerRect[0] + 1, self.markerRect[1], self.markerRect[2], self.markerRect[3] - 1)
# check if marker has foucs
if (self.mouseFocusX and self.mouseFocusY and (self.mousePosition > self.markerRect[1]) and (self.mousePosition < self.markerRect[3])):
Blender.BGL.glColor3f(0.64,0.64,0.64) # marker focus grey
else:
Blender.BGL.glColor3f(0.60,0.60,0.60) # marker grey
Blender.BGL.glRectf(self.markerRect[0] + 1, self.markerRect[1] + 1, self.markerRect[2] - 1, self.markerRect[3] - 1)
return
def eventFilter(self, event, value):
if (value != 0):
# Mouse
if (event == Blender.Draw.MOUSEX):
mousePositionX = value - ScreenManager.getSingleton().getScissorRectangle()[0]
# check if mouse is inside bar
if ((mousePositionX >= self.barRect[0]) and (mousePositionX <= self.barRect[2])):
# redraw if marker got focus
if (not self.mouseFocusX) and self.mouseFocusY:
Blender.Draw.Redraw(1)
self.mouseFocusX = 1
else:
# redraw if marker lost focus
if self.mouseFocusX and self.mouseFocusY:
Blender.Draw.Redraw(1)
self.mouseFocusX = 0
elif (event == Blender.Draw.MOUSEY):
# relative mouse position
self.mousePosition = value - ScreenManager.getSingleton().getScissorRectangle()[1]
# check if mouse is inside bar
if ((self.mousePosition >= self.barRect[1]) and (self.mousePosition <= self.barRect[3])):
self.mouseFocusY = 1
if ((self.mousePosition > self.markerRect[1]) and (self.mousePosition < self.markerRect[3])):
# redraw if marker got focus
if self.mouseFocusX and (not self.markerFocus):
Blender.Draw.Redraw(1)
self.markerFocus = 1
else:
# redraw if marker lost focus
if self.mouseFocusX and self.markerFocus:
Blender.Draw.Redraw(1)
self.markerFocus = 0
# move marker
if (self.mousePressed == 1):
# calculate step from distance to marker
if (self.mousePosition > self.markerRect[3]):
self._dec(1)
Blender.Draw.Draw()
elif (self.mousePosition < self.markerRect[1]):
self._inc(1)
Blender.Draw.Draw()
else:
# redraw if marker lost focus
if self.mouseFocusX and self.markerFocus:
Blender.Draw.Redraw(1)
self.markerFocus = 0
self.mouseFocusY = 0
self.mousePressed = 0
elif ((event == Blender.Draw.LEFTMOUSE) and (self.mouseFocusX == 1) and (self.mouseFocusY == 1)):
self.mousePressed = 1
# move marker
if (self.mousePosition > self.markerRect[3]):
self._dec(1)
elif (self.mousePosition < self.markerRect[1]):
self._inc(1)
elif (event == Blender.Draw.WHEELUPMOUSE):
if self.mouseFocusX and self.mouseFocusY:
self._dec(1)
elif (event == Blender.Draw.WHEELDOWNMOUSE):
if self.mouseFocusX and self.mouseFocusY:
self._inc(1)
else: # released keys and buttons
if (event == Blender.Draw.LEFTMOUSE):
self.mousePressed = 0
return
class HorizontalScrollbar(ScrollbarBase):
"""Horizontal scrollbar.
"""
def __init__(self, parent, size, model):
ScrollbarBase.__init__(self, parent, size, model)
self.incButton = Button(self, Size(), ScrollbarBase.IncAction(self), ValueModel(">"), ValueModel("Scroll right"))
self.decButton = Button(self, Size(), ScrollbarBase.DecAction(self), ValueModel("<"), ValueModel("Scroll left"))
return
def draw(self, rect):
# buttons
buttonSize = rect[3] - rect[1]
# <
self.decButton.draw([rect[0], rect[1], rect[0] + buttonSize, rect[3]])
# >
self.incButton.draw([rect[2] - buttonSize, rect[1], rect[2], rect[3]])
# bar
# marker and bar are > 3x3 pix each as they have 1 pix border
#TODO: Missed by one
self.barRect = [rect[0] + buttonSize, rect[1] - 1, rect[2] - buttonSize, rect[3] - 1]
Blender.BGL.glColor3f(0.13,0.13,0.13) # dark grey
Blender.BGL.glRectf(self.barRect[0], self.barRect[1], self.barRect[2], self.barRect[3])
Blender.BGL.glColor3f(0.78,0.78,0.78) # light grey
Blender.BGL.glRectf(self.barRect[0] + 1, self.barRect[1], self.barRect[2], self.barRect[3] - 1)
Blender.BGL.glColor3f(0.48,0.48,0.48) # grey
Blender.BGL.glRectf(self.barRect[0] + 1, self.barRect[1] + 1, self.barRect[2] - 1, self.barRect[3] - 1)
# marker
# calculate marker size
range = self.model.getMaximum() - self.model.getMinimum()
if range:
step = float(self.barRect[2] - self.barRect[0] - 2)/range
# relative positions
markerStart = step*(self.model.getValue() - self.model.getMinimum())
markerEnd = markerStart + step*self.model.getExtend()
else:
# relative positions
markerStart = 0.0
markerEnd = self.barRect[2] - self.barRect[0] - 2
if ((markerEnd - markerStart) < 3):
# minimal marker size
markerEnd = markerStart + 3
self.markerRect = [self.barRect[0] + 1 + markerStart, \
self.barRect[1] + 1, \
self.barRect[0] + markerEnd, \
self.barRect[3] - 1]
# draw maker
Blender.BGL.glColor3f(0.78,0.78,0.78) # light grey
Blender.BGL.glRectf(self.markerRect[0], self.markerRect[1], self.markerRect[2], self.markerRect[3])
Blender.BGL.glColor3f(0.13,0.13,0.13) # dark grey
Blender.BGL.glRectf(self.markerRect[0] + 1, self.markerRect[1], self.markerRect[2], self.markerRect[3] - 1)
# check if marker has foucs
if (self.mouseFocusX and self.mouseFocusY and (self.mousePosition > self.markerRect[0]) and (self.mousePosition < self.markerRect[2])):
Blender.BGL.glColor3f(0.64,0.64,0.64) # marker focus grey
else:
Blender.BGL.glColor3f(0.60,0.60,0.60) # marker grey
Blender.BGL.glRectf(self.markerRect[0] + 1, self.markerRect[1] + 1, self.markerRect[2] - 1, self.markerRect[3] - 1)
return
def eventFilter(self, event, value):
if (value != 0):
# Mouse
if (event == Blender.Draw.MOUSEY):
mousePositionY = value - ScreenManager.getSingleton().getScissorRectangle()[1]
# check if mouse is inside bar
if ((mousePositionY >= self.barRect[1]) and (mousePositionY <= self.barRect[3])):
# redraw if marker got focus
if (not self.mouseFocusY) and self.mouseFocusX:
Blender.Draw.Redraw(1)
self.mouseFocusY = 1
else:
# redraw if marker lost focus
if self.mouseFocusX and self.mouseFocusY:
Blender.Draw.Redraw(1)
self.mouseFocusY = 0
elif (event == Blender.Draw.MOUSEX):
# relative mouse position
self.mousePosition = value - ScreenManager.getSingleton().getScissorRectangle()[0]
# check if mouse is inside bar
if ((self.mousePosition >= self.barRect[0]) and (self.mousePosition <= self.barRect[2])):
self.mouseFocusX = 1
if ((self.mousePosition > self.markerRect[0]) and (self.mousePosition < self.markerRect[2])):
# redraw if marker got focus
if (not self.markerFocus) and self.mouseFocusY:
Blender.Draw.Redraw(1)
self.markerFocus = 1
else:
# redraw if marker lost focus
if self.mouseFocusX and self.markerFocus:
Blender.Draw.Redraw(1)
self.markerFocus = 0
# move marker
if (self.mousePressed == 1):
# calculate step from distance to marker
if (self.mousePosition > self.markerRect[2]):
self._inc(1)
Blender.Draw.Draw()
elif (self.mousePosition < self.markerRect[0]):
self._dec(1)
Blender.Draw.Draw()
else:
# redraw if marker lost focus
if self.mouseFocusX and self.markerFocus:
Blender.Draw.Redraw(1)
self.markerFocus = 0
self.mouseFocusX = 0
self.mousePressed = 0
elif ((event == Blender.Draw.LEFTMOUSE) and (self.mouseFocusX == 1) and (self.mouseFocusY == 1)):
self.mousePressed = 1
# move marker
if (self.mousePosition > self.markerRect[2]):
self._inc(1)
elif (self.mousePosition < self.markerRect[0]):
self._dec(1)
elif (event == Blender.Draw.WHEELUPMOUSE):
if self.mouseFocusX and self.mouseFocusY:
self._dec(1)
elif (event == Blender.Draw.WHEELDOWNMOUSE):
if self.mouseFocusX and self.mouseFocusY:
self._inc(1)
else: # released keys and buttons
if (event == Blender.Draw.LEFTMOUSE):
self.mousePressed = 0
return
class NumberView(ActionTitleWidget, ActionView):
def __init__(self, parent, size, model, title=ValueModel(''), tooltip=None):
"""Constructor.
@param model BoundedValueModel.
"""
ActionView.__init__(self, model)
ActionTitleWidget.__init__(self, parent, size, StringView.ViewAction(self), title, tooltip)
return
def draw(self, rect):
if self.tooltip:
self.valueButton = Blender.Draw.Number(self.title.getValue(), self.event, \
rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, \
self.model.getValue(), self.model.getMinimum(), self.model.getMaximum(), self.tooltip.getValue())
else:
self.valueButton = Blender.Draw.Number(self.title.getValue(), self.event, \
rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, \
self.model.getValue(), self.model.getMinimum(), self.model.getMaximum())
return
class SliderView(ActionTitleWidget, ActionView):
def __init__(self, parent, size, model, title=ValueModel(''), tooltip=None):
"""Constructor.
@param model BoundedValueModel.
"""
ActionView.__init__(self, model)
ActionTitleWidget.__init__(self, parent, size, StringView.ViewAction(self), title, tooltip)
return
def draw(self, rect):
if self.tooltip:
self.valueButton = Blender.Draw.Slider(self.title.getValue(), self.event, \
rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, \
self.model.getValue(), self.model.getMinimum(), self.model.getMaximum(), 1, self.tooltip.getValue())
else:
self.valueButton = Blender.Draw.Slider(self.title.getValue(), self.event, \
rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, \
self.model.getValue(), self.model.getMinimum(), self.model.getMaximum(), 1)
return
class MenuItem(ValueModel):
def __init__(self, text, action=Action()):
"""Constructor.
@param text Item string.
@param action Action to execute on selection.
"""
ValueModel.__init__(self, text)
self.action = action
return
def select(self):
self.action.execute()
return
class MenuTitle(MenuItem):
def __init__(self, text):
MenuItem.__init__(self, text + " %t")
return
def setValue(self, value):
MenuItem.setValue(self, value + " %t")
return
class MenuSeparator(MenuItem):
def __init__(self):
MenuItem.__init__(self, " %l")
return
def setValue(self, value):
raise NotImplementedError
return
class Menu(ActionWidget):
"""Blender menu button.
"""
def __init__(self, parent, size, tooltip=None):
"""Constructor.
@param title Optional title ValueModel.
"""
ActionWidget.__init__(self, parent, size, Menu.SelectAction(self), tooltip)
# cached menu string
self.menuString = ''
# current selected item id
self.current = 0
# id management
# display order of ids in menu
# value: id
self.displayList = []
# key: id, value: MenuItem
self.itemDict = {}
self.valueButton = Blender.Draw.Create(0)
return
def update(self):
"""Update cached menu string.
"""
self.menuString = ''
for itemId in self.displayList:
self.menuString = self.menuString + self.itemDict[itemId].getValue() + " %x" + str(itemId) + "|"
return
def insertItem(self, menuItem, position, setCurrent=False):
"""Inserts a menu item into a specific position of the display list.
"""
id = self._getId(menuItem, setCurrent)
self.displayList.insert(position, id)
# recreate menu string
self.update()
return id
def appendItem(self, menuItem, setCurrent=False):
"""Appends an item to the menu.
@param menuItem The MenuItem to add.
@param setCurrent Sets the item to be the current selected.
@return Item identifier.
"""
# get free id
id = self._getId(menuItem, setCurrent)
# append id
self.displayList.append(id)
# create menu string
self.menuString = self.menuString + menuItem.getValue() + " %x" + str(id) + "|"
return id
def removeItem(self, id):
if id in self.displayList:
# remove id
if self.current == id:
self.current = 0
del self.itemDict[id]
self.displayList.remove(id)
# recreate menu string
self.update()
return
def removeAll(self):
"""Removes all menu items.
"""
self.itemDict = {}
self.displayList = []
self.current = 0
self.menuString = ''
return
def draw(self, rect):
if self.tooltip:
self.valueButton = Blender.Draw.Menu(self.menuString, self.event, \
rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, self.current, self.tooltip.getValue())
else:
self.valueButton = Blender.Draw.Menu(self.menuString, self.event, \
rect[0], rect[1], rect[2]-rect[0]-1, rect[3]-rect[1]-1, self.current.getValue())
return
def _getId(self, menuItem, setCurrent):
"""Creates an id for the menuItem and optinally set it to current menu string.
"""
# get a free id (button value)
if (len(self.itemDict) == len(self.displayList)):
# Blender's button values start with 1
id = len(self.displayList) + 1
else:
# first unused
id = [(x+1) for x in range(len(self.displayList)) if (x+1) not in self.itemDict.keys()][0]
# assign menu item to that id
self.itemDict[id] = menuItem
# manage current state
if setCurrent:
self.current = id
return id
class SelectAction(Action):
def __init__(self, menu):
self.menu = menu
return
def execute(self):
if self.menu.valueButton.val in self.menu.displayList:
self.menu.itemDict[self.menu.valueButton.val].select()
self.menu.current = self.menu.valueButton.val
return
class Border(Decorator):
"""Fixed border around widgets.
"""
def __init__(self, parent, borderSize=10):
"""Constructor.
@param borderSize Size of the border.
"""
Decorator.__init__(self, parent)
self.borderSize = borderSize
return
def draw(self, screenRectangle):
rect = screenRectangle[:]
rect[0] += self.borderSize
rect[1] += self.borderSize
rect[2] -= self.borderSize
rect[3] -= self.borderSize
self.childWidget.draw(rect)
return
def getSize(self):
prefSize = self.childWidget.getSize().getPreferredSize()[:]
prefSize[0] += 2*self.borderSize
prefSize[1] += 2*self.borderSize
minSize = self.childWidget.getSize().getMinimumSize()[:]
minSize[0] += 2*self.borderSize
minSize[1] += 2*self.borderSize
maxSize = self.childWidget.getSize().getMaximumSize()[:]
maxSize[0] += 2*self.borderSize
maxSize[1] += 2*self.borderSize
return Size(prefSize, minSize, maxSize)
class LabelModel(Model):
def __init__(self, text, fontsize='normal', color=None):
"""Constructor.
@param text Text to display.
@param fontsize 'large', 'normal', 'small' or 'tiny'
@param color List of font color values.
"""
Model.__init__(self)
self.text = text
if fontsize in ['large', 'normal', 'small', 'tiny']:
self.fontsize = fontsize
else:
raise ValueError
self.color = color
return
def setText(self, text):
self.text = text
self._notify()
return
def getText(self):
return self.text
def setFontsize(self, fontsize):
if fontsize in ['large', 'normal', 'small', 'tiny']:
self.fontsize = fontsize
self._notify()
else:
raise ValueError
return
def getFontsize(self):
return self.fontsize
def setColor(self, color):
self.color = color
self._notify()
def getColor(self):
return self.color
class L(LabelModel):
"""Short name for LabelModel.
@see LabelModel
"""
class LabelView(Widget, View):
"""Displays a text string.
"""
_HEIGHTDICT = {'large':14, 'normal':12, 'small':10, 'tiny':8}
_YSHIFT = {'large':5, 'normal':4, 'small':3, 'tiny':2}
def __init__(self, parent, model, size=None):
View.__init__(self, model)
if not size:
self.calculateSize = True
size = self._calculateSize()
else:
self.calculateSize = False
Widget.__init__(self, parent, size)
return
def draw(self, screenRectangle):
range = len(self.model.getText())
while ((range > 0) and (Blender.Draw.GetStringWidth(self.model.getText()[:range], self.model.getFontsize()) > (screenRectangle[2] - screenRectangle[0]))):
range -= 1
if self.model.getColor():
if (len(self.model.getColor()) == 3):
glColor3f(*self.model.getColor())
else:
glColor4f(*self.model.getColor())
else:
# theme font color
theme = Blender.Window.Theme.Get()[0]
glColor4ub(*theme.get('text').text)
glRasterPos2i(screenRectangle[0], screenRectangle[1] + LabelView._YSHIFT[self.model.getFontsize()])
Blender.Draw.Text(self.model.getText()[:range], self.model.getFontsize())
return
def update(self):
if self.calculateSize:
self.size = self._calculateSize()
Blender.Draw.Redraw(1)
return
def _calculateSize(self):
size = [0, 0]
size[0] = Blender.Draw.GetStringWidth(self.model.getText(), self.model.getFontsize())
size[1] = LabelView._HEIGHTDICT[self.model.getFontsize()]
return Size(size, size, size)
class Box(Decorator):
"""Provides a border with an optional title for a child widget.
"""
def __init__(self, parent, label=None, outerBorder=0, innerBorder=0):
"""Constructor.
@param childWidget Widget to decorate.
@param label Optional LabelModel as title.
"""
# borders
self.outerBorder = outerBorder
self.innerBorder = innerBorder
self.model = label
self.view = None
if self.model:
self.model.addView(self)
# use view to caculate size only
self.view = LabelView(Box.NoParent(), self.model)
Decorator.__init__(self, parent)
return
def draw(self, screenRectangle):
rect = screenRectangle[:]
rect[0] += self.outerBorder
rect[1] += self.outerBorder
rect[2] -= self.outerBorder
rect[3] -= self.outerBorder
if self.model:
# title
[labelWidth, labelHeight] = self.view.getSize().getMinimumSize()
self.view.draw([rect[0] + 7, rect[3] - labelHeight, rect[0] + 7 + labelWidth, rect[3]])
# border
glColor3f(0.0, 0.0, 0.0)
glBegin(GL_LINE_STRIP)
# 5--6 TITLE 1--2
# | |
# 4----------------3
glVertex2i(rect[0] + 9 + labelWidth, rect[3] - int(labelHeight/2.0))
glVertex2i(rect[2], rect[3] - int(labelHeight/2.0))
glVertex2i(rect[2], rect[1])
glVertex2i(rect[0], rect[1])
glVertex2i(rect[0], rect[3] - int(labelHeight/2.0))
glVertex2i(rect[0] + 3, rect[3] - int(labelHeight/2.0))
glEnd()
rect[0] += 1
rect[1] += 1
rect[2] -= 1
rect[3] -= labelHeight
else:
# border only
glColor3f(0.0, 0.0, 0.0)
glBegin(GL_LINE_STRIP)
glVertex2i(rect[0], rect[1])
glVertex2i(rect[0], rect[3])
glVertex2i(rect[2], rect[3])
glVertex2i(rect[2], rect[1])
glVertex2i(rect[0], rect[1])
glEnd()
rect[0] += 1
rect[1] += 1
rect[2] -= 1
rect[3] -= 1
rect[0] += self.innerBorder
rect[1] += self.innerBorder
rect[2] -= self.innerBorder
rect[3] -= self.innerBorder
self.childWidget.draw(rect)
return
def getSize(self):
if self.childWidget:
minSize = self.childWidget.getSize().getMinimumSize()[:]
prefSize = self.childWidget.getSize().getPreferredSize()[:]
maxSize = self.childWidget.getSize().getMaximumSize()[:]
else:
minSize = [0, 0]
prefSize = [0, 0]
maxSize = [0, 0]
# border
minSize[0] += 2 + 2*self.outerBorder + 2*self.innerBorder
minSize[1] += 2 + 2*self.outerBorder + 2*self.innerBorder
prefSize[0] += 2 + 2*self.outerBorder + 2*self.innerBorder
prefSize[1] += 2 + 2*self.outerBorder + 2*self.innerBorder
maxSize[0] += 2 + 2*self.outerBorder + 2*self.innerBorder
maxSize[1] += 2 + 2*self.outerBorder + 2*self.innerBorder
if self.model:
titleSize = self.view.getSize()
# 1+3 +3 +x +3+1 = 11+x
# +---___TITLE___+ y
# | | +
# +--------------+ 1
if (minSize[0] < (titleSize.getMinimumSize()[0] + 9)):
minSize[0] += 9 + titleSize.getMinimumSize()[0]
prefSize[0] += 9 + titleSize.getPreferredSize()[0]
maxSize[0] += 9 + titleSize.getMaximumSize()[0]
minSize[1] += titleSize.getMinimumSize()[1] - 1
prefSize[1] += titleSize.getPreferredSize()[1] - 1
maxSize[1] += titleSize.getMaximumSize()[1] - 1
return Size(prefSize, minSize, maxSize)
class NoParent(Widget):
"""Widget acts as dummy parent.
"""
def __init__(self):
return
def resize(self, size=None):
return
def _addWidget(self, widget):
return
def _removeWidget(self, widget):
return
class OgreFrame(Decorator):
"""Ogre Logo, Title and border.
"""
OGRE_LOGO = Buffer(GL_BYTE, [48,122*4],[[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,64,0,0,0,95,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,0,0,0,127,0,0,0,127,0,1,0,127,0,2,0,127,2,5,2,127,2,5,2,127,4,6,4,127,5,8,5,127,8,11,8,127,8,11,8,127,3,5,3,127,2,3,2,127,0,1,0,127,0,1,0,127,0,1,0,127,0,1,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,64,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,0,0,0,127,1,2,1,127,4,6,4,127,10,13,10,127,18,22,18,127,23,28,23,127,24,30,24,127,25,31,25,127,25,31,25,127,26,32,26,127,26,32,26,127,26,32,26,127,25,31,25,127,24,30,24,127,18,23,18,127,3,5,3,127,4,6,4,127,8,11,8,127,9,12,9,127,13,17,13,127,17,22,17,127,15,19,15,127,7,9,7,127,1,2,1,127,0,0,0,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,2,4,2,127,4,6,4,127,18,22,18,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,18,22,18,127,15,19,15,127,20,26,20,127,25,31,25,127,26,32,26,127,26,32,26,127,25,31,25,127,25,31,25,127,25,31,25,127,26,32,26,127,24,30,24,127,16,20,16,127,4,5,4,127,0,0,0,95,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,95,1,1,1,127,13,15,13,127,12,15,12,127,24,29,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,29,23,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,28,23,127,3,5,3,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,95,1,1,1,127,19,24,19,127,11,15,11,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,28,23,127,17,21,17,127,22,28,22,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,28,23,127,3,5,3,127,0,0,0,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,20,24,20,127,16,20,16,127,20,25,20,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,22,28,22,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,28,23,127,3,5,3,127,0,0,0,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,64,5,7,5,127,26,32,26,127,15,19,15,127,41,48,41,127,38,45,38,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,28,23,127,3,4,3,127,0,0,0,127,58,66,58,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,127,20,24,20,127,27,34,27,127,26,32,26,127,47,55,47,127,47,55,47,127,39,46,39,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,11,16,11,127,0,1,0,127,3,3,3,127,94,106,94,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,127,33,39,33,127,45,52,45,127,28,32,28,127,47,55,47,127,44,51,44,127,39,46,39,127,27,33,27,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,21,26,21,127,0,2,0,127,0,0,0,127,23,26,23,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,127,24,28,24,127,33,40,33,127,18,22,18,127,29,35,29,127,25,31,25,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,5,8,5,127,1,2,1,127,0,0,0,127,70,79,70,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,94,105,94,127,70,79,70,127,76,86,76,127,90,101,90,127,103,116,103,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,64,0,0,0,127,4,6,4,127,12,16,12,127,22,27,22,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,29,23,127,28,34,28,127,35,42,35,127,28,35,28,127,25,31,25,127,23,29,23,127,23,29,23,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,17,21,17,127,0,2,0,127,0,0,0,127,31,36,31,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,100,112,100,127,92,103,92,127,103,116,103,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,100,112,100,127,81,92,81,127,68,77,68,127,65,73,65,127,65,73,65,127,76,86,76,127,78,88,78,127,83,94,83,127,92,103,92,127,85,95,85,127,31,35,31,127,6,7,6,127,6,7,6,127,13,14,13,127,13,14,13,127,19,21,19,127,26,29,26,127,26,29,26,127,48,54,48,127,96,108,96,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,70,78,70,127,3,3,3,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,12,13,11,127,23,26,23,127,36,40,36,127,49,55,49,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,64,0,0,0,127,2,4,2,127,16,20,16,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,24,30,24,127,26,33,26,127,59,68,59,127,81,91,81,127,87,98,87,127,86,96,86,127,80,90,80,127,71,79,71,127,59,66,59,127,36,41,35,127,23,29,23,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,24,31,24,127,26,32,26,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,5,8,5,127,0,1,0,127,18,20,18,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,91,103,91,127,58,65,58,127,29,33,29,127,6,7,6,127,0,0,0,127,0,0,0,127,1,2,1,127,22,24,22,127,54,61,54,127,94,106,94,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,88,99,88,127,51,58,51,127,18,21,18,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,17,19,17,127,48,54,48,127,80,91,80,127,102,115,102,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,29,33,29,127,0,0,0,127,41,31,14,127,33,25,11,127,18,14,6,127,2,2,1,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,127,2,3,2,127,24,29,24,127,26,32,26,127,24,30,24,127,25,31,25,127,24,30,24,127,24,30,24,127,24,30,24,127,23,29,23,127,34,41,34,127,78,88,78,127,87,98,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,87,97,87,127,87,97,87,127,84,93,84,127,62,69,62,127,34,40,34,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,28,23,127,26,30,26,127,36,38,36,127,47,50,46,127,39,42,37,127,34,40,34,127,30,37,30,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,15,19,15,127,0,1,0,127,0,0,0,127,102,115,102,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,94,106,94,127,43,48,43,127,4,5,4,127,0,0,0,127,0,0,0,127,0,0,0,127,6,5,2,127,16,12,5,127,2,2,1,127,0,0,0,127,0,0,0,127,7,8,7,127,58,65,58,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,96,108,96,127,41,47,41,127,1,1,1,127,0,0,0,127,0,0,0,127,6,5,2,127,27,21,9,127,42,33,14,127,46,36,16,127,46,36,16,127,33,25,11,127,31,24,11,127,25,19,9,127,16,12,5,127,12,9,4,127,0,0,0,127,107,82,36,127,115,88,38,127,107,82,36,127,107,82,36,127,100,76,33,127,92,71,31,127,88,68,30,127,0,0,0,127,4,3,2,127,0,0,0,127,0,0,0,127,0,0,0,127,13,15,13,127,65,73,65,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,13,14,13,127,0,0,0,127,107,82,36,127,122,94,41,127,122,94,41,127,122,94,41,127,109,84,36,127,96,73,32,127,80,62,27,127,65,50,22,127,52,40,17,127,37,28,12,127,21,16,7,127,2,2,1,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,127,9,11,9,127,48,56,48,127,45,53,45,127,41,48,41,127,33,40,33,127,34,41,34,127,37,44,37,127,54,62,54,127,77,87,77,127,87,97,87,127,87,97,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,79,88,79,127,61,69,61,127,25,31,25,127,25,31,25,127,23,28,23,127,19,23,19,127,42,43,41,127,60,60,59,127,61,61,59,127,61,61,59,127,63,63,61,127,35,37,34,127,38,45,38,127,33,39,33,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,15,19,15,127,0,1,0,127,0,0,0,127,102,115,102,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,81,91,81,127,9,11,9,127,0,0,0,127,2,2,1,127,44,34,15,127,86,66,29,127,115,88,38,127,122,94,41,127,122,94,41,127,121,92,40,127,94,72,31,127,39,30,13,127,0,0,0,127,0,0,0,127,40,45,40,127,101,114,101,127,105,118,105,127,105,118,105,127,105,118,105,127,85,95,85,127,11,13,11,127,0,0,0,127,4,3,2,127,50,38,17,127,94,72,31,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,92,71,31,127,0,0,0,127,107,82,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,100,76,33,127,2,2,1,127,105,81,35,127,98,75,33,127,60,46,20,127,23,18,8,127,0,0,0,127,1,1,1,127,90,102,90,127,105,118,105,127,105,118,105,127,105,118,105,127,6,7,6,127,0,0,0,127,115,88,38,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,8,6,3,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,127,3,5,3,127,45,53,45,127,46,54,46,127,46,54,46,127,47,55,47,127,46,54,46,127,68,78,68,127,87,98,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,87,98,87,127,67,76,67,127,38,46,38,127,21,26,21,127,50,52,50,127,60,60,59,127,61,61,59,127,60,60,58,127,60,60,58,127,60,60,58,127,61,61,59,127,39,41,38,127,52,59,52,127,67,76,67,127,23,29,23,127,25,31,25,127,25,31,25,127,25,31,25,127,15,19,15,127,0,1,0,127,0,0,0,127,102,115,102,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,59,67,59,127,1,1,1,127,0,0,0,127,35,27,12,127,105,81,35,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,86,66,29,127,8,6,3,127,0,0,0,127,36,40,36,127,105,118,105,127,105,118,105,127,82,92,82,127,7,7,7,127,0,0,0,127,31,24,10,127,107,82,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,80,62,27,127,0,0,0,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,82,63,28,127,46,36,16,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,27,21,9,127,0,0,0,127,78,88,78,127,105,118,105,127,105,118,105,127,105,118,105,127,0,0,0,127,0,0,0,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,100,76,33,127,0,0,0,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,127,0,2,0,127,41,49,41,127,46,54,46,127,46,54,46,127,49,56,49,127,77,87,77,127,87,98,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,85,96,85,127,55,64,55,127,44,52,44,127,23,28,23,127,17,22,17,127,90,92,90,127,84,84,82,127,60,60,58,127,60,60,58,127,60,60,58,127,60,60,58,127,61,61,59,127,39,41,38,127,54,62,54,127,62,71,62,127,23,29,23,127,25,31,25,127,25,31,25,127,25,31,25,127,15,20,15,127,0,1,0,127,0,0,0,127,102,115,102,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,81,90,81,127,1,1,1,127,0,0,0,127,61,47,21,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,103,79,34,127,12,9,4,127,0,0,0,127,47,52,47,127,93,104,93,127,8,9,8,127,0,0,0,127,52,40,17,127,121,92,40,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,77,59,26,127,0,0,0,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,63,49,21,127,105,81,35,127,122,94,41,127,122,94,41,127,122,94,41,127,100,76,33,127,0,0,0,127,9,11,9,127,101,113,101,127,105,118,105,127,105,118,105,127,105,118,105,127,0,0,0,127,0,0,0,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,69,53,23,127,0,0,0,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,127,0,1,0,127,37,44,37,127,46,54,46,127,49,57,49,127,79,89,79,127,87,97,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,56,64,56,127,46,53,46,127,25,31,25,127,22,27,22,127,25,31,25,127,44,47,44,127,116,116,115,127,59,59,57,127,60,60,58,127,60,60,58,127,60,60,58,127,61,61,59,127,38,41,37,127,69,78,69,127,45,53,45,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,15,20,15,127,0,0,0,127,5,6,5,127,104,117,104,127,105,118,105,127,105,118,105,127,105,118,105,127,93,104,93,127,8,9,8,127,0,0,0,127,61,47,21,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,96,73,32,127,2,2,1,127,0,0,0,127,24,28,24,127,0,0,0,127,37,28,12,127,121,92,40,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,77,59,26,127,10,8,3,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,88,68,30,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,39,30,13,127,0,0,0,127,43,49,43,127,105,118,105,127,105,118,105,127,105,118,105,127,93,105,93,127,0,0,0,127,14,11,5,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,39,30,13,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,64,0,1,0,127,21,25,21,127,48,57,49,127,82,92,82,127,87,97,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,87,97,87,127,87,98,87,127,60,69,60,127,43,50,43,127,29,36,29,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,116,116,116,127,71,71,70,127,60,60,58,127,60,60,58,127,60,60,58,127,62,62,60,127,30,32,29,127,75,85,75,127,29,36,29,127,25,31,25,127,24,30,24,127,24,30,24,127,23,28,23,127,10,14,10,127,0,0,0,127,40,45,40,127,105,118,105,127,105,118,105,127,105,118,105,127,105,118,105,127,33,38,33,127,0,0,0,127,39,30,13,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,67,52,23,127,0,0,0,127,0,0,0,127,10,8,3,127,113,87,38,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,107,82,36,127,84,65,28,127,71,54,24,127,115,88,38,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,67,51,22,127,16,12,5,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,122,94,41,127,122,94,41,127,122,94,41,127,105,81,35,127,2,2,1,127,0,0,0,127,0,0,0,127,18,21,18,127,61,69,61,127,102,115,102,127,92,103,92,127,0,0,0,127,16,12,5,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,60,46,20,127,52,40,17,127,69,53,23,127,86,66,29,127,10,8,3,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,127,2,5,2,127,49,57,49,127,87,98,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,87,98,87,127,86,97,86,127,75,84,75,127,53,61,53,127,34,41,34,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,22,28,22,127,96,97,96,127,93,93,92,127,59,59,58,127,60,60,58,127,60,60,58,127,61,61,59,127,34,39,34,127,74,84,74,127,23,29,23,127,25,31,25,127,37,39,34,127,47,47,41,127,44,45,39,127,17,18,16,127,0,0,0,127,52,59,52,127,105,118,105,127,105,118,105,127,105,118,105,127,81,92,81,127,0,0,0,127,8,6,3,127,111,85,37,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,50,38,17,127,16,12,5,127,33,25,11,127,103,79,34,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,23,18,8,127,0,0,0,127,69,53,23,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,77,59,26,127,27,21,9,127,0,0,0,127,0,0,0,127,0,0,0,127,92,71,31,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,61,47,21,127,18,14,6,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,117,90,39,127,88,68,29,127,54,41,18,127,14,11,5,127,0,0,0,127,0,0,0,127,17,18,17,127,68,76,68,127,0,0,0,127,21,16,7,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,31,24,11,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,95,0,0,0,127,37,43,37,127,89,100,89,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,87,97,87,127,88,99,88,127,82,92,82,127,61,69,61,127,36,42,36,127,27,32,27,127,23,29,23,127,23,29,23,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,29,23,127,78,80,76,127,102,102,102,127,58,58,57,127,60,60,58,127,60,60,58,127,58,58,56,127,40,47,40,127,56,64,56,127,24,29,23,127,44,45,40,127,49,49,43,127,49,49,43,127,46,46,41,127,41,42,37,127,0,0,0,127,38,43,38,127,105,118,105,127,105,118,105,127,105,118,105,127,33,37,33,127,0,0,0,127,61,47,21,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,77,59,26,127,0,0,0,127,0,0,0,127,0,0,0,127,12,9,4,127,113,87,38,127,122,94,41,127,122,94,41,127,122,94,41,127,84,65,28,127,4,3,2,127,115,88,38,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,42,33,14,127,0,0,0,127,119,91,40,127,102,78,34,127,75,57,25,127,52,40,17,127,88,68,29,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,61,47,21,127,31,24,11,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,84,65,28,127,19,15,7,127,0,0,0,127,4,5,4,127,0,0,0,127,31,24,11,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,111,85,37,127,115,88,38,127,122,94,41,127,122,94,41,127,48,37,16,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,32,0,0,0,127,6,7,5,127,67,75,67,127,89,100,89,127,87,97,87,127,87,97,87,127,87,98,87,127,88,99,88,127,88,98,88,127,80,90,80,127,62,71,62,127,45,52,45,127,39,46,39,127,57,65,57,127,65,74,65,127,59,67,59,127,54,61,54,127,55,61,55,127,28,34,28,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,24,30,24,127,64,67,64,127,109,109,108,127,58,58,57,127,60,60,58,127,61,60,59,127,50,50,47,127,47,55,47,127,33,39,33,127,44,44,39,127,48,48,42,127,48,48,42,127,28,30,25,127,36,37,31,127,48,48,42,127,1,2,1,127,36,41,36,127,105,118,105,127,105,118,105,127,99,111,99,127,4,5,4,127,2,2,1,127,113,87,38,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,65,50,22,127,0,0,0,127,30,34,30,127,27,30,27,127,0,0,0,127,67,51,22,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,58,44,19,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,71,54,24,127,0,0,0,127,18,14,6,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,54,41,18,127,31,24,11,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,56,43,19,127,0,0,0,127,0,0,0,127,31,24,11,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,37,28,12,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,95,0,0,0,127,2,3,2,127,28,32,28,127,58,65,58,127,56,64,56,127,50,57,50,127,46,54,46,127,42,49,42,127,43,50,43,127,62,71,62,127,80,90,80,127,87,98,87,127,87,98,87,127,87,97,87,127,87,98,87,127,86,97,87,127,78,85,78,127,46,52,46,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,24,30,24,127,64,67,64,127,104,104,104,127,58,58,57,127,60,60,58,127,62,61,60,127,34,38,33,127,37,43,37,127,50,51,44,127,48,48,42,127,48,48,42,127,23,27,22,127,32,36,30,127,95,95,82,127,43,45,39,127,0,0,0,127,45,51,45,127,105,118,105,127,105,118,105,127,71,80,71,127,0,0,0,127,35,27,12,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,103,79,35,127,2,2,1,127,0,0,0,127,11,13,11,127,0,0,0,127,65,50,22,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,23,18,8,127,0,0,0,127,35,27,12,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,46,36,16,127,41,31,14,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,119,91,40,127,37,28,12,127,50,38,17,127,73,56,24,127,107,82,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,69,53,23,127,0,0,0,127,44,34,15,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,27,21,9,127,0,0,0,127,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,8,10,8,127,51,59,51,127,84,95,84,127,87,98,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,87,127,63,71,63,127,23,29,23,127,25,31,25,127,25,31,25,127,25,31,25,127,23,29,23,127,76,78,75,127,100,100,99,127,58,58,57,127,61,60,59,127,53,54,51,127,24,30,24,127,29,33,28,127,77,76,63,127,47,48,42,127,29,32,27,127,24,30,24,127,30,35,29,127,90,91,84,127,28,29,25,127,0,0,0,127,77,86,76,127,105,118,105,127,105,118,105,127,44,50,44,127,0,0,0,127,69,53,23,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,81,62,27,127,4,3,2,127,0,0,0,127,12,9,4,127,107,82,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,4,3,2,127,0,0,0,127,54,41,18,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,46,36,16,127,46,36,16,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,100,76,33,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,48,37,16,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,42,33,14,127,46,36,16,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,16,12,5,127,0,0,0,127,0,0,0,127,4,3,2,127,6,5,2,127,0,0,0,95,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,64,0,0,0,95,1,1,1,127,60,68,60,127,87,98,87,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,87,97,87,127,73,82,73,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,22,28,22,127,89,92,89,127,87,87,86,127,59,59,58,127,60,59,58,127,31,35,31,127,25,31,25,127,43,45,38,127,74,74,62,127,43,43,38,127,22,28,22,127,25,31,25,127,24,30,24,127,26,32,26,127,13,14,12,127,0,0,0,127,100,113,100,127,105,118,105,127,105,118,105,127,21,24,21,127,0,0,0,127,98,75,33,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,113,87,38,127,92,71,31,127,117,90,39,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,19,15,7,127,0,0,0,127,71,54,24,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,39,30,13,127,50,38,17,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,82,63,28,127,0,0,0,127,23,26,23,127,38,42,38,127,5,7,5,127,0,0,0,127,96,73,32,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,111,85,37,127,54,41,18,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,82,63,28,127,16,12,5,127,16,12,5,127,16,12,5,127,12,9,4,127,46,35,16,127,82,63,28,127,117,90,39,127,46,36,16,127,0,0,0,127,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,33,38,33,127,89,99,89,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,84,94,84,127,28,35,28,127,25,31,25,127,25,31,25,127,25,31,25,127,22,28,22,127,100,101,100,127,73,73,71,127,61,60,59,127,35,38,35,127,24,30,24,127,24,30,24,127,48,51,41,127,69,69,57,127,36,37,32,127,24,30,24,127,28,34,28,127,25,31,25,127,25,31,25,127,17,21,17,127,0,0,0,127,80,90,80,127,105,118,105,127,105,118,105,127,6,7,6,127,0,0,0,127,115,88,38,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,56,43,19,127,0,0,0,127,88,68,29,127,117,90,39,127,107,82,36,127,92,71,31,127,80,62,27,127,69,53,23,127,60,46,20,127,46,36,16,127,33,25,11,127,23,18,8,127,4,3,2,127,61,47,21,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,65,50,22,127,0,0,0,127,20,22,20,127,26,30,26,127,0,0,0,127,2,2,1,127,109,84,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,100,76,33,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,77,59,26,127,21,16,7,127,60,46,20,127,94,72,31,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,54,41,18,127,0,0,0,127,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,95,6,7,6,127,81,91,81,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,88,98,88,127,60,68,60,127,27,33,27,127,24,30,24,127,25,31,25,127,22,28,22,127,91,91,91,127,57,58,56,127,31,36,31,127,24,30,24,127,25,31,25,127,25,31,25,127,27,31,26,127,70,71,58,127,41,42,36,127,37,43,37,127,66,74,66,127,23,29,23,127,25,31,25,127,19,22,19,127,0,0,0,127,75,84,75,127,105,118,105,127,102,114,102,127,0,0,0,127,4,3,2,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,117,90,39,127,31,24,10,127,2,2,1,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,4,3,2,127,61,47,21,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,46,36,16,127,0,0,0,127,0,0,0,127,0,0,0,127,8,6,3,127,73,56,24,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,61,47,21,127,0,0,0,127,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,45,52,45,127,87,98,88,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,82,92,82,127,46,54,46,127,34,41,34,127,25,31,25,127,25,31,25,127,26,30,26,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,24,30,24,127,33,37,31,127,48,48,42,127,43,43,38,127,66,74,65,127,23,29,23,127,25,31,25,127,20,25,20,127,0,0,0,127,70,78,70,127,105,118,105,127,92,103,92,127,0,0,0,127,16,12,5,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,86,66,29,127,48,37,16,127,31,24,11,127,16,12,5,127,23,18,8,127,33,25,11,127,52,40,17,127,71,54,24,127,96,73,32,127,117,90,39,127,63,49,21,127,73,56,24,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,88,68,29,127,77,59,26,127,77,59,26,127,90,69,30,127,117,90,39,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,73,56,24,127,0,0,0,127,0,0,0,32],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,95,25,28,25,127,88,99,88,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,86,97,86,127,87,98,87,127,70,79,70,127,46,54,46,127,47,55,47,127,45,52,45,127,30,37,30,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,24,30,24,127,44,52,44,127,72,81,72,127,70,79,70,127,23,29,23,127,25,31,25,127,21,25,21,127,0,0,0,127,66,73,65,127,105,118,105,127,92,103,92,127,0,0,0,127,16,12,5,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,80,62,27,127,77,59,26,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,77,59,26,127,0,0,0,127,0,0,0,64],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,1,0,127,64,72,64,127,87,97,87,127,86,97,86,127,86,97,86,127,87,97,87,127,86,97,86,127,86,96,86,127,85,95,85,127,71,80,71,127,47,55,47,127,46,54,46,127,46,54,46,127,46,54,46,127,47,55,47,127,31,38,31,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,23,29,23,127,59,67,59,127,77,87,77,127,58,66,58,127,25,31,25,127,25,31,25,127,22,27,22,127,0,0,0,127,48,54,48,127,105,118,105,127,92,103,92,127,0,0,0,127,16,12,5,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,98,75,33,127,80,62,27,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,92,71,31,127,0,0,0,127,0,0,0,64],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,127,14,16,14,127,88,99,88,127,88,98,88,127,88,98,88,127,72,82,72,127,51,59,51,127,52,61,52,127,55,63,55,127,47,55,47,127,45,53,45,127,45,53,45,127,46,54,46,127,46,54,46,127,46,54,46,127,45,53,45,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,37,44,37,127,76,86,76,127,73,82,73,127,32,39,32,127,23,29,23,127,2,2,2,127,30,34,30,95,105,118,105,64,98,111,98,64,0,0,0,95,4,3,2,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,115,88,38,127,92,71,31,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,119,91,40,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,98,75,33,127,0,0,0,127,0,0,0,64],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,127,21,24,21,127,55,62,55,127,51,57,50,127,64,72,64,127,86,96,86,127,85,95,85,127,84,94,84,127,86,96,86,127,84,95,84,127,82,92,82,127,75,85,75,127,52,60,52,127,46,54,46,127,46,54,46,127,45,53,45,127,26,32,26,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,29,36,29,127,28,34,28,127,24,30,24,127,62,71,62,127,88,99,88,127,66,75,66,127,24,30,24,127,8,11,8,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,105,81,35,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,98,75,33,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,100,76,33,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,107,82,36,127,0,0,0,127,0,0,0,64],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,31,36,31,127,35,40,35,127,33,36,32,127,31,34,31,127,47,55,47,127,51,59,51,127,47,55,47,127,39,46,39,127,29,36,29,127,37,43,37,127,52,60,52,127,77,87,77,127,49,58,49,127,46,54,46,127,40,48,40,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,29,35,29,127,80,90,80,127,59,67,59,127,24,30,24,127,24,30,24,127,76,86,76,127,87,98,87,127,39,46,39,127,17,22,17,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,75,57,25,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,79,60,26,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,113,87,38,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,71,55,24,127,103,79,35,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,117,90,39,127,0,0,0,127,0,0,0,64],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,127,41,48,41,127,69,79,69,127,39,45,39,127,47,54,47,127,77,87,77,127,86,97,86,127,88,97,87,127,87,97,86,127,82,93,83,127,57,65,57,127,25,31,25,127,24,30,24,127,26,32,26,127,26,32,26,127,26,32,26,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,75,85,75,127,87,98,87,127,67,75,67,127,23,29,23,127,23,29,23,127,56,64,56,127,85,95,85,127,75,84,75,127,24,30,24,127,3,3,3,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,127,29,22,10,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,119,91,40,127,8,6,3,127,109,84,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,119,91,40,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,98,75,33,127,6,5,2,127,107,82,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,0,0,0,127,0,0,0,64],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,12,15,12,127,45,53,46,127,48,56,48,127,65,72,63,127,98,81,79,127,123,119,119,127,117,108,108,127,94,79,76,127,88,88,80,127,64,73,64,127,24,30,24,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,35,41,35,127,86,96,86,127,87,98,87,127,61,69,61,127,23,29,23,127,24,30,24,127,46,53,46,127,84,94,84,127,87,98,87,127,55,63,55,127,10,12,10,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,92,71,31,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,75,57,25,127,0,0,0,127,52,40,17,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,98,75,33,127,12,9,4,127,0,0,0,127,109,84,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,14,11,5,127,0,0,0,95],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,22,26,22,127,30,37,30,127,23,29,23,127,41,40,35,127,91,73,72,127,113,103,103,127,100,75,75,127,87,58,58,127,83,72,66,127,54,63,55,127,23,29,23,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,30,25,127,34,41,34,127,69,78,69,127,81,91,81,127,34,41,34,127,25,31,25,127,23,29,23,127,61,69,61,127,82,92,82,127,75,85,75,127,82,92,82,127,24,29,24,127,1,1,1,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,127,23,18,8,127,119,91,40,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,17,14,6,127,0,0,0,127,2,2,1,127,96,73,32,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,75,58,25,127,6,5,2,127,0,0,0,127,0,0,0,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,18,14,6,127,0,0,0,127],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,24,29,24,127,48,56,48,127,28,34,28,127,24,30,24,127,25,31,25,127,36,37,32,127,68,55,52,127,82,63,62,127,80,52,52,127,81,82,74,127,28,34,28,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,24,30,24,127,23,29,23,127,25,31,25,127,24,30,24,127,25,31,25,127,24,29,24,127,56,64,56,127,87,97,87,127,70,79,70,127,88,99,88,127,49,57,49,127,10,12,10,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,44,34,15,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,67,52,23,127,0,0,0,127,0,0,0,95,0,0,0,127,12,9,4,127,109,84,36,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,100,76,33,127,33,25,11,127,0,0,0,127,0,0,0,127,0,0,0,95,0,0,0,127,107,82,36,127,117,90,39,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,31,24,11,127,0,0,0,127],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,95,14,16,14,127,81,91,81,127,72,81,72,127,43,51,43,127,23,29,23,127,24,30,24,127,23,30,24,127,23,30,23,127,25,31,25,127,26,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,26,32,26,127,32,39,32,127,30,37,30,127,24,30,24,127,25,31,25,127,25,31,25,127,25,32,25,127,83,93,83,127,77,86,77,127,87,97,87,127,80,90,80,127,22,27,22,127,1,1,1,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,46,35,15,127,121,92,40,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,96,73,32,127,4,3,2,127,0,0,0,95,0,0,0,0,0,0,0,32,0,0,0,127,12,9,4,127,98,75,33,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,96,73,32,127,40,31,14,127,2,2,1,127,0,0,0,127,0,0,0,64,0,0,0,0,0,0,0,32,0,0,0,127,0,0,0,127,0,0,0,127,2,2,1,127,16,12,5,127,25,19,9,127,33,25,11,127,46,36,16,127,56,43,19,127,61,47,21,127,77,59,26,127,84,65,28,127,92,71,31,127,107,82,36,127,115,88,38,127,122,94,41,127,122,94,41,127,39,30,13,127,0,0,0,127],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,18,21,18,127,83,93,83,127,89,100,89,127,71,81,71,127,54,61,54,127,37,44,37,127,24,30,24,127,23,29,23,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,24,30,24,127,42,50,42,127,70,79,70,127,87,98,87,127,74,83,74,127,28,35,28,127,25,31,25,127,24,30,24,127,42,49,42,127,76,86,76,127,86,97,86,127,88,99,88,127,41,49,41,127,11,14,11,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,27,21,9,127,105,81,35,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,98,75,33,127,12,9,4,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,2,2,1,127,58,44,19,127,113,87,38,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,105,81,35,127,63,49,21,127,21,16,7,127,0,0,0,127,0,0,0,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,16,12,5,127,6,5,2,127,0,0,0,95],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,13,17,13,127,61,70,61,127,85,96,85,127,89,100,89,127,88,98,88,127,77,87,77,127,60,67,60,127,26,32,26,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,25,31,25,127,12,16,12,127,12,15,12,127,40,46,40,127,80,90,80,127,80,89,80,127,34,40,34,127,24,30,24,127,23,29,23,127,51,59,51,127,88,99,88,127,86,97,86,127,76,85,76,127,22,27,22,127,1,2,1,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,4,3,2,127,59,46,20,127,111,85,37,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,119,91,40,127,65,50,22,127,4,3,2,127,0,0,0,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,95,0,0,0,127,4,3,2,127,44,34,15,127,80,62,27,127,111,85,37,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,122,94,41,127,121,92,40,127,100,76,33,127,75,57,25,127,48,37,16,127,18,13,6,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,127,0,0,0,64,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,127,19,23,19,127,46,53,46,127,64,72,64,127,80,90,80,127,85,96,85,127,74,84,74,127,28,34,28,127,25,31,25,127,25,31,25,127,25,30,25,127,25,31,25,127,25,31,25,127,25,31,25,127,17,21,17,127,1,3,1,127,0,1,0,127,0,0,0,127,9,11,9,127,51,59,52,127,82,93,83,127,45,52,45,127,23,29,23,127,24,30,24,127,59,67,59,127,88,99,88,127,85,96,85,127,30,37,30,127,12,15,12,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,0,0,0,127,4,3,2,127,42,33,14,127,82,63,28,127,107,82,36,127,103,79,35,127,84,65,28,127,54,41,18,127,12,9,4,127,0,0,0,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,0,0,0,127,0,0,0,127,0,0,0,127,10,8,3,127,25,19,9,127,31,24,11,127,31,24,11,127,31,24,11,127,31,24,11,127,18,14,6,127,35,27,12,127,105,81,35,127,80,62,27,127,54,41,18,127,29,22,10,127,6,5,2,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,95,0,0,0,64,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,8,10,8,127,33,39,33,127,44,51,44,127,46,53,46,127,44,52,44,127,39,46,39,127,25,30,25,127,25,31,25,127,25,31,25,127,24,30,24,127,15,19,15,127,5,7,5,127,0,1,0,127,0,0,0,127,0,0,0,95,0,0,0,64,0,0,0,64,0,1,0,127,21,24,21,127,66,74,66,127,57,66,57,127,24,30,24,127,23,29,23,127,52,60,52,127,40,47,40,127,24,30,24,127,23,28,23,127,1,2,1,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,95,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,64,0,0,0,95,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,64,0,0,0,64,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,0,0,0,127,11,13,11,127,23,28,23,127,33,39,33,127,36,43,36,127,23,29,23,127,20,26,20,127,11,15,11,127,3,4,3,127,0,1,0,127,0,0,0,127,0,0,0,95,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,3,5,3,127,37,41,37,127,58,66,58,127,27,33,27,127,24,30,24,127,26,32,26,127,25,31,25,127,25,31,25,127,8,9,8,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,64,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,0,0,0,127,0,0,0,127,0,0,0,127,0,1,0,127,0,0,0,127,0,0,0,127,0,0,0,127,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,127,12,15,12,127,42,49,42,127,32,39,32,127,24,30,24,127,25,31,25,127,25,31,25,127,18,22,18,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,64,2,2,2,127,23,27,23,127,37,43,37,127,26,33,26,127,25,31,25,127,24,30,24,127,4,4,4,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,4,5,4,127,24,28,23,127,29,35,29,127,25,31,25,127,12,16,12,127,0,0,0,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,95,4,4,4,127,11,14,11,127,16,20,16,127,0,0,0,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,0,0,0,64,0,0,0,127,0,0,0,32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])
def __init__(self, parent, title):
"""Constructor.
@param title Header title.
"""
Decorator.__init__(self, parent)
self.title = title
self.border = 10
return
def draw(self, screenRectangle):
rect = screenRectangle[:]
rect[0] += self.border
rect[1] += self.border
rect[2] -= self.border
rect[3] -= self.border
# title
glColor3ub(210, 236, 210)
glRecti(rect[0],rect[3]-41,rect[2],rect[3]-17)
glColor3ub(50, 62, 50)
glRasterPos2i(rect[0]+126, rect[3]-34)
Blender.Draw.Text(self.title)
glRasterPos2i(rect[0]+127, rect[3]-34)
Blender.Draw.Text(self.title)
# logo
glRasterPos2i(rect[0]+1, rect[3]-48)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glDrawPixels(122, 48, GL_RGBA, GL_BYTE, OgreFrame.OGRE_LOGO)
rect[3] -= 48 + self.border
glDisable(GL_BLEND)
# child
self.childWidget.draw(rect)
return
def getSize(self):
width = 2*self.border + Blender.Draw.GetStringWidth(self.title) + 85
height = 48 + 3*self.border
minSize = self.childWidget.getSize().getMinimumSize()[:]
minSize[0] += 2*self.border
minSize[1] += 3*self.border + 48
if minSize[0] < width:
minSize[0] = width
if minSize[1] < height:
minSize[1] = height
prefSize = self.childWidget.getSize().getPreferredSize()[:]
prefSize[0] += 2*self.border
prefSize[1] += 3*self.border + 48
if prefSize[0] < width:
prefSize[0] = width
if prefSize[1] < height:
prefSize[1] = height
maxSize = self.childWidget.getSize().getMaximumSize()[:]
return Size(prefSize, minSize, maxSize)
class DirectionalLayout(Widget):
"""Common layout functionality for horizontal and vertical layout.
"""
def __init__(self, parent):
"""Constructor.
"""
Widget.__init__(self, parent, Size([0, 0]))
self.widgetList = []
return
def resize(self, size=None):
self.size = Size([0, 0])
for widget in self.widgetList:
self._addWidgetSize(widget)
self.parent.resize()
return
def eventFilter(self, event, value):
for widget in self.widgetList:
widget.eventFilter(event, value)
return
def _addWidget(self, widget):
"""Adds a child widget to this layout.
The child widget is appended below all other widgets. This
method gets called by the constructor of the child widget.
"""
self.widgetList.append(widget)
self._addWidgetSize(widget)
self.parent.resize()
return
def _removeWidget(self, widget):
"""Removes a child widget from this layout.
This method gets called by the <code>removeFromParent()</code>
method of the corresponding child widget.
"""
if widget in self.widgetList:
self.widgetList.remove(widget)
self.resize()
return
def _addWidgetSize(self, widget):
raise NotImplementedError
return
class HorizontalLayout(DirectionalLayout):
"""Widget that manages horizontally stacked child widgets.
"""
def __init__(self, parent, aligned = False):
"""Constructor.
"""
DirectionalLayout.__init__(self, parent)
self.aligned = aligned
return
def draw(self, screenRectangle):
# split height for the child widgets
minimumSize = self.size.getMinimumSize()
width = screenRectangle[2]- screenRectangle[0]
additionalWidth = width - minimumSize[0]
stretchWidgetList = []
extraWidth = 0
lastExtraWidth = 0
# get widgets with unlimited preferred height
if (additionalWidth > 0):
stretchWidgetList = [w for w in self.widgetList if w.getSize().getPreferredSize()[0] >= Size.INFINITY]
if (len(stretchWidgetList) > 0):
# give equal extra height to widgets with unlimited preferred height
extraWidth = additionalWidth / len(stretchWidgetList)
lastExtraWidth = extraWidth + additionalWidth - (extraWidth * len(stretchWidgetList))
# draw widgets with minimum or minimum plus extra size
x = screenRectangle[0]
dx = 0
if (self.aligned): Blender.Draw.BeginAlign()
for widget in self.widgetList:
dx = widget.getSize().getMinimumSize()[0]
if (widget in stretchWidgetList):
if (widget is stretchWidgetList[-1]):
dx += lastExtraWidth
else:
dx += extraWidth
widget.draw([x, screenRectangle[1], x+dx, screenRectangle[3]])
x += dx
if (self.aligned): Blender.Draw.EndAlign()
return
def _addWidgetSize(self, widget):
"""Adds size of a widget but does not notify parent.
"""
wMinSize = widget.getSize().getMinimumSize()
wMaxSize = widget.getSize().getMaximumSize()
wPrefSize = widget.getSize().getPreferredSize()
minSize = self.getSize().getMinimumSize()
maxSize = self.getSize().getMaximumSize()
prefSize = self.getSize().getPreferredSize()
# add in x direction
minSize[0] += wMinSize[0]
maxSize[0] += wMaxSize[0]
if (prefSize[0] < Size.INFINITY):
if (wPrefSize[0] < Size.INFINITY):
prefSize[0] += wPrefSize[0]
else:
prefSize[0] = Size.INFINITY
# maximum in y direction
if (wMinSize[1] > minSize[1]):
minSize[1] = wMinSize[1]
if (wPrefSize[1] > prefSize[1]):
prefSize[1] = wPrefSize[1]
if (wMaxSize[1] > maxSize[1]):
maxSize[1] = wMaxSize[1]
self.size = Size(prefSize, minSize, maxSize)
return
class VerticalLayout(DirectionalLayout):
"""Widget that manages vertically stacked child widgets.
"""
def __init__(self, parent, aligned = False):
"""Constructor.
"""
DirectionalLayout.__init__(self, parent)
self.aligned = aligned
return
def draw(self, screenRectangle):
# split height for the child widgets
minimumSize = self.getSize().getMinimumSize()
height = screenRectangle[3]- screenRectangle[1]
additionalHeight = height - minimumSize[1]
stretchWidgetList = []
extraHeight = 0
lastExtraHeight = 0
# get widgets with unlimited preferred height
if (additionalHeight > 0):
stretchWidgetList = [w for w in self.widgetList if w.getSize().getPreferredSize()[1] >= Size.INFINITY]
if (len(stretchWidgetList) > 0):
# give equal extra height to widgets with unlimited preferred height
extraHeight = additionalHeight / len(stretchWidgetList)
lastExtraHeight = extraHeight + additionalHeight - (extraHeight * len(stretchWidgetList))
# draw widgets with minimum or minimum plus extra size
y = screenRectangle[3]
dy = 0
if (self.aligned): Blender.Draw.BeginAlign()
for widget in self.widgetList:
dy = widget.getSize().getMinimumSize()[1]
if (widget in stretchWidgetList):
if (widget is stretchWidgetList[-1]):
dy += lastExtraHeight
else:
dy += extraHeight
widget.draw([screenRectangle[0], y-dy, screenRectangle[2], y])
y -= dy
if (self.aligned): Blender.Draw.EndAlign()
return
def _addWidgetSize(self, widget):
"""Adds size of a widget but does not notify parent.
"""
wMinSize = widget.getSize().getMinimumSize()
wMaxSize = widget.getSize().getMaximumSize()
wPrefSize = widget.getSize().getPreferredSize()
minSize = self.getSize().getMinimumSize()
maxSize = self.getSize().getMaximumSize()
prefSize = self.getSize().getPreferredSize()
# add in y direction
minSize[1] += wMinSize[1]
maxSize[1] += wMaxSize[1]
if (prefSize[1] < Size.INFINITY):
if (wPrefSize[1] < Size.INFINITY):
prefSize[1] += wPrefSize[1]
else:
prefSize[1] = Size.INFINITY
# maximum in x direction
if (wMinSize[0] > minSize[0]):
minSize[0] = wMinSize[0]
if (wPrefSize[0] > prefSize[0]):
prefSize[0] = wPrefSize[0]
if (wMaxSize[0] > maxSize[0]):
maxSize[0] = wMaxSize[0]
self.size = Size(prefSize, minSize, maxSize)
return
class AlternativesLayout(Widget):
"""Displays one widget out of a given set of alternatives.
"""
def __init__(self, parent):
"""Constructor.
"""
Widget.__init__(self, parent, Size())
self.widgetList = []
self.current = None
return
def getCurrent(self):
"""Returns name of current choosen widget.
"""
return self.current
def setCurrent(self, widget):
"""Sets current active widget to alternative called name.
@param widget A previously added widget or <code>None</code>.
"""
if widget is None:
self.current = None
self.size = Size()
self.parent.resize()
elif widget in self.widgetList:
self.current = widget
self.size = widget.getSize()
self.parent.resize()
return
def removeAll(self):
self.setCurrent(None)
self.widgetList = []
return
def draw(self, screenRectangle):
if self.current:
self.current.draw(screenRectangle)
return
def eventFilter(self, event, value):
if self.current:
self.current.eventFilter(event, value)
return
def _addWidget(self, widget):
"""Adds a child widget to this layout.
The child widget is appended below all other widgets. This
method gets called by the constructor of the child widget.
"""
self.widgetList.append(widget)
return
def _removeWidget(self, widget):
"""Removes a child widget from this layout.
This method gets called by the <code>removeFromParent()</code>
method of the corresponding child widget.
"""
if widget in self.widgetList:
if (widget == self.current):
self.setCurrent(None)
self.widgetList.remove(widget)
return
class WidgetListLayout(Widget):
"""Displays a list of vertically stacked widgets using a scrollbar if necessary.
"""
def __init__(self, parent, size, scrollbarWidth=20):
"""Constructor.
@param size Minimum size should exceed 3*scrollbarWidth in both directions
"""
Widget.__init__(self, parent, size)
# mousewheel scrolling
self.listRect = [0, 0, 0, 0]
# list of child widgets
self.widgetList = []
# list of current displayed widgets
self.visibleList = []
# parent to register buttons to.
self.boundedRange = BoundedRangeModel()
self.scrollbarWidth = scrollbarWidth
self.scrollbar = VerticalScrollbar(WidgetListLayout.Mediator(self), Size([self.scrollbarWidth, self.scrollbarWidth]), self.boundedRange)
# mousewheel scrolling
self.mouseFocusX = 0
self.mouseFocusY = 0
# unused widget space
self.remainingHeight = 0
self.autoScroll = False
return
def setAutoScroll(self, autoScroll=True):
"""Scroll to newly added widgets.
"""
self.autoScroll = autoScroll
return
def _addWidget(self, widget):
self.widgetList.append(widget)
self.boundedRange.setMaximum(self.boundedRange.getMaximum() + 1)
if self.autoScroll:
# scroll into visible area
# Avoid call to Blender.Draw.Draw() to get the current
# scrollbar extend, as widget may be disabled
if( widget.getSize().getMinimumSize()[1] > self.remainingHeight):
self.boundedRange.setValue(self.boundedRange.getMaximum() - self.boundedRange.getExtend())
return
def _removeWidget(self, widget):
if widget in self.widgetList:
if widget in self.visibleList:
self.visibleList.remove(widget)
self.widgetList.remove(widget)
self.boundedRange.setMaximum(self.boundedRange.getMaximum() - 1)
return
def removeAll(self):
self.widgetList = []
self.boundedRangeModel = BoundedRangeModel()
return
def draw(self, rect):
self.listRect = [rect[0] + 2, rect[1] + 2, rect[2] - self.scrollbarWidth - 2, rect[3] - 2]
remainingHeight = self.listRect[3] - self.listRect[1]
# Box
glColor3f(0.0, 0.0, 0.0)
glBegin(GL_LINE_STRIP)
glVertex2i(rect[0], rect[3])
glVertex2i(rect[2], rect[3])
glVertex2i(rect[2], rect[1])
glVertex2i(rect[0], rect[1])
glVertex2i(rect[0], rect[3])
glEnd()
# Widgets
self.visibleList = []
if len(self.widgetList):
listIndex = self.boundedRange.getValue()
widgetRect = self.listRect[:]
while ((listIndex < len(self.widgetList)) \
and (remainingHeight >= self.widgetList[listIndex].getSize().getMinimumSize()[1])):
widgetRect[3] = self.listRect[1] + remainingHeight
remainingHeight -= self.widgetList[listIndex].getSize().getMinimumSize()[1]
widgetRect[1] = self.listRect[1] + remainingHeight
self.widgetList[listIndex].draw(widgetRect)
self.visibleList.append(self.widgetList[listIndex])
listIndex += 1
self.boundedRange.setExtend(len(self.visibleList), 1)
# Scrollbar
self.scrollbar.draw([rect[2]-self.scrollbarWidth-1, rect[1]+1, rect[2]-1, rect[3]-1])
self.remainingHeight = remainingHeight
return
def eventFilter(self, event, value):
for widget in self.visibleList:
widget.eventFilter(event, value)
# mousewheel scrolling
if (event == Blender.Draw.MOUSEX):
mousePositionX = value - ScreenManager.getSingleton().getScissorRectangle()[0]
if (mousePositionX >= self.listRect[0]) and (mousePositionX <= self.listRect[2]):
self.mouseFocusX = 1
else:
self.mouseFocusX = 0
elif (event == Blender.Draw.MOUSEY):
mousePositionY = value - ScreenManager.getSingleton().getScissorRectangle()[1]
if (mousePositionY >= self.listRect[1]) and (mousePositionY <= self.listRect[3]):
self.mouseFocusY = 1
else:
self.mouseFocusY = 0
elif (event == Blender.Draw.WHEELUPMOUSE) \
and self.mouseFocusX and self.mouseFocusY:
self.scrollbar._dec(1)
elif (event == Blender.Draw.WHEELDOWNMOUSE) \
and self.mouseFocusX and self.mouseFocusY:
self.scrollbar._inc(1)
# scrollbar
self.scrollbar.eventFilter(event, value)
return
class Mediator(Widget):
def __init__(self, parent):
self.parent = parent
return
def resize(self, size=None):
self.parent.resize()
return
def _addWidget(self, widget):
return
def _removeWidget(self, widget):
return
def _addButtonAction(self, action):
return self.parent._addButtonAction(action)
def _removeButtonAction(self, eventNumber):
self.parent._removeButtonAction(eventNumber)
return
class AddWidgetListLayout(WidgetListLayout):
"""WidgetList with an additional button in the last row.
The first widget added to this layout is used as add-button. The
add-button is not specified in the constructor in order to not
complicate a reference from the button action to the parent layout.
"""
def __init__(self, parent, size, scrollbarWidth=20):
"""Constructor.
"""
WidgetListLayout.__init__(self, parent, size, scrollbarWidth)
# additional button
self.addButton = None
return
def removeAll(self):
"""Remove all widgets but the add-button.
"""
self.widgetList = []
self.boundedRangeModel = BoundedRangeModel(0, 0, 0, 1)
return
def draw(self, rect):
self.listRect = [rect[0] + 2, rect[1] + 2, rect[2] - self.scrollbarWidth - 2, rect[3] - 2]
self.visibleList = []
remainingHeight = self.listRect[3] - self.listRect[1]
widgetRect = self.listRect[:]
if len(self.widgetList):
listIndex = self.boundedRange.getValue()
while ((listIndex < len(self.widgetList)) \
and (remainingHeight >= self.widgetList[listIndex].getSize().getMinimumSize()[1])):
widgetRect[3] = self.listRect[1] + remainingHeight
remainingHeight -= self.widgetList[listIndex].getSize().getMinimumSize()[1]
widgetRect[1] = self.listRect[1] + remainingHeight
self.widgetList[listIndex].draw(widgetRect)
self.visibleList.append(self.widgetList[listIndex])
listIndex += 1
self.boundedRange.setExtend(len(self.visibleList), 1)
# add button
if remainingHeight >= self.addButton.getSize().getMinimumSize()[1]:
# draw button
widgetRect[3] = self.listRect[1] + remainingHeight
remainingHeight -= self.addButton.getSize().getMinimumSize()[1]
widgetRect[1] = self.listRect[1] + remainingHeight
widgetRect[2] = widgetRect[0] + self.addButton.getSize().getMinimumSize()[0]
self.addButton.draw(widgetRect)
self.boundedRange.setExtend(self.boundedRange.getExtend() + 1, 1)
# Scrollbar
self.scrollbar.draw([rect[2]-self.scrollbarWidth-1, rect[1]+1, rect[2]-1, rect[3]-1])
self.remainingHeight = remainingHeight
return
def _addWidget(self, widget):
if self.addButton:
self.widgetList.append(widget)
self.boundedRange.setMaximum(self.boundedRange.getMaximum() + 1)
# scroll into visible area
# Avoid call to Blender.Draw.Draw() to get the current
# scrollbar extend, as widget may be disabled
if self.autoScroll:
if((widget.getSize().getMinimumSize()[1] + self.addButton.getSize().getMinimumSize()[1]) > self.remainingHeight):
self.boundedRange.setValue(self.boundedRange.getMaximum() - self.boundedRange.getExtend())
else:
self.addButton = widget
self.boundedRange.setMaximum(self.boundedRange.getMaximum() + 1)
return
class LogView(Decorator, View):
"""Shows the log messages.
"""
_COLOR = {Log.INFO:[0.0, 0.0, 0.0], Log.WARNING:[1.0, 1.0, 0.0], Log.ERROR:[1.0, 0.0, 0.0]}
def __init__(self, parent, size, scrollbarWidth=20, viewPrevious=True):
Decorator.__init__(self, parent)
WidgetListLayout(self, size, scrollbarWidth)
View.__init__(self, Log.getSingleton())
self.labelSize = Size([self.getSize().getMinimumSize()[0], LabelView._HEIGHTDICT['normal'] + 2])
# last considered log message
self.iMessages = 0
for entry in Log.getSingleton().getMessageList():
if viewPrevious:
self._addLogEntry(entry)
self.iMessages += 1
# first line to display
if viewPrevious:
self.firstLine = 0
else:
self.firstLine = self.iMessages
self.childWidget.setAutoScroll(True)
return
def update(self):
if (self.iMessages < len(Log.getSingleton().getMessageList())):
for entry in Log.getSingleton().getMessageList()[self.iMessages:]:
self._addLogEntry(entry)
self.iMessages += 1
Blender.Draw.Draw()
return
def _addLogEntry(self, entry):
LabelView(self.childWidget, LabelModel(entry[1], color=LogView._COLOR[entry[0]]), size=self.labelSize)
return
class Screen:
"""Represents the complete script window.
A screen represents the complete script window. It handles
drawing and events and can consist of several user interface
components. A screen has a single child widget.
"""
def __init__(self):
"""Constructor.
"""
# buttonHandler event number management
self.nButtonEvent = 0
# buttonEventDict key: iButtonEvent, value: Action
self.buttonEventDict = {}
# root widget of the screen
self.widget = None
Widget(self)
# scissor rectangle
self.scissorRectangle = [0, 0, 0, 0]
return
def activate(self):
"""Makes this the current active screen.
This method registers itself at the ScreenManager.
"""
ScreenManager.getSingleton().activate(self)
return
def deactivate(self):
"""Deactivates this screen.
"""
ScreenManager.getSingleton().deactivate(self)
return
def resize(self):
"""Resize notification from child widget.
"""
Blender.Draw.Redraw(1)
return
def getScissorRectangle(self):
return self.scissorRectangle
def _addWidget(self, widget):
"""Adds a child widget.
@param widget Child widget to add.
"""
if self.widget:
self.widget.removeFromParent()
self.widget = widget
return
def _removeWidget(self, widget):
"""Removes a child widget.
@param widget Child widget to remove.
"""
if (self.widget == widget):
self.widget = None
Widget(self)
return
def _addButtonAction(self, action):
"""Registers an action for a button event.
@param action Action to execute on receive of the returned button event number.
@return Event number to use for the button that corresponds to that action.
"""
# workaround for Blender 2.37 event 8 bug:
shiftEvents = 100
# get a free event number
if (len(self.buttonEventDict) == self.nButtonEvent):
self.nButtonEvent += 1
eventNumber = self.nButtonEvent + shiftEvents
else:
eventNumber = [(x+1+shiftEvents) for x in range(self.nButtonEvent) if (x+1+shiftEvents) not in self.buttonEventDict.keys()][0]
# assign action to that event
self.buttonEventDict[eventNumber] = action
return eventNumber
def _removeButtonAction(self, eventNumber):
"""Action for the given event number will no longer be called.
@param eventNumber Event number for the action.
"""
if self.buttonEventDict.has_key(eventNumber):
del self.buttonEventDict[eventNumber]
return
# callbacks for Blender.Draw.Register
def _draw(self):
"""Draws the screen.
Callback function for Blender.Draw.Register
"""
# clear background
theme = Blender.Window.Theme.Get()[0]
bgColor = [color/255.0 for color in theme.get('buts').back]
glClearColor(*bgColor)
glClear(GL_COLOR_BUFFER_BIT)
# scissor box: [lower-left-x, lower-left-y, width, height]
scissorBox = Blender.BGL.Buffer(GL_INT, 4)
Blender.BGL.glGetIntegerv(Blender.BGL.GL_SCISSOR_BOX, scissorBox)
self.scissorRectangle = [scissorBox[0], scissorBox[1], scissorBox[0] + scissorBox[2], scissorBox[1] + scissorBox[3]]
# size of the script window
size = list(Blender.Window.GetAreaSize())
minimumSize = self.widget.getSize().getMinimumSize()
if size[0] < minimumSize[0]:
size[0] = minimumSize[0]
if size[1] < minimumSize[1]:
size[1] = minimumSize[1]
screenRect = [0, 0, size[0]-1, size[1]-1]
# draw widgets
self.widget.draw(screenRect)
return
def _eventHandler(self, event, value):
"""Handles keyboard and mouse input events.
Callback function for Blender.Draw.Register
"""
self.widget.eventFilter(event, value)
return
def _buttonHandler(self, event):
"""Handles draw button events.
Callback function for Blender.Draw.Register
"""
if self.buttonEventDict.has_key(event):
self.buttonEventDict[event].execute()
return
class ScreenManager(Singleton):
"""Manages screens.
"""
def __init__(self):
Singleton.__init__(self)
# current active screen is on top
self.screenStack = []
return
def activate(self, screen):
"""Activates a screen.
This method calls Blender.Draw.Register to register a screen to
be responsible for windowing.
"""
self.screenStack.append(screen)
Blender.Draw.Register(screen._draw, screen._eventHandler, screen._buttonHandler)
Blender.Draw.Draw()
return
def deactivate(self, screen):
"""Deactivates a screen.
If the screen is the current displayed screen, the next screen on the stack
of activated screens will be reactivated. If there is no screen left, an empty
screen will be displayed.
"""
if screen in self.screenStack:
position = self.screenStack.index(screen)
self.screenStack.remove(screen)
if (position == len(self.screenStack)):
# screen was current active
if len(self.screenStack):
screen = self.screenStack.pop()
self.activate(screen)
else:
# empty screen
Blender.Draw.Register()
Blender.Draw.Draw()
return
def getScissorRectangle(self):
if len(self.screenStack):
scissorRectangle = self.screenStack[-1].getScissorRectangle()
else:
scissorRectangle = [0, 0, 0, 0]
return scissorRectangle
|
svdwulp/da-programming-1
|
refs/heads/master
|
game/blackjack.py
|
1
|
"""Blackjack
Multiplayer version of a simplified blackjack game.
Rules:
- cards are dealt from a single, shuffled deck of 52 standard 'playing cards'
- when the deck runs out, a fresh deck is shuffled and used
(note: this entails a player can receive the same card twice,
e.g. aces of spades, in a single round, but it still allows for
some level of card counting)
- card values:
- number cards have face value (2=2, 3=3, etc.)
- face cards all count for 10
- aces count for either 1 or 11, whichever is more favorable for the player
- colors (hearts, diamonds, spades and clubs) are irrelevant while
calculating hand value
- the game is played for a preset number of rounds
- each player will play for an individual win, loss or draw during each
round of the game and:
- gain 1 point for each round won
- lose 1 point for each round lost
- neither gain nor win a point for each round resulting in a draw
- each round runs as follows:
- each player is dealt start hand of two cards, visible to everyone
- the dealer is handed a start hand of two cards of which
only the first is visible
- in turns, each player plays the game:
- the aim for the player is to reach a total hand value of 21,
which results in an immediate win of the round
- when a players hand value exceeds 21 (bust), the player immediately
loses the round
- while the players hand value is below 21, the player can choose to
'hit' or 'stand':
- when a player chooses to 'hit', another card is added to their hand
- when a player chooses to 'stand', they receive no more cards
- when all players have played their turns, if any player has chosen to
'stand', the dealer will play their hand:
- the dealer will start by revealing their second card
- if the dealers hand value is below 21, the dealer will repeatedly add
cards to their hand until their hand value exceeds 16
- if the dealers hand exceeded 21 (bust), all 'standing' players win
- if the dealer did not bust his hand:
- a player wins if their hand value exceeds the dealers
- a player draws if their hand value equals the dealers
- a player loses if their hand value is below the dealers
"""
import random
card_symbols = (
[str(i) for i in range(2, 11)] +
list("JQKA")
)
card_values = dict(
zip(
card_symbols,
[i for i in range(2, 11)] + [10, 10, 10, 11]
)
)
card_colors = list("hdsc")
class Hand(list):
def __init__(self, *args):
list.__init__(self, *args)
def calculate_value(self):
values = []
for symbol, color in self:
values.append(card_values[symbol])
while sum(values) > 21 and 11 in values:
values[values.index(11)] = 1
return sum(values)
@classmethod
def single_card_str(cls, card):
return "{}{}".format(card[0], card[1])
def __str__(self):
return "[{}]".format(
", ".join(Hand.single_card_str(card) for card in self)
)
class Player(object):
def __init__(self, name, callback):
self.name = name
self.hand = Hand()
self.points = 0
self.callback = callback
def decision(self, hand, dealer):
try:
decision = bool(self.callback("decision", (hand, dealer)))
except:
decision = False
return decision
def card_seen(self, player, card):
self.callback("card_seen", (player, card))
def message(self, msg):
self.callback("message", (msg))
def result(self, result):
self.callback("result", (result))
def __str__(self):
return self.name
class Deck(list):
def __init__(self, *args):
list.__init__(self, *args)
self.add_deck()
def add_deck(self):
self.extend([
(value, color)
for value in card_values
for color in card_colors
])
random.shuffle(self)
def draw(self):
if len(self) == 0:
self.add_deck()
return self.pop()
class Dealer(object):
def __init__(self, players):
self.name = "Dealer"
self.deck = Deck()
self.players = players
self.hand = Hand()
def show_card(self, player, card):
for p in self.players:
p.card_seen(player.name, self.hand.single_card_str(card))
def draw(self, player, show_card=True):
card = self.deck.draw()
if show_card:
self.show_card(player, card)
return card
def emit_message(self, msg):
for player in self.players:
player.message(msg)
def reset_hands(self):
self.hand = Hand()
for player in self.players:
player.hand = Hand()
def play_round(self):
self.emit_message("Round starts")
self.reset_hands()
# deal initial hand to all players and self
for player in self.players:
player.hand.append(self.draw(player))
self.hand.append(self.draw(self))
for player in self.players:
player.hand.append(self.draw(player))
self.emit_message("{} got {}".format(player.name, player.hand))
# do not show second dealer card
self.hand.append(self.draw(self, False))
self.emit_message(
"Dealer got {}".format(
self.hand.single_card_str(self.hand[0])
)
)
if self.play_all_players():
self.play_dealer_hand()
else:
# reveal second dealer card when not playing
self.show_card(self, self.hand[1])
self.emit_message("Round ends")
def play_all_players(self):
# play all players hands
players_left = False
for player in self.players:
self.play_player(player)
if player.hand.calculate_value() < 21:
players_left = True
return players_left
def play_dealer_hand(self):
self.emit_message("Dealer shows hand: {}".format(self.hand))
self.show_card(self, self.hand[1])
hand_value = self.hand.calculate_value()
self.emit_message("Dealer hand value = {}".format(hand_value))
while hand_value < 17:
self.hand.append(self.draw(self))
self.emit_message("Dealer hit, hand now: {}".format(self.hand))
hand_value = self.hand.calculate_value()
self.emit_message("Dealer hand value = {}".format(hand_value))
if hand_value <= 21:
self.emit_message("Dealer stands")
for player in self.players:
player_hand_value = player.hand.calculate_value()
if player_hand_value >= 21:
continue
if player_hand_value < hand_value:
self.do_player_lose(
player,
"dealer {} vs player {}".format(
hand_value, player_hand_value
)
)
elif player_hand_value > hand_value:
self.do_player_win(
player,
"dealer {} vs player {}".format(
hand_value, player_hand_value
)
)
else:
self.do_player_draw(
player,
"dealer {} vs player {}".format(
hand_value, player_hand_value
)
)
else:
self.emit_message("Dealer busted")
for player in self.players:
player_hand_value = player.hand.calculate_value()
if player_hand_value >= 21:
continue
self.do_player_win(player, "dealer busted")
def do_player_win(self, player, reason=None):
player.points += 1
player.result("win")
if reason:
self.emit_message("{} wins (reason: {})".format(player, reason))
else:
self.emit_message("{} wins".format(player))
def do_player_lose(self, player, reason=None):
player.points -= 1
player.result("loss")
if reason:
self.emit_message("{} loses (reason: {})".format(player, reason))
else:
self.emit_message("{} loses".format(player))
def do_player_draw(self, player, reason=None):
# player.points += 0
player.result("draw")
if reason:
self.emit_message("{} draws (reason: {})".format(player, reason))
else:
self.emit_message("{} draws".format(player))
def play_player(self, player):
hand_value = player.hand.calculate_value()
if hand_value == 21:
self.do_player_win(player, "hit 21")
return
decision = player.decision(player.hand.copy(), self.hand[0])
if not decision:
self.emit_message("{} decided to stand".format(player))
while decision:
player.hand.append(self.draw(player))
self.emit_message(
"{} decided to hit, hand now: {}".format(
player, player.hand
)
)
hand_value = player.hand.calculate_value()
if hand_value == 21:
self.do_player_win(player, "hit 21")
return
elif hand_value > 21:
self.do_player_lose(player, "busted")
return
decision = player.decision(player.hand.copy(), self.hand[0])
if not decision:
self.emit_message("{} decided to stand".format(player))
if hand_value == 21:
self.do_player_win(player, "hit 21")
elif hand_value > 21:
self.do_player_lose(player, "busted")
|
sambyers/o365_fmc
|
refs/heads/master
|
.venv/lib/python3.6/site-packages/pip/commands/search.py
|
343
|
from __future__ import absolute_import
import logging
import sys
import textwrap
from pip.basecommand import Command, SUCCESS
from pip.compat import OrderedDict
from pip.download import PipXmlrpcTransport
from pip.models import PyPI
from pip.utils import get_terminal_size
from pip.utils.logging import indent_log
from pip.exceptions import CommandError
from pip.status_codes import NO_MATCHES_FOUND
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor import pkg_resources
from pip._vendor.six.moves import xmlrpc_client
logger = logging.getLogger(__name__)
class SearchCommand(Command):
"""Search for PyPI packages whose name or summary contains <query>."""
name = 'search'
usage = """
%prog [options] <query>"""
summary = 'Search PyPI for packages.'
def __init__(self, *args, **kw):
super(SearchCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-i', '--index',
dest='index',
metavar='URL',
default=PyPI.pypi_url,
help='Base URL of Python Package Index (default %default)')
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
if not args:
raise CommandError('Missing required argument (search query).')
query = args
pypi_hits = self.search(query, options)
hits = transform_hits(pypi_hits)
terminal_width = None
if sys.stdout.isatty():
terminal_width = get_terminal_size()[0]
print_results(hits, terminal_width=terminal_width)
if pypi_hits:
return SUCCESS
return NO_MATCHES_FOUND
def search(self, query, options):
index_url = options.index
with self._build_session(options) as session:
transport = PipXmlrpcTransport(index_url, session)
pypi = xmlrpc_client.ServerProxy(index_url, transport)
hits = pypi.search({'name': query, 'summary': query}, 'or')
return hits
def transform_hits(hits):
"""
The list from pypi is really a list of versions. We want a list of
packages with the list of versions stored inline. This converts the
list from pypi into one we can use.
"""
packages = OrderedDict()
for hit in hits:
name = hit['name']
summary = hit['summary']
version = hit['version']
if name not in packages.keys():
packages[name] = {
'name': name,
'summary': summary,
'versions': [version],
}
else:
packages[name]['versions'].append(version)
# if this is the highest version, replace summary and score
if version == highest_version(packages[name]['versions']):
packages[name]['summary'] = summary
return list(packages.values())
def print_results(hits, name_column_width=None, terminal_width=None):
if not hits:
return
if name_column_width is None:
name_column_width = max([
len(hit['name']) + len(hit.get('versions', ['-'])[-1])
for hit in hits
]) + 4
installed_packages = [p.project_name for p in pkg_resources.working_set]
for hit in hits:
name = hit['name']
summary = hit['summary'] or ''
version = hit.get('versions', ['-'])[-1]
if terminal_width is not None:
target_width = terminal_width - name_column_width - 5
if target_width > 10:
# wrap and indent summary to fit terminal
summary = textwrap.wrap(summary, target_width)
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
line = '%-*s - %s' % (name_column_width,
'%s (%s)' % (name, version), summary)
try:
logger.info(line)
if name in installed_packages:
dist = pkg_resources.get_distribution(name)
with indent_log():
latest = highest_version(hit['versions'])
if dist.version == latest:
logger.info('INSTALLED: %s (latest)', dist.version)
else:
logger.info('INSTALLED: %s', dist.version)
logger.info('LATEST: %s', latest)
except UnicodeEncodeError:
pass
def highest_version(versions):
return max(versions, key=parse_version)
|
bplancher/odoo
|
refs/heads/9.0
|
openerp/addons/base/tests/test_ir_actions.py
|
40
|
import unittest
from openerp.exceptions import ValidationError
import openerp.tests.common as common
from openerp.tools import mute_logger
class TestServerActionsBase(common.TransactionCase):
def setUp(self):
super(TestServerActionsBase, self).setUp()
cr, uid = self.cr, self.uid
# Models
self.ir_actions_server = self.registry('ir.actions.server')
self.ir_actions_client = self.registry('ir.actions.client')
self.ir_values = self.registry('ir.values')
self.ir_model = self.registry('ir.model')
self.ir_model_fields = self.registry('ir.model.fields')
self.res_partner = self.registry('res.partner')
self.res_country = self.registry('res.country')
# Data on which we will run the server action
self.test_country_id = self.res_country.create(cr, uid, {
'name': 'TestingCountry',
'code': 'TY',
'address_format': 'SuperFormat',
})
self.test_country = self.res_country.browse(cr, uid, self.test_country_id)
self.test_partner_id = self.res_partner.create(cr, uid, {
'name': 'TestingPartner',
'city': 'OrigCity',
'country_id': self.test_country_id,
})
self.test_partner = self.res_partner.browse(cr, uid, self.test_partner_id)
self.context = {
'active_id': self.test_partner_id,
'active_model': 'res.partner',
}
# Model data
self.res_partner_model_id = self.ir_model.search(cr, uid, [('model', '=', 'res.partner')])[0]
self.res_partner_name_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'name')])[0]
self.res_partner_city_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'city')])[0]
self.res_partner_country_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'country_id')])[0]
self.res_partner_parent_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'parent_id')])[0]
self.res_country_model_id = self.ir_model.search(cr, uid, [('model', '=', 'res.country')])[0]
self.res_country_name_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.country'), ('name', '=', 'name')])[0]
self.res_country_code_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.country'), ('name', '=', 'code')])[0]
# create server action to
self.act_id = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction',
'condition': 'True',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'obj.write({"comment": "MyComment"})',
})
class TestServerActions(TestServerActionsBase):
def test_00_action(self):
cr, uid = self.cr, self.uid
# Do: eval 'True' condition
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: invalid condition check')
self.test_partner.write({'comment': False})
# Do: eval False condition, that should be considered as True (void = True)
self.ir_actions_server.write(cr, uid, [self.act_id], {'condition': False})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: invalid condition check')
# Do: create contextual action
self.ir_actions_server.create_action(cr, uid, [self.act_id])
# Test: ir_values created
ir_values_ids = self.ir_values.search(cr, uid, [('name', '=', 'Run TestAction')])
self.assertEqual(len(ir_values_ids), 1, 'ir_actions_server: create_action should have created an entry in ir_values')
ir_value = self.ir_values.browse(cr, uid, ir_values_ids[0])
self.assertEqual(ir_value.value, 'ir.actions.server,%s' % self.act_id, 'ir_actions_server: created ir_values should reference the server action')
self.assertEqual(ir_value.model, 'res.partner', 'ir_actions_server: created ir_values should be linked to the action base model')
# Do: remove contextual action
self.ir_actions_server.unlink_action(cr, uid, [self.act_id])
# Test: ir_values removed
ir_values_ids = self.ir_values.search(cr, uid, [('name', '=', 'Run TestAction')])
self.assertEqual(len(ir_values_ids), 0, 'ir_actions_server: unlink_action should remove the ir_values record')
def test_10_code(self):
cr, uid = self.cr, self.uid
self.ir_actions_server.write(cr, uid, self.act_id, {
'state': 'code',
'code': """partner_name = obj.name + '_code'
self.pool["res.partner"].create(cr, uid, {"name": partner_name}, context=context)
workflow"""
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: code server action correctly finished should return False')
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner_code')])
self.assertEqual(len(pids), 1, 'ir_actions_server: 1 new partner should have been created')
def test_20_trigger(self):
cr, uid = self.cr, self.uid
# Data: code server action (at this point code-based actions should work)
act_id2 = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction2',
'type': 'ir.actions.server',
'condition': 'True',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'obj.write({"comment": "MyComment"})',
})
act_id3 = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction3',
'type': 'ir.actions.server',
'condition': 'True',
'model_id': self.res_country_model_id,
'state': 'code',
'code': 'obj.write({"code": "ZZ"})',
})
# Data: create workflows
partner_wf_id = self.registry('workflow').create(cr, uid, {
'name': 'TestWorkflow',
'osv': 'res.partner',
'on_create': True,
})
partner_act1_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'PartnerStart',
'wkf_id': partner_wf_id,
'flow_start': True
})
partner_act2_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'PartnerTwo',
'wkf_id': partner_wf_id,
'kind': 'function',
'action': 'True',
'action_id': act_id2,
})
partner_trs1_id = self.registry('workflow.transition').create(cr, uid, {
'signal': 'partner_trans',
'act_from': partner_act1_id,
'act_to': partner_act2_id
})
country_wf_id = self.registry('workflow').create(cr, uid, {
'name': 'TestWorkflow',
'osv': 'res.country',
'on_create': True,
})
country_act1_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'CountryStart',
'wkf_id': country_wf_id,
'flow_start': True
})
country_act2_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'CountryTwo',
'wkf_id': country_wf_id,
'kind': 'function',
'action': 'True',
'action_id': act_id3,
})
country_trs1_id = self.registry('workflow.transition').create(cr, uid, {
'signal': 'country_trans',
'act_from': country_act1_id,
'act_to': country_act2_id
})
# Data: re-create country and partner to benefit from the workflows
self.test_country_id = self.res_country.create(cr, uid, {
'name': 'TestingCountry2',
'code': 'T2',
})
self.test_country = self.res_country.browse(cr, uid, self.test_country_id)
self.test_partner_id = self.res_partner.create(cr, uid, {
'name': 'TestingPartner2',
'country_id': self.test_country_id,
})
self.test_partner = self.res_partner.browse(cr, uid, self.test_partner_id)
self.context = {
'active_id': self.test_partner_id,
'active_model': 'res.partner',
}
# Run the action on partner object itself ('base')
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'trigger',
'use_relational_model': 'base',
'wkf_model_id': self.res_partner_model_id,
'wkf_transition_id': partner_trs1_id,
})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: incorrect signal trigger')
# Run the action on related country object ('relational')
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_relational_model': 'relational',
'wkf_model_id': self.res_country_model_id,
'wkf_field_id': self.res_partner_country_field_id,
'wkf_transition_id': country_trs1_id,
})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_country.refresh()
self.assertEqual(self.test_country.code, 'ZZ', 'ir_actions_server: incorrect signal trigger')
# Clear workflow cache, otherwise openerp will try to create workflows even if it has been deleted
from openerp.workflow import clear_cache
clear_cache(cr, uid)
def test_30_client(self):
cr, uid = self.cr, self.uid
client_action_id = self.registry('ir.actions.client').create(cr, uid, {
'name': 'TestAction2',
'tag': 'Test',
})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'client_action',
'action_id': client_action_id,
})
res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertEqual(res['name'], 'TestAction2', 'ir_actions_server: incorrect return result for a client action')
def test_40_crud_create(self):
cr, uid = self.cr, self.uid
_city = 'TestCity'
_name = 'TestNew'
# Do: create a new record in the same model and link it
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'new',
'link_new_record': True,
'link_field_id': self.res_partner_parent_field_id,
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': _name}),
(0, 0, {'col1': self.res_partner_city_field_id, 'value': _city})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', _name)])
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, _city, 'ir_actions_server: TODO')
# Test: new partner linked
self.test_partner.refresh()
self.assertEqual(self.test_partner.parent_id.id, pids[0], 'ir_actions_server: TODO')
# Do: copy current record
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'copy_current',
'link_new_record': False,
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': 'TestCopyCurrent'}),
(0, 0, {'col1': self.res_partner_city_field_id, 'value': 'TestCity'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner (copy)')]) # currently res_partner overrides default['name'] whatever its value
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, 'TestCity', 'ir_actions_server: TODO')
self.assertEqual(partner.country_id.id, self.test_partner.country_id.id, 'ir_actions_server: TODO')
# Do: create a new record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'new_other',
'crud_model_id': self.res_country_model_id,
'link_new_record': False,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'obj.name', 'type': 'equation'}),
(0, 0, {'col1': self.res_country_code_field_id, 'value': 'obj.name[0:2]', 'type': 'equation'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'TestingPartner')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
country = self.res_country.browse(cr, uid, cids[0])
self.assertEqual(country.code, 'TE', 'ir_actions_server: TODO')
# Do: copy a record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'copy_other',
'crud_model_id': self.res_country_model_id,
'link_new_record': False,
'ref_object': 'res.country,%s' % self.test_country_id,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'NewCountry', 'type': 'value'}),
(0, 0, {'col1': self.res_country_code_field_id, 'value': 'NY', 'type': 'value'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
country = self.res_country.browse(cr, uid, cids[0])
self.assertEqual(country.code, 'NY', 'ir_actions_server: TODO')
self.assertEqual(country.address_format, 'SuperFormat', 'ir_actions_server: TODO')
def test_50_crud_write(self):
cr, uid = self.cr, self.uid
_name = 'TestNew'
# Do: create a new record in the same model and link it
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_write',
'use_write': 'current',
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': _name})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', _name)])
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, 'OrigCity', 'ir_actions_server: TODO')
# Do: copy current record
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_write': 'other',
'crud_model_id': self.res_country_model_id,
'ref_object': 'res.country,%s' % self.test_country_id,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'obj.name', 'type': 'equation'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'TestNew')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
# Do: copy a record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_write': 'expression',
'crud_model_id': self.res_country_model_id,
'write_expression': 'object.country_id',
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'NewCountry', 'type': 'value'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_60_multi(self):
cr, uid = self.cr, self.uid
# Data: 2 server actions that will be nested
act1_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction1',
'sequence': 1,
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'action = {"type": "ir.actions.act_window"}',
})
act2_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction2',
'sequence': 2,
'model_id': self.res_partner_model_id,
'state': 'object_create',
'use_create': 'copy_current',
})
act3_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction3',
'sequence': 3,
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'action = {"type": "ir.actions.act_url"}',
})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'multi',
'child_ids': [(6, 0, [act1_id, act2_id, act3_id])],
})
# Do: run the action
res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner (copy)')]) # currently res_partner overrides default['name'] whatever its value
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
# Test: action returned
self.assertEqual(res.get('type'), 'ir.actions.act_url')
# Test loops
with self.assertRaises(ValidationError):
self.ir_actions_server.write(cr, uid, [self.act_id], {
'child_ids': [(6, 0, [self.act_id])]
})
if __name__ == '__main__':
unittest.main()
|
kvar/ansible
|
refs/heads/seas_master_2.9.5
|
lib/ansible/modules/network/fortios/fortios_log_syslogd3_setting.py
|
14
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_log_syslogd3_setting
short_description: Global settings for remote syslog server in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify log_syslogd3 feature and setting category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
log_syslogd3_setting:
description:
- Global settings for remote syslog server.
default: null
type: dict
suboptions:
certificate:
description:
- Certificate used to communicate with Syslog server. Source certificate.local.name.
type: str
custom_field_name:
description:
- Custom field name for CEF format logging.
type: list
suboptions:
custom:
description:
- Field custom name.
type: str
id:
description:
- Entry ID.
required: true
type: int
name:
description:
- Field name.
type: str
enc_algorithm:
description:
- Enable/disable reliable syslogging with TLS encryption.
type: str
choices:
- high-medium
- high
- low
- disable
facility:
description:
- Remote syslog facility.
type: str
choices:
- kernel
- user
- mail
- daemon
- auth
- syslog
- lpr
- news
- uucp
- cron
- authpriv
- ftp
- ntp
- audit
- alert
- clock
- local0
- local1
- local2
- local3
- local4
- local5
- local6
- local7
format:
description:
- Log format.
type: str
choices:
- default
- csv
- cef
mode:
description:
- Remote syslog logging over UDP/Reliable TCP.
type: str
choices:
- udp
- legacy-reliable
- reliable
port:
description:
- Server listen port.
type: int
server:
description:
- Address of remote syslog server.
type: str
source_ip:
description:
- Source IP address of syslog.
type: str
ssl_min_proto_version:
description:
- Minimum supported protocol version for SSL/TLS connections .
type: str
choices:
- default
- SSLv3
- TLSv1
- TLSv1-1
- TLSv1-2
status:
description:
- Enable/disable remote syslog logging.
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Global settings for remote syslog server.
fortios_log_syslogd3_setting:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
log_syslogd3_setting:
certificate: "<your_own_value> (source certificate.local.name)"
custom_field_name:
-
custom: "<your_own_value>"
id: "6"
name: "default_name_7"
enc_algorithm: "high-medium"
facility: "kernel"
format: "default"
mode: "udp"
port: "12"
server: "192.168.100.40"
source_ip: "84.230.14.43"
ssl_min_proto_version: "default"
status: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_log_syslogd3_setting_data(json):
option_list = ['certificate', 'custom_field_name', 'enc_algorithm',
'facility', 'format', 'mode',
'port', 'server', 'source_ip',
'ssl_min_proto_version', 'status']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def log_syslogd3_setting(data, fos):
vdom = data['vdom']
log_syslogd3_setting_data = data['log_syslogd3_setting']
filtered_data = underscore_to_hyphen(filter_log_syslogd3_setting_data(log_syslogd3_setting_data))
return fos.set('log.syslogd3',
'setting',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_log_syslogd3(data, fos):
if data['log_syslogd3_setting']:
resp = log_syslogd3_setting(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"log_syslogd3_setting": {
"required": False, "type": "dict", "default": None,
"options": {
"certificate": {"required": False, "type": "str"},
"custom_field_name": {"required": False, "type": "list",
"options": {
"custom": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"name": {"required": False, "type": "str"}
}},
"enc_algorithm": {"required": False, "type": "str",
"choices": ["high-medium", "high", "low",
"disable"]},
"facility": {"required": False, "type": "str",
"choices": ["kernel", "user", "mail",
"daemon", "auth", "syslog",
"lpr", "news", "uucp",
"cron", "authpriv", "ftp",
"ntp", "audit", "alert",
"clock", "local0", "local1",
"local2", "local3", "local4",
"local5", "local6", "local7"]},
"format": {"required": False, "type": "str",
"choices": ["default", "csv", "cef"]},
"mode": {"required": False, "type": "str",
"choices": ["udp", "legacy-reliable", "reliable"]},
"port": {"required": False, "type": "int"},
"server": {"required": False, "type": "str"},
"source_ip": {"required": False, "type": "str"},
"ssl_min_proto_version": {"required": False, "type": "str",
"choices": ["default", "SSLv3", "TLSv1",
"TLSv1-1", "TLSv1-2"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_log_syslogd3(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_log_syslogd3(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
pdellaert/ansible
|
refs/heads/devel
|
lib/ansible/modules/database/postgresql/postgresql_schema.py
|
10
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: postgresql_schema
short_description: Add or remove PostgreSQL schema
description:
- Add or remove PostgreSQL schema.
version_added: '2.3'
options:
name:
description:
- Name of the schema to add or remove.
required: true
type: str
aliases:
- schema
database:
description:
- Name of the database to connect to and add or remove the schema.
type: str
default: postgres
aliases:
- db
- login_db
owner:
description:
- Name of the role to set as owner of the schema.
type: str
session_role:
version_added: '2.8'
description:
- Switch to session_role after connecting.
- The specified session_role must be a role that the current login_user is a member of.
- Permissions checking for SQL commands is carried out as though the session_role
were the one that had logged in originally.
type: str
state:
description:
- The schema state.
type: str
default: present
choices: [ absent, present ]
cascade_drop:
description:
- Drop schema with CASCADE to remove child objects.
type: bool
default: false
version_added: '2.8'
ssl_mode:
description:
- Determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
- See https://www.postgresql.org/docs/current/static/libpq-ssl.html for more information on the modes.
- Default of C(prefer) matches libpq default.
type: str
default: prefer
choices: [ allow, disable, prefer, require, verify-ca, verify-full ]
version_added: '2.8'
ca_cert:
description:
- Specifies the name of a file containing SSL certificate authority (CA) certificate(s).
- If the file exists, the server's certificate will be verified to be signed by one of these authorities.
type: str
aliases: [ ssl_rootcert ]
version_added: '2.8'
seealso:
- name: PostgreSQL schemas
description: General information about PostgreSQL schemas.
link: https://www.postgresql.org/docs/current/ddl-schemas.html
- name: CREATE SCHEMA reference
description: Complete reference of the CREATE SCHEMA command documentation.
link: https://www.postgresql.org/docs/current/sql-createschema.html
- name: ALTER SCHEMA reference
description: Complete reference of the ALTER SCHEMA command documentation.
link: https://www.postgresql.org/docs/current/sql-alterschema.html
- name: DROP SCHEMA reference
description: Complete reference of the DROP SCHEMA command documentation.
link: https://www.postgresql.org/docs/current/sql-dropschema.html
author:
- Flavien Chantelot (@Dorn-) <contact@flavien.io>
- Thomas O'Donnell (@andytom)
extends_documentation_fragment: postgres
'''
EXAMPLES = r'''
- name: Create a new schema with name acme in test database
postgresql_schema:
db: test
name: acme
- name: Create a new schema acme with a user bob who will own it
postgresql_schema:
name: acme
owner: bob
- name: Drop schema "acme" with cascade
postgresql_schema:
name: acme
state: absent
cascade_drop: yes
'''
RETURN = r'''
schema:
description: Name of the schema.
returned: success, changed
type: str
sample: "acme"
queries:
description: List of executed queries.
returned: always
type: list
sample: ["CREATE SCHEMA \"acme\""]
'''
import traceback
try:
from psycopg2.extras import DictCursor
except ImportError:
# psycopg2 is checked by connect_to_db()
# from ansible.module_utils.postgres
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.postgres import (
connect_to_db,
get_conn_params,
postgres_common_argument_spec,
)
from ansible.module_utils.database import SQLParseError, pg_quote_identifier
from ansible.module_utils._text import to_native
executed_queries = []
class NotSupportedError(Exception):
pass
# ===========================================
# PostgreSQL module specific support methods.
#
def set_owner(cursor, schema, owner):
query = "ALTER SCHEMA %s OWNER TO %s" % (
pg_quote_identifier(schema, 'schema'),
pg_quote_identifier(owner, 'role'))
cursor.execute(query)
executed_queries.append(query)
return True
def get_schema_info(cursor, schema):
query = ("SELECT schema_owner AS owner "
"FROM information_schema.schemata "
"WHERE schema_name = '%s'" % schema)
cursor.execute(query)
return cursor.fetchone()
def schema_exists(cursor, schema):
query = ("SELECT schema_name FROM information_schema.schemata "
"WHERE schema_name = '%s'" % schema)
cursor.execute(query)
return cursor.rowcount == 1
def schema_delete(cursor, schema, cascade):
if schema_exists(cursor, schema):
query = "DROP SCHEMA %s" % pg_quote_identifier(schema, 'schema')
if cascade:
query += " CASCADE"
cursor.execute(query)
executed_queries.append(query)
return True
else:
return False
def schema_create(cursor, schema, owner):
if not schema_exists(cursor, schema):
query_fragments = ['CREATE SCHEMA %s' % pg_quote_identifier(schema, 'schema')]
if owner:
query_fragments.append('AUTHORIZATION %s' % pg_quote_identifier(owner, 'role'))
query = ' '.join(query_fragments)
cursor.execute(query)
executed_queries.append(query)
return True
else:
schema_info = get_schema_info(cursor, schema)
if owner and owner != schema_info['owner']:
return set_owner(cursor, schema, owner)
else:
return False
def schema_matches(cursor, schema, owner):
if not schema_exists(cursor, schema):
return False
else:
schema_info = get_schema_info(cursor, schema)
if owner and owner != schema_info['owner']:
return False
else:
return True
# ===========================================
# Module execution.
#
def main():
argument_spec = postgres_common_argument_spec()
argument_spec.update(
schema=dict(type="str", required=True, aliases=['name']),
owner=dict(type="str", default=""),
database=dict(type="str", default="postgres", aliases=["db", "login_db"]),
cascade_drop=dict(type="bool", default=False),
state=dict(type="str", default="present", choices=["absent", "present"]),
session_role=dict(type="str"),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
schema = module.params["schema"]
owner = module.params["owner"]
state = module.params["state"]
cascade_drop = module.params["cascade_drop"]
changed = False
conn_params = get_conn_params(module, module.params)
db_connection = connect_to_db(module, conn_params, autocommit=True)
cursor = db_connection.cursor(cursor_factory=DictCursor)
try:
if module.check_mode:
if state == "absent":
changed = not schema_exists(cursor, schema)
elif state == "present":
changed = not schema_matches(cursor, schema, owner)
module.exit_json(changed=changed, schema=schema)
if state == "absent":
try:
changed = schema_delete(cursor, schema, cascade_drop)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
elif state == "present":
try:
changed = schema_create(cursor, schema, owner)
except SQLParseError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
except NotSupportedError as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
except SystemExit:
# Avoid catching this on Python 2.4
raise
except Exception as e:
module.fail_json(msg="Database query failed: %s" % to_native(e), exception=traceback.format_exc())
db_connection.close()
module.exit_json(changed=changed, schema=schema, queries=executed_queries)
if __name__ == '__main__':
main()
|
ColOfAbRiX/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/webfaction/webfaction_db.py
|
23
|
#!/usr/bin/python
#
# Create a webfaction database using Ansible and the Webfaction API
#
# ------------------------------------------
#
# (c) Quentin Stafford-Fraser 2015, with contributions gratefully acknowledged from:
# * Andy Baker
# * Federico Tarantini
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: webfaction_db
short_description: Add or remove a database on Webfaction
description:
- Add or remove a database on a Webfaction host. Further documentation at http://github.com/quentinsf/ansible-webfaction.
author: Quentin Stafford-Fraser (@quentinsf)
version_added: "2.0"
notes:
- "You can run playbooks that use this on a local machine, or on a Webfaction host, or elsewhere, since the scripts use the remote webfaction API - the location is not important. However, running them on multiple hosts I(simultaneously) is best avoided. If you don't specify I(localhost) as your host, you may want to add C(serial: 1) to the plays."
- See `the webfaction API <http://docs.webfaction.com/xmlrpc-api/>`_ for more info.
options:
name:
description:
- The name of the database
required: true
state:
description:
- Whether the database should exist
required: false
choices: ['present', 'absent']
default: "present"
type:
description:
- The type of database to create.
required: true
choices: ['mysql', 'postgresql']
password:
description:
- The password for the new database user.
required: false
default: None
login_name:
description:
- The webfaction account to use
required: true
login_password:
description:
- The webfaction password to use
required: true
machine:
description:
- The machine name to use (optional for accounts with only one machine)
required: false
'''
EXAMPLES = '''
# This will also create a default DB user with the same
# name as the database, and the specified password.
- name: Create a database
webfaction_db:
name: "{{webfaction_user}}_db1"
password: mytestsql
type: mysql
login_name: "{{webfaction_user}}"
login_password: "{{webfaction_passwd}}"
machine: "{{webfaction_machine}}"
# Note that, for symmetry's sake, deleting a database using
# 'state: absent' will also delete the matching user.
'''
import socket
import xmlrpclib
webfaction = xmlrpclib.ServerProxy('https://api.webfaction.com/')
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(required=False, choices=['present', 'absent'], default='present'),
# You can specify an IP address or hostname.
type = dict(required=True),
password = dict(required=False, default=None),
login_name = dict(required=True),
login_password = dict(required=True),
machine = dict(required=False, default=False),
),
supports_check_mode=True
)
db_name = module.params['name']
db_state = module.params['state']
db_type = module.params['type']
db_passwd = module.params['password']
if module.params['machine']:
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password'],
module.params['machine']
)
else:
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password']
)
db_list = webfaction.list_dbs(session_id)
db_map = dict([(i['name'], i) for i in db_list])
existing_db = db_map.get(db_name)
user_list = webfaction.list_db_users(session_id)
user_map = dict([(i['username'], i) for i in user_list])
existing_user = user_map.get(db_name)
result = {}
# Here's where the real stuff happens
if db_state == 'present':
# Does a database with this name already exist?
if existing_db:
# Yes, but of a different type - fail
if existing_db['db_type'] != db_type:
module.fail_json(msg="Database already exists but is a different type. Please fix by hand.")
# If it exists with the right type, we don't change anything.
module.exit_json(
changed = False,
)
if not module.check_mode:
# If this isn't a dry run, create the db
# and default user.
result.update(
webfaction.create_db(
session_id, db_name, db_type, db_passwd
)
)
elif db_state == 'absent':
# If this isn't a dry run...
if not module.check_mode:
if not (existing_db or existing_user):
module.exit_json(changed = False,)
if existing_db:
# Delete the db if it exists
result.update(
webfaction.delete_db(session_id, db_name, db_type)
)
if existing_user:
# Delete the default db user if it exists
result.update(
webfaction.delete_db_user(session_id, db_name, db_type)
)
else:
module.fail_json(msg="Unknown state specified: {}".format(db_state))
module.exit_json(
changed = True,
result = result
)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
PhobosXIII/qc
|
refs/heads/master
|
coordination/models.py
|
1
|
from datetime import timedelta
from itertools import groupby
from ckeditor.fields import RichTextField
from django.conf import settings
from django.contrib.auth.models import User
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from django.utils import timezone
from django.utils.html import strip_tags
from coordination.utils import get_timedelta_with_now, time_in_minutes, generate_random_username, \
generate_random_password, get_timedelta
def mission_file_name(instance, filename):
ext = filename.split('.')[-1].lower()
filename = "{0}-{1}.{2}".format(instance.order_number, timezone.now().strftime("%d-%m-%Y-%H-%M-%S"), ext)
return '/'.join(['mission_imgs', str(instance.quest.creator.pk), str(instance.quest.pk), filename])
class Quest(models.Model):
LINEAR = 'L'
NONLINEAR = 'NL'
LINE_NONLINEAR = 'LNL'
MULTILINEAR = 'ML'
NOT_STARTED = 'NTS'
STARTED = 'STR'
ENDED = 'END'
TYPES = (
(LINEAR, 'Линейный'),
(NONLINEAR, 'Нелинейный'),
(LINE_NONLINEAR, 'Линейно-нелинейный'),
(MULTILINEAR, 'Многолинейный'),
)
STATUSES = (
(NOT_STARTED, 'Не запущен'),
(STARTED, 'Запущен'),
(ENDED, 'Завершен'),
)
title = models.CharField('название', max_length=255)
start = models.DateTimeField('старт', null=True, blank=True)
description = RichTextField('описание', blank=True)
type = models.CharField('тип', max_length=3, choices=TYPES, default=LINEAR)
status = models.CharField('статус', max_length=3, choices=STATUSES, default=NOT_STARTED)
is_published = models.BooleanField('опубликован', default=False)
creator = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='создатель', related_name='creator')
members = models.ManyToManyField(settings.AUTH_USER_MODEL, through='Membership', related_name='members')
game_over = models.DateTimeField('конец игры', null=True, blank=True)
parent = models.ForeignKey('self', on_delete=models.CASCADE, editable=False, null=True, blank=True)
order_number = models.PositiveSmallIntegerField('номер линии', default=1,
help_text='Влияет на порядок отображения линий.',
validators=[MinValueValidator(1), MaxValueValidator(99)])
class Meta:
verbose_name = 'квест'
verbose_name_plural = 'квесты'
ordering = ['start']
@property
def published(self):
quest = self
if self.parent:
quest = self.parent
return quest.is_published
@property
def not_started(self):
quest = self
if self.parent:
quest = self.parent
return quest.status == Quest.NOT_STARTED
@property
def started(self):
quest = self
if self.parent:
quest = self.parent
return quest.status == Quest.STARTED
@property
def ended(self):
quest = self
if self.parent:
quest = self.parent
return quest.status == Quest.ENDED
@property
def linear(self):
return self.type == self.LINEAR
@property
def nonlinear(self):
return self.type == self.NONLINEAR
@property
def line_nonlinear(self):
return self.type == self.LINE_NONLINEAR
@property
def multilinear(self):
return self.type == self.MULTILINEAR
@property
def is_game_over(self):
quest = self
if self.parent:
quest = self.parent
if quest.game_over:
return timezone.now() >= quest.game_over
else:
return False
@property
def rest_quest(self):
quest = self
if self.parent:
quest = self.parent
if quest.started and not quest.is_game_over:
return get_timedelta(quest.game_over)
else:
return None
def __str__(self):
return self.title
def save(self, *args, **kwargs):
is_create = not self.pk
super(Quest, self).save(*args, **kwargs)
if is_create:
if not self.parent:
Membership.objects.create(quest=self, user=self.creator, role=Membership.ORGANIZER)
name = 'agent{0}'.format(self.pk)
username = generate_random_username(name)
password = generate_random_password()
agent = User.objects.create_user(username=username, password=password, first_name=name, last_name=password)
Membership.objects.create(quest=self, user=agent, role=Membership.AGENT)
Mission.objects.create(quest=self, name_in_table='Старт', order_number=0)
Mission.objects.create(quest=self, name_in_table='Финиш', order_number=1, is_finish=True)
def begin(self):
if self.not_started:
self.status = self.STARTED
elif self.started:
self.status = self.NOT_STARTED
self.save()
def end(self):
if self.started:
self.status = self.ENDED
elif self.ended:
self.status = self.STARTED
self.save()
def publish(self):
self.is_published = not self.is_published
self.save()
def lines(self):
return Quest.objects.filter(parent=self).order_by('order_number')
def missions(self):
if self.multilinear:
return Mission.objects.filter(quest__in=self.lines()).order_by('quest__order_number', 'order_number')
else:
return Mission.objects.filter(quest=self).order_by('order_number')
def current_missions(self):
return CurrentMission.objects.filter(mission__quest=self)
def current_missions_multilinear(self, player):
return CurrentMission.objects.filter(mission__quest__in=self.lines(), player=player)
def next_mission_number(self):
if self.parent:
return len(self.missions())
else:
return len(self.missions()) - 1
def start_mission(self):
return Mission.objects.get(quest=self, order_number=0)
def finish_mission(self):
return Mission.objects.filter(quest=self, is_finish=True).first()
def messages(self):
return Message.objects.filter(quest=self)
def organizers(self):
return self.members.filter(membership__role=Membership.ORGANIZER)
def players(self):
return self.members.filter(membership__role=Membership.PLAYER).order_by('first_name')
def agents(self):
return self.members.filter(membership__role=Membership.AGENT)
def players_ext(self):
all_missions = self.missions().filter(order_number__gt=0, is_finish=False)
players = self.members.filter(membership__role=Membership.PLAYER)
for player in players:
player.last_time = Keylog.last_time(self, player)
player.points = Keylog.total_points(self, player)
missions = Mission.completed_missions(self, player)
other_missions = [i for i in all_missions if i not in missions]
player.num_missions = len(missions)
if self.multilinear:
player.missions = self.multiline_missions_str(missions)
player.other_missions = self.multiline_missions_str(other_missions)
else:
player.missions = ', '.join(str(i.table_name) for i in missions)
player.other_missions = ', '.join(str(i.table_name) for i in other_missions)
return players
def missions_ext(self):
all_players = self.members.filter(membership__role=Membership.PLAYER)
missions = self.missions().filter(order_number__gt=0, is_finish=False)
for mission in missions:
keylogs = mission.right_keylogs()
players = [i.player for i in keylogs]
mission.players = ', '.join(str(i) for i in players)
other_players = [i for i in all_players if i not in players]
mission.other_players = ', '.join(str(i) for i in other_players)
mission.num_players = len(keylogs)
return missions
@staticmethod
def coming_quests():
now = timezone.now() - timedelta(hours=6)
return Quest.objects.filter(is_published=True, parent__isnull=True, start__gte=now)
@staticmethod
def my_quests(user):
return Quest.objects.filter(membership__user=user, parent__isnull=True)
@staticmethod
def multiline_missions_str(missions):
line_format = "{0}: {1}"
missions = sorted(missions, key=lambda x: x.quest.order_number)
iter = groupby(missions, key=lambda x: x.quest)
quest_missions = []
for quest, missions in iter:
line_str = ', '.join(str(i.table_name) for i in missions)
quest_missions.append(line_format.format(quest.title, line_str))
missions_str = ' ___ '.join(i for i in quest_missions)
return missions_str
class OrganizerManager(models.Manager):
def get_queryset(self):
return super(OrganizerManager, self).get_queryset().filter(role=Membership.ORGANIZER)
class PlayerManager(models.Manager):
def get_queryset(self):
return super(PlayerManager, self).get_queryset().filter(role=Membership.PLAYER)
class AgentManager(models.Manager):
def get_queryset(self):
return super(AgentManager, self).get_queryset().filter(role=Membership.AGENT)
class Membership(models.Model):
ORGANIZER = 'O'
PLAYER = 'P'
AGENT = 'A'
ROLES = (
(ORGANIZER, 'Организатор'),
(PLAYER, 'Игрок'),
(AGENT, 'Агент'),
)
quest = models.ForeignKey(Quest, verbose_name='квест', on_delete=models.CASCADE)
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='пользователь', on_delete=models.CASCADE)
role = models.CharField('роль', max_length=1, choices=ROLES, default=PLAYER)
objects = models.Manager()
organizers = OrganizerManager()
players = PlayerManager()
agents = AgentManager()
class Meta:
verbose_name = 'Участник квеста'
verbose_name_plural = 'Участники квеста'
unique_together = ('quest', 'user')
@property
def organizer(self):
return self.role == self.ORGANIZER
@property
def player(self):
return self.role == self.PLAYER
@property
def agent(self):
return self.role == self.AGENT
class Mission(models.Model):
quest = models.ForeignKey(Quest, verbose_name='квест')
name = models.CharField('название', max_length=100, blank=True,
help_text='В основном для сюжетных игр, например, Панофобия, Колдунья и т.д. '
'Отображается игрокам в координации.')
name_in_table = models.CharField('название в табличке', max_length=100, blank=True,
help_text='Как правило ответ на задание. Отображается в итоговой табличке.')
text = RichTextField('текст задания', blank=True)
picture = models.ImageField('картинка', upload_to=mission_file_name, blank=True)
key = models.CharField('ключ', max_length=30, blank=True)
order_number = models.PositiveSmallIntegerField('номер задания',
validators=[MinValueValidator(0), MaxValueValidator(99)])
is_finish = models.BooleanField(u'финиш', default=False)
points = models.PositiveSmallIntegerField('баллы', default=0)
class Meta:
verbose_name = 'задание'
verbose_name_plural = 'задания'
ordering = ['quest__title', 'order_number', 'name']
@property
def is_start(self):
return self.order_number == 0
def __str__(self):
if self.is_start:
return 'Старт'
elif self.is_finish:
if self.quest.line_nonlinear:
return 'Финиш{0}'.format(". " + self.name if self.name else "")
else:
return 'Финиш'
else:
return 'Задание {0}{1}{2}'.format(self.order_number,
". " + self.name if self.name else "",
" (" + self.name_in_table + ")" if self.name_in_table else "")
@property
def short_name(self):
if self.is_start or self.is_finish:
return self.__str__()
else:
return 'Задание {0}{1}'.format(self.order_number, ". " + self.name if self.name else "")
@property
def medium_name(self):
if self.is_start or self.is_finish:
return self.__str__()
else:
return '{0}{1}{2}'.format(self.order_number, ". " + self.name if self.name else "",
" (" + self.name_in_table + ")" if self.name_in_table else "")
@property
def table_name(self):
if self.is_start or self.is_finish:
return self.__str__()
else:
return '{0}{1}'.format(self.order_number, ". " + self.name_in_table if self.name_in_table else "")
@property
def total_hints_time(self):
hint = Hint.objects.filter(mission=self).order_by('order_number').last()
if hint is not None:
return hint.abs_delay
else:
return 0
def save(self, *args, **kwargs):
super(Mission, self).save(*args, **kwargs)
if not self.is_start and not self.is_finish:
Mission.update_finish_number(self.quest)
def hints(self):
return Hint.objects.filter(mission=self)
@staticmethod
def hints_in_nl(quest, missions):
display_hints = []
rest_hints = []
if quest.nonlinear:
minutes = time_in_minutes(get_timedelta_with_now(quest.start))
for mission in missions:
hints = mission.hints()
for hint in hints:
if hint.abs_delay <= minutes:
display_hints.append(hint)
else:
rest_hints.append(hint)
return display_hints, rest_hints
def next_hint_number(self):
return len(self.hints()) + 1
def is_completed(self, player):
keylog = Keylog.objects.filter(mission=self, player=player, is_right=True).first()
return keylog is not None
def is_current(self, player):
quest = self.quest
if quest.nonlinear:
member = quest.membership_set.filter(user=player).first()
result = member and member.player
else:
current_mission = CurrentMission.objects.filter(mission=self, player=player).first()
result = current_mission is not None
return result
def right_keylogs(self):
keylogs = Keylog.objects.filter(mission=self, is_right=True)
return keylogs.order_by('player', 'mission__order_number').distinct('player', 'mission__order_number')
def as_json(self):
return {
"name": self.short_name,
"text": self.text
}
@staticmethod
def update_finish_number(quest):
missions = quest.missions()
order_number = missions.filter(is_finish=False).last().order_number + 1
missions.filter(is_finish=True).update(order_number=order_number)
@staticmethod
def completed_missions(quest, player):
keylogs = Keylog.get_keylogs(quest, player, True)
keylogs = keylogs.order_by('fix_time', 'mission__id').distinct('fix_time', 'mission__id')
return [i.mission for i in keylogs]
class Hint(models.Model):
mission = models.ForeignKey(Mission, verbose_name='задание')
text = RichTextField('текст подсказки')
delay = models.PositiveSmallIntegerField('время отправления',
validators=[MinValueValidator(1), MaxValueValidator(360)])
order_number = models.PositiveSmallIntegerField('номер подсказки',
validators=[MinValueValidator(1), MaxValueValidator(99)])
class Meta:
verbose_name = 'подсказка'
verbose_name_plural = 'подсказки'
ordering = ['order_number']
def __str__(self):
return 'Подсказка {0}'.format(self.order_number)
@property
def abs_delay(self):
hints = Hint.objects.filter(mission=self.mission, order_number__lte=self.order_number)
aggregation = hints.aggregate(abs_delay=models.Sum('delay'))
return aggregation.get('abs_delay', self.delay)
@property
def time_in_nl(self):
time = None
quest = self.mission.quest
if quest.nonlinear:
time = quest.start + timedelta(minutes=self.abs_delay)
return time
def as_json(self):
return {
"title": self.__str__(),
"delay": self.delay,
"text": self.text
}
@staticmethod
def as_json_array(hints):
array = []
for hint in hints:
array.append(hint.as_json())
return array
class CurrentMission(models.Model):
player = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='игрок')
mission = models.ForeignKey(Mission, verbose_name='задание')
start_time = models.DateTimeField('время начала задания', default=timezone.now)
class Meta:
verbose_name = 'текущее задание'
verbose_name_plural = 'текущие задания'
ordering = ['-mission__order_number', 'start_time']
def __str__(self):
return '{0} - {1}'.format(self.player, self.mission)
@property
def alarm(self):
if self.mission.is_start or self.mission.is_finish:
return False
minutes = time_in_minutes(get_timedelta_with_now(self.start_time))
threshold = self.mission.total_hints_time + 30
return minutes >= threshold
def display_hints(self):
display_hints = []
minutes = time_in_minutes(get_timedelta_with_now(self.start_time))
hints = self.mission.hints()
for hint in hints:
if hint.abs_delay <= minutes:
display_hints.append(hint)
return display_hints
def next_hint_time(self):
next_hint_time = None
minutes = time_in_minutes(get_timedelta_with_now(self.start_time))
hints = self.mission.hints()
for hint in hints:
if hint.abs_delay > minutes:
next_hint_time = self.start_time + timedelta(minutes=hint.abs_delay)
break
return next_hint_time
class Keylog(models.Model):
player = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='игрок')
mission = models.ForeignKey(Mission, verbose_name='задание')
key = models.CharField('ключ', max_length=30)
fix_time = models.DateTimeField('время ключа')
is_right = models.BooleanField('правильный ключ', default=False)
points = models.PositiveSmallIntegerField('баллы', default=0)
class Meta:
verbose_name = 'история ключей'
verbose_name_plural = 'история ключей'
def __str__(self):
return self.key
@staticmethod
def right_keylogs(missions):
keylogs = Keylog.objects.filter(mission__in=missions, is_right=True)
return keylogs.order_by('player', 'mission__order_number').distinct('player', 'mission__order_number')
@staticmethod
def wrong_keylogs(player, mission):
return Keylog.objects.filter(player=player, mission=mission, is_right=False)
@staticmethod
def wrong_keylogs_format(player, mission):
wrong_keys = Keylog.wrong_keylogs(player, mission)
return ', '.join(str(i) for i in wrong_keys)
@staticmethod
def total_points(quest, player):
keylogs = Keylog.get_keylogs(quest, player, True)
keylogs = keylogs.order_by('mission__id').distinct('mission__id')
total_points = 0
for keylog in keylogs:
total_points += keylog.points
return total_points
@staticmethod
def last_time(quest, player):
keylog = None
keylogs = Keylog.get_keylogs(quest, player, True)
if keylogs:
keylog = keylogs.order_by('-fix_time').first()
return keylog.fix_time if keylog else timezone.now()
@staticmethod
def get_keylogs(quest, player, is_right):
if quest.multilinear:
keylogs = Keylog.objects.filter(mission__quest__in=quest.lines(), player=player, is_right=is_right)
else:
keylogs = Keylog.objects.filter(mission__quest=quest, player=player, is_right=is_right)
return keylogs
class Message(models.Model):
quest = models.ForeignKey(Quest, verbose_name='квест')
text = RichTextField('текст сообщения')
is_show = models.BooleanField('отображать', default=True)
class Meta:
verbose_name = 'сообщение'
verbose_name_plural = 'сообщения'
def strip_text(self):
return strip_tags(self.text)
def show(self):
self.is_show = not self.is_show
self.save()
def as_json(self):
return {
"text": self.text
}
@staticmethod
def as_json_array(messages):
array = []
for message in messages:
array.append(message.as_json())
return array
|
alisidd/tensorflow
|
refs/heads/asgd-dc
|
tensorflow/python/training/server_lib_same_variables_no_clear_test.py
|
125
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.GrpcServer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import server_lib
class SameVariablesNoClearTest(test.TestCase):
# Verifies behavior of multiple variables with multiple sessions connecting to
# the same server.
# TODO(b/34465411): Starting multiple servers with different configurations
# in the same test is flaky. Move this test case back into
# "server_lib_test.py" when this is no longer the case.
def testSameVariablesNoClear(self):
server = server_lib.Server.create_local_server()
with session.Session(server.target) as sess_1:
v0 = variables.Variable([[2, 1]], name="v0")
v1 = variables.Variable([[1], [2]], name="v1")
v2 = math_ops.matmul(v0, v1)
sess_1.run([v0.initializer, v1.initializer])
self.assertAllEqual([[4]], sess_1.run(v2))
with session.Session(server.target) as sess_2:
new_v0 = ops.get_default_graph().get_tensor_by_name("v0:0")
new_v1 = ops.get_default_graph().get_tensor_by_name("v1:0")
new_v2 = math_ops.matmul(new_v0, new_v1)
self.assertAllEqual([[4]], sess_2.run(new_v2))
if __name__ == "__main__":
test.main()
|
Ivaylo-Popov/Theano-Lights
|
refs/heads/master
|
models/lm_draw.py
|
11
|
import theano
import theano.tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.tensor.nnet.conv import conv2d
from theano.tensor.signal.downsample import max_pool_2d
from theano.tensor.shared_randomstreams import RandomStreams
import numpy as np
from toolbox import *
from modelbase import *
class LM_draw(ModelLMBase):
def __init__(self, data, hp):
super(LM_draw, self).__init__(self.__class__.__name__, data, hp)
self.n_h = 1024
self.n_zpt = 256
self.dropout = 0.0
self.params = Parameters()
self.hiddenstates = Parameters()
n_tokens = self.data['n_tokens']
gates = 4
with self.hiddenstates:
b1_h = shared_zeros((self.hp.batch_size, self.n_h))
b1_c = shared_zeros((self.hp.batch_size, self.n_h))
b2_h = shared_zeros((self.hp.batch_size, self.n_h))
b2_c = shared_zeros((self.hp.batch_size, self.n_h))
if hp.load_model and os.path.isfile(self.filename):
self.params.load(self.filename)
else:
with self.params:
W_emb = shared_normal((n_tokens, self.n_h), scale=hp.init_scale)
W1 = shared_normal((self.n_h*2, self.n_h*gates), scale=hp.init_scale*1.5)
V1 = shared_normal((self.n_h, self.n_h*gates), scale=hp.init_scale*1.5)
b1 = shared_zeros((self.n_h*gates,))
Wmu = shared_normal((self.n_h, self.n_zpt), scale=hp.init_scale)
Wsi = shared_normal((self.n_h, self.n_zpt), scale=hp.init_scale)
bmu = shared_zeros((self.n_zpt,))
bsi = shared_zeros((self.n_zpt,))
W2 = shared_normal((self.n_zpt, self.n_h*gates), scale=hp.init_scale*1.5)
V2 = shared_normal((self.n_h, self.n_h*gates), scale=hp.init_scale*1.5)
b2 = shared_zeros((self.n_h*gates,))
def lstm(X, h, c, W, U, b):
g_on = T.dot(X,W) + T.dot(h,U) + b
i_on = T.nnet.sigmoid(g_on[:,:self.n_h])
f_on = T.nnet.sigmoid(g_on[:,self.n_h:2*self.n_h])
o_on = T.nnet.sigmoid(g_on[:,2*self.n_h:3*self.n_h])
c = f_on * c + i_on * T.tanh(g_on[:,3*self.n_h:])
h = o_on * T.tanh(c)
return h, c
def model(x, p, p_dropout, noise):
input_size = x.shape[1]
h0 = p.W_emb[x] # (seq_len, batch_size, emb_size)
h0 = dropout(h0, p_dropout)
cost, h1, c1, h2, c2 = [0., b1_h, b1_c, b2_h, b2_c]
eps = srnd.normal((self.hp.seq_size, input_size, self.n_zpt), dtype=theano.config.floatX)
for t in xrange(0, self.hp.seq_size):
if t >= self.hp.warmup_size:
pyx = softmax(T.dot(h2, T.transpose(p.W_emb)))
cost += T.sum(T.nnet.categorical_crossentropy(pyx, theano_one_hot(x[t], n_tokens)))
h_x = concatenate([h0[t], h2], axis=1)
h1, c1 = lstm(h_x, h1, c1, p.W1, p.V1, p.b1)
h1 = dropout(h1, p_dropout)
mu_encoder = T.dot(h1, p.Wmu) + p.bmu
if noise:
log_sigma_encoder = 0.5*(T.dot(h1, p.Wsi) + p.bsi)
cost += -0.5* T.sum(1 + 2*log_sigma_encoder - mu_encoder**2 - T.exp(2*log_sigma_encoder)) * 0.01
z = mu_encoder + eps[t]*T.exp(log_sigma_encoder)
else:
z = mu_encoder
h2, c2 = lstm(z, h2, c2, p.W2, p.V2, p.b2)
h2 = dropout(h2, p_dropout)
h_updates = [(b1_h, h1), (b1_c, c1), (b2_h, h2), (b2_c, c2)]
return cost, h_updates
cost, h_updates = model(self.X, self.params, self.dropout, True)
te_cost, te_h_updates = model(self.X, self.params, 0.0, False)
def generate(seed_idx, p):
spx = T.zeros((self.hp.seq_size, n_tokens))
h1, c1, h2, c2 = [T.zeros((self.n_h)),
T.zeros((self.n_h)),
T.zeros((self.n_h)),
T.zeros((self.n_h))]
spx = T.set_subtensor(spx[0, seed_idx], 1)
#for t in xrange(0, self.hp.seq_size):
# if t > 0:
# pyx = softmax(T.dot(h2, T.transpose(p.W_emb)))
# spx = T.set_subtensor(spx[t,:], srnd.multinomial(pvals=pyx)[0])
# h1, c1 = lstm(p.W_emb[T.cast(spx[t], dtype='int32')], h1, c1, p.W1, p.V1, p.b1)
# h2, c2 = lstm(h1, h2, c2, p.W2, p.V2, p.b2)
return spx
spx = generate(self.seed_idx, self.params)
self.compile(cost, te_cost, h_updates, te_h_updates, spx)
|
testalt/electrum-NMC
|
refs/heads/master
|
setup-release.py
|
1
|
"""
py2app/py2exe build script for Electrum Litecoin
Usage (Mac OS X):
python setup.py py2app
Usage (Windows):
python setup.py py2exe
"""
from setuptools import setup
import os
import re
import shutil
import sys
from lib.util import print_error
from lib.version import ELECTRUM_VERSION as version
name = "Electrum-NMC"
mainscript = 'electrum-NMC'
if sys.version_info[:3] < (2, 6, 0):
print_error("Error: " + name + " requires Python version >= 2.6.0...")
sys.exit(1)
if sys.platform == 'darwin':
from plistlib import Plist
plist = Plist.fromFile('Info.plist')
plist.update(dict(CFBundleIconFile='electrum.icns'))
shutil.copy(mainscript, mainscript + '.py')
mainscript += '.py'
extra_options = dict(
setup_requires=['py2app'],
app=[mainscript],
options=dict(py2app=dict(argv_emulation=True,
includes=['PyQt4.QtCore', 'PyQt4.QtGui', 'PyQt4.QtWebKit', 'PyQt4.QtNetwork', 'sip'],
packages=['lib', 'gui', 'plugins'],
iconfile='electrum.icns',
plist=plist,
resources=["data", "icons"])),
)
elif sys.platform == 'win32':
extra_options = dict(
setup_requires=['py2exe'],
app=[mainscript],
)
else:
extra_options = dict(
# Normally unix-like platforms will use "setup.py install"
# and install the main script as such
scripts=[mainscript],
)
setup(
name=name,
version=version,
**extra_options
)
from distutils import dir_util
if sys.platform == 'darwin':
# Remove the copied py file
os.remove(mainscript)
resource = "dist/" + name + ".app/Contents/Resources/"
dir_util.copy_tree("locale", resource + "locale/")
# Try to locate qt_menu
# Let's try the port version first!
if os.path.isfile("/opt/local/lib/Resources/qt_menu.nib"):
qt_menu_location = "/opt/local/lib/Resources/qt_menu.nib"
else:
# No dice? Then let's try the brew version
if os.path.exists("/usr/local/Cellar"):
qt_menu_location = os.popen("find /usr/local/Cellar -name qt_menu.nib | tail -n 1").read()
# no brew, check /opt/local
else:
qt_menu_location = os.popen("find /opt/local -name qt_menu.nib | tail -n 1").read()
qt_menu_location = re.sub('\n', '', qt_menu_location)
if (len(qt_menu_location) == 0):
print "Sorry couldn't find your qt_menu.nib this probably won't work"
else:
print "Found your qib: " + qt_menu_location
# Need to include a copy of qt_menu.nib
shutil.copytree(qt_menu_location, resource + "qt_menu.nib")
# Need to touch qt.conf to avoid loading 2 sets of Qt libraries
fname = resource + "qt.conf"
with file(fname, 'a'):
os.utime(fname, None)
|
Xperia-Nicki/android_platform_sony_nicki
|
refs/heads/master
|
external/webkit/Tools/Scripts/webkitpy/common/config/build.py
|
15
|
# Copyright (C) 2010 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Functions relating to building WebKit"""
import re
def _should_file_trigger_build(target_platform, file):
# The directories and patterns lists below map directory names or
# regexp patterns to the bot platforms for which they should trigger a
# build. Mapping to the empty list means that no builds should be
# triggered on any platforms. Earlier directories/patterns take
# precendence over later ones.
# FIXME: The patterns below have only been verified to be correct on
# the platforms listed below. We should implement this for other platforms
# and start using it for their bots. Someone familiar with each platform
# will have to figure out what the right set of directories/patterns is for
# that platform.
assert(target_platform in ("mac-leopard", "mac-snowleopard", "win"))
directories = [
# Directories that shouldn't trigger builds on any bots.
("Examples", []),
("PerformanceTests", []),
("Source/WebCore/manual-tests", []),
("Tools/BuildSlaveSupport/build.webkit.org-config/public_html", []),
("Websites", []),
("android", []),
("brew", []),
("efl", []),
("haiku", []),
("iphone", []),
("opengl", []),
("opentype", []),
("openvg", []),
("wince", []),
("wx", []),
# Directories that should trigger builds on only some bots.
("Source/JavaScriptGlue", ["mac"]),
("Source/WebCore/image-decoders", ["chromium"]),
("LayoutTests/platform/mac", ["mac", "win"]),
("cairo", ["gtk", "wincairo"]),
("cf", ["chromium-mac", "mac", "qt", "win"]),
("chromium", ["chromium"]),
("cocoa", ["chromium-mac", "mac"]),
("curl", ["gtk", "wincairo"]),
("gobject", ["gtk"]),
("gpu", ["chromium", "mac"]),
("gstreamer", ["gtk"]),
("gtk", ["gtk"]),
("mac", ["chromium-mac", "mac"]),
("mac-leopard", ["mac-leopard"]),
("mac-snowleopard", ["mac", "win"]),
("mac-wk2", ["mac-snowleopard", "win"]),
("objc", ["mac"]),
("qt", ["qt"]),
("skia", ["chromium"]),
("soup", ["gtk"]),
("v8", ["chromium"]),
("win", ["chromium-win", "win"]),
]
patterns = [
# Patterns that shouldn't trigger builds on any bots.
(r"(?:^|/)ChangeLog.*$", []),
(r"(?:^|/)Makefile$", []),
(r"/ARM", []),
(r"/CMake.*", []),
(r"/LICENSE[^/]+$", []),
(r"ARM(?:v7)?\.(?:cpp|h)$", []),
(r"MIPS\.(?:cpp|h)$", []),
(r"WinCE\.(?:cpp|h|mm)$", []),
(r"\.(?:bkl|mk)$", []),
# Patterns that should trigger builds on only some bots.
(r"(?:^|/)GNUmakefile\.am$", ["gtk"]),
(r"/\w+Chromium\w*\.(?:cpp|h|mm)$", ["chromium"]),
(r"Mac\.(?:cpp|h|mm)$", ["mac"]),
(r"\.(?:vcproj|vsprops|sln)$", ["win"]),
(r"\.exp(?:\.in)?$", ["mac"]),
(r"\.gypi?", ["chromium"]),
(r"\.order$", ["mac"]),
(r"\.pr[io]$", ["qt"]),
(r"\.vcproj/", ["win"]),
(r"\.xcconfig$", ["mac"]),
(r"\.xcodeproj/", ["mac"]),
]
base_platform = target_platform.split("-")[0]
# See if the file is in one of the known directories.
for directory, platforms in directories:
if re.search(r"(?:^|/)%s/" % directory, file):
return target_platform in platforms or base_platform in platforms
# See if the file matches a known pattern.
for pattern, platforms in patterns:
if re.search(pattern, file):
return target_platform in platforms or base_platform in platforms
# See if the file is a platform-specific test result.
match = re.match("LayoutTests/platform/(?P<platform>[^/]+)/", file)
if match:
# See if the file is a test result for this platform, our base
# platform, or one of our sub-platforms.
return match.group("platform") in (target_platform, base_platform) or match.group("platform").startswith("%s-" % target_platform)
# The file isn't one we know about specifically, so we should assume we
# have to build.
return True
def should_build(target_platform, changed_files):
"""Returns true if the changed files affect the given platform, and
thus a build should be performed. target_platform should be one of the
platforms used in the build.webkit.org master's config.json file."""
return any(_should_file_trigger_build(target_platform, file) for file in changed_files)
|
JioCloud/python-openstackclient
|
refs/heads/master
|
openstackclient/compute/v2/security_group.py
|
2
|
# Copyright 2012 OpenStack Foundation
# Copyright 2013 Nebula Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Compute v2 Security Group action implementations"""
import logging
import six
from cliff import command
from cliff import lister
from cliff import show
from keystoneclient.openstack.common.apiclient import exceptions as ksc_exc
from novaclient.v1_1 import security_group_rules
from openstackclient.common import parseractions
from openstackclient.common import utils
def _xform_security_group_rule(sgroup):
info = {}
info.update(sgroup)
from_port = info.pop('from_port')
to_port = info.pop('to_port')
if isinstance(from_port, int) and isinstance(to_port, int):
port_range = {'port_range': "%u:%u" % (from_port, to_port)}
elif from_port is None and to_port is None:
port_range = {'port_range': ""}
else:
port_range = {'port_range': "%s:%s" % (from_port, to_port)}
info.update(port_range)
if 'cidr' in info['ip_range']:
info['ip_range'] = info['ip_range']['cidr']
else:
info['ip_range'] = ''
if info['ip_protocol'] == 'icmp':
info['port_range'] = ''
elif info['ip_protocol'] is None:
info['ip_protocol'] = ''
return info
class CreateSecurityGroup(show.ShowOne):
"""Create a new security group"""
log = logging.getLogger(__name__ + ".CreateSecurityGroup")
def get_parser(self, prog_name):
parser = super(CreateSecurityGroup, self).get_parser(prog_name)
parser.add_argument(
"name",
metavar="<name>",
help="New security group name",
)
parser.add_argument(
"--description",
metavar="<description>",
help="Security group description",
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
data = compute_client.security_groups.create(
parsed_args.name,
parsed_args.description,
)
info = {}
info.update(data._info)
return zip(*sorted(six.iteritems(info)))
class DeleteSecurityGroup(command.Command):
"""Delete a security group"""
log = logging.getLogger(__name__ + '.DeleteSecurityGroup')
def get_parser(self, prog_name):
parser = super(DeleteSecurityGroup, self).get_parser(prog_name)
parser.add_argument(
'group',
metavar='<group>',
help='Name or ID of security group to delete',
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
compute_client = self.app.client_manager.compute
data = utils.find_resource(
compute_client.security_groups,
parsed_args.group,
)
compute_client.security_groups.delete(data.id)
return
class ListSecurityGroup(lister.Lister):
"""List all security groups"""
log = logging.getLogger(__name__ + ".ListSecurityGroup")
def get_parser(self, prog_name):
parser = super(ListSecurityGroup, self).get_parser(prog_name)
parser.add_argument(
'--all-projects',
action='store_true',
default=False,
help='Display information from all projects (admin only)',
)
return parser
def take_action(self, parsed_args):
def _get_project(project_id):
try:
return getattr(project_hash[project_id], 'name', project_id)
except KeyError:
return project_id
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
columns = (
"ID",
"Name",
"Description",
)
column_headers = columns
if parsed_args.all_projects:
# TODO(dtroyer): Translate Project_ID to Project (name)
columns = columns + ('Tenant ID',)
column_headers = column_headers + ('Project',)
search = {'all_tenants': parsed_args.all_projects}
data = compute_client.security_groups.list(search_opts=search)
project_hash = {}
try:
projects = self.app.client_manager.identity.projects.list()
except ksc_exc.Forbidden:
# This fails when the user is not an admin, just move along
pass
else:
for project in projects:
project_hash[project.id] = project
return (column_headers,
(utils.get_item_properties(
s, columns,
formatters={'Tenant ID': _get_project},
) for s in data))
class SetSecurityGroup(show.ShowOne):
"""Set security group properties"""
log = logging.getLogger(__name__ + '.SetSecurityGroup')
def get_parser(self, prog_name):
parser = super(SetSecurityGroup, self).get_parser(prog_name)
parser.add_argument(
'group',
metavar='<group>',
help='Name or ID of security group to change',
)
parser.add_argument(
'--name',
metavar='<new-name>',
help='New security group name',
)
parser.add_argument(
"--description",
metavar="<description>",
help="New security group name",
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
compute_client = self.app.client_manager.compute
data = utils.find_resource(
compute_client.security_groups,
parsed_args.group,
)
if parsed_args.name:
data.name = parsed_args.name
if parsed_args.description:
data.description = parsed_args.description
info = {}
info.update(compute_client.security_groups.update(
data,
data.name,
data.description,
)._info)
if info:
return zip(*sorted(six.iteritems(info)))
else:
return ({}, {})
class ShowSecurityGroup(show.ShowOne):
"""Show a specific security group"""
log = logging.getLogger(__name__ + '.ShowSecurityGroup')
def get_parser(self, prog_name):
parser = super(ShowSecurityGroup, self).get_parser(prog_name)
parser.add_argument(
'group',
metavar='<group>',
help='Name or ID of security group to change',
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
compute_client = self.app.client_manager.compute
info = {}
info.update(utils.find_resource(
compute_client.security_groups,
parsed_args.group,
)._info)
rules = []
for r in info['rules']:
rules.append(utils.format_dict(_xform_security_group_rule(r)))
# Format rules into a list of strings
info.update(
{'rules': rules}
)
# Map 'tenant_id' column to 'project_id'
info.update(
{'project_id': info.pop('tenant_id')}
)
return zip(*sorted(six.iteritems(info)))
class CreateSecurityGroupRule(show.ShowOne):
"""Create a new security group rule"""
log = logging.getLogger(__name__ + ".CreateSecurityGroupRule")
def get_parser(self, prog_name):
parser = super(CreateSecurityGroupRule, self).get_parser(prog_name)
parser.add_argument(
'group',
metavar='<group>',
help='Create rule in this security group',
)
parser.add_argument(
"--proto",
metavar="<proto>",
default="tcp",
help="IP protocol (icmp, tcp, udp; default: tcp)",
)
parser.add_argument(
"--src-ip",
metavar="<ip-address>",
default="0.0.0.0/0",
help="Source IP (may use CIDR notation; default: 0.0.0.0/0)",
)
parser.add_argument(
"--dst-port",
metavar="<port-range>",
action=parseractions.RangeAction,
help="Destination port, may be a range: 137:139 (default: 0; "
"only required for proto tcp and udp)",
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
group = utils.find_resource(
compute_client.security_groups,
parsed_args.group,
)
from_port, to_port = parsed_args.dst_port
data = compute_client.security_group_rules.create(
group.id,
parsed_args.proto,
from_port,
to_port,
parsed_args.src_ip,
)
info = _xform_security_group_rule(data._info)
return zip(*sorted(six.iteritems(info)))
class DeleteSecurityGroupRule(command.Command):
"""Delete a security group rule"""
log = logging.getLogger(__name__ + '.DeleteSecurityGroupRule')
def get_parser(self, prog_name):
parser = super(DeleteSecurityGroupRule, self).get_parser(prog_name)
parser.add_argument(
'group',
metavar='<group>',
help='Create rule in this security group',
)
parser.add_argument(
"--proto",
metavar="<proto>",
default="tcp",
help="IP protocol (icmp, tcp, udp; default: tcp)",
)
parser.add_argument(
"--src-ip",
metavar="<ip-address>",
default="0.0.0.0/0",
help="Source IP (may use CIDR notation; default: 0.0.0.0/0)",
)
parser.add_argument(
"--dst-port",
metavar="<port-range>",
action=parseractions.RangeAction,
help="Destination port, may be a range: 137:139 (default: 0; "
"only required for proto tcp and udp)",
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
compute_client = self.app.client_manager.compute
group = utils.find_resource(
compute_client.security_groups,
parsed_args.group,
)
from_port, to_port = parsed_args.dst_port
# sigh...delete by ID?
compute_client.security_group_rules.delete(
group.id,
parsed_args.proto,
from_port,
to_port,
parsed_args.src_ip,
)
return
class ListSecurityGroupRule(lister.Lister):
"""List all security group rules"""
log = logging.getLogger(__name__ + ".ListSecurityGroupRule")
def get_parser(self, prog_name):
parser = super(ListSecurityGroupRule, self).get_parser(prog_name)
parser.add_argument(
'group',
metavar='<group>',
help='Create rule in this security group',
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
compute_client = self.app.client_manager.compute
group = utils.find_resource(
compute_client.security_groups,
parsed_args.group,
)
# Argh, the rules are not Resources...
rules = []
for rule in group.rules:
rules.append(security_group_rules.SecurityGroupRule(
compute_client.security_group_rules,
_xform_security_group_rule(rule),
))
columns = column_headers = (
"ID",
"IP Protocol",
"IP Range",
"Port Range",
)
return (column_headers,
(utils.get_item_properties(
s, columns,
) for s in rules))
|
gaddman/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/junos/junos_vrf.py
|
27
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: junos_vrf
version_added: "2.4"
author: "Ganesh Nalawade (@ganeshrn)"
short_description: Manage the VRF definitions on Juniper JUNOS devices
description:
- This module provides declarative management of VRF definitions on
Juniper JUNOS devices. It allows playbooks to manage individual or
the entire VRF collection.
options:
name:
description:
- The name of the VRF definition to be managed on the remote IOS
device. The VRF definition name is an ASCII string name used
to uniquely identify the VRF. This argument is mutually exclusive
with the C(aggregate) argument
description:
description:
- Provides a short description of the VRF definition in the
current active configuration. The VRF definition value accepts
alphanumeric characters used to provide additional information
about the VRF.
rd:
description:
- The router-distinguisher value uniquely identifies the VRF to
routing processes on the remote IOS system. The RD value takes
the form of C(A:B) where C(A) and C(B) are both numeric values.
interfaces:
description:
- Identifies the set of interfaces that
should be configured in the VRF. Interfaces must be routed
interfaces in order to be placed into a VRF.
target:
description:
- It configures VRF target community configuration. The target value takes
the form of C(target:A:B) where C(A) and C(B) are both numeric values.
table_label:
description:
- Causes JUNOS to allocate a VPN label per VRF rather than per VPN FEC.
This allows for forwarding of traffic to directly connected subnets, COS
Egress filtering etc.
type: bool
aggregate:
description:
- The set of VRF definition objects to be configured on the remote
JUNOS device. Ths list entries can either be the VRF name or a hash
of VRF definitions and attributes. This argument is mutually
exclusive with the C(name) argument.
state:
description:
- Configures the state of the VRF definition
as it relates to the device operational configuration. When set
to I(present), the VRF should be configured in the device active
configuration and when set to I(absent) the VRF should not be
in the device active configuration
default: present
choices: ['present', 'absent']
active:
description:
- Specifies whether or not the configuration is active or deactivated
default: True
type: bool
requirements:
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
- Tested against vSRX JUNOS version 15.1X49-D15.4, vqfx-10000 JUNOS Version 15.1X53-D60.4.
- Recommended connection is C(netconf). See L(the Junos OS Platform Options,../network/user_guide/platform_junos.html).
- This module also works with C(local) connections for legacy playbooks.
extends_documentation_fragment: junos
"""
EXAMPLES = """
- name: Configure vrf configuration
junos_vrf:
name: test-1
description: test-vrf-1
interfaces:
- ge-0/0/3
- ge-0/0/2
rd: 192.0.2.1:10
target: target:65514:113
state: present
- name: Remove vrf configuration
junos_vrf:
name: test-1
description: test-vrf-1
interfaces:
- ge-0/0/3
- ge-0/0/2
rd: 192.0.2.1:10
target: target:65514:113
state: absent
- name: Deactivate vrf configuration
junos_vrf:
name: test-1
description: test-vrf-1
interfaces:
- ge-0/0/3
- ge-0/0/2
rd: 192.0.2.1:10
target: target:65514:113
active: False
- name: Activate vrf configuration
junos_vrf:
name: test-1
description: test-vrf-1
interfaces:
- ge-0/0/3
- ge-0/0/2
rd: 192.0.2.1:10
target: target:65514:113
active: True
- name: Create vrf using aggregate
junos_vrf:
aggregate:
- name: test-1
description: test-vrf-1
interfaces:
- ge-0/0/3
- ge-0/0/2
rd: 192.0.2.1:10
target: target:65514:113
- name: test-2
description: test-vrf-2
interfaces:
- ge-0/0/4
- ge-0/0/5
rd: 192.0.2.2:10
target: target:65515:114
state: present
"""
RETURN = """
diff.prepared:
description: Configuration difference before and after applying change.
returned: when configuration is changed and diff option is enabled.
type: string
sample: >
[edit routing-instances]
+ test-1 {
+ description test-vrf-1;
+ instance-type vrf;
+ interface ge-0/0/2.0;
+ interface ge-0/0/3.0;
+ route-distinguisher 192.0.2.1:10;
+ vrf-target target:65514:113;
+ }
"""
import collections
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.junos.junos import junos_argument_spec, tostring
from ansible.module_utils.network.junos.junos import load_config, map_params_to_obj, map_obj_to_ele, to_param_list
from ansible.module_utils.network.junos.junos import commit_configuration, discard_changes, locked_config
USE_PERSISTENT_CONNECTION = True
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
description=dict(),
rd=dict(type='list'),
interfaces=dict(type='list'),
target=dict(type='list'),
state=dict(default='present', choices=['present', 'absent']),
active=dict(default=True, type='bool'),
table_label=dict(default=True, type='bool')
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(junos_argument_spec)
required_one_of = [['aggregate', 'name']]
mutually_exclusive = [['aggregate', 'name']]
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
top = 'routing-instances/instance'
param_to_xpath_map = collections.OrderedDict()
param_to_xpath_map.update([
('name', {'xpath': 'name', 'is_key': True}),
('description', 'description'),
('type', 'instance-type'),
('rd', 'route-distinguisher/rd-type'),
('interfaces', 'interface/name'),
('target', 'vrf-target/community'),
('table_label', {'xpath': 'vrf-table-label', 'tag_only': True}),
])
params = to_param_list(module)
requests = list()
for param in params:
# if key doesn't exist in the item, get it from module.params
for key in param:
if param.get(key) is None:
param[key] = module.params[key]
item = param.copy()
item['type'] = 'vrf'
want = map_params_to_obj(module, param_to_xpath_map, param=item)
requests.append(map_obj_to_ele(module, want, top, param=item))
with locked_config(module):
for req in requests:
diff = load_config(module, tostring(req), warnings, action='merge')
commit = not module.check_mode
if diff:
if commit:
commit_configuration(module)
else:
discard_changes(module)
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
module.exit_json(**result)
if __name__ == "__main__":
main()
|
MalloyPower/parsing-python
|
refs/heads/master
|
front-end/testsuite-python-lib/Python-2.7/Lib/distutils/msvc9compiler.py
|
2
|
"""distutils.msvc9compiler
Contains MSVCCompiler, an implementation of the abstract CCompiler class
for the Microsoft Visual Studio 2008.
The module is compatible with VS 2005 and VS 2008. You can find legacy support
for older versions of VS in distutils.msvccompiler.
"""
# Written by Perry Stoll
# hacked by Robin Becker and Thomas Heller to do a better job of
# finding DevStudio (through the registry)
# ported to VS2005 and VS 2008 by Christian Heimes
__revision__ = "$Id: msvc9compiler.py 82130 2010-06-21 15:27:46Z benjamin.peterson $"
import os
import subprocess
import sys
import re
from distutils.errors import (DistutilsExecError, DistutilsPlatformError,
CompileError, LibError, LinkError)
from distutils.ccompiler import CCompiler, gen_lib_options
from distutils import log
from distutils.util import get_platform
import _winreg
RegOpenKeyEx = _winreg.OpenKeyEx
RegEnumKey = _winreg.EnumKey
RegEnumValue = _winreg.EnumValue
RegError = _winreg.error
HKEYS = (_winreg.HKEY_USERS,
_winreg.HKEY_CURRENT_USER,
_winreg.HKEY_LOCAL_MACHINE,
_winreg.HKEY_CLASSES_ROOT)
NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32)
if NATIVE_WIN64:
# Visual C++ is a 32-bit application, so we need to look in
# the corresponding registry branch, if we're running a
# 64-bit Python on Win64
VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
VSEXPRESS_BASE = r"Software\Wow6432Node\Microsoft\VCExpress\%0.1f"
WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
else:
VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
VSEXPRESS_BASE = r"Software\Microsoft\VCExpress\%0.1f"
WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
NET_BASE = r"Software\Microsoft\.NETFramework"
# A map keyed by get_platform() return values to values accepted by
# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is
# the param to cross-compile on x86 targetting amd64.)
PLAT_TO_VCVARS = {
'win32' : 'x86',
'win-amd64' : 'amd64',
'win-ia64' : 'ia64',
}
class Reg:
"""Helper class to read values from the registry
"""
def get_value(cls, path, key):
for base in HKEYS:
d = cls.read_values(base, path)
if d and key in d:
return d[key]
raise KeyError(key)
get_value = classmethod(get_value)
def read_keys(cls, base, key):
"""Return list of registry keys."""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
L = []
i = 0
while True:
try:
k = RegEnumKey(handle, i)
except RegError:
break
L.append(k)
i += 1
return L
read_keys = classmethod(read_keys)
def read_values(cls, base, key):
"""Return dict of registry keys and values.
All names are converted to lowercase.
"""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
d = {}
i = 0
while True:
try:
name, value, type = RegEnumValue(handle, i)
except RegError:
break
name = name.lower()
d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
i += 1
return d
read_values = classmethod(read_values)
def convert_mbcs(s):
dec = getattr(s, "decode", None)
if dec is not None:
try:
s = dec("mbcs")
except UnicodeError:
pass
return s
convert_mbcs = staticmethod(convert_mbcs)
class MacroExpander:
def __init__(self, version):
self.macros = {}
self.vsbase = VS_BASE % version
self.load_macros(version)
def set_macro(self, macro, path, key):
self.macros["$(%s)" % macro] = Reg.get_value(path, key)
def load_macros(self, version):
self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
self.set_macro("FrameworkDir", NET_BASE, "installroot")
try:
if version >= 8.0:
self.set_macro("FrameworkSDKDir", NET_BASE,
"sdkinstallrootv2.0")
else:
raise KeyError("sdkinstallrootv2.0")
except KeyError:
raise DistutilsPlatformError(
"""Python was built with Visual Studio 2008;
extensions must be built with a compiler than can generate compatible binaries.
Visual Studio 2008 was not found on this system. If you have Cygwin installed,
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
if version >= 9.0:
self.set_macro("FrameworkVersion", self.vsbase, "clr version")
self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
else:
p = r"Software\Microsoft\NET Framework Setup\Product"
for base in HKEYS:
try:
h = RegOpenKeyEx(base, p)
except RegError:
continue
key = RegEnumKey(h, 0)
d = Reg.get_value(base, r"%s\%s" % (p, key))
self.macros["$(FrameworkVersion)"] = d["version"]
def sub(self, s):
for k, v in self.macros.items():
s = s.replace(k, v)
return s
def get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
prefix = "MSC v."
i = sys.version.find(prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def normalize_and_reduce_paths(paths):
"""Return a list of normalized paths with duplicates removed.
The current order of paths is maintained.
"""
# Paths are normalized so things like: /a and /a/ aren't both preserved.
reduced_paths = []
for p in paths:
np = os.path.normpath(p)
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
if np not in reduced_paths:
reduced_paths.append(np)
return reduced_paths
def removeDuplicates(variable):
"""Remove duplicate values of an environment variable.
"""
oldList = variable.split(os.pathsep)
newList = []
for i in oldList:
if i not in newList:
newList.append(i)
newVariable = os.pathsep.join(newList)
return newVariable
def find_vcvarsall(version):
"""Find the vcvarsall.bat file
At first it tries to find the productdir of VS 2008 in the registry. If
that fails it falls back to the VS90COMNTOOLS env var.
"""
vsbase = VS_BASE % version
try:
productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
"productdir")
except KeyError:
productdir = None
# trying Express edition
if productdir is None:
vsbase = VSEXPRESS_BASE % version
try:
productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
"productdir")
except KeyError:
productdir = None
log.debug("Unable to find productdir in registry")
if not productdir or not os.path.isdir(productdir):
toolskey = "VS%0.f0COMNTOOLS" % version
toolsdir = os.environ.get(toolskey, None)
if toolsdir and os.path.isdir(toolsdir):
productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
productdir = os.path.abspath(productdir)
if not os.path.isdir(productdir):
log.debug("%s is not a valid directory" % productdir)
return None
else:
log.debug("Env var %s is not set or invalid" % toolskey)
if not productdir:
log.debug("No productdir found")
return None
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
if os.path.isfile(vcvarsall):
return vcvarsall
log.debug("Unable to find vcvarsall.bat")
return None
def query_vcvarsall(version, arch="x86"):
"""Launch vcvarsall.bat and read the settings from its environment
"""
vcvarsall = find_vcvarsall(version)
interesting = set(("include", "lib", "libpath", "path"))
result = {}
if vcvarsall is None:
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = popen.communicate()
if popen.wait() != 0:
raise DistutilsPlatformError(stderr.decode("mbcs"))
stdout = stdout.decode("mbcs")
for line in stdout.split("\n"):
line = Reg.convert_mbcs(line)
if '=' not in line:
continue
line = line.strip()
key, value = line.split('=', 1)
key = key.lower()
if key in interesting:
if value.endswith(os.pathsep):
value = value[:-1]
result[key] = removeDuplicates(value)
if len(result) != len(interesting):
raise ValueError(str(list(result.keys())))
return result
# More globals
VERSION = get_build_version()
if VERSION < 8.0:
raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION)
# MACROS = MacroExpander(VERSION)
class MSVCCompiler(CCompiler) :
"""Concrete class that implements an interface to Microsoft Visual C++,
as defined by the CCompiler abstract class."""
compiler_type = 'msvc'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
_rc_extensions = ['.rc']
_mc_extensions = ['.mc']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = (_c_extensions + _cpp_extensions +
_rc_extensions + _mc_extensions)
res_extension = '.res'
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__(self, verbose=0, dry_run=0, force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
self.__version = VERSION
self.__root = r"Software\Microsoft\VisualStudio"
# self.__macros = MACROS
self.__paths = []
# target platform (.plat_name is consistent with 'bdist')
self.plat_name = None
self.__arch = None # deprecated name
self.initialized = False
def initialize(self, plat_name=None):
# multi-init means we would need to check platform same each time...
assert not self.initialized, "don't init multiple times"
if plat_name is None:
plat_name = get_platform()
# sanity check for platforms to prevent obscure errors later.
ok_plats = 'win32', 'win-amd64', 'win-ia64'
if plat_name not in ok_plats:
raise DistutilsPlatformError("--plat-name must be one of %s" %
(ok_plats,))
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
# Assume that the SDK set up everything alright; don't try to be
# smarter
self.cc = "cl.exe"
self.linker = "link.exe"
self.lib = "lib.exe"
self.rc = "rc.exe"
self.mc = "mc.exe"
else:
# On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
# to cross compile, you use 'x86_amd64'.
# On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
# compile use 'x86' (ie, it runs the x86 compiler directly)
# No idea how itanium handles this, if at all.
if plat_name == get_platform() or plat_name == 'win32':
# native build or cross-compile to win32
plat_spec = PLAT_TO_VCVARS[plat_name]
else:
# cross compile from win32 -> some 64bit
plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
PLAT_TO_VCVARS[plat_name]
vc_env = query_vcvarsall(VERSION, plat_spec)
# take care to only use strings in the environment.
self.__paths = vc_env['path'].encode('mbcs').split(os.pathsep)
os.environ['lib'] = vc_env['lib'].encode('mbcs')
os.environ['include'] = vc_env['include'].encode('mbcs')
if len(self.__paths) == 0:
raise DistutilsPlatformError("Python was built with %s, "
"and extensions need to be built with the same "
"version of the compiler, but it isn't installed."
% self.__product)
self.cc = self.find_exe("cl.exe")
self.linker = self.find_exe("link.exe")
self.lib = self.find_exe("lib.exe")
self.rc = self.find_exe("rc.exe") # resource compiler
self.mc = self.find_exe("mc.exe") # message compiler
#self.set_path_env_var('lib')
#self.set_path_env_var('include')
# extend the MSVC path with the current path
try:
for p in os.environ['path'].split(';'):
self.__paths.append(p)
except KeyError:
pass
self.__paths = normalize_and_reduce_paths(self.__paths)
os.environ['path'] = ";".join(self.__paths)
self.preprocess_options = None
if self.__arch == "x86":
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3',
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
'/Z7', '/D_DEBUG']
else:
# Win64
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
'/Z7', '/D_DEBUG']
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
if self.__version >= 7:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None'
]
self.ldflags_static = [ '/nologo']
self.initialized = True
# -- Worker methods ------------------------------------------------
def object_filenames(self,
source_filenames,
strip_dir=0,
output_dir=''):
# Copied from ccompiler.py, extended to return .res as 'object'-file
# for .rc input file
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
(base, ext) = os.path.splitext (src_name)
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if ext not in self.src_extensions:
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
raise CompileError ("Don't know how to compile %s" % src_name)
if strip_dir:
base = os.path.basename (base)
if ext in self._rc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
elif ext in self._mc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
if not self.initialized:
self.initialize()
compile_info = self._setup_compile(output_dir, macros, include_dirs,
sources, depends, extra_postargs)
macros, objects, extra_postargs, pp_opts, build = compile_info
compile_opts = extra_preargs or []
compile_opts.append ('/c')
if debug:
compile_opts.extend(self.compile_options_debug)
else:
compile_opts.extend(self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
if debug:
# pass the full pathname to MSVC in debug mode,
# this allows the debugger to find the source file
# without asking the user to browse for it
src = os.path.abspath(src)
if ext in self._c_extensions:
input_opt = "/Tc" + src
elif ext in self._cpp_extensions:
input_opt = "/Tp" + src
elif ext in self._rc_extensions:
# compile .RC to .RES file
input_opt = src
output_opt = "/fo" + obj
try:
self.spawn([self.rc] + pp_opts +
[output_opt] + [input_opt])
except DistutilsExecError, msg:
raise CompileError(msg)
continue
elif ext in self._mc_extensions:
# Compile .MC to .RC file to .RES file.
# * '-h dir' specifies the directory for the
# generated include file
# * '-r dir' specifies the target directory of the
# generated RC file and the binary message resource
# it includes
#
# For now (since there are no options to change this),
# we use the source-directory for the include file and
# the build directory for the RC file and message
# resources. This works at least for win32all.
h_dir = os.path.dirname(src)
rc_dir = os.path.dirname(obj)
try:
# first compile .MC to .RC and .H file
self.spawn([self.mc] +
['-h', h_dir, '-r', rc_dir] + [src])
base, _ = os.path.splitext (os.path.basename (src))
rc_file = os.path.join (rc_dir, base + '.rc')
# then compile .RC to .RES file
self.spawn([self.rc] +
["/fo" + obj] + [rc_file])
except DistutilsExecError, msg:
raise CompileError(msg)
continue
else:
# how to handle this file?
raise CompileError("Don't know how to compile %s to %s"
% (src, obj))
output_opt = "/Fo" + obj
try:
self.spawn([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError(msg)
return objects
def create_static_lib(self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
if not self.initialized:
self.initialize()
(objects, output_dir) = self._fix_object_args(objects, output_dir)
output_filename = self.library_filename(output_libname,
output_dir=output_dir)
if self._need_link(objects, output_filename):
lib_args = objects + ['/OUT:' + output_filename]
if debug:
pass # XXX what goes here?
try:
self.spawn([self.lib] + lib_args)
except DistutilsExecError, msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
if not self.initialized:
self.initialize()
(objects, output_dir) = self._fix_object_args(objects, output_dir)
fixed_args = self._fix_lib_args(libraries, library_dirs,
runtime_library_dirs)
(libraries, library_dirs, runtime_library_dirs) = fixed_args
if runtime_library_dirs:
self.warn ("I don't know what to do with 'runtime_library_dirs': "
+ str (runtime_library_dirs))
lib_opts = gen_lib_options(self,
library_dirs, runtime_library_dirs,
libraries)
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
if target_desc == CCompiler.EXECUTABLE:
if debug:
ldflags = self.ldflags_shared_debug[1:]
else:
ldflags = self.ldflags_shared[1:]
else:
if debug:
ldflags = self.ldflags_shared_debug
else:
ldflags = self.ldflags_shared
export_opts = []
for sym in (export_symbols or []):
export_opts.append("/EXPORT:" + sym)
ld_args = (ldflags + lib_opts + export_opts +
objects + ['/OUT:' + output_filename])
# The MSVC linker generates .lib and .exp files, which cannot be
# suppressed by any linker switches. The .lib files may even be
# needed! Make sure they are generated in the temporary build
# directory. Since they have different names for debug and release
# builds, they can go into the same directory.
build_temp = os.path.dirname(objects[0])
if export_symbols is not None:
(dll_name, dll_ext) = os.path.splitext(
os.path.basename(output_filename))
implib_file = os.path.join(
build_temp,
self.library_filename(dll_name))
ld_args.append ('/IMPLIB:' + implib_file)
# Embedded manifests are recommended - see MSDN article titled
# "How to: Embed a Manifest Inside a C/C++ Application"
# (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
# Ask the linker to generate the manifest in the temp dir, so
# we can embed it later.
temp_manifest = os.path.join(
build_temp,
os.path.basename(output_filename) + ".manifest")
ld_args.append('/MANIFESTFILE:' + temp_manifest)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
self.spawn([self.linker] + ld_args)
except DistutilsExecError, msg:
raise LinkError(msg)
# embed the manifest
# XXX - this is somewhat fragile - if mt.exe fails, distutils
# will still consider the DLL up-to-date, but it will not have a
# manifest. Maybe we should link to a temp file? OTOH, that
# implies a build environment error that shouldn't go undetected.
if target_desc == CCompiler.EXECUTABLE:
mfid = 1
else:
mfid = 2
self._remove_visual_c_ref(temp_manifest)
out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
try:
self.spawn(['mt.exe', '-nologo', '-manifest',
temp_manifest, out_arg])
except DistutilsExecError, msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def _remove_visual_c_ref(self, manifest_file):
try:
# Remove references to the Visual C runtime, so they will
# fall through to the Visual C dependency of Python.exe.
# This way, when installed for a restricted user (e.g.
# runtimes are not in WinSxS folder, but in Python's own
# folder), the runtimes do not need to be in every folder
# with .pyd's.
manifest_f = open(manifest_file)
try:
manifest_buf = manifest_f.read()
finally:
manifest_f.close()
pattern = re.compile(
r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
re.DOTALL)
manifest_buf = re.sub(pattern, "", manifest_buf)
pattern = "<dependentAssembly>\s*</dependentAssembly>"
manifest_buf = re.sub(pattern, "", manifest_buf)
manifest_f = open(manifest_file, 'w')
try:
manifest_f.write(manifest_buf)
finally:
manifest_f.close()
except IOError:
pass
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "/LIBPATH:" + dir
def runtime_library_dir_option(self, dir):
raise DistutilsPlatformError(
"don't know how to set runtime library search path for MSVC++")
def library_option(self, lib):
return self.library_filename(lib)
def find_library_file(self, dirs, lib, debug=0):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
try_names = [lib + "_d", lib]
else:
try_names = [lib]
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename (name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# Helper methods for using the MSVC registry settings
def find_exe(self, exe):
"""Return path to an MSVC executable program.
Tries to find the program in several places: first, one of the
MSVC program search paths from the registry; next, the directories
in the PATH environment variable. If any of those work, return an
absolute path that is known to exist. If none of them work, just
return the original program name, 'exe'.
"""
for p in self.__paths:
fn = os.path.join(os.path.abspath(p), exe)
if os.path.isfile(fn):
return fn
# didn't find it; try existing path
for p in os.environ['Path'].split(';'):
fn = os.path.join(os.path.abspath(p),exe)
if os.path.isfile(fn):
return fn
return exe
|
c3nav/c3nav
|
refs/heads/master
|
src/c3nav/mapdata/render/engines/wavefront.py
|
1
|
import os
from itertools import chain
import numpy as np
from c3nav.mapdata.render.engines import register_engine
from c3nav.mapdata.render.engines.base3d import Base3DEngine
@register_engine
class WavefrontEngine(Base3DEngine):
filetype = 'obj'
def _normal_normal(self, normal):
return normal / (np.absolute(normal).max())
def render(self, filename=None):
facets = np.vstack(chain(*(chain(*v.values()) for v in self.vertices.values())))
vertices = tuple(set(tuple(vertex) for vertex in facets.reshape((-1, 3))))
vertices_lookup = {vertex: i for i, vertex in enumerate(vertices, start=1)}
normals = np.cross(facets[:, 1] - facets[:, 0], facets[:, 2] - facets[:, 1]).reshape((-1, 3))
normals = normals / np.amax(np.absolute(normals), axis=1).reshape((-1, 1))
normals = tuple(set(tuple(normal) for normal in normals))
normals_lookup = {normal: i for i, normal in enumerate(normals, start=1)}
materials = b''
materials_filename = filename + '.mtl'
for name, color in self.colors.items():
materials += ((b'newmtl %s\n' % name.encode()) +
(b'Ka %.2f %.2f %.2f\n' % color[:3]) +
(b'Kd %.2f %.2f %.2f\n' % color[:3]) +
b'Ks 0.00 0.00 0.00\n' +
(b'd %.2f\n' % color[3]) +
b'illum 2\n')
result = b'mtllib %s\n' % os.path.split(materials_filename)[-1].encode()
result += b'o c3navExport\n'
result += b''.join((b'v %.3f %.3f %.3f\n' % vertex) for vertex in vertices)
result += b''.join((b'vn %.6f %.6f %.6f\n' % normal) for normal in normals)
for group, subgroups in self.groups.items():
result += b'\n# ' + group.encode() + b'\n'
for subgroup in subgroups:
result += b'\n# ' + subgroup.encode() + b'\n'
for i, vertices in enumerate(self.vertices[subgroup].values()):
if not vertices:
continue
for j, facets in enumerate(vertices):
if not facets.size:
continue
normals = np.cross(facets[:, 1] - facets[:, 0], facets[:, 2] - facets[:, 1]).reshape((-1, 3))
normals = normals / np.amax(np.absolute(normals), axis=1).reshape((-1, 1))
normals = tuple(normals_lookup[tuple(normal)] for normal in normals)
result += ((b'g %s_%d_%d\n' % (subgroup.encode(), i, j)) +
(b'usemtl %s\n' % subgroup.encode()) +
b's off\n' +
b''.join((b'f %d//%d %d//%d %d//%d\n' % (vertices_lookup[tuple(a)], normals[k],
vertices_lookup[tuple(b)], normals[k],
vertices_lookup[tuple(c)], normals[k],)
for k, (a, b, c) in enumerate(facets)))
)
return result, (materials_filename, materials)
|
riklaunim/django-custom-multisite
|
refs/heads/master
|
tests/modeltests/choices/tests.py
|
150
|
from __future__ import absolute_import
from django.test import TestCase
from .models import Person
class ChoicesTests(TestCase):
def test_display(self):
a = Person.objects.create(name='Adrian', gender='M')
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(a.gender, 'M')
self.assertEqual(s.gender, 'F')
self.assertEqual(a.get_gender_display(), 'Male')
self.assertEqual(s.get_gender_display(), 'Female')
# If the value for the field doesn't correspond to a valid choice,
# the value itself is provided as a display value.
a.gender = ''
self.assertEqual(a.get_gender_display(), '')
a.gender = 'U'
self.assertEqual(a.get_gender_display(), 'U')
|
BaesFr/Sick-Beard
|
refs/heads/development
|
lib/hachoir_metadata/metadata_item.py
|
90
|
from lib.hachoir_core.tools import makeUnicode, normalizeNewline
from lib.hachoir_core.error import HACHOIR_ERRORS
from lib.hachoir_metadata import config
from lib.hachoir_metadata.setter import normalizeString
MIN_PRIORITY = 100
MAX_PRIORITY = 999
QUALITY_FASTEST = 0.0
QUALITY_FAST = 0.25
QUALITY_NORMAL = 0.5
QUALITY_GOOD = 0.75
QUALITY_BEST = 1.0
class DataValue:
def __init__(self, value, text):
self.value = value
self.text = text
class Data:
def __init__(self, key, priority, description,
text_handler=None, type=None, filter=None, conversion=None):
"""
handler is only used if value is not string nor unicode, prototype:
def handler(value) -> str/unicode
"""
assert MIN_PRIORITY <= priority <= MAX_PRIORITY
assert isinstance(description, unicode)
self.metadata = None
self.key = key
self.description = description
self.values = []
if type and not isinstance(type, (tuple, list)):
type = (type,)
self.type = type
self.text_handler = text_handler
self.filter = filter
self.priority = priority
self.conversion = conversion
def _createItem(self, value, text=None):
if text is None:
if isinstance(value, unicode):
text = value
elif self.text_handler:
text = self.text_handler(value)
assert isinstance(text, unicode)
else:
text = makeUnicode(value)
return DataValue(value, text)
def add(self, value):
if isinstance(value, tuple):
if len(value) != 2:
raise ValueError("Data.add() only accept tuple of 2 elements: (value,text)")
value, text = value
else:
text = None
# Skip value 'None'
if value is None:
return
if isinstance(value, (str, unicode)):
value = normalizeString(value)
if not value:
return
# Convert string to Unicode string using charset ISO-8859-1
if self.conversion:
try:
new_value = self.conversion(self.metadata, self.key, value)
except HACHOIR_ERRORS, err:
self.metadata.warning("Error during conversion of %r value: %s" % (
self.key, err))
return
if new_value is None:
dest_types = " or ".join(str(item.__name__) for item in self.type)
self.metadata.warning("Unable to convert %s=%r (%s) to %s" % (
self.key, value, type(value).__name__, dest_types))
return
if isinstance(new_value, tuple):
if text:
value = new_value[0]
else:
value, text = new_value
else:
value = new_value
elif isinstance(value, str):
value = unicode(value, "ISO-8859-1")
if self.type and not isinstance(value, self.type):
dest_types = " or ".join(str(item.__name__) for item in self.type)
self.metadata.warning("Key %r: value %r type (%s) is not %s" % (
self.key, value, type(value).__name__, dest_types))
return
# Skip empty strings
if isinstance(value, unicode):
value = normalizeNewline(value)
if config.MAX_STR_LENGTH \
and config.MAX_STR_LENGTH < len(value):
value = value[:config.MAX_STR_LENGTH] + "(...)"
# Skip duplicates
if value in self:
return
# Use filter
if self.filter and not self.filter(value):
self.metadata.warning("Skip value %s=%r (filter)" % (self.key, value))
return
# For string, if you have "verlongtext" and "verylo",
# keep the longer value
if isinstance(value, unicode):
for index, item in enumerate(self.values):
item = item.value
if not isinstance(item, unicode):
continue
if value.startswith(item):
# Find longer value, replace the old one
self.values[index] = self._createItem(value, text)
return
if item.startswith(value):
# Find truncated value, skip it
return
# Add new value
self.values.append(self._createItem(value, text))
def __len__(self):
return len(self.values)
def __getitem__(self, index):
return self.values[index]
def __contains__(self, value):
for item in self.values:
if value == item.value:
return True
return False
def __cmp__(self, other):
return cmp(self.priority, other.priority)
|
GbalsaC/bitnamiP
|
refs/heads/master
|
venv/lib/python2.7/site-packages/sympy/polys/constructor.py
|
5
|
"""Tools for constructing domains for expressions. """
from sympy.polys.polyutils import parallel_dict_from_basic
from sympy.polys.polyoptions import build_options
from sympy.polys.domains import ZZ, QQ, RR, EX
from sympy.assumptions import ask, Q
from sympy.core import S, sympify
def _construct_simple(coeffs, opt):
"""Handle simple domains, e.g.: ZZ, QQ, RR and algebraic domains. """
result, rationals, reals, algebraics = {}, False, False, False
if opt.extension is True:
is_algebraic = lambda coeff: ask(Q.algebraic(coeff))
else:
is_algebraic = lambda coeff: False
# XXX: add support for a + b*I coefficients
for coeff in coeffs:
if coeff.is_Rational:
if not coeff.is_Integer:
rationals = True
elif coeff.is_Float:
if not algebraics:
reals = True
else:
# there are both reals and algebraics -> EX
return False
elif is_algebraic(coeff):
if not reals:
algebraics = True
else:
# there are both algebraics and reals -> EX
return False
else:
# this is a composite domain, e.g. ZZ[X], EX
return None
if algebraics:
domain, result = _construct_algebraic(coeffs, opt)
else:
if reals:
domain = RR
else:
if opt.field or rationals:
domain = QQ
else:
domain = ZZ
result = []
for coeff in coeffs:
result.append(domain.from_sympy(coeff))
return domain, result
def _construct_algebraic(coeffs, opt):
"""We know that coefficients are algebraic so construct the extension. """
from sympy.polys.numberfields import primitive_element
result, exts = [], set([])
for coeff in coeffs:
if coeff.is_Rational:
coeff = (None, 0, QQ.from_sympy(coeff))
else:
a = coeff.as_coeff_add()[0]
coeff -= a
b = coeff.as_coeff_mul()[0]
coeff /= b
exts.add(coeff)
a = QQ.from_sympy(a)
b = QQ.from_sympy(b)
coeff = (coeff, b, a)
result.append(coeff)
exts = list(exts)
g, span, H = primitive_element(exts, ex=True, polys=True)
root = sum([ s*ext for s, ext in zip(span, exts) ])
domain, g = QQ.algebraic_field((g, root)), g.rep.rep
for i, (coeff, a, b) in enumerate(result):
if coeff is not None:
coeff = a*domain.dtype.from_list(H[exts.index(coeff)], g, QQ) + b
else:
coeff = domain.dtype.from_list([b], g, QQ)
result[i] = coeff
return domain, result
def _construct_composite(coeffs, opt):
"""Handle composite domains, e.g.: ZZ[X], QQ[X], ZZ(X), QQ(X). """
numers, denoms = [], []
for coeff in coeffs:
numer, denom = coeff.as_numer_denom()
numers.append(numer)
denoms.append(denom)
polys, gens = parallel_dict_from_basic(numers + denoms) # XXX: sorting
if any(gen.is_number for gen in gens):
return None # generators are number-like so lets better use EX
n = len(gens)
k = len(polys)//2
numers = polys[:k]
denoms = polys[k:]
if opt.field:
fractions = True
else:
fractions, zeros = False, (0,)*n
for denom in denoms:
if len(denom) > 1 or zeros not in denom:
fractions = True
break
coeffs = set([])
if not fractions:
for numer, denom in zip(numers, denoms):
denom = denom[zeros]
for monom, coeff in numer.iteritems():
coeff /= denom
coeffs.add(coeff)
numer[monom] = coeff
else:
for numer, denom in zip(numers, denoms):
coeffs.update(numer.values())
coeffs.update(denom.values())
rationals, reals = False, False
for coeff in coeffs:
if coeff.is_Rational:
if not coeff.is_Integer:
rationals = True
elif coeff.is_Float:
reals = True
break
if reals:
ground = RR
elif rationals:
ground = QQ
else:
ground = ZZ
result = []
if not fractions:
domain = ground.poly_ring(*gens)
for numer in numers:
for monom, coeff in numer.iteritems():
numer[monom] = ground.from_sympy(coeff)
result.append(domain(numer))
else:
domain = ground.frac_field(*gens)
for numer, denom in zip(numers, denoms):
for monom, coeff in numer.iteritems():
numer[monom] = ground.from_sympy(coeff)
for monom, coeff in denom.iteritems():
denom[monom] = ground.from_sympy(coeff)
result.append(domain((numer, denom)))
return domain, result
def _construct_expression(coeffs, opt):
"""The last resort case, i.e. use the expression domain. """
domain, result = EX, []
for coeff in coeffs:
result.append(domain.from_sympy(coeff))
return domain, result
def construct_domain(obj, **args):
"""Construct a minimal domain for the list of coefficients. """
opt = build_options(args)
if hasattr(obj, '__iter__'):
if isinstance(obj, dict):
monoms, coeffs = zip(*obj.items())
else:
coeffs = obj
else:
coeffs = [obj]
coeffs = map(sympify, coeffs)
result = _construct_simple(coeffs, opt)
if result is not None:
if result is not False:
domain, coeffs = result
else:
domain, coeffs = _construct_expression(coeffs, opt)
else:
if opt.composite:
result = _construct_composite(coeffs, opt)
else:
result = None
if result is not None:
domain, coeffs = result
else:
domain, coeffs = _construct_expression(coeffs, opt)
if hasattr(obj, '__iter__'):
if isinstance(obj, dict):
return domain, dict(zip(monoms, coeffs))
else:
return domain, coeffs
else:
return domain, coeffs[0]
|
abhishekjairath/codeyard
|
refs/heads/master
|
commit/lib/python2.7/site-packages/pkg_resources.py
|
134
|
"""
Package resource API
--------------------
A resource is a logical file contained within a package, or a logical
subdirectory thereof. The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is. Do not use os.path operations to manipulate resource
names being passed into the API.
The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files. It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
"""
import sys
import os
import time
import re
import imp
import zipfile
import zipimport
import warnings
import stat
import functools
import pkgutil
import token
import symbol
import operator
import platform
from pkgutil import get_importer
try:
from urlparse import urlparse, urlunparse
except ImportError:
from urllib.parse import urlparse, urlunparse
try:
frozenset
except NameError:
from sets import ImmutableSet as frozenset
try:
basestring
next = lambda o: o.next()
from cStringIO import StringIO as BytesIO
except NameError:
basestring = str
from io import BytesIO
def execfile(fn, globs=None, locs=None):
if globs is None:
globs = globals()
if locs is None:
locs = globs
exec(compile(open(fn).read(), fn, 'exec'), globs, locs)
# capture these to bypass sandboxing
from os import utime
try:
from os import mkdir, rename, unlink
WRITE_SUPPORT = True
except ImportError:
# no write support, probably under GAE
WRITE_SUPPORT = False
from os import open as os_open
from os.path import isdir, split
# Avoid try/except due to potential problems with delayed import mechanisms.
if sys.version_info >= (3, 3) and sys.implementation.name == "cpython":
import importlib._bootstrap as importlib_bootstrap
else:
importlib_bootstrap = None
try:
import parser
except ImportError:
pass
def _bypass_ensure_directory(name, mode=0x1FF): # 0777
# Sandbox-bypassing version of ensure_directory()
if not WRITE_SUPPORT:
raise IOError('"os.mkdir" not supported on this platform.')
dirname, filename = split(name)
if dirname and filename and not isdir(dirname):
_bypass_ensure_directory(dirname)
mkdir(dirname, mode)
_state_vars = {}
def _declare_state(vartype, **kw):
g = globals()
for name, val in kw.items():
g[name] = val
_state_vars[name] = vartype
def __getstate__():
state = {}
g = globals()
for k, v in _state_vars.items():
state[k] = g['_sget_'+v](g[k])
return state
def __setstate__(state):
g = globals()
for k, v in state.items():
g['_sset_'+_state_vars[k]](k, g[k], v)
return state
def _sget_dict(val):
return val.copy()
def _sset_dict(key, ob, state):
ob.clear()
ob.update(state)
def _sget_object(val):
return val.__getstate__()
def _sset_object(key, ob, state):
ob.__setstate__(state)
_sget_none = _sset_none = lambda *args: None
def get_supported_platform():
"""Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
of Mac OS X that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
version of Mac OS X that we are *running*. To allow usage of packages that
explicitly require a newer version of Mac OS X, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
platform strings, this function should be extended accordingly.
"""
plat = get_build_platform()
m = macosVersionString.match(plat)
if m is not None and sys.platform == "darwin":
try:
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
except ValueError:
pass # not Mac OS X
return plat
__all__ = [
# Basic resource access and distribution/entry point discovery
'require', 'run_script', 'get_provider', 'get_distribution',
'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
'resource_string', 'resource_stream', 'resource_filename',
'resource_listdir', 'resource_exists', 'resource_isdir',
# Environmental control
'declare_namespace', 'working_set', 'add_activation_listener',
'find_distributions', 'set_extraction_path', 'cleanup_resources',
'get_default_cache',
# Primary implementation classes
'Environment', 'WorkingSet', 'ResourceManager',
'Distribution', 'Requirement', 'EntryPoint',
# Exceptions
'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
'ExtractionError',
# Parsing functions and string utilities
'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
# filesystem utilities
'ensure_directory', 'normalize_path',
# Distribution "precedence" constants
'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
# "Provider" interfaces, implementations, and registration/lookup APIs
'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
'register_finder', 'register_namespace_handler', 'register_loader_type',
'fixup_namespace_packages', 'get_importer',
# Deprecated/backward compatibility only
'run_main', 'AvailableDistributions',
]
class ResolutionError(Exception):
"""Abstract base for dependency resolution errors"""
def __repr__(self):
return self.__class__.__name__+repr(self.args)
class VersionConflict(ResolutionError):
"""An already-installed version conflicts with the requested version"""
class DistributionNotFound(ResolutionError):
"""A requested distribution was not found"""
class UnknownExtra(ResolutionError):
"""Distribution doesn't have an "extra feature" of the given name"""
_provider_factories = {}
PY_MAJOR = sys.version[:3]
EGG_DIST = 3
BINARY_DIST = 2
SOURCE_DIST = 1
CHECKOUT_DIST = 0
DEVELOP_DIST = -1
def register_loader_type(loader_type, provider_factory):
"""Register `provider_factory` to make providers for `loader_type`
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
and `provider_factory` is a function that, passed a *module* object,
returns an ``IResourceProvider`` for that module.
"""
_provider_factories[loader_type] = provider_factory
def get_provider(moduleOrReq):
"""Return an IResourceProvider for the named module or requirement"""
if isinstance(moduleOrReq,Requirement):
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
try:
module = sys.modules[moduleOrReq]
except KeyError:
__import__(moduleOrReq)
module = sys.modules[moduleOrReq]
loader = getattr(module, '__loader__', None)
return _find_adapter(_provider_factories, loader)(module)
def _macosx_vers(_cache=[]):
if not _cache:
import platform
version = platform.mac_ver()[0]
# fallback for MacPorts
if version == '':
import plistlib
plist = '/System/Library/CoreServices/SystemVersion.plist'
if os.path.exists(plist):
if hasattr(plistlib, 'readPlist'):
plist_content = plistlib.readPlist(plist)
if 'ProductVersion' in plist_content:
version = plist_content['ProductVersion']
_cache.append(version.split('.'))
return _cache[0]
def _macosx_arch(machine):
return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
def get_build_platform():
"""Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and Mac OS X.
"""
try:
# Python 2.7 or >=3.2
from sysconfig import get_platform
except ImportError:
from distutils.util import get_platform
plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'):
try:
version = _macosx_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
_macosx_arch(machine))
except ValueError:
# if someone is running a non-Mac darwin system, this will fall
# through to the default implementation
pass
return plat
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
get_platform = get_build_platform # XXX backward compat
def compatible_platforms(provided,required):
"""Can code for the `provided` platform run on the `required` platform?
Returns true if either platform is ``None``, or the platforms are equal.
XXX Needs compatibility checks for Linux and other unixy OSes.
"""
if provided is None or required is None or provided==required:
return True # easy case
# Mac OS X special cases
reqMac = macosVersionString.match(required)
if reqMac:
provMac = macosVersionString.match(provided)
# is this a Mac package?
if not provMac:
# this is backwards compatibility for packages built before
# setuptools 0.6. All packages built after this point will
# use the new macosx designation.
provDarwin = darwinVersionString.match(provided)
if provDarwin:
dversion = int(provDarwin.group(1))
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
if dversion == 7 and macosversion >= "10.3" or \
dversion == 8 and macosversion >= "10.4":
#import warnings
#warnings.warn("Mac eggs should be rebuilt to "
# "use the macosx designation instead of darwin.",
# category=DeprecationWarning)
return True
return False # egg isn't macosx or legacy darwin
# are they the same major version and machine type?
if provMac.group(1) != reqMac.group(1) or \
provMac.group(3) != reqMac.group(3):
return False
# is the required OS major update >= the provided one?
if int(provMac.group(2)) > int(reqMac.group(2)):
return False
return True
# XXX Linux and other platforms' special cases should go here
return False
def run_script(dist_spec, script_name):
"""Locate distribution `dist_spec` and run its `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
require(dist_spec)[0].run_script(script_name, ns)
run_main = run_script # backward compatibility
def get_distribution(dist):
"""Return a current distribution object for a Requirement or string"""
if isinstance(dist,basestring): dist = Requirement.parse(dist)
if isinstance(dist,Requirement): dist = get_provider(dist)
if not isinstance(dist,Distribution):
raise TypeError("Expected string, Requirement, or Distribution", dist)
return dist
def load_entry_point(dist, group, name):
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
return get_distribution(dist).load_entry_point(group, name)
def get_entry_map(dist, group=None):
"""Return the entry point map for `group`, or the full entry map"""
return get_distribution(dist).get_entry_map(group)
def get_entry_info(dist, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return get_distribution(dist).get_entry_info(group, name)
class IMetadataProvider:
def has_metadata(name):
"""Does the package's distribution contain the named metadata?"""
def get_metadata(name):
"""The named metadata resource as a string"""
def get_metadata_lines(name):
"""Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
def metadata_listdir(name):
"""List of metadata names in the directory (like ``os.listdir()``)"""
def run_script(script_name, namespace):
"""Execute the named script in the supplied namespace dictionary"""
class IResourceProvider(IMetadataProvider):
"""An object that provides access to package resources"""
def get_resource_filename(manager, resource_name):
"""Return a true filesystem path for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_stream(manager, resource_name):
"""Return a readable file-like object for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_string(manager, resource_name):
"""Return a string containing the contents of `resource_name`
`manager` must be an ``IResourceManager``"""
def has_resource(resource_name):
"""Does the package contain the named resource?"""
def resource_isdir(resource_name):
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
def resource_listdir(resource_name):
"""List of resource names in the directory (like ``os.listdir()``)"""
class WorkingSet(object):
"""A collection of active distributions on sys.path (or a similar list)"""
def __init__(self, entries=None):
"""Create working set from list of path entries (default=sys.path)"""
self.entries = []
self.entry_keys = {}
self.by_key = {}
self.callbacks = []
if entries is None:
entries = sys.path
for entry in entries:
self.add_entry(entry)
def add_entry(self, entry):
"""Add a path item to ``.entries``, finding any distributions on it
``find_distributions(entry, True)`` is used to find distributions
corresponding to the path entry, and they are added. `entry` is
always appended to ``.entries``, even if it is already present.
(This is because ``sys.path`` can contain the same value more than
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
equal ``sys.path``.)
"""
self.entry_keys.setdefault(entry, [])
self.entries.append(entry)
for dist in find_distributions(entry, True):
self.add(dist, entry, False)
def __contains__(self,dist):
"""True if `dist` is the active distribution for its project"""
return self.by_key.get(dist.key) == dist
def find(self, req):
"""Find a distribution matching requirement `req`
If there is an active distribution for the requested project, this
returns it as long as it meets the version requirement specified by
`req`. But, if there is an active distribution for the project and it
does *not* meet the `req` requirement, ``VersionConflict`` is raised.
If there is no active distribution for the requested project, ``None``
is returned.
"""
dist = self.by_key.get(req.key)
if dist is not None and dist not in req:
raise VersionConflict(dist,req) # XXX add more info
else:
return dist
def iter_entry_points(self, group, name=None):
"""Yield entry point objects from `group` matching `name`
If `name` is None, yields all entry points in `group` from all
distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order).
"""
for dist in self:
entries = dist.get_entry_map(group)
if name is None:
for ep in entries.values():
yield ep
elif name in entries:
yield entries[name]
def run_script(self, requires, script_name):
"""Locate distribution for `requires` and run `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
self.require(requires)[0].run_script(script_name, ns)
def __iter__(self):
"""Yield distributions for non-duplicate projects in the working set
The yield order is the order in which the items' path entries were
added to the working set.
"""
seen = {}
for item in self.entries:
if item not in self.entry_keys:
# workaround a cache issue
continue
for key in self.entry_keys[item]:
if key not in seen:
seen[key]=1
yield self.by_key[key]
def add(self, dist, entry=None, insert=True, replace=False):
"""Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
On exit from this routine, `entry` is added to the end of the working
set's ``.entries`` (if it wasn't already present).
`dist` is only added to the working set if it's for a project that
doesn't already have a distribution in the set, unless `replace=True`.
If it's added, any callbacks registered with the ``subscribe()`` method
will be called.
"""
if insert:
dist.insert_on(self.entries, entry)
if entry is None:
entry = dist.location
keys = self.entry_keys.setdefault(entry,[])
keys2 = self.entry_keys.setdefault(dist.location,[])
if not replace and dist.key in self.by_key:
return # ignore hidden distros
self.by_key[dist.key] = dist
if dist.key not in keys:
keys.append(dist.key)
if dist.key not in keys2:
keys2.append(dist.key)
self._added_new(dist)
def resolve(self, requirements, env=None, installer=None,
replace_conflicting=False):
"""List all distributions needed to (recursively) meet `requirements`
`requirements` must be a sequence of ``Requirement`` objects. `env`,
if supplied, should be an ``Environment`` instance. If
not supplied, it defaults to all distributions available within any
entry or distribution in the working set. `installer`, if supplied,
will be invoked with each requirement that cannot be met by an
already-installed distribution; it should return a ``Distribution`` or
``None``.
Unless `replace_conflicting=True`, raises a VersionConflict exception if
any requirements are found on the path that have the correct name but
the wrong version. Otherwise, if an `installer` is supplied it will be
invoked to obtain the correct version of the requirement and activate
it.
"""
requirements = list(requirements)[::-1] # set up the stack
processed = {} # set of processed requirements
best = {} # key -> dist
to_activate = []
while requirements:
req = requirements.pop(0) # process dependencies breadth-first
if req in processed:
# Ignore cyclic or redundant dependencies
continue
dist = best.get(req.key)
if dist is None:
# Find the best distribution and add it to the map
dist = self.by_key.get(req.key)
if dist is None or (dist not in req and replace_conflicting):
ws = self
if env is None:
if dist is None:
env = Environment(self.entries)
else:
# Use an empty environment and workingset to avoid
# any further conflicts with the conflicting
# distribution
env = Environment([])
ws = WorkingSet([])
dist = best[req.key] = env.best_match(req, ws, installer)
if dist is None:
#msg = ("The '%s' distribution was not found on this "
# "system, and is required by this application.")
#raise DistributionNotFound(msg % req)
# unfortunately, zc.buildout uses a str(err)
# to get the name of the distribution here..
raise DistributionNotFound(req)
to_activate.append(dist)
if dist not in req:
# Oops, the "best" so far conflicts with a dependency
raise VersionConflict(dist,req) # XXX put more info here
requirements.extend(dist.requires(req.extras)[::-1])
processed[req] = True
return to_activate # return list of distros to activate
def find_plugins(self, plugin_env, full_env=None, installer=None,
fallback=True):
"""Find all activatable distributions in `plugin_env`
Example usage::
distributions, errors = working_set.find_plugins(
Environment(plugin_dirlist)
)
map(working_set.add, distributions) # add plugins+libs to sys.path
print 'Could not load', errors # display errors
The `plugin_env` should be an ``Environment`` instance that contains
only distributions that are in the project's "plugin directory" or
directories. The `full_env`, if supplied, should be an ``Environment``
contains all currently-available distributions. If `full_env` is not
supplied, one is created automatically from the ``WorkingSet`` this
method is called on, which will typically mean that every directory on
``sys.path`` will be scanned for distributions.
`installer` is a standard installer callback as used by the
``resolve()`` method. The `fallback` flag indicates whether we should
attempt to resolve older versions of a plugin if the newest version
cannot be resolved.
This method returns a 2-tuple: (`distributions`, `error_info`), where
`distributions` is a list of the distributions found in `plugin_env`
that were loadable, along with any other distributions that are needed
to resolve their dependencies. `error_info` is a dictionary mapping
unloadable plugin distributions to an exception instance describing the
error that occurred. Usually this will be a ``DistributionNotFound`` or
``VersionConflict`` instance.
"""
plugin_projects = list(plugin_env)
plugin_projects.sort() # scan project names in alphabetic order
error_info = {}
distributions = {}
if full_env is None:
env = Environment(self.entries)
env += plugin_env
else:
env = full_env + plugin_env
shadow_set = self.__class__([])
list(map(shadow_set.add, self)) # put all our entries in shadow_set
for project_name in plugin_projects:
for dist in plugin_env[project_name]:
req = [dist.as_requirement()]
try:
resolvees = shadow_set.resolve(req, env, installer)
except ResolutionError:
v = sys.exc_info()[1]
error_info[dist] = v # save error info
if fallback:
continue # try the next older version of project
else:
break # give up on this project, keep going
else:
list(map(shadow_set.add, resolvees))
distributions.update(dict.fromkeys(resolvees))
# success, no need to try any more versions of this project
break
distributions = list(distributions)
distributions.sort()
return distributions, error_info
def require(self, *requirements):
"""Ensure that distributions matching `requirements` are activated
`requirements` must be a string or a (possibly-nested) sequence
thereof, specifying the distributions and versions required. The
return value is a sequence of the distributions that needed to be
activated to fulfill the requirements; all relevant distributions are
included, even if they were already activated in this working set.
"""
needed = self.resolve(parse_requirements(requirements))
for dist in needed:
self.add(dist)
return needed
def subscribe(self, callback):
"""Invoke `callback` for all distributions (including existing ones)"""
if callback in self.callbacks:
return
self.callbacks.append(callback)
for dist in self:
callback(dist)
def _added_new(self, dist):
for callback in self.callbacks:
callback(dist)
def __getstate__(self):
return (
self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
self.callbacks[:]
)
def __setstate__(self, e_k_b_c):
entries, keys, by_key, callbacks = e_k_b_c
self.entries = entries[:]
self.entry_keys = keys.copy()
self.by_key = by_key.copy()
self.callbacks = callbacks[:]
class Environment(object):
"""Searchable snapshot of distributions on a search path"""
def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
"""Snapshot distributions available on a search path
Any distributions found on `search_path` are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used.
`platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an
optional string naming the desired version of Python (e.g. ``'3.3'``);
it defaults to the current version.
You may explicitly set `platform` (and/or `python`) to ``None`` if you
wish to map *all* distributions, not just those compatible with the
running platform or Python version.
"""
self._distmap = {}
self._cache = {}
self.platform = platform
self.python = python
self.scan(search_path)
def can_add(self, dist):
"""Is distribution `dist` acceptable for this environment?
The distribution must match the platform and python version
requirements specified when this environment was created, or False
is returned.
"""
return (self.python is None or dist.py_version is None
or dist.py_version==self.python) \
and compatible_platforms(dist.platform,self.platform)
def remove(self, dist):
"""Remove `dist` from the environment"""
self._distmap[dist.key].remove(dist)
def scan(self, search_path=None):
"""Scan `search_path` for distributions usable in this environment
Any distributions found are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used. Only distributions conforming to
the platform/python version defined at initialization are added.
"""
if search_path is None:
search_path = sys.path
for item in search_path:
for dist in find_distributions(item):
self.add(dist)
def __getitem__(self,project_name):
"""Return a newest-to-oldest list of distributions for `project_name`
"""
try:
return self._cache[project_name]
except KeyError:
project_name = project_name.lower()
if project_name not in self._distmap:
return []
if project_name not in self._cache:
dists = self._cache[project_name] = self._distmap[project_name]
_sort_dists(dists)
return self._cache[project_name]
def add(self,dist):
"""Add `dist` if we ``can_add()`` it and it isn't already added"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key,[])
if dist not in dists:
dists.append(dist)
if dist.key in self._cache:
_sort_dists(self._cache[dist.key])
def best_match(self, req, working_set, installer=None):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned.
"""
dist = working_set.find(req)
if dist is not None:
return dist
for dist in self[req.key]:
if dist in req:
return dist
return self.obtain(req, installer) # try and download/install
def obtain(self, requirement, installer=None):
"""Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
``installer(requirement)``, unless `installer` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
to the `installer` argument."""
if installer is not None:
return installer(requirement)
def __iter__(self):
"""Yield the unique project names of the available distributions"""
for key in self._distmap.keys():
if self[key]: yield key
def __iadd__(self, other):
"""In-place addition of a distribution or environment"""
if isinstance(other,Distribution):
self.add(other)
elif isinstance(other,Environment):
for project in other:
for dist in other[project]:
self.add(dist)
else:
raise TypeError("Can't add %r to environment" % (other,))
return self
def __add__(self, other):
"""Add an environment or distribution to an environment"""
new = self.__class__([], platform=None, python=None)
for env in self, other:
new += env
return new
AvailableDistributions = Environment # XXX backward compatibility
class ExtractionError(RuntimeError):
"""An error occurred extracting a resource
The following attributes are available from instances of this exception:
manager
The resource manager that raised this exception
cache_path
The base directory for resource extraction
original_error
The exception instance that caused extraction to fail
"""
class ResourceManager:
"""Manage resource extraction and packages"""
extraction_path = None
def __init__(self):
self.cached_files = {}
def resource_exists(self, package_or_requirement, resource_name):
"""Does the named resource exist?"""
return get_provider(package_or_requirement).has_resource(resource_name)
def resource_isdir(self, package_or_requirement, resource_name):
"""Is the named resource an existing directory?"""
return get_provider(package_or_requirement).resource_isdir(
resource_name
)
def resource_filename(self, package_or_requirement, resource_name):
"""Return a true filesystem path for specified resource"""
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name
)
def resource_stream(self, package_or_requirement, resource_name):
"""Return a readable file-like object for specified resource"""
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name
)
def resource_string(self, package_or_requirement, resource_name):
"""Return specified resource as a string"""
return get_provider(package_or_requirement).get_resource_string(
self, resource_name
)
def resource_listdir(self, package_or_requirement, resource_name):
"""List the contents of the named resource directory"""
return get_provider(package_or_requirement).resource_listdir(
resource_name
)
def extraction_error(self):
"""Give an error message for problems extracting file(s)"""
old_exc = sys.exc_info()[1]
cache_path = self.extraction_path or get_default_cache()
err = ExtractionError("""Can't extract file(s) to egg cache
The following error occurred while trying to extract file(s) to the Python egg
cache:
%s
The Python egg cache directory is currently set to:
%s
Perhaps your account does not have write access to this directory? You can
change the cache directory by setting the PYTHON_EGG_CACHE environment
variable to point to an accessible directory.
""" % (old_exc, cache_path)
)
err.manager = self
err.cache_path = cache_path
err.original_error = old_exc
raise err
def get_cache_path(self, archive_name, names=()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
"""
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
try:
_bypass_ensure_directory(target_path)
except:
self.extraction_error()
self._warn_unsafe_extraction_path(extract_path)
self.cached_files[target_path] = 1
return target_path
@staticmethod
def _warn_unsafe_extraction_path(path):
"""
If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user
if a known insecure location is used.
See Distribute #375 for more details.
"""
if os.name == 'nt' and not path.startswith(os.environ['windir']):
# On Windows, permissions are generally restrictive by default
# and temp directories are not writable by other users, so
# bypass the warning.
return
mode = os.stat(path).st_mode
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = ("%s is writable by group/others and vulnerable to attack "
"when "
"used with get_resource_filename. Consider a more secure "
"location (set with .set_extraction_path or the "
"PYTHON_EGG_CACHE environment variable)." % path)
warnings.warn(msg, UserWarning)
def postprocess(self, tempname, filename):
"""Perform any platform-specific postprocessing of `tempname`
This is where Mac header rewrites should be done; other platforms don't
have anything special they should do.
Resource providers should call this method ONLY after successfully
extracting a compressed resource. They must NOT call it on resources
that are already in the filesystem.
`tempname` is the current (temporary) name of the file, and `filename`
is the name it will be renamed to by the caller after this routine
returns.
"""
if os.name == 'posix':
# Make the resource executable
mode = ((os.stat(tempname).st_mode) | 0x16D) & 0xFFF # 0555, 07777
os.chmod(tempname, mode)
def set_extraction_path(self, path):
"""Set the base path where resources will be extracted to, if needed.
If you do not call this routine before any extractions take place, the
path defaults to the return value of ``get_default_cache()``. (Which
is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
platform-specific fallbacks. See that routine's documentation for more
details.)
Resources are extracted to subdirectories of this path based upon
information given by the ``IResourceProvider``. You may set this to a
temporary directory, but then you must call ``cleanup_resources()`` to
delete the extracted files when done. There is no guarantee that
``cleanup_resources()`` will be able to remove all extracted files.
(Note: you may not change the extraction path for a given resource
manager once resources have been extracted, unless you first call
``cleanup_resources()``.)
"""
if self.cached_files:
raise ValueError(
"Can't change extraction path, files already extracted"
)
self.extraction_path = path
def cleanup_resources(self, force=False):
"""
Delete all extracted resource files and directories, returning a list
of the file and directory names that could not be successfully removed.
This function does not have any concurrency protection, so it should
generally only be called when the extraction path is a temporary
directory exclusive to a single process. This method is not
automatically called; you must call it explicitly or register it as an
``atexit`` function if you wish to ensure cleanup of a temporary
directory used for extractions.
"""
# XXX
def get_default_cache():
"""Determine the default cache location
This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
"Application Data" directory. On all other systems, it's "~/.python-eggs".
"""
try:
return os.environ['PYTHON_EGG_CACHE']
except KeyError:
pass
if os.name!='nt':
return os.path.expanduser('~/.python-eggs')
app_data = 'Application Data' # XXX this may be locale-specific!
app_homes = [
(('APPDATA',), None), # best option, should be locale-safe
(('USERPROFILE',), app_data),
(('HOMEDRIVE','HOMEPATH'), app_data),
(('HOMEPATH',), app_data),
(('HOME',), None),
(('WINDIR',), app_data), # 95/98/ME
]
for keys, subdir in app_homes:
dirname = ''
for key in keys:
if key in os.environ:
dirname = os.path.join(dirname, os.environ[key])
else:
break
else:
if subdir:
dirname = os.path.join(dirname,subdir)
return os.path.join(dirname, 'Python-Eggs')
else:
raise RuntimeError(
"Please set the PYTHON_EGG_CACHE enviroment variable"
)
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
"""
version = version.replace(' ','.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def safe_extra(extra):
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
and the result is always lowercased.
"""
return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-','_')
class MarkerEvaluation(object):
values = {
'os_name': lambda: os.name,
'sys_platform': lambda: sys.platform,
'python_full_version': lambda: sys.version.split()[0],
'python_version': lambda:'%s.%s' % (sys.version_info[0], sys.version_info[1]),
'platform_version': platform.version,
'platform_machine': platform.machine,
'python_implementation': platform.python_implementation,
}
@classmethod
def is_invalid_marker(cls, text):
"""
Validate text as a PEP 426 environment marker; return an exception
if invalid or False otherwise.
"""
try:
cls.evaluate_marker(text)
except SyntaxError:
return cls.normalize_exception(sys.exc_info()[1])
return False
@staticmethod
def normalize_exception(exc):
"""
Given a SyntaxError from a marker evaluation, normalize the error message:
- Remove indications of filename and line number.
- Replace platform-specific error messages with standard error messages.
"""
subs = {
'unexpected EOF while parsing': 'invalid syntax',
'parenthesis is never closed': 'invalid syntax',
}
exc.filename = None
exc.lineno = None
exc.msg = subs.get(exc.msg, exc.msg)
return exc
@classmethod
def and_test(cls, nodelist):
# MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
return functools.reduce(operator.and_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)])
@classmethod
def test(cls, nodelist):
# MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
return functools.reduce(operator.or_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)])
@classmethod
def atom(cls, nodelist):
t = nodelist[1][0]
if t == token.LPAR:
if nodelist[2][0] == token.RPAR:
raise SyntaxError("Empty parentheses")
return cls.interpret(nodelist[2])
raise SyntaxError("Language feature not supported in environment markers")
@classmethod
def comparison(cls, nodelist):
if len(nodelist)>4:
raise SyntaxError("Chained comparison not allowed in environment markers")
comp = nodelist[2][1]
cop = comp[1]
if comp[0] == token.NAME:
if len(nodelist[2]) == 3:
if cop == 'not':
cop = 'not in'
else:
cop = 'is not'
try:
cop = cls.get_op(cop)
except KeyError:
raise SyntaxError(repr(cop)+" operator not allowed in environment markers")
return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3]))
@classmethod
def get_op(cls, op):
ops = {
symbol.test: cls.test,
symbol.and_test: cls.and_test,
symbol.atom: cls.atom,
symbol.comparison: cls.comparison,
'not in': lambda x, y: x not in y,
'in': lambda x, y: x in y,
'==': operator.eq,
'!=': operator.ne,
}
if hasattr(symbol, 'or_test'):
ops[symbol.or_test] = cls.test
return ops[op]
@classmethod
def evaluate_marker(cls, text, extra=None):
"""
Evaluate a PEP 426 environment marker on CPython 2.4+.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
This implementation uses the 'parser' module, which is not implemented on
Jython and has been superseded by the 'ast' module in Python 2.6 and
later.
"""
return cls.interpret(parser.expr(text).totuple(1)[1])
@classmethod
def _markerlib_evaluate(cls, text):
"""
Evaluate a PEP 426 environment marker using markerlib.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
"""
import _markerlib
# markerlib implements Metadata 1.2 (PEP 345) environment markers.
# Translate the variables to Metadata 2.0 (PEP 426).
env = _markerlib.default_environment()
for key in env.keys():
new_key = key.replace('.', '_')
env[new_key] = env.pop(key)
try:
result = _markerlib.interpret(text, env)
except NameError:
e = sys.exc_info()[1]
raise SyntaxError(e.args[0])
return result
if 'parser' not in globals():
# Fall back to less-complete _markerlib implementation if 'parser' module
# is not available.
evaluate_marker = _markerlib_evaluate
@classmethod
def interpret(cls, nodelist):
while len(nodelist)==2: nodelist = nodelist[1]
try:
op = cls.get_op(nodelist[0])
except KeyError:
raise SyntaxError("Comparison or logical expression expected")
return op(nodelist)
@classmethod
def evaluate(cls, nodelist):
while len(nodelist)==2: nodelist = nodelist[1]
kind = nodelist[0]
name = nodelist[1]
if kind==token.NAME:
try:
op = cls.values[name]
except KeyError:
raise SyntaxError("Unknown name %r" % name)
return op()
if kind==token.STRING:
s = nodelist[1]
if s[:1] not in "'\"" or s.startswith('"""') or s.startswith("'''") \
or '\\' in s:
raise SyntaxError(
"Only plain strings allowed in environment markers")
return s[1:-1]
raise SyntaxError("Language feature not supported in environment markers")
invalid_marker = MarkerEvaluation.is_invalid_marker
evaluate_marker = MarkerEvaluation.evaluate_marker
class NullProvider:
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
egg_name = None
egg_info = None
loader = None
def __init__(self, module):
self.loader = getattr(module, '__loader__', None)
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
def get_resource_filename(self, manager, resource_name):
return self._fn(self.module_path, resource_name)
def get_resource_stream(self, manager, resource_name):
return BytesIO(self.get_resource_string(manager, resource_name))
def get_resource_string(self, manager, resource_name):
return self._get(self._fn(self.module_path, resource_name))
def has_resource(self, resource_name):
return self._has(self._fn(self.module_path, resource_name))
def has_metadata(self, name):
return self.egg_info and self._has(self._fn(self.egg_info,name))
if sys.version_info <= (3,):
def get_metadata(self, name):
if not self.egg_info:
return ""
return self._get(self._fn(self.egg_info,name))
else:
def get_metadata(self, name):
if not self.egg_info:
return ""
return self._get(self._fn(self.egg_info,name)).decode("utf-8")
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
def resource_isdir(self,resource_name):
return self._isdir(self._fn(self.module_path, resource_name))
def metadata_isdir(self,name):
return self.egg_info and self._isdir(self._fn(self.egg_info,name))
def resource_listdir(self,resource_name):
return self._listdir(self._fn(self.module_path,resource_name))
def metadata_listdir(self,name):
if self.egg_info:
return self._listdir(self._fn(self.egg_info,name))
return []
def run_script(self,script_name,namespace):
script = 'scripts/'+script_name
if not self.has_metadata(script):
raise ResolutionError("No script named %r" % script_name)
script_text = self.get_metadata(script).replace('\r\n','\n')
script_text = script_text.replace('\r','\n')
script_filename = self._fn(self.egg_info,script)
namespace['__file__'] = script_filename
if os.path.exists(script_filename):
execfile(script_filename, namespace, namespace)
else:
from linecache import cache
cache[script_filename] = (
len(script_text), 0, script_text.split('\n'), script_filename
)
script_code = compile(script_text,script_filename,'exec')
exec(script_code, namespace, namespace)
def _has(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _isdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _listdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _fn(self, base, resource_name):
if resource_name:
return os.path.join(base, *resource_name.split('/'))
return base
def _get(self, path):
if hasattr(self.loader, 'get_data'):
return self.loader.get_data(path)
raise NotImplementedError(
"Can't perform this operation for loaders without 'get_data()'"
)
register_loader_type(object, NullProvider)
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
def __init__(self,module):
NullProvider.__init__(self,module)
self._setup_prefix()
def _setup_prefix(self):
# we assume here that our metadata may be nested inside a "basket"
# of multiple eggs; that's why we use module_path instead of .archive
path = self.module_path
old = None
while path!=old:
if path.lower().endswith('.egg'):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
break
old = path
path, base = os.path.split(path)
class DefaultProvider(EggProvider):
"""Provides access to package resources in the filesystem"""
def _has(self, path):
return os.path.exists(path)
def _isdir(self,path):
return os.path.isdir(path)
def _listdir(self,path):
return os.listdir(path)
def get_resource_stream(self, manager, resource_name):
return open(self._fn(self.module_path, resource_name), 'rb')
def _get(self, path):
stream = open(path, 'rb')
try:
return stream.read()
finally:
stream.close()
register_loader_type(type(None), DefaultProvider)
if importlib_bootstrap is not None:
register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider)
class EmptyProvider(NullProvider):
"""Provider that returns nothing for all requests"""
_isdir = _has = lambda self,path: False
_get = lambda self,path: ''
_listdir = lambda self,path: []
module_path = None
def __init__(self):
pass
empty_provider = EmptyProvider()
def build_zipmanifest(path):
"""
This builds a similar dictionary to the zipimport directory
caches. However instead of tuples, ZipInfo objects are stored.
The translation of the tuple is as follows:
* [0] - zipinfo.filename on stock pythons this needs "/" --> os.sep
on pypy it is the same (one reason why distribute did work
in some cases on pypy and win32).
* [1] - zipinfo.compress_type
* [2] - zipinfo.compress_size
* [3] - zipinfo.file_size
* [4] - len(utf-8 encoding of filename) if zipinfo & 0x800
len(ascii encoding of filename) otherwise
* [5] - (zipinfo.date_time[0] - 1980) << 9 |
zipinfo.date_time[1] << 5 | zipinfo.date_time[2]
* [6] - (zipinfo.date_time[3] - 1980) << 11 |
zipinfo.date_time[4] << 5 | (zipinfo.date_time[5] // 2)
* [7] - zipinfo.CRC
"""
zipinfo = dict()
zfile = zipfile.ZipFile(path)
#Got ZipFile has not __exit__ on python 3.1
try:
for zitem in zfile.namelist():
zpath = zitem.replace('/', os.sep)
zipinfo[zpath] = zfile.getinfo(zitem)
assert zipinfo[zpath] is not None
finally:
zfile.close()
return zipinfo
class ZipProvider(EggProvider):
"""Resource support for zips and eggs"""
eagers = None
def __init__(self, module):
EggProvider.__init__(self,module)
self.zipinfo = build_zipmanifest(self.loader.archive)
self.zip_pre = self.loader.archive+os.sep
def _zipinfo_name(self, fspath):
# Convert a virtual filename (full path to file) into a zipfile subpath
# usable with the zipimport directory cache for our target archive
if fspath.startswith(self.zip_pre):
return fspath[len(self.zip_pre):]
raise AssertionError(
"%s is not a subpath of %s" % (fspath,self.zip_pre)
)
def _parts(self,zip_path):
# Convert a zipfile subpath into an egg-relative path part list
fspath = self.zip_pre+zip_path # pseudo-fs path
if fspath.startswith(self.egg_root+os.sep):
return fspath[len(self.egg_root)+1:].split(os.sep)
raise AssertionError(
"%s is not a subpath of %s" % (fspath,self.egg_root)
)
def get_resource_filename(self, manager, resource_name):
if not self.egg_name:
raise NotImplementedError(
"resource_filename() only supported for .egg, not .zip"
)
# no need to lock for extraction, since we use temp names
zip_path = self._resource_to_zip(resource_name)
eagers = self._get_eager_resources()
if '/'.join(self._parts(zip_path)) in eagers:
for name in eagers:
self._extract_resource(manager, self._eager_to_zip(name))
return self._extract_resource(manager, zip_path)
@staticmethod
def _get_date_and_size(zip_stat):
size = zip_stat.file_size
date_time = zip_stat.date_time + (0, 0, -1) # ymdhms+wday, yday, dst
#1980 offset already done
timestamp = time.mktime(date_time)
return timestamp, size
def _extract_resource(self, manager, zip_path):
if zip_path in self._index():
for name in self._index()[zip_path]:
last = self._extract_resource(
manager, os.path.join(zip_path, name)
)
return os.path.dirname(last) # return the extracted directory name
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not WRITE_SUPPORT:
raise IOError('"os.rename" and "os.unlink" are not supported '
'on this platform')
try:
real_path = manager.get_cache_path(
self.egg_name, self._parts(zip_path)
)
if self._is_current(real_path, zip_path):
return real_path
outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
os.write(outf, self.loader.get_data(zip_path))
os.close(outf)
utime(tmpnam, (timestamp,timestamp))
manager.postprocess(tmpnam, real_path)
try:
rename(tmpnam, real_path)
except os.error:
if os.path.isfile(real_path):
if self._is_current(real_path, zip_path):
# the file became current since it was checked above,
# so proceed.
return real_path
elif os.name=='nt': # Windows, del old file and retry
unlink(real_path)
rename(tmpnam, real_path)
return real_path
raise
except os.error:
manager.extraction_error() # report a user-friendly error
return real_path
def _is_current(self, file_path, zip_path):
"""
Return True if the file_path is current for this zip_path
"""
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not os.path.isfile(file_path):
return False
stat = os.stat(file_path)
if stat.st_size!=size or stat.st_mtime!=timestamp:
return False
# check that the contents match
zip_contents = self.loader.get_data(zip_path)
f = open(file_path, 'rb')
file_contents = f.read()
f.close()
return zip_contents == file_contents
def _get_eager_resources(self):
if self.eagers is None:
eagers = []
for name in ('native_libs.txt', 'eager_resources.txt'):
if self.has_metadata(name):
eagers.extend(self.get_metadata_lines(name))
self.eagers = eagers
return self.eagers
def _index(self):
try:
return self._dirindex
except AttributeError:
ind = {}
for path in self.zipinfo:
parts = path.split(os.sep)
while parts:
parent = os.sep.join(parts[:-1])
if parent in ind:
ind[parent].append(parts[-1])
break
else:
ind[parent] = [parts.pop()]
self._dirindex = ind
return ind
def _has(self, fspath):
zip_path = self._zipinfo_name(fspath)
return zip_path in self.zipinfo or zip_path in self._index()
def _isdir(self,fspath):
return self._zipinfo_name(fspath) in self._index()
def _listdir(self,fspath):
return list(self._index().get(self._zipinfo_name(fspath), ()))
def _eager_to_zip(self,resource_name):
return self._zipinfo_name(self._fn(self.egg_root,resource_name))
def _resource_to_zip(self,resource_name):
return self._zipinfo_name(self._fn(self.module_path,resource_name))
register_loader_type(zipimport.zipimporter, ZipProvider)
class FileMetadata(EmptyProvider):
"""Metadata handler for standalone PKG-INFO files
Usage::
metadata = FileMetadata("/path/to/PKG-INFO")
This provider rejects all data and metadata requests except for PKG-INFO,
which is treated as existing, and will be the contents of the file at
the provided location.
"""
def __init__(self,path):
self.path = path
def has_metadata(self,name):
return name=='PKG-INFO'
def get_metadata(self,name):
if name=='PKG-INFO':
f = open(self.path,'rU')
metadata = f.read()
f.close()
return metadata
raise KeyError("No metadata except PKG-INFO is available")
def get_metadata_lines(self,name):
return yield_lines(self.get_metadata(name))
class PathMetadata(DefaultProvider):
"""Metadata provider for egg directories
Usage::
# Development eggs:
egg_info = "/path/to/PackageName.egg-info"
base_dir = os.path.dirname(egg_info)
metadata = PathMetadata(base_dir, egg_info)
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
# Unpacked egg directories:
egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
dist = Distribution.from_filename(egg_path, metadata=metadata)
"""
def __init__(self, path, egg_info):
self.module_path = path
self.egg_info = egg_info
class EggMetadata(ZipProvider):
"""Metadata provider for .egg files"""
def __init__(self, importer):
"""Create a metadata provider from a zipimporter"""
self.zipinfo = build_zipmanifest(importer.archive)
self.zip_pre = importer.archive+os.sep
self.loader = importer
if importer.prefix:
self.module_path = os.path.join(importer.archive, importer.prefix)
else:
self.module_path = importer.archive
self._setup_prefix()
_declare_state('dict', _distribution_finders = {})
def register_finder(importer_type, distribution_finder):
"""Register `distribution_finder` to find distributions in sys.path items
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `distribution_finder` is a callable that, passed a path
item and the importer instance, yields ``Distribution`` instances found on
that path item. See ``pkg_resources.find_on_path`` for an example."""
_distribution_finders[importer_type] = distribution_finder
def find_distributions(path_item, only=False):
"""Yield distributions accessible via `path_item`"""
importer = get_importer(path_item)
finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only)
def find_eggs_in_zip(importer, path_item, only=False):
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
if importer.archive.endswith('.whl'):
# wheels are not supported with this finder
# they don't have PKG-INFO metadata, and won't ever contain eggs
return
metadata = EggMetadata(importer)
if metadata.has_metadata('PKG-INFO'):
yield Distribution.from_filename(path_item, metadata=metadata)
if only:
return # don't yield nested distros
for subitem in metadata.resource_listdir('/'):
if subitem.endswith('.egg'):
subpath = os.path.join(path_item, subitem)
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
yield dist
register_finder(zipimport.zipimporter, find_eggs_in_zip)
def find_nothing(importer, path_item, only=False):
return ()
register_finder(object,find_nothing)
def find_on_path(importer, path_item, only=False):
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
if path_item.lower().endswith('.egg'):
# unpacked egg
yield Distribution.from_filename(
path_item, metadata=PathMetadata(
path_item, os.path.join(path_item,'EGG-INFO')
)
)
else:
# scan for .egg and .egg-info in directory
for entry in os.listdir(path_item):
lower = entry.lower()
if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
fullpath = os.path.join(path_item, entry)
if os.path.isdir(fullpath):
# egg-info directory, allow getting metadata
metadata = PathMetadata(path_item, fullpath)
else:
metadata = FileMetadata(fullpath)
yield Distribution.from_location(
path_item,entry,metadata,precedence=DEVELOP_DIST
)
elif not only and lower.endswith('.egg'):
for dist in find_distributions(os.path.join(path_item, entry)):
yield dist
elif not only and lower.endswith('.egg-link'):
entry_file = open(os.path.join(path_item, entry))
try:
entry_lines = entry_file.readlines()
finally:
entry_file.close()
for line in entry_lines:
if not line.strip(): continue
for item in find_distributions(os.path.join(path_item,line.rstrip())):
yield item
break
register_finder(pkgutil.ImpImporter,find_on_path)
if importlib_bootstrap is not None:
register_finder(importlib_bootstrap.FileFinder, find_on_path)
_declare_state('dict', _namespace_handlers={})
_declare_state('dict', _namespace_packages={})
def register_namespace_handler(importer_type, namespace_handler):
"""Register `namespace_handler` to declare namespace packages
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `namespace_handler` is a callable like this::
def namespace_handler(importer,path_entry,moduleName,module):
# return a path_entry to use for child packages
Namespace handlers are only called if the importer object has already
agreed that it can handle the relevant path item, and they should only
return a subpath if the module __path__ does not already contain an
equivalent subpath. For an example namespace handler, see
``pkg_resources.file_ns_handler``.
"""
_namespace_handlers[importer_type] = namespace_handler
def _handle_ns(packageName, path_item):
"""Ensure that named package includes a subpath of path_item (if needed)"""
importer = get_importer(path_item)
if importer is None:
return None
loader = importer.find_module(packageName)
if loader is None:
return None
module = sys.modules.get(packageName)
if module is None:
module = sys.modules[packageName] = imp.new_module(packageName)
module.__path__ = []
_set_parent_ns(packageName)
elif not hasattr(module,'__path__'):
raise TypeError("Not a package:", packageName)
handler = _find_adapter(_namespace_handlers, importer)
subpath = handler(importer, path_item, packageName, module)
if subpath is not None:
path = module.__path__
path.append(subpath)
loader.load_module(packageName)
for path_item in path:
if path_item not in module.__path__:
module.__path__.append(path_item)
return subpath
def declare_namespace(packageName):
"""Declare that package 'packageName' is a namespace package"""
imp.acquire_lock()
try:
if packageName in _namespace_packages:
return
path, parent = sys.path, None
if '.' in packageName:
parent = '.'.join(packageName.split('.')[:-1])
declare_namespace(parent)
if parent not in _namespace_packages:
__import__(parent)
try:
path = sys.modules[parent].__path__
except AttributeError:
raise TypeError("Not a package:", parent)
# Track what packages are namespaces, so when new path items are added,
# they can be updated
_namespace_packages.setdefault(parent,[]).append(packageName)
_namespace_packages.setdefault(packageName,[])
for path_item in path:
# Ensure all the parent's path items are reflected in the child,
# if they apply
_handle_ns(packageName, path_item)
finally:
imp.release_lock()
def fixup_namespace_packages(path_item, parent=None):
"""Ensure that previously-declared namespace packages include path_item"""
imp.acquire_lock()
try:
for package in _namespace_packages.get(parent,()):
subpath = _handle_ns(package, path_item)
if subpath: fixup_namespace_packages(subpath,package)
finally:
imp.release_lock()
def file_ns_handler(importer, path_item, packageName, module):
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
subpath = os.path.join(path_item, packageName.split('.')[-1])
normalized = _normalize_cached(subpath)
for item in module.__path__:
if _normalize_cached(item)==normalized:
break
else:
# Only return the path if it's not already there
return subpath
register_namespace_handler(pkgutil.ImpImporter,file_ns_handler)
register_namespace_handler(zipimport.zipimporter,file_ns_handler)
if importlib_bootstrap is not None:
register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler)
def null_ns_handler(importer, path_item, packageName, module):
return None
register_namespace_handler(object,null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(filename))
def _normalize_cached(filename,_cache={}):
try:
return _cache[filename]
except KeyError:
_cache[filename] = result = normalize_path(filename)
return result
def _set_parent_ns(packageName):
parts = packageName.split('.')
name = parts.pop()
if parts:
parent = '.'.join(parts)
setattr(sys.modules[parent], name, sys.modules[packageName])
def yield_lines(strs):
"""Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
if isinstance(strs,basestring):
for s in strs.splitlines():
s = s.strip()
if s and not s.startswith('#'): # skip blank lines/comments
yield s
else:
for ss in strs:
for s in yield_lines(ss):
yield s
LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
COMMA = re.compile(r"\s*,").match # comma between items
OBRACKET = re.compile(r"\s*\[").match
CBRACKET = re.compile(r"\s*\]").match
MODULE = re.compile(r"\w+(\.\w+)*$").match
EGG_NAME = re.compile(
r"(?P<name>[^-]+)"
r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
re.VERBOSE | re.IGNORECASE
).match
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part,part)
if not part or part=='.':
continue
if part[:1] in '0123456789':
yield part.zfill(8) # pad for numeric comparison
else:
yield '*'+part
yield '*final' # ensure that alpha/beta/candidate are before final
def parse_version(s):
"""Convert a version string to a chronologically-sortable key
This is a rough cross between distutils' StrictVersion and LooseVersion;
if you give it versions that would work with StrictVersion, then it behaves
the same; otherwise it acts like a slightly-smarter LooseVersion. It is
*possible* to create pathological version coding schemes that will fool
this parser, but they should be very rare in practice.
The returned value will be a tuple of strings. Numeric portions of the
version are padded to 8 digits so they will compare numerically, but
without relying on how numbers compare relative to strings. Dots are
dropped, but dashes are retained. Trailing zeros between alpha segments
or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
"2.4". Alphanumeric parts are lower-cased.
The algorithm assumes that strings like "-" and any alpha string that
alphabetically follows "final" represents a "patch level". So, "2.4-1"
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
considered newer than "2.4-1", which in turn is newer than "2.4".
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
come before "final" alphabetically) are assumed to be pre-release versions,
so that the version "2.4" is considered newer than "2.4a1".
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
"rc" are treated as if they were "c", i.e. as though they were release
candidates, and therefore are not as new as a version string that does not
contain them, and "dev" is replaced with an '@' so that it sorts lower than
than any other pre-release tag.
"""
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
if part<'*final': # remove '-' before a prerelease tag
while parts and parts[-1]=='*final-': parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1]=='00000000':
parts.pop()
parts.append(part)
return tuple(parts)
class EntryPoint(object):
"""Object representing an advertised importable object"""
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
if not MODULE(module_name):
raise ValueError("Invalid module name", module_name)
self.name = name
self.module_name = module_name
self.attrs = tuple(attrs)
self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
self.dist = dist
def __str__(self):
s = "%s = %s" % (self.name, self.module_name)
if self.attrs:
s += ':' + '.'.join(self.attrs)
if self.extras:
s += ' [%s]' % ','.join(self.extras)
return s
def __repr__(self):
return "EntryPoint.parse(%r)" % str(self)
def load(self, require=True, env=None, installer=None):
if require: self.require(env, installer)
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
for attr in self.attrs:
try:
entry = getattr(entry,attr)
except AttributeError:
raise ImportError("%r has no %r attribute" % (entry,attr))
return entry
def require(self, env=None, installer=None):
if self.extras and not self.dist:
raise UnknownExtra("Can't require() without a distribution", self)
list(map(working_set.add,
working_set.resolve(self.dist.requires(self.extras),env,installer)))
@classmethod
def parse(cls, src, dist=None):
"""Parse a single entry point from string `src`
Entry point syntax follows the form::
name = some.module:some.attr [extra1,extra2]
The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional
"""
try:
attrs = extras = ()
name,value = src.split('=',1)
if '[' in value:
value,extras = value.split('[',1)
req = Requirement.parse("x["+extras)
if req.specs: raise ValueError
extras = req.extras
if ':' in value:
value,attrs = value.split(':',1)
if not MODULE(attrs.rstrip()):
raise ValueError
attrs = attrs.rstrip().split('.')
except ValueError:
raise ValueError(
"EntryPoint must be in 'name=module:attrs [extras]' format",
src
)
else:
return cls(name.strip(), value.strip(), attrs, extras, dist)
@classmethod
def parse_group(cls, group, lines, dist=None):
"""Parse an entry point group"""
if not MODULE(group):
raise ValueError("Invalid group name", group)
this = {}
for line in yield_lines(lines):
ep = cls.parse(line, dist)
if ep.name in this:
raise ValueError("Duplicate entry point", group, ep.name)
this[ep.name]=ep
return this
@classmethod
def parse_map(cls, data, dist=None):
"""Parse a map of entry point groups"""
if isinstance(data,dict):
data = data.items()
else:
data = split_sections(data)
maps = {}
for group, lines in data:
if group is None:
if not lines:
continue
raise ValueError("Entry points must be listed in groups")
group = group.strip()
if group in maps:
raise ValueError("Duplicate group name", group)
maps[group] = cls.parse_group(group, lines, dist)
return maps
def _remove_md5_fragment(location):
if not location:
return ''
parsed = urlparse(location)
if parsed[-1].startswith('md5='):
return urlunparse(parsed[:-1] + ('',))
return location
class Distribution(object):
"""Wrap an actual or potential sys.path entry w/metadata"""
PKG_INFO = 'PKG-INFO'
def __init__(self, location=None, metadata=None, project_name=None,
version=None, py_version=PY_MAJOR, platform=None,
precedence=EGG_DIST):
self.project_name = safe_name(project_name or 'Unknown')
if version is not None:
self._version = safe_version(version)
self.py_version = py_version
self.platform = platform
self.location = location
self.precedence = precedence
self._provider = metadata or empty_provider
@classmethod
def from_location(cls,location,basename,metadata=None,**kw):
project_name, version, py_version, platform = [None]*4
basename, ext = os.path.splitext(basename)
if ext.lower() in _distributionImpl:
# .dist-info gets much metadata differently
match = EGG_NAME(basename)
if match:
project_name, version, py_version, platform = match.group(
'name','ver','pyver','plat'
)
cls = _distributionImpl[ext.lower()]
return cls(
location, metadata, project_name=project_name, version=version,
py_version=py_version, platform=platform, **kw
)
hashcmp = property(
lambda self: (
getattr(self,'parsed_version',()),
self.precedence,
self.key,
_remove_md5_fragment(self.location),
self.py_version,
self.platform
)
)
def __hash__(self): return hash(self.hashcmp)
def __lt__(self, other):
return self.hashcmp < other.hashcmp
def __le__(self, other):
return self.hashcmp <= other.hashcmp
def __gt__(self, other):
return self.hashcmp > other.hashcmp
def __ge__(self, other):
return self.hashcmp >= other.hashcmp
def __eq__(self, other):
if not isinstance(other, self.__class__):
# It's not a Distribution, so they are not equal
return False
return self.hashcmp == other.hashcmp
def __ne__(self, other):
return not self == other
# These properties have to be lazy so that we don't have to load any
# metadata until/unless it's actually needed. (i.e., some distributions
# may not know their name or version without loading PKG-INFO)
@property
def key(self):
try:
return self._key
except AttributeError:
self._key = key = self.project_name.lower()
return key
@property
def parsed_version(self):
try:
return self._parsed_version
except AttributeError:
self._parsed_version = pv = parse_version(self.version)
return pv
@property
def version(self):
try:
return self._version
except AttributeError:
for line in self._get_metadata(self.PKG_INFO):
if line.lower().startswith('version:'):
self._version = safe_version(line.split(':',1)[1].strip())
return self._version
else:
raise ValueError(
"Missing 'Version:' header and/or %s file" % self.PKG_INFO, self
)
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
dm = self.__dep_map = {None: []}
for name in 'requires.txt', 'depends.txt':
for extra,reqs in split_sections(self._get_metadata(name)):
if extra:
if ':' in extra:
extra, marker = extra.split(':',1)
if invalid_marker(marker):
reqs=[] # XXX warn
elif not evaluate_marker(marker):
reqs=[]
extra = safe_extra(extra) or None
dm.setdefault(extra,[]).extend(parse_requirements(reqs))
return dm
def requires(self,extras=()):
"""List of Requirements needed for this distro if `extras` are used"""
dm = self._dep_map
deps = []
deps.extend(dm.get(None,()))
for ext in extras:
try:
deps.extend(dm[safe_extra(ext)])
except KeyError:
raise UnknownExtra(
"%s has no such extra feature %r" % (self, ext)
)
return deps
def _get_metadata(self,name):
if self.has_metadata(name):
for line in self.get_metadata_lines(name):
yield line
def activate(self,path=None):
"""Ensure distribution is importable on `path` (default=sys.path)"""
if path is None: path = sys.path
self.insert_on(path)
if path is sys.path:
fixup_namespace_packages(self.location)
list(map(declare_namespace, self._get_metadata('namespace_packages.txt')))
def egg_name(self):
"""Return what this distribution's standard .egg filename should be"""
filename = "%s-%s-py%s" % (
to_filename(self.project_name), to_filename(self.version),
self.py_version or PY_MAJOR
)
if self.platform:
filename += '-'+self.platform
return filename
def __repr__(self):
if self.location:
return "%s (%s)" % (self,self.location)
else:
return str(self)
def __str__(self):
try: version = getattr(self,'version',None)
except ValueError: version = None
version = version or "[unknown version]"
return "%s %s" % (self.project_name,version)
def __getattr__(self,attr):
"""Delegate all unrecognized public attributes to .metadata provider"""
if attr.startswith('_'):
raise AttributeError(attr)
return getattr(self._provider, attr)
@classmethod
def from_filename(cls,filename,metadata=None, **kw):
return cls.from_location(
_normalize_cached(filename), os.path.basename(filename), metadata,
**kw
)
def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly"""
return Requirement.parse('%s==%s' % (self.project_name, self.version))
def load_entry_point(self, group, name):
"""Return the `name` entry point of `group` or raise ImportError"""
ep = self.get_entry_info(group,name)
if ep is None:
raise ImportError("Entry point %r not found" % ((group,name),))
return ep.load()
def get_entry_map(self, group=None):
"""Return the entry point map for `group`, or the full entry map"""
try:
ep_map = self._ep_map
except AttributeError:
ep_map = self._ep_map = EntryPoint.parse_map(
self._get_metadata('entry_points.txt'), self
)
if group is not None:
return ep_map.get(group,{})
return ep_map
def get_entry_info(self, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return self.get_entry_map(group).get(name)
def insert_on(self, path, loc = None):
"""Insert self.location in path before its nearest parent directory"""
loc = loc or self.location
if not loc:
return
nloc = _normalize_cached(loc)
bdir = os.path.dirname(nloc)
npath= [(p and _normalize_cached(p) or p) for p in path]
for p, item in enumerate(npath):
if item==nloc:
break
elif item==bdir and self.precedence==EGG_DIST:
# if it's an .egg, give it precedence over its directory
if path is sys.path:
self.check_version_conflict()
path.insert(p, loc)
npath.insert(p, nloc)
break
else:
if path is sys.path:
self.check_version_conflict()
path.append(loc)
return
# p is the spot where we found or inserted loc; now remove duplicates
while 1:
try:
np = npath.index(nloc, p+1)
except ValueError:
break
else:
del npath[np], path[np]
p = np # ha!
return
def check_version_conflict(self):
if self.key=='setuptools':
return # ignore the inevitable setuptools self-conflicts :(
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
loc = normalize_path(self.location)
for modname in self._get_metadata('top_level.txt'):
if (modname not in sys.modules or modname in nsp
or modname in _namespace_packages):
continue
if modname in ('pkg_resources', 'setuptools', 'site'):
continue
fn = getattr(sys.modules[modname], '__file__', None)
if fn and (normalize_path(fn).startswith(loc) or
fn.startswith(self.location)):
continue
issue_warning(
"Module %s was already imported from %s, but %s is being added"
" to sys.path" % (modname, fn, self.location),
)
def has_version(self):
try:
self.version
except ValueError:
issue_warning("Unbuilt egg for "+repr(self))
return False
return True
def clone(self,**kw):
"""Copy this distribution, substituting in any changed keyword args"""
for attr in (
'project_name', 'version', 'py_version', 'platform', 'location',
'precedence'
):
kw.setdefault(attr, getattr(self,attr,None))
kw.setdefault('metadata', self._provider)
return self.__class__(**kw)
@property
def extras(self):
return [dep for dep in self._dep_map if dep]
class DistInfoDistribution(Distribution):
"""Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
PKG_INFO = 'METADATA'
EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
@property
def _parsed_pkg_info(self):
"""Parse and cache metadata"""
try:
return self._pkg_info
except AttributeError:
from email.parser import Parser
self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO))
return self._pkg_info
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
self.__dep_map = self._compute_dependencies()
return self.__dep_map
def _preparse_requirement(self, requires_dist):
"""Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
Split environment marker, add == prefix to version specifiers as
necessary, and remove parenthesis.
"""
parts = requires_dist.split(';', 1) + ['']
distvers = parts[0].strip()
mark = parts[1].strip()
distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
distvers = distvers.replace('(', '').replace(')', '')
return (distvers, mark)
def _compute_dependencies(self):
"""Recompute this distribution's dependencies."""
from _markerlib import compile as compile_marker
dm = self.__dep_map = {None: []}
reqs = []
# Including any condition expressions
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
distvers, mark = self._preparse_requirement(req)
parsed = next(parse_requirements(distvers))
parsed.marker_fn = compile_marker(mark)
reqs.append(parsed)
def reqs_for_extra(extra):
for req in reqs:
if req.marker_fn(override={'extra':extra}):
yield req
common = frozenset(reqs_for_extra(None))
dm[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
extra = safe_extra(extra.strip())
dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
return dm
_distributionImpl = {
'.egg': Distribution,
'.egg-info': Distribution,
'.dist-info': DistInfoDistribution,
}
def issue_warning(*args,**kw):
level = 1
g = globals()
try:
# find the first stack frame that is *not* code in
# the pkg_resources module, to use for the warning
while sys._getframe(level).f_globals is g:
level += 1
except ValueError:
pass
from warnings import warn
warn(stacklevel = level+1, *args, **kw)
def parse_requirements(strs):
"""Yield ``Requirement`` objects for each specification in `strs`
`strs` must be an instance of ``basestring``, or a (possibly-nested)
iterable thereof.
"""
# create a steppable iterator, so we can handle \-continuations
lines = iter(yield_lines(strs))
def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
items = []
while not TERMINATOR(line,p):
if CONTINUE(line,p):
try:
line = next(lines)
p = 0
except StopIteration:
raise ValueError(
"\\ must not appear on the last nonblank line"
)
match = ITEM(line,p)
if not match:
raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
items.append(match.group(*groups))
p = match.end()
match = COMMA(line,p)
if match:
p = match.end() # skip the comma
elif not TERMINATOR(line,p):
raise ValueError(
"Expected ',' or end-of-list in",line,"at",line[p:]
)
match = TERMINATOR(line,p)
if match: p = match.end() # skip the terminator, if any
return line, p, items
for line in lines:
match = DISTRO(line)
if not match:
raise ValueError("Missing distribution spec", line)
project_name = match.group(1)
p = match.end()
extras = []
match = OBRACKET(line,p)
if match:
p = match.end()
line, p, extras = scan_list(
DISTRO, CBRACKET, line, p, (1,), "'extra' name"
)
line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
specs = [(op,safe_version(val)) for op,val in specs]
yield Requirement(project_name, specs, extras)
def _sort_dists(dists):
tmp = [(dist.hashcmp,dist) for dist in dists]
tmp.sort()
dists[::-1] = [d for hc,d in tmp]
class Requirement:
def __init__(self, project_name, specs, extras):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
self.unsafe_name, project_name = project_name, safe_name(project_name)
self.project_name, self.key = project_name, project_name.lower()
index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
index.sort()
self.specs = [(op,ver) for parsed,trans,op,ver in index]
self.index, self.extras = index, tuple(map(safe_extra,extras))
self.hashCmp = (
self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
frozenset(self.extras)
)
self.__hash = hash(self.hashCmp)
def __str__(self):
specs = ','.join([''.join(s) for s in self.specs])
extras = ','.join(self.extras)
if extras: extras = '[%s]' % extras
return '%s%s%s' % (self.project_name, extras, specs)
def __eq__(self,other):
return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
def __contains__(self,item):
if isinstance(item,Distribution):
if item.key != self.key: return False
if self.index: item = item.parsed_version # only get if we need it
elif isinstance(item,basestring):
item = parse_version(item)
last = None
compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1
for parsed,trans,op,ver in self.index:
action = trans[compare(item,parsed)] # Indexing: 0, 1, -1
if action=='F':
return False
elif action=='T':
return True
elif action=='+':
last = True
elif action=='-' or last is None: last = False
if last is None: last = True # no rules encountered
return last
def __hash__(self):
return self.__hash
def __repr__(self): return "Requirement.parse(%r)" % str(self)
@staticmethod
def parse(s):
reqs = list(parse_requirements(s))
if reqs:
if len(reqs)==1:
return reqs[0]
raise ValueError("Expected only one requirement", s)
raise ValueError("No requirements found", s)
state_machine = {
# =><
'<': '--T',
'<=': 'T-T',
'>': 'F+F',
'>=': 'T+F',
'==': 'T..',
'!=': 'F++',
}
def _get_mro(cls):
"""Get an mro for a type or classic class"""
if not isinstance(cls,type):
class cls(cls,object): pass
return cls.__mro__[1:]
return cls.__mro__
def _find_adapter(registry, ob):
"""Return an adapter factory for `ob` from `registry`"""
for t in _get_mro(getattr(ob, '__class__', type(ob))):
if t in registry:
return registry[t]
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def split_sections(s):
"""Split a string or iterable thereof into (section,content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
and each ``content`` is a list of stripped lines excluding blank lines and
comment-only lines. If there are any such lines before the first section
header, they're returned in a first ``section`` of ``None``.
"""
section = None
content = []
for line in yield_lines(s):
if line.startswith("["):
if line.endswith("]"):
if section or content:
yield section, content
section = line[1:-1].strip()
content = []
else:
raise ValueError("Invalid section heading", line)
else:
content.append(line)
# wrap up last segment
yield section, content
def _mkstemp(*args,**kw):
from tempfile import mkstemp
old_open = os.open
try:
os.open = os_open # temporarily bypass sandboxing
return mkstemp(*args,**kw)
finally:
os.open = old_open # and then put it back
# Set up global resource manager (deliberately not state-saved)
_manager = ResourceManager()
def _initialize(g):
for name in dir(_manager):
if not name.startswith('_'):
g[name] = getattr(_manager, name)
_initialize(globals())
# Prepare the master working set and make the ``require()`` API available
_declare_state('object', working_set = WorkingSet())
try:
# Does the main program list any requirements?
from __main__ import __requires__
except ImportError:
pass # No: just use the default working set based on sys.path
else:
# Yes: ensure the requirements are met, by prefixing sys.path if necessary
try:
working_set.require(__requires__)
except VersionConflict: # try it without defaults already on sys.path
working_set = WorkingSet([]) # by starting with an empty path
for dist in working_set.resolve(
parse_requirements(__requires__), Environment()
):
working_set.add(dist)
for entry in sys.path: # add any missing entries from sys.path
if entry not in working_set.entries:
working_set.add_entry(entry)
sys.path[:] = working_set.entries # then copy back to sys.path
require = working_set.require
iter_entry_points = working_set.iter_entry_points
add_activation_listener = working_set.subscribe
run_script = working_set.run_script
run_main = run_script # backward compatibility
# Activate all distributions already on sys.path, and ensure that
# all distributions added to the working set in the future (e.g. by
# calling ``require()``) will get activated as well.
add_activation_listener(lambda dist: dist.activate())
working_set.entries=[]
list(map(working_set.add_entry,sys.path)) # match order
|
ryano144/intellij-community
|
refs/heads/master
|
python/testData/intentions/beforeImportFromToImport.py
|
83
|
from sys import *
for x in argv: print x
print version
|
capergroup/bayou
|
refs/heads/master
|
src/main/python/bayou/lda/__init__.py
|
12133432
| |
aosingh/lexpy
|
refs/heads/master
|
lexpy/_constants.py
|
12133432
| |
gunchleoc/django
|
refs/heads/master
|
tests/inline_formsets/__init__.py
|
12133432
| |
dahlstrom-g/intellij-community
|
refs/heads/master
|
python/testData/debug/qt/test_pyside2_3.py
|
13
|
from PySide2 import QtCore
import sys
class Runnable(QtCore.QRunnable):
def run(self):
app = QtCore.QCoreApplication.instance()
for i in range(3):
print("ping %d" % i)
app.quit()
app = QtCore.QCoreApplication([])
runnable = Runnable()
QtCore.QThreadPool.globalInstance().start(runnable)
sys.exit(app.exec_())
|
srimai/odoo
|
refs/heads/8.0
|
addons/mail/mail_thread.py
|
14
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
from collections import OrderedDict
import datetime
import dateutil
import email
try:
import simplejson as json
except ImportError:
import json
from lxml import etree
import logging
import pytz
import re
import socket
import time
import xmlrpclib
from email.message import Message
from email.utils import formataddr
from urllib import urlencode
from openerp import api, tools
from openerp import SUPERUSER_ID
from openerp.addons.mail.mail_message import decode
from openerp.osv import fields, osv, orm
from openerp.osv.orm import BaseModel
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
mail_header_msgid_re = re.compile('<[^<>]+>')
def decode_header(message, header, separator=' '):
return separator.join(map(decode, filter(None, message.get_all(header, []))))
class mail_thread(osv.AbstractModel):
''' mail_thread model is meant to be inherited by any model that needs to
act as a discussion topic on which messages can be attached. Public
methods are prefixed with ``message_`` in order to avoid name
collisions with methods of the models that will inherit from this class.
``mail.thread`` defines fields used to handle and display the
communication history. ``mail.thread`` also manages followers of
inheriting classes. All features and expected behavior are managed
by mail.thread. Widgets has been designed for the 7.0 and following
versions of OpenERP.
Inheriting classes are not required to implement any method, as the
default implementation will work for any model. However it is common
to override at least the ``message_new`` and ``message_update``
methods (calling ``super``) to add model-specific behavior at
creation and update of a thread when processing incoming emails.
Options:
- _mail_flat_thread: if set to True, all messages without parent_id
are automatically attached to the first message posted on the
ressource. If set to False, the display of Chatter is done using
threads, and no parent_id is automatically set.
'''
_name = 'mail.thread'
_description = 'Email Thread'
_mail_flat_thread = True
_mail_post_access = 'write'
# Automatic logging system if mail installed
# _track = {
# 'field': {
# 'module.subtype_xml': lambda self, cr, uid, obj, context=None: obj[state] == done,
# 'module.subtype_xml2': lambda self, cr, uid, obj, context=None: obj[state] != done,
# },
# 'field2': {
# ...
# },
# }
# where
# :param string field: field name
# :param module.subtype_xml: xml_id of a mail.message.subtype (i.e. mail.mt_comment)
# :param obj: is a browse_record
# :param function lambda: returns whether the tracking should record using this subtype
_track = {}
# Mass mailing feature
_mail_mass_mailing = False
def get_empty_list_help(self, cr, uid, help, context=None):
""" Override of BaseModel.get_empty_list_help() to generate an help message
that adds alias information. """
model = context.get('empty_list_help_model')
res_id = context.get('empty_list_help_id')
ir_config_parameter = self.pool.get("ir.config_parameter")
catchall_domain = ir_config_parameter.get_param(cr, SUPERUSER_ID, "mail.catchall.domain", context=context)
document_name = context.get('empty_list_help_document_name', _('document'))
alias = None
if catchall_domain and model and res_id: # specific res_id -> find its alias (i.e. section_id specified)
object_id = self.pool.get(model).browse(cr, uid, res_id, context=context)
# check that the alias effectively creates new records
if object_id.alias_id and object_id.alias_id.alias_name and \
object_id.alias_id.alias_model_id and \
object_id.alias_id.alias_model_id.model == self._name and \
object_id.alias_id.alias_force_thread_id == 0:
alias = object_id.alias_id
if not alias and catchall_domain and model: # no res_id or res_id not linked to an alias -> generic help message, take a generic alias of the model
alias_obj = self.pool.get('mail.alias')
alias_ids = alias_obj.search(cr, uid, [("alias_parent_model_id.model", "=", model), ("alias_name", "!=", False), ('alias_force_thread_id', '=', False), ('alias_parent_thread_id', '=', False)], context=context, order='id ASC')
if alias_ids and len(alias_ids) == 1:
alias = alias_obj.browse(cr, uid, alias_ids[0], context=context)
add_arrow = not help or help.find("oe_view_nocontent_create") == -1
if alias:
email_link = "<a href='mailto:%(email)s'>%(email)s</a>" % {'email': alias.name_get()[0][1]}
if add_arrow:
return _("""<p class='oe_view_nocontent_create'>
Click here to add new %(document)s or send an email to: %(email)s.
</p>
%(static_help)s"""
) % {
'document': document_name, 'email': email_link, 'static_help': help or ''
}
return _("""%(static_help)s
<p>
You could also add a new %(document)s by sending an email to: %(email)s.
</p>""") % {
'document': document_name, 'email': email_link, 'static_help': help or ''
}
if add_arrow:
return _("<p class='oe_view_nocontent_create'>Click here to add new %(document)s</p>%(static_help)s") % {
'document': document_name, 'static_help': help or ''
}
return help
def _get_message_data(self, cr, uid, ids, name, args, context=None):
""" Computes:
- message_unread: has uid unread message for the document
- message_summary: html snippet summarizing the Chatter for kanban views """
res = dict((id, dict(message_unread=False, message_unread_count=0, message_summary=' ')) for id in ids)
user_pid = self.pool.get('res.users').read(cr, uid, [uid], ['partner_id'], context=context)[0]['partner_id'][0]
# search for unread messages, directly in SQL to improve performances
cr.execute(""" SELECT m.res_id FROM mail_message m
RIGHT JOIN mail_notification n
ON (n.message_id = m.id AND n.partner_id = %s AND (n.is_read = False or n.is_read IS NULL))
WHERE m.model = %s AND m.res_id in %s""",
(user_pid, self._name, tuple(ids),))
for result in cr.fetchall():
res[result[0]]['message_unread'] = True
res[result[0]]['message_unread_count'] += 1
for id in ids:
if res[id]['message_unread_count']:
title = res[id]['message_unread_count'] > 1 and _("You have %d unread messages") % res[id]['message_unread_count'] or _("You have one unread message")
res[id]['message_summary'] = "<span class='oe_kanban_mail_new' title='%s'><span class='oe_e'>9</span> %d %s</span>" % (title, res[id].pop('message_unread_count'), _("New"))
res[id].pop('message_unread_count', None)
return res
def read_followers_data(self, cr, uid, follower_ids, context=None):
result = []
for follower in self.pool.get('res.partner').browse(cr, uid, follower_ids, context=context):
is_editable = self.pool['res.users'].has_group(cr, uid, 'base.group_no_one')
is_uid = uid in map(lambda x: x.id, follower.user_ids)
data = (follower.id,
follower.name,
{'is_editable': is_editable, 'is_uid': is_uid},
)
result.append(data)
return result
def _get_subscription_data(self, cr, uid, ids, name, args, user_pid=None, context=None):
""" Computes:
- message_subtype_data: data about document subtypes: which are
available, which are followed if any """
res = dict((id, dict(message_subtype_data='')) for id in ids)
if user_pid is None:
user_pid = self.pool.get('res.users').read(cr, uid, [uid], ['partner_id'], context=context)[0]['partner_id'][0]
# find current model subtypes, add them to a dictionary
subtype_obj = self.pool.get('mail.message.subtype')
subtype_ids = subtype_obj.search(
cr, uid, [
'&', ('hidden', '=', False), '|', ('res_model', '=', self._name), ('res_model', '=', False)
], context=context)
subtype_dict = OrderedDict(
(subtype.name, {
'default': subtype.default,
'followed': False,
'parent_model': subtype.parent_id and subtype.parent_id.res_model or self._name,
'id': subtype.id}
) for subtype in subtype_obj.browse(cr, uid, subtype_ids, context=context))
for id in ids:
res[id]['message_subtype_data'] = subtype_dict.copy()
# find the document followers, update the data
fol_obj = self.pool.get('mail.followers')
fol_ids = fol_obj.search(cr, uid, [
('partner_id', '=', user_pid),
('res_id', 'in', ids),
('res_model', '=', self._name),
], context=context)
for fol in fol_obj.browse(cr, uid, fol_ids, context=context):
thread_subtype_dict = res[fol.res_id]['message_subtype_data']
for subtype in [st for st in fol.subtype_ids if st.name in thread_subtype_dict]:
thread_subtype_dict[subtype.name]['followed'] = True
res[fol.res_id]['message_subtype_data'] = thread_subtype_dict
return res
def _search_message_unread(self, cr, uid, obj=None, name=None, domain=None, context=None):
return [('message_ids.to_read', '=', True)]
def _get_followers(self, cr, uid, ids, name, arg, context=None):
fol_obj = self.pool.get('mail.followers')
fol_ids = fol_obj.search(cr, SUPERUSER_ID, [('res_model', '=', self._name), ('res_id', 'in', ids)])
res = dict((id, dict(message_follower_ids=[], message_is_follower=False)) for id in ids)
user_pid = self.pool.get('res.users').read(cr, uid, [uid], ['partner_id'], context=context)[0]['partner_id'][0]
for fol in fol_obj.browse(cr, SUPERUSER_ID, fol_ids):
res[fol.res_id]['message_follower_ids'].append(fol.partner_id.id)
if fol.partner_id.id == user_pid:
res[fol.res_id]['message_is_follower'] = True
return res
def _set_followers(self, cr, uid, id, name, value, arg, context=None):
if not value:
return
partner_obj = self.pool.get('res.partner')
fol_obj = self.pool.get('mail.followers')
# read the old set of followers, and determine the new set of followers
fol_ids = fol_obj.search(cr, SUPERUSER_ID, [('res_model', '=', self._name), ('res_id', '=', id)])
old = set(fol.partner_id.id for fol in fol_obj.browse(cr, SUPERUSER_ID, fol_ids))
new = set(old)
for command in value or []:
if isinstance(command, (int, long)):
new.add(command)
elif command[0] == 0:
new.add(partner_obj.create(cr, uid, command[2], context=context))
elif command[0] == 1:
partner_obj.write(cr, uid, [command[1]], command[2], context=context)
new.add(command[1])
elif command[0] == 2:
partner_obj.unlink(cr, uid, [command[1]], context=context)
new.discard(command[1])
elif command[0] == 3:
new.discard(command[1])
elif command[0] == 4:
new.add(command[1])
elif command[0] == 5:
new.clear()
elif command[0] == 6:
new = set(command[2])
# remove partners that are no longer followers
self.message_unsubscribe(cr, uid, [id], list(old-new), context=context)
# add new followers
self.message_subscribe(cr, uid, [id], list(new-old), context=context)
def _search_followers(self, cr, uid, obj, name, args, context):
"""Search function for message_follower_ids
Do not use with operator 'not in'. Use instead message_is_followers
"""
fol_obj = self.pool.get('mail.followers')
res = []
for field, operator, value in args:
assert field == name
# TOFIX make it work with not in
assert operator != "not in", "Do not search message_follower_ids with 'not in'"
fol_ids = fol_obj.search(cr, SUPERUSER_ID, [('res_model', '=', self._name), ('partner_id', operator, value)])
res_ids = [fol.res_id for fol in fol_obj.browse(cr, SUPERUSER_ID, fol_ids)]
res.append(('id', 'in', res_ids))
return res
def _search_is_follower(self, cr, uid, obj, name, args, context):
"""Search function for message_is_follower"""
res = []
for field, operator, value in args:
assert field == name
partner_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).partner_id.id
if (operator == '=' and value) or (operator == '!=' and not value): # is a follower
res_ids = self.search(cr, uid, [('message_follower_ids', 'in', [partner_id])], context=context)
else: # is not a follower or unknown domain
mail_ids = self.search(cr, uid, [('message_follower_ids', 'in', [partner_id])], context=context)
res_ids = self.search(cr, uid, [('id', 'not in', mail_ids)], context=context)
res.append(('id', 'in', res_ids))
return res
_columns = {
'message_is_follower': fields.function(_get_followers, type='boolean',
fnct_search=_search_is_follower, string='Is a Follower', multi='_get_followers,'),
'message_follower_ids': fields.function(_get_followers, fnct_inv=_set_followers,
fnct_search=_search_followers, type='many2many', priority=-10,
obj='res.partner', string='Followers', multi='_get_followers'),
'message_ids': fields.one2many('mail.message', 'res_id',
domain=lambda self: [('model', '=', self._name)],
auto_join=True,
string='Messages',
help="Messages and communication history"),
'message_last_post': fields.datetime('Last Message Date',
help='Date of the last message posted on the record.'),
'message_unread': fields.function(_get_message_data,
fnct_search=_search_message_unread, multi="_get_message_data",
type='boolean', string='Unread Messages',
help="If checked new messages require your attention."),
'message_summary': fields.function(_get_message_data, method=True,
type='text', string='Summary', multi="_get_message_data",
help="Holds the Chatter summary (number of messages, ...). "\
"This summary is directly in html format in order to "\
"be inserted in kanban views."),
}
def _get_user_chatter_options(self, cr, uid, context=None):
options = {
'display_log_button': False
}
is_employee = self.pool['res.users'].has_group(cr, uid, 'base.group_user')
if is_employee:
options['display_log_button'] = True
return options
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
res = super(mail_thread, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
if view_type == 'form':
doc = etree.XML(res['arch'])
for node in doc.xpath("//field[@name='message_ids']"):
options = json.loads(node.get('options', '{}'))
options.update(self._get_user_chatter_options(cr, uid, context=context))
node.set('options', json.dumps(options))
res['arch'] = etree.tostring(doc)
return res
#------------------------------------------------------
# CRUD overrides for automatic subscription and logging
#------------------------------------------------------
def create(self, cr, uid, values, context=None):
""" Chatter override :
- subscribe uid
- subscribe followers of parent
- log a creation message
"""
if context is None:
context = {}
if context.get('tracking_disable'):
return super(mail_thread, self).create(
cr, uid, values, context=context)
# subscribe uid unless asked not to
if not context.get('mail_create_nosubscribe'):
pid = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid).partner_id.id
message_follower_ids = values.get('message_follower_ids') or [] # webclient can send None or False
message_follower_ids.append([4, pid])
values['message_follower_ids'] = message_follower_ids
thread_id = super(mail_thread, self).create(cr, uid, values, context=context)
# automatic logging unless asked not to (mainly for various testing purpose)
if not context.get('mail_create_nolog'):
ir_model_pool = self.pool['ir.model']
ids = ir_model_pool.search(cr, uid, [('model', '=', self._name)], context=context)
name = ir_model_pool.read(cr, uid, ids, ['name'], context=context)[0]['name']
self.message_post(cr, uid, thread_id, body=_('%s created') % name, context=context)
# auto_subscribe: take values and defaults into account
create_values = dict(values)
for key, val in context.iteritems():
if key.startswith('default_') and key[8:] not in create_values:
create_values[key[8:]] = val
self.message_auto_subscribe(cr, uid, [thread_id], create_values.keys(), context=context, values=create_values)
# track values
track_ctx = dict(context)
if 'lang' not in track_ctx:
track_ctx['lang'] = self.pool.get('res.users').browse(cr, uid, uid, context=context).lang
if not context.get('mail_notrack'):
tracked_fields = self._get_tracked_fields(cr, uid, values.keys(), context=track_ctx)
if tracked_fields:
initial_values = {thread_id: dict.fromkeys(tracked_fields, False)}
self.message_track(cr, uid, [thread_id], tracked_fields, initial_values, context=track_ctx)
return thread_id
def write(self, cr, uid, ids, values, context=None):
if context is None:
context = {}
if isinstance(ids, (int, long)):
ids = [ids]
if context.get('tracking_disable'):
return super(mail_thread, self).write(
cr, uid, ids, values, context=context)
# Track initial values of tracked fields
track_ctx = dict(context)
if 'lang' not in track_ctx:
track_ctx['lang'] = self.pool.get('res.users').browse(cr, uid, uid, context=context).lang
tracked_fields = None
if not context.get('mail_notrack'):
tracked_fields = self._get_tracked_fields(cr, uid, values.keys(), context=track_ctx)
if tracked_fields:
records = self.browse(cr, uid, ids, context=track_ctx)
initial_values = dict((record.id, dict((key, getattr(record, key)) for key in tracked_fields))
for record in records)
# Perform write
result = super(mail_thread, self).write(cr, uid, ids, values, context=context)
# Perform the tracking
if tracked_fields:
self.message_track(cr, uid, ids, tracked_fields, initial_values, context=track_ctx)
# update followers
self.message_auto_subscribe(cr, uid, ids, values.keys(), context=context, values=values)
return result
def unlink(self, cr, uid, ids, context=None):
""" Override unlink to delete messages and followers. This cannot be
cascaded, because link is done through (res_model, res_id). """
msg_obj = self.pool.get('mail.message')
fol_obj = self.pool.get('mail.followers')
if isinstance(ids, (int, long)):
ids = [ids]
# delete messages and notifications
msg_ids = msg_obj.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)], context=context)
msg_obj.unlink(cr, uid, msg_ids, context=context)
# delete
res = super(mail_thread, self).unlink(cr, uid, ids, context=context)
# delete followers
fol_ids = fol_obj.search(cr, SUPERUSER_ID, [('res_model', '=', self._name), ('res_id', 'in', ids)], context=context)
fol_obj.unlink(cr, SUPERUSER_ID, fol_ids, context=context)
return res
def copy_data(self, cr, uid, id, default=None, context=None):
# avoid tracking multiple temporary changes during copy
context = dict(context or {}, mail_notrack=True)
return super(mail_thread, self).copy_data(cr, uid, id, default=default, context=context)
#------------------------------------------------------
# Automatically log tracked fields
#------------------------------------------------------
def _get_tracked_fields(self, cr, uid, updated_fields, context=None):
""" Return a structure of tracked fields for the current model.
:param list updated_fields: modified field names
:return dict: a dict mapping field name to description, containing
always tracked fields and modified on_change fields
"""
tracked_fields = []
for name, field in self._fields.items():
visibility = getattr(field, 'track_visibility', False)
if visibility == 'always' or (visibility == 'onchange' and name in updated_fields) or name in self._track:
tracked_fields.append(name)
if tracked_fields:
return self.fields_get(cr, uid, tracked_fields, context=context)
return {}
def message_track(self, cr, uid, ids, tracked_fields, initial_values, context=None):
def convert_for_display(value, col_info):
if not value and col_info['type'] == 'boolean':
return 'False'
if not value:
return ''
if col_info['type'] == 'many2one':
return value.name_get()[0][1]
if col_info['type'] == 'selection':
return dict(col_info['selection'])[value]
return value
def format_message(message_description, tracked_values):
message = ''
if message_description:
message = '<span>%s</span>' % message_description
for name, change in tracked_values.items():
message += '<div> • <b>%s</b>: ' % change.get('col_info')
if change.get('old_value'):
message += '%s → ' % change.get('old_value')
message += '%s</div>' % change.get('new_value')
return message
if not tracked_fields:
return True
for browse_record in self.browse(cr, uid, ids, context=context):
initial = initial_values[browse_record.id]
changes = set()
tracked_values = {}
# generate tracked_values data structure: {'col_name': {col_info, new_value, old_value}}
for col_name, col_info in tracked_fields.items():
field = self._fields[col_name]
initial_value = initial[col_name]
record_value = getattr(browse_record, col_name)
if record_value == initial_value and getattr(field, 'track_visibility', None) == 'always':
tracked_values[col_name] = dict(
col_info=col_info['string'],
new_value=convert_for_display(record_value, col_info),
)
elif record_value != initial_value and (record_value or initial_value): # because browse null != False
if getattr(field, 'track_visibility', None) in ['always', 'onchange']:
tracked_values[col_name] = dict(
col_info=col_info['string'],
old_value=convert_for_display(initial_value, col_info),
new_value=convert_for_display(record_value, col_info),
)
if col_name in tracked_fields:
changes.add(col_name)
if not changes:
continue
# find subtypes and post messages or log if no subtype found
subtypes = []
# By passing this key, that allows to let the subtype empty and so don't sent email because partners_to_notify from mail_message._notify will be empty
if not context.get('mail_track_log_only'):
for field, track_info in self._track.items():
if field not in changes:
continue
for subtype, method in track_info.items():
if method(self, cr, uid, browse_record, context):
subtypes.append(subtype)
posted = False
for subtype in subtypes:
subtype_rec = self.pool.get('ir.model.data').xmlid_to_object(cr, uid, subtype, context=context)
if not (subtype_rec and subtype_rec.exists()):
_logger.debug('subtype %s not found' % subtype)
continue
message = format_message(subtype_rec.description if subtype_rec.description else subtype_rec.name, tracked_values)
self.message_post(cr, uid, browse_record.id, body=message, subtype=subtype, context=context)
posted = True
if not posted:
message = format_message('', tracked_values)
self.message_post(cr, uid, browse_record.id, body=message, context=context)
return True
#------------------------------------------------------
# mail.message wrappers and tools
#------------------------------------------------------
def _needaction_domain_get(self, cr, uid, context=None):
if self._needaction:
return [('message_unread', '=', True)]
return []
def _garbage_collect_attachments(self, cr, uid, context=None):
""" Garbage collect lost mail attachments. Those are attachments
- linked to res_model 'mail.compose.message', the composer wizard
- with res_id 0, because they were created outside of an existing
wizard (typically user input through Chatter or reports
created on-the-fly by the templates)
- unused since at least one day (create_date and write_date)
"""
limit_date = datetime.datetime.utcnow() - datetime.timedelta(days=1)
limit_date_str = datetime.datetime.strftime(limit_date, tools.DEFAULT_SERVER_DATETIME_FORMAT)
ir_attachment_obj = self.pool.get('ir.attachment')
attach_ids = ir_attachment_obj.search(cr, uid, [
('res_model', '=', 'mail.compose.message'),
('res_id', '=', 0),
('create_date', '<', limit_date_str),
('write_date', '<', limit_date_str),
], context=context)
ir_attachment_obj.unlink(cr, uid, attach_ids, context=context)
return True
@api.cr_uid_ids_context
def check_mail_message_access(self, cr, uid, mids, operation, model_obj=None, context=None):
""" mail.message check permission rules for related document. This method is
meant to be inherited in order to implement addons-specific behavior.
A common behavior would be to allow creating messages when having read
access rule on the document, for portal document such as issues. """
if not model_obj:
model_obj = self
if hasattr(self, '_mail_post_access'):
create_allow = self._mail_post_access
else:
create_allow = 'write'
if operation in ['write', 'unlink']:
check_operation = 'write'
elif operation == 'create' and create_allow in ['create', 'read', 'write', 'unlink']:
check_operation = create_allow
elif operation == 'create':
check_operation = 'write'
else:
check_operation = operation
model_obj.check_access_rights(cr, uid, check_operation)
model_obj.check_access_rule(cr, uid, mids, check_operation, context=context)
def _get_inbox_action_xml_id(self, cr, uid, context=None):
""" When redirecting towards the Inbox, choose which action xml_id has
to be fetched. This method is meant to be inherited, at least in portal
because portal users have a different Inbox action than classic users. """
return ('mail', 'action_mail_inbox_feeds')
def message_redirect_action(self, cr, uid, context=None):
""" For a given message, return an action that either
- opens the form view of the related document if model, res_id, and
read access to the document
- opens the Inbox with a default search on the conversation if model,
res_id
- opens the Inbox with context propagated
"""
if context is None:
context = {}
# default action is the Inbox action
self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
act_model, act_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, *self._get_inbox_action_xml_id(cr, uid, context=context))
action = self.pool.get(act_model).read(cr, uid, [act_id], [])[0]
params = context.get('params')
msg_id = model = res_id = None
if params:
msg_id = params.get('message_id')
model = params.get('model')
res_id = params.get('res_id', params.get('id')) # signup automatically generated id instead of res_id
if not msg_id and not (model and res_id):
return action
if msg_id and not (model and res_id):
msg = self.pool.get('mail.message').browse(cr, uid, msg_id, context=context)
if msg.exists():
model, res_id = msg.model, msg.res_id
# if model + res_id found: try to redirect to the document or fallback on the Inbox
if model and res_id:
model_obj = self.pool.get(model)
if model_obj.check_access_rights(cr, uid, 'read', raise_exception=False):
try:
model_obj.check_access_rule(cr, uid, [res_id], 'read', context=context)
action = model_obj.get_access_action(cr, uid, res_id, context=context)
except (osv.except_osv, orm.except_orm):
pass
action.update({
'context': {
'search_default_model': model,
'search_default_res_id': res_id,
}
})
return action
def _get_access_link(self, cr, uid, mail, partner, context=None):
# the parameters to encode for the query and fragment part of url
query = {'db': cr.dbname}
fragment = {
'login': partner.user_ids[0].login,
'action': 'mail.action_mail_redirect',
}
if mail.notification:
fragment['message_id'] = mail.mail_message_id.id
elif mail.model and mail.res_id:
fragment.update(model=mail.model, res_id=mail.res_id)
return "/web?%s#%s" % (urlencode(query), urlencode(fragment))
#------------------------------------------------------
# Email specific
#------------------------------------------------------
def message_get_default_recipients(self, cr, uid, ids, context=None):
if context and context.get('thread_model') and context['thread_model'] in self.pool and context['thread_model'] != self._name:
if hasattr(self.pool[context['thread_model']], 'message_get_default_recipients'):
sub_ctx = dict(context)
sub_ctx.pop('thread_model')
return self.pool[context['thread_model']].message_get_default_recipients(cr, uid, ids, context=sub_ctx)
res = {}
for record in self.browse(cr, SUPERUSER_ID, ids, context=context):
recipient_ids, email_to, email_cc = set(), False, False
if 'partner_id' in self._fields and record.partner_id:
recipient_ids.add(record.partner_id.id)
elif 'email_from' in self._fields and record.email_from:
email_to = record.email_from
elif 'email' in self._fields:
email_to = record.email
res[record.id] = {'partner_ids': list(recipient_ids), 'email_to': email_to, 'email_cc': email_cc}
return res
def message_get_reply_to(self, cr, uid, ids, default=None, context=None):
""" Returns the preferred reply-to email address that is basically
the alias of the document, if it exists. """
if context is None:
context = {}
model_name = context.get('thread_model') or self._name
alias_domain = self.pool['ir.config_parameter'].get_param(cr, uid, "mail.catchall.domain", context=context)
res = dict.fromkeys(ids, False)
# alias domain: check for aliases and catchall
aliases = {}
doc_names = {}
if alias_domain:
if model_name and model_name != 'mail.thread':
alias_ids = self.pool['mail.alias'].search(
cr, SUPERUSER_ID, [
('alias_parent_model_id.model', '=', model_name),
('alias_parent_thread_id', 'in', ids),
('alias_name', '!=', False)
], context=context)
aliases.update(
dict((alias.alias_parent_thread_id, '%s@%s' % (alias.alias_name, alias_domain))
for alias in self.pool['mail.alias'].browse(cr, SUPERUSER_ID, alias_ids, context=context)))
doc_names.update(
dict((ng_res[0], ng_res[1])
for ng_res in self.pool[model_name].name_get(cr, SUPERUSER_ID, aliases.keys(), context=context)))
# left ids: use catchall
left_ids = set(ids).difference(set(aliases.keys()))
if left_ids:
catchall_alias = self.pool['ir.config_parameter'].get_param(cr, uid, "mail.catchall.alias", context=context)
if catchall_alias:
aliases.update(dict((res_id, '%s@%s' % (catchall_alias, alias_domain)) for res_id in left_ids))
# compute name of reply-to
company_name = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).company_id.name
for res_id in aliases.keys():
email_name = '%s%s' % (company_name, doc_names.get(res_id) and (' ' + doc_names[res_id]) or '')
email_addr = aliases[res_id]
res[res_id] = formataddr((email_name, email_addr))
left_ids = set(ids).difference(set(aliases.keys()))
if left_ids and default:
res.update(dict((res_id, default) for res_id in left_ids))
return res
def message_get_email_values(self, cr, uid, id, notif_mail=None, context=None):
""" Get specific notification email values to store on the notification
mail_mail. Void method, inherit it to add custom values. """
res = dict()
return res
#------------------------------------------------------
# Mail gateway
#------------------------------------------------------
def message_capable_models(self, cr, uid, context=None):
""" Used by the plugin addon, based for plugin_outlook and others. """
ret_dict = {}
for model_name in self.pool.obj_list():
model = self.pool[model_name]
if hasattr(model, "message_process") and hasattr(model, "message_post"):
ret_dict[model_name] = model._description
return ret_dict
def _message_find_partners(self, cr, uid, message, header_fields=['From'], context=None):
""" Find partners related to some header fields of the message.
:param string message: an email.message instance """
s = ', '.join([decode(message.get(h)) for h in header_fields if message.get(h)])
return filter(lambda x: x, self._find_partner_from_emails(cr, uid, None, tools.email_split(s), context=context))
def message_route_verify(self, cr, uid, message, message_dict, route, update_author=True, assert_model=True, create_fallback=True, allow_private=False, context=None):
""" Verify route validity. Check and rules:
1 - if thread_id -> check that document effectively exists; otherwise
fallback on a message_new by resetting thread_id
2 - check that message_update exists if thread_id is set; or at least
that message_new exist
[ - find author_id if udpate_author is set]
3 - if there is an alias, check alias_contact:
'followers' and thread_id:
check on target document that the author is in the followers
'followers' and alias_parent_thread_id:
check on alias parent document that the author is in the
followers
'partners': check that author_id id set
"""
assert isinstance(route, (list, tuple)), 'A route should be a list or a tuple'
assert len(route) == 5, 'A route should contain 5 elements: model, thread_id, custom_values, uid, alias record'
message_id = message.get('Message-Id')
email_from = decode_header(message, 'From')
author_id = message_dict.get('author_id')
model, thread_id, alias = route[0], route[1], route[4]
model_pool = None
def _create_bounce_email():
mail_mail = self.pool.get('mail.mail')
mail_id = mail_mail.create(cr, uid, {
'body_html': '<div><p>Hello,</p>'
'<p>The following email sent to %s cannot be accepted because this is '
'a private email address. Only allowed people can contact us at this address.</p></div>'
'<blockquote>%s</blockquote>' % (message.get('to'), message_dict.get('body')),
'subject': 'Re: %s' % message.get('subject'),
'email_to': message.get('from'),
'auto_delete': True,
}, context=context)
mail_mail.send(cr, uid, [mail_id], context=context)
def _warn(message):
_logger.warning('Routing mail with Message-Id %s: route %s: %s',
message_id, route, message)
# Wrong model
if model and not model in self.pool:
if assert_model:
assert model in self.pool, 'Routing: unknown target model %s' % model
_warn('unknown target model %s' % model)
return ()
elif model:
model_pool = self.pool[model]
# Private message: should not contain any thread_id
if not model and thread_id:
if assert_model:
if thread_id:
raise ValueError('Routing: posting a message without model should be with a null res_id (private message), received %s.' % thread_id)
_warn('posting a message without model should be with a null res_id (private message), received %s resetting thread_id' % thread_id)
thread_id = 0
# Private message: should have a parent_id (only answers)
if not model and not message_dict.get('parent_id'):
if assert_model:
if not message_dict.get('parent_id'):
raise ValueError('Routing: posting a message without model should be with a parent_id (private mesage).')
_warn('posting a message without model should be with a parent_id (private mesage), skipping')
return False
# Existing Document: check if exists; if not, fallback on create if allowed
if thread_id and not model_pool.exists(cr, uid, thread_id):
if create_fallback:
_warn('reply to missing document (%s,%s), fall back on new document creation' % (model, thread_id))
thread_id = None
elif assert_model:
assert model_pool.exists(cr, uid, thread_id), 'Routing: reply to missing document (%s,%s)' % (model, thread_id)
else:
_warn('reply to missing document (%s,%s), skipping' % (model, thread_id))
return False
# Existing Document: check model accepts the mailgateway
if thread_id and model and not hasattr(model_pool, 'message_update'):
if create_fallback:
_warn('model %s does not accept document update, fall back on document creation' % model)
thread_id = None
elif assert_model:
assert hasattr(model_pool, 'message_update'), 'Routing: model %s does not accept document update, crashing' % model
else:
_warn('model %s does not accept document update, skipping' % model)
return False
# New Document: check model accepts the mailgateway
if not thread_id and model and not hasattr(model_pool, 'message_new'):
if assert_model:
if not hasattr(model_pool, 'message_new'):
raise ValueError(
'Model %s does not accept document creation, crashing' % model
)
_warn('model %s does not accept document creation, skipping' % model)
return False
# Update message author if asked
# We do it now because we need it for aliases (contact settings)
if not author_id and update_author:
author_ids = self._find_partner_from_emails(cr, uid, thread_id, [email_from], model=model, context=context)
if author_ids:
author_id = author_ids[0]
message_dict['author_id'] = author_id
# Alias: check alias_contact settings
if alias and alias.alias_contact == 'followers' and (thread_id or alias.alias_parent_thread_id):
if thread_id:
obj = self.pool[model].browse(cr, uid, thread_id, context=context)
else:
obj = self.pool[alias.alias_parent_model_id.model].browse(cr, uid, alias.alias_parent_thread_id, context=context)
if not author_id or not author_id in [fol.id for fol in obj.message_follower_ids]:
_warn('alias %s restricted to internal followers, skipping' % alias.alias_name)
_create_bounce_email()
return False
elif alias and alias.alias_contact == 'partners' and not author_id:
_warn('alias %s does not accept unknown author, skipping' % alias.alias_name)
_create_bounce_email()
return False
if not model and not thread_id and not alias and not allow_private:
return ()
return (model, thread_id, route[2], route[3], None if context.get('drop_alias', False) else route[4])
def message_route(self, cr, uid, message, message_dict, model=None, thread_id=None,
custom_values=None, context=None):
"""Attempt to figure out the correct target model, thread_id,
custom_values and user_id to use for an incoming message.
Multiple values may be returned, if a message had multiple
recipients matching existing mail.aliases, for example.
The following heuristics are used, in this order:
1. If the message replies to an existing thread_id, and
properly contains the thread model in the 'In-Reply-To'
header, use this model/thread_id pair, and ignore
custom_value (not needed as no creation will take place)
2. Look for a mail.alias entry matching the message
recipient, and use the corresponding model, thread_id,
custom_values and user_id.
3. Fallback to the ``model``, ``thread_id`` and ``custom_values``
provided.
4. If all the above fails, raise an exception.
:param string message: an email.message instance
:param dict message_dict: dictionary holding message variables
:param string model: the fallback model to use if the message
does not match any of the currently configured mail aliases
(may be None if a matching alias is supposed to be present)
:type dict custom_values: optional dictionary of default field values
to pass to ``message_new`` if a new record needs to be created.
Ignored if the thread record already exists, and also if a
matching mail.alias was found (aliases define their own defaults)
:param int thread_id: optional ID of the record/thread from ``model``
to which this mail should be attached. Only used if the message
does not reply to an existing thread and does not match any mail alias.
:return: list of [model, thread_id, custom_values, user_id, alias]
:raises: ValueError, TypeError
"""
if not isinstance(message, Message):
raise TypeError('message must be an email.message.Message at this point')
mail_msg_obj = self.pool['mail.message']
mail_alias = self.pool.get('mail.alias')
fallback_model = model
# Get email.message.Message variables for future processing
message_id = message.get('Message-Id')
email_from = decode_header(message, 'From')
email_to = decode_header(message, 'To')
references = decode_header(message, 'References')
in_reply_to = decode_header(message, 'In-Reply-To').strip()
thread_references = references or in_reply_to
# 0. First check if this is a bounce message or not.
# See http://datatracker.ietf.org/doc/rfc3462/?include_text=1
# As all MTA does not respect this RFC (googlemail is one of them),
# we also need to verify if the message come from "mailer-daemon"
localpart = (tools.email_split(email_from) or [''])[0].split('@', 1)[0].lower()
if message.get_content_type() == 'multipart/report' or localpart == 'mailer-daemon':
_logger.info("Not routing bounce email from %s to %s with Message-Id %s",
email_from, email_to, message_id)
return []
# 1. message is a reply to an existing message (exact match of message_id)
ref_match = thread_references and tools.reference_re.search(thread_references)
msg_references = mail_header_msgid_re.findall(thread_references)
mail_message_ids = mail_msg_obj.search(cr, uid, [('message_id', 'in', msg_references)], context=context)
if ref_match and mail_message_ids:
original_msg = mail_msg_obj.browse(cr, SUPERUSER_ID, mail_message_ids[0], context=context)
model, thread_id = original_msg.model, original_msg.res_id
alias_ids = mail_alias.search(cr, uid, [('alias_name', '=', (tools.email_split(email_to) or [''])[0].split('@', 1)[0].lower())])
alias = None
if alias_ids:
alias = mail_alias.browse(cr, uid, [alias_ids[0]], context=context)
route = self.message_route_verify(
cr, uid, message, message_dict,
(model, thread_id, custom_values, uid, alias),
update_author=True, assert_model=False, create_fallback=True, context=dict(context, drop_alias=True))
if route:
_logger.info(
'Routing mail from %s to %s with Message-Id %s: direct reply to msg: model: %s, thread_id: %s, custom_values: %s, uid: %s',
email_from, email_to, message_id, model, thread_id, custom_values, uid)
return [route]
elif route is False:
return []
# 2. message is a reply to an existign thread (6.1 compatibility)
if ref_match:
reply_thread_id = int(ref_match.group(1))
reply_model = ref_match.group(2) or fallback_model
reply_hostname = ref_match.group(3)
local_hostname = socket.gethostname()
# do not match forwarded emails from another OpenERP system (thread_id collision!)
if local_hostname == reply_hostname:
thread_id, model = reply_thread_id, reply_model
if thread_id and model in self.pool:
model_obj = self.pool[model]
compat_mail_msg_ids = mail_msg_obj.search(
cr, uid, [
('message_id', '=', False),
('model', '=', model),
('res_id', '=', thread_id),
], context=context)
if compat_mail_msg_ids and model_obj.exists(cr, uid, thread_id) and hasattr(model_obj, 'message_update'):
route = self.message_route_verify(
cr, uid, message, message_dict,
(model, thread_id, custom_values, uid, None),
update_author=True, assert_model=True, create_fallback=True, context=context)
if route:
# parent is invalid for a compat-reply
message_dict.pop('parent_id', None)
_logger.info(
'Routing mail from %s to %s with Message-Id %s: direct thread reply (compat-mode) to model: %s, thread_id: %s, custom_values: %s, uid: %s',
email_from, email_to, message_id, model, thread_id, custom_values, uid)
return [route]
elif route is False:
return []
# 3. Reply to a private message
if in_reply_to:
mail_message_ids = mail_msg_obj.search(cr, uid, [
('message_id', '=', in_reply_to),
'!', ('message_id', 'ilike', 'reply_to')
], limit=1, context=context)
if mail_message_ids:
mail_message = mail_msg_obj.browse(cr, uid, mail_message_ids[0], context=context)
route = self.message_route_verify(cr, uid, message, message_dict,
(mail_message.model, mail_message.res_id, custom_values, uid, None),
update_author=True, assert_model=True, create_fallback=True, allow_private=True, context=context)
if route:
_logger.info(
'Routing mail from %s to %s with Message-Id %s: direct reply to a private message: %s, custom_values: %s, uid: %s',
email_from, email_to, message_id, mail_message.id, custom_values, uid)
return [route]
elif route is False:
return []
# no route found for a matching reference (or reply), so parent is invalid
message_dict.pop('parent_id', None)
# 4. Look for a matching mail.alias entry
# Delivered-To is a safe bet in most modern MTAs, but we have to fallback on To + Cc values
# for all the odd MTAs out there, as there is no standard header for the envelope's `rcpt_to` value.
rcpt_tos = \
','.join([decode_header(message, 'Delivered-To'),
decode_header(message, 'To'),
decode_header(message, 'Cc'),
decode_header(message, 'Resent-To'),
decode_header(message, 'Resent-Cc')])
local_parts = [e.split('@')[0] for e in tools.email_split(rcpt_tos)]
if local_parts:
alias_ids = mail_alias.search(cr, uid, [('alias_name', 'in', local_parts)])
if alias_ids:
routes = []
for alias in mail_alias.browse(cr, uid, alias_ids, context=context):
user_id = alias.alias_user_id.id
if not user_id:
# TDE note: this could cause crashes, because no clue that the user
# that send the email has the right to create or modify a new document
# Fallback on user_id = uid
# Note: recognized partners will be added as followers anyway
# user_id = self._message_find_user_id(cr, uid, message, context=context)
user_id = uid
_logger.info('No matching user_id for the alias %s', alias.alias_name)
route = (alias.alias_model_id.model, alias.alias_force_thread_id, eval(alias.alias_defaults), user_id, alias)
route = self.message_route_verify(cr, uid, message, message_dict, route,
update_author=True, assert_model=True, create_fallback=True, context=context)
if route:
_logger.info(
'Routing mail from %s to %s with Message-Id %s: direct alias match: %r',
email_from, email_to, message_id, route)
routes.append(route)
return routes
# 5. Fallback to the provided parameters, if they work
if not thread_id:
# Legacy: fallback to matching [ID] in the Subject
match = tools.res_re.search(decode_header(message, 'Subject'))
thread_id = match and match.group(1)
# Convert into int (bug spotted in 7.0 because of str)
try:
thread_id = int(thread_id)
except:
thread_id = False
route = self.message_route_verify(cr, uid, message, message_dict,
(fallback_model, thread_id, custom_values, uid, None),
update_author=True, assert_model=True, context=context)
if route:
_logger.info(
'Routing mail from %s to %s with Message-Id %s: fallback to model:%s, thread_id:%s, custom_values:%s, uid:%s',
email_from, email_to, message_id, fallback_model, thread_id, custom_values, uid)
return [route]
# ValueError if no routes found and if no bounce occured
raise ValueError(
'No possible route found for incoming message from %s to %s (Message-Id %s:). '
'Create an appropriate mail.alias or force the destination model.' %
(email_from, email_to, message_id)
)
def message_route_process(self, cr, uid, message, message_dict, routes, context=None):
# postpone setting message_dict.partner_ids after message_post, to avoid double notifications
context = dict(context or {})
partner_ids = message_dict.pop('partner_ids', [])
thread_id = False
for model, thread_id, custom_values, user_id, alias in routes or ():
if self._name == 'mail.thread':
context['thread_model'] = model
if model:
model_pool = self.pool[model]
if not (thread_id and hasattr(model_pool, 'message_update') or hasattr(model_pool, 'message_new')):
raise ValueError(
"Undeliverable mail with Message-Id %s, model %s does not accept incoming emails" %
(message_dict['message_id'], model)
)
# disabled subscriptions during message_new/update to avoid having the system user running the
# email gateway become a follower of all inbound messages
nosub_ctx = dict(context, mail_create_nosubscribe=True, mail_create_nolog=True)
if thread_id and hasattr(model_pool, 'message_update'):
model_pool.message_update(cr, user_id, [thread_id], message_dict, context=nosub_ctx)
else:
# if a new thread is created, parent is irrelevant
message_dict.pop('parent_id', None)
thread_id = model_pool.message_new(cr, user_id, message_dict, custom_values, context=nosub_ctx)
else:
if thread_id:
raise ValueError("Posting a message without model should be with a null res_id, to create a private message.")
model_pool = self.pool.get('mail.thread')
if not hasattr(model_pool, 'message_post'):
context['thread_model'] = model
model_pool = self.pool['mail.thread']
new_msg_id = model_pool.message_post(cr, uid, [thread_id], context=context, subtype='mail.mt_comment', **message_dict)
if partner_ids:
# postponed after message_post, because this is an external message and we don't want to create
# duplicate emails due to notifications
self.pool.get('mail.message').write(cr, uid, [new_msg_id], {'partner_ids': partner_ids}, context=context)
return thread_id
def message_process(self, cr, uid, model, message, custom_values=None,
save_original=False, strip_attachments=False,
thread_id=None, context=None):
""" Process an incoming RFC2822 email message, relying on
``mail.message.parse()`` for the parsing operation,
and ``message_route()`` to figure out the target model.
Once the target model is known, its ``message_new`` method
is called with the new message (if the thread record did not exist)
or its ``message_update`` method (if it did).
There is a special case where the target model is False: a reply
to a private message. In this case, we skip the message_new /
message_update step, to just post a new message using mail_thread
message_post.
:param string model: the fallback model to use if the message
does not match any of the currently configured mail aliases
(may be None if a matching alias is supposed to be present)
:param message: source of the RFC2822 message
:type message: string or xmlrpclib.Binary
:type dict custom_values: optional dictionary of field values
to pass to ``message_new`` if a new record needs to be created.
Ignored if the thread record already exists, and also if a
matching mail.alias was found (aliases define their own defaults)
:param bool save_original: whether to keep a copy of the original
email source attached to the message after it is imported.
:param bool strip_attachments: whether to strip all attachments
before processing the message, in order to save some space.
:param int thread_id: optional ID of the record/thread from ``model``
to which this mail should be attached. When provided, this
overrides the automatic detection based on the message
headers.
"""
if context is None:
context = {}
# extract message bytes - we are forced to pass the message as binary because
# we don't know its encoding until we parse its headers and hence can't
# convert it to utf-8 for transport between the mailgate script and here.
if isinstance(message, xmlrpclib.Binary):
message = str(message.data)
# Warning: message_from_string doesn't always work correctly on unicode,
# we must use utf-8 strings here :-(
if isinstance(message, unicode):
message = message.encode('utf-8')
msg_txt = email.message_from_string(message)
# parse the message, verify we are not in a loop by checking message_id is not duplicated
msg = self.message_parse(cr, uid, msg_txt, save_original=save_original, context=context)
if strip_attachments:
msg.pop('attachments', None)
if msg.get('message_id'): # should always be True as message_parse generate one if missing
existing_msg_ids = self.pool.get('mail.message').search(cr, SUPERUSER_ID, [
('message_id', '=', msg.get('message_id')),
], context=context)
if existing_msg_ids:
_logger.info('Ignored mail from %s to %s with Message-Id %s: found duplicated Message-Id during processing',
msg.get('from'), msg.get('to'), msg.get('message_id'))
return False
# find possible routes for the message
routes = self.message_route(cr, uid, msg_txt, msg, model, thread_id, custom_values, context=context)
thread_id = self.message_route_process(cr, uid, msg_txt, msg, routes, context=context)
return thread_id
def message_new(self, cr, uid, msg_dict, custom_values=None, context=None):
"""Called by ``message_process`` when a new message is received
for a given thread model, if the message did not belong to
an existing thread.
The default behavior is to create a new record of the corresponding
model (based on some very basic info extracted from the message).
Additional behavior may be implemented by overriding this method.
:param dict msg_dict: a map containing the email details and
attachments. See ``message_process`` and
``mail.message.parse`` for details.
:param dict custom_values: optional dictionary of additional
field values to pass to create()
when creating the new thread record.
Be careful, these values may override
any other values coming from the message.
:param dict context: if a ``thread_model`` value is present
in the context, its value will be used
to determine the model of the record
to create (instead of the current model).
:rtype: int
:return: the id of the newly created thread object
"""
if context is None:
context = {}
data = {}
if isinstance(custom_values, dict):
data = custom_values.copy()
model = context.get('thread_model') or self._name
model_pool = self.pool[model]
fields = model_pool.fields_get(cr, uid, context=context)
if 'name' in fields and not data.get('name'):
data['name'] = msg_dict.get('subject', '')
res_id = model_pool.create(cr, uid, data, context=context)
return res_id
def message_update(self, cr, uid, ids, msg_dict, update_vals=None, context=None):
"""Called by ``message_process`` when a new message is received
for an existing thread. The default behavior is to update the record
with update_vals taken from the incoming email.
Additional behavior may be implemented by overriding this
method.
:param dict msg_dict: a map containing the email details and
attachments. See ``message_process`` and
``mail.message.parse()`` for details.
:param dict update_vals: a dict containing values to update records
given their ids; if the dict is None or is
void, no write operation is performed.
"""
if update_vals:
self.write(cr, uid, ids, update_vals, context=context)
return True
def _message_extract_payload(self, message, save_original=False):
"""Extract body as HTML and attachments from the mail message"""
attachments = []
body = u''
if save_original:
attachments.append(('original_email.eml', message.as_string()))
# Be careful, content-type may contain tricky content like in the
# following example so test the MIME type with startswith()
#
# Content-Type: multipart/related;
# boundary="_004_3f1e4da175f349248b8d43cdeb9866f1AMSPR06MB343eurprd06pro_";
# type="text/html"
if not message.is_multipart() or message.get('content-type', '').startswith("text/"):
encoding = message.get_content_charset()
body = message.get_payload(decode=True)
body = tools.ustr(body, encoding, errors='replace')
if message.get_content_type() == 'text/plain':
# text/plain -> <pre/>
body = tools.append_content_to_html(u'', body, preserve=True)
else:
alternative = False
mixed = False
html = u''
for part in message.walk():
if part.get_content_type() == 'multipart/alternative':
alternative = True
if part.get_content_type() == 'multipart/mixed':
mixed = True
if part.get_content_maintype() == 'multipart':
continue # skip container
# part.get_filename returns decoded value if able to decode, coded otherwise.
# original get_filename is not able to decode iso-8859-1 (for instance).
# therefore, iso encoded attachements are not able to be decoded properly with get_filename
# code here partially copy the original get_filename method, but handle more encoding
filename=part.get_param('filename', None, 'content-disposition')
if not filename:
filename=part.get_param('name', None)
if filename:
if isinstance(filename, tuple):
# RFC2231
filename=email.utils.collapse_rfc2231_value(filename).strip()
else:
filename=decode(filename)
encoding = part.get_content_charset() # None if attachment
# 1) Explicit Attachments -> attachments
if filename or part.get('content-disposition', '').strip().startswith('attachment'):
attachments.append((filename or 'attachment', part.get_payload(decode=True)))
continue
# 2) text/plain -> <pre/>
if part.get_content_type() == 'text/plain' and (not alternative or not body):
body = tools.append_content_to_html(body, tools.ustr(part.get_payload(decode=True),
encoding, errors='replace'), preserve=True)
# 3) text/html -> raw
elif part.get_content_type() == 'text/html':
# mutlipart/alternative have one text and a html part, keep only the second
# mixed allows several html parts, append html content
append_content = not alternative or (html and mixed)
html = tools.ustr(part.get_payload(decode=True), encoding, errors='replace')
if not append_content:
body = html
else:
body = tools.append_content_to_html(body, html, plaintext=False)
# 4) Anything else -> attachment
else:
attachments.append((filename or 'attachment', part.get_payload(decode=True)))
return body, attachments
def message_parse(self, cr, uid, message, save_original=False, context=None):
"""Parses a string or email.message.Message representing an
RFC-2822 email, and returns a generic dict holding the
message details.
:param message: the message to parse
:type message: email.message.Message | string | unicode
:param bool save_original: whether the returned dict
should include an ``original`` attachment containing
the source of the message
:rtype: dict
:return: A dict with the following structure, where each
field may not be present if missing in original
message::
{ 'message_id': msg_id,
'subject': subject,
'from': from,
'to': to,
'cc': cc,
'body': unified_body,
'attachments': [('file1', 'bytes'),
('file2', 'bytes')}
}
"""
msg_dict = {
'type': 'email',
}
if not isinstance(message, Message):
if isinstance(message, unicode):
# Warning: message_from_string doesn't always work correctly on unicode,
# we must use utf-8 strings here :-(
message = message.encode('utf-8')
message = email.message_from_string(message)
message_id = message['message-id']
if not message_id:
# Very unusual situation, be we should be fault-tolerant here
message_id = "<%s@localhost>" % time.time()
_logger.debug('Parsing Message without message-id, generating a random one: %s', message_id)
msg_dict['message_id'] = message_id
if message.get('Subject'):
msg_dict['subject'] = decode(message.get('Subject'))
# Envelope fields not stored in mail.message but made available for message_new()
msg_dict['from'] = decode(message.get('from'))
msg_dict['to'] = decode(message.get('to'))
msg_dict['cc'] = decode(message.get('cc'))
msg_dict['email_from'] = decode(message.get('from'))
partner_ids = self._message_find_partners(cr, uid, message, ['To', 'Cc'], context=context)
msg_dict['partner_ids'] = [(4, partner_id) for partner_id in partner_ids]
if message.get('Date'):
try:
date_hdr = decode(message.get('Date'))
parsed_date = dateutil.parser.parse(date_hdr, fuzzy=True)
if parsed_date.utcoffset() is None:
# naive datetime, so we arbitrarily decide to make it
# UTC, there's no better choice. Should not happen,
# as RFC2822 requires timezone offset in Date headers.
stored_date = parsed_date.replace(tzinfo=pytz.utc)
else:
stored_date = parsed_date.astimezone(tz=pytz.utc)
except Exception:
_logger.warning('Failed to parse Date header %r in incoming mail '
'with message-id %r, assuming current date/time.',
message.get('Date'), message_id)
stored_date = datetime.datetime.now()
msg_dict['date'] = stored_date.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)
if message.get('In-Reply-To'):
parent_ids = self.pool.get('mail.message').search(cr, uid, [('message_id', '=', decode(message['In-Reply-To'].strip()))])
if parent_ids:
msg_dict['parent_id'] = parent_ids[0]
if message.get('References') and 'parent_id' not in msg_dict:
msg_list = mail_header_msgid_re.findall(decode(message['References']))
parent_ids = self.pool.get('mail.message').search(cr, uid, [('message_id', 'in', [x.strip() for x in msg_list])])
if parent_ids:
msg_dict['parent_id'] = parent_ids[0]
msg_dict['body'], msg_dict['attachments'] = self._message_extract_payload(message, save_original=save_original)
return msg_dict
#------------------------------------------------------
# Note specific
#------------------------------------------------------
def _message_add_suggested_recipient(self, cr, uid, result, obj, partner=None, email=None, reason='', context=None):
""" Called by message_get_suggested_recipients, to add a suggested
recipient in the result dictionary. The form is :
partner_id, partner_name<partner_email> or partner_name, reason """
if email and not partner:
# get partner info from email
partner_info = self.message_partner_info_from_emails(cr, uid, obj.id, [email], context=context)[0]
if partner_info.get('partner_id'):
partner = self.pool.get('res.partner').browse(cr, SUPERUSER_ID, [partner_info['partner_id']], context=context)[0]
if email and email in [val[1] for val in result[obj.id]]: # already existing email -> skip
return result
if partner and partner in obj.message_follower_ids: # recipient already in the followers -> skip
return result
if partner and partner.id in [val[0] for val in result[obj.id]]: # already existing partner ID -> skip
return result
if partner and partner.email: # complete profile: id, name <email>
result[obj.id].append((partner.id, '%s<%s>' % (partner.name, partner.email), reason))
elif partner: # incomplete profile: id, name
result[obj.id].append((partner.id, '%s' % (partner.name), reason))
else: # unknown partner, we are probably managing an email address
result[obj.id].append((False, email, reason))
return result
def message_get_suggested_recipients(self, cr, uid, ids, context=None):
""" Returns suggested recipients for ids. Those are a list of
tuple (partner_id, partner_name, reason), to be managed by Chatter. """
result = dict((res_id, []) for res_id in ids)
if 'user_id' in self._fields:
for obj in self.browse(cr, SUPERUSER_ID, ids, context=context): # SUPERUSER because of a read on res.users that would crash otherwise
if not obj.user_id or not obj.user_id.partner_id:
continue
self._message_add_suggested_recipient(cr, uid, result, obj, partner=obj.user_id.partner_id, reason=self._fields['user_id'].string, context=context)
return result
def _find_partner_from_emails(self, cr, uid, id, emails, model=None, context=None, check_followers=True):
""" Utility method to find partners from email addresses. The rules are :
1 - check in document (model | self, id) followers
2 - try to find a matching partner that is also an user
3 - try to find a matching partner
:param list emails: list of email addresses
:param string model: model to fetch related record; by default self
is used.
:param boolean check_followers: check in document followers
"""
partner_obj = self.pool['res.partner']
partner_ids = []
obj = None
if id and (model or self._name != 'mail.thread') and check_followers:
if model:
obj = self.pool[model].browse(cr, uid, id, context=context)
else:
obj = self.browse(cr, uid, id, context=context)
for contact in emails:
partner_id = False
email_address = tools.email_split(contact)
if not email_address:
partner_ids.append(partner_id)
continue
email_address = email_address[0]
# first try: check in document's followers
if obj:
for follower in obj.message_follower_ids:
if follower.email == email_address:
partner_id = follower.id
# second try: check in partners that are also users
# Escape special SQL characters in email_address to avoid invalid matches
email_address = (email_address.replace('\\', '\\\\').replace('%', '\\%').replace('_', '\\_'))
email_brackets = "<%s>" % email_address
if not partner_id:
# exact, case-insensitive match
ids = partner_obj.search(cr, SUPERUSER_ID,
[('email', '=ilike', email_address),
('user_ids', '!=', False)],
limit=1, context=context)
if not ids:
# if no match with addr-spec, attempt substring match within name-addr pair
ids = partner_obj.search(cr, SUPERUSER_ID,
[('email', 'ilike', email_brackets),
('user_ids', '!=', False)],
limit=1, context=context)
if ids:
partner_id = ids[0]
# third try: check in partners
if not partner_id:
# exact, case-insensitive match
ids = partner_obj.search(cr, SUPERUSER_ID,
[('email', '=ilike', email_address)],
limit=1, context=context)
if not ids:
# if no match with addr-spec, attempt substring match within name-addr pair
ids = partner_obj.search(cr, SUPERUSER_ID,
[('email', 'ilike', email_brackets)],
limit=1, context=context)
if ids:
partner_id = ids[0]
partner_ids.append(partner_id)
return partner_ids
def message_partner_info_from_emails(self, cr, uid, id, emails, link_mail=False, context=None):
""" Convert a list of emails into a list partner_ids and a list
new_partner_ids. The return value is non conventional because
it is meant to be used by the mail widget.
:return dict: partner_ids and new_partner_ids """
mail_message_obj = self.pool.get('mail.message')
partner_ids = self._find_partner_from_emails(cr, uid, id, emails, context=context)
result = list()
for idx in range(len(emails)):
email_address = emails[idx]
partner_id = partner_ids[idx]
partner_info = {'full_name': email_address, 'partner_id': partner_id}
result.append(partner_info)
# link mail with this from mail to the new partner id
if link_mail and partner_info['partner_id']:
# Escape special SQL characters in email_address to avoid invalid matches
email_address = (email_address.replace('\\', '\\\\').replace('%', '\\%').replace('_', '\\_'))
email_brackets = "<%s>" % email_address
message_ids = mail_message_obj.search(cr, SUPERUSER_ID, [
'|',
('email_from', '=ilike', email_address),
('email_from', 'ilike', email_brackets),
('author_id', '=', False)
], context=context)
if message_ids:
mail_message_obj.write(cr, SUPERUSER_ID, message_ids, {'author_id': partner_info['partner_id']}, context=context)
return result
def _message_preprocess_attachments(self, cr, uid, attachments, attachment_ids, attach_model, attach_res_id, context=None):
""" Preprocess attachments for mail_thread.message_post() or mail_mail.create().
:param list attachments: list of attachment tuples in the form ``(name,content)``,
where content is NOT base64 encoded
:param list attachment_ids: a list of attachment ids, not in tomany command form
:param str attach_model: the model of the attachments parent record
:param integer attach_res_id: the id of the attachments parent record
"""
Attachment = self.pool['ir.attachment']
m2m_attachment_ids = []
if attachment_ids:
filtered_attachment_ids = Attachment.search(cr, SUPERUSER_ID, [
('res_model', '=', 'mail.compose.message'),
('create_uid', '=', uid),
('id', 'in', attachment_ids)], context=context)
if filtered_attachment_ids:
Attachment.write(cr, SUPERUSER_ID, filtered_attachment_ids, {'res_model': attach_model, 'res_id': attach_res_id}, context=context)
m2m_attachment_ids += [(4, id) for id in attachment_ids]
# Handle attachments parameter, that is a dictionary of attachments
for name, content in attachments:
if isinstance(content, unicode):
content = content.encode('utf-8')
data_attach = {
'name': name,
'datas': base64.b64encode(str(content)),
'datas_fname': name,
'description': name,
'res_model': attach_model,
'res_id': attach_res_id,
}
m2m_attachment_ids.append((0, 0, data_attach))
return m2m_attachment_ids
@api.cr_uid_ids_context
def message_post(self, cr, uid, thread_id, body='', subject=None, type='notification',
subtype=None, parent_id=False, attachments=None, context=None,
content_subtype='html', **kwargs):
""" Post a new message in an existing thread, returning the new
mail.message ID.
:param int thread_id: thread ID to post into, or list with one ID;
if False/0, mail.message model will also be set as False
:param str body: body of the message, usually raw HTML that will
be sanitized
:param str type: see mail_message.type field
:param str content_subtype:: if plaintext: convert body into html
:param int parent_id: handle reply to a previous message by adding the
parent partners to the message in case of private discussion
:param tuple(str,str) attachments or list id: list of attachment tuples in the form
``(name,content)``, where content is NOT base64 encoded
Extra keyword arguments will be used as default column values for the
new mail.message record. Special cases:
- attachment_ids: supposed not attached to any document; attach them
to the related document. Should only be set by Chatter.
:return int: ID of newly created mail.message
"""
if context is None:
context = {}
if attachments is None:
attachments = {}
mail_message = self.pool.get('mail.message')
ir_attachment = self.pool.get('ir.attachment')
assert (not thread_id) or \
isinstance(thread_id, (int, long)) or \
(isinstance(thread_id, (list, tuple)) and len(thread_id) == 1), \
"Invalid thread_id; should be 0, False, an ID or a list with one ID"
if isinstance(thread_id, (list, tuple)):
thread_id = thread_id[0]
# if we're processing a message directly coming from the gateway, the destination model was
# set in the context.
model = False
if thread_id:
model = context.get('thread_model', False) if self._name == 'mail.thread' else self._name
if model and model != self._name and hasattr(self.pool[model], 'message_post'):
del context['thread_model']
return self.pool[model].message_post(cr, uid, thread_id, body=body, subject=subject, type=type, subtype=subtype, parent_id=parent_id, attachments=attachments, context=context, content_subtype=content_subtype, **kwargs)
#0: Find the message's author, because we need it for private discussion
author_id = kwargs.get('author_id')
if author_id is None: # keep False values
author_id = self.pool.get('mail.message')._get_default_author(cr, uid, context=context)
# 1: Handle content subtype: if plaintext, converto into HTML
if content_subtype == 'plaintext':
body = tools.plaintext2html(body)
# 2: Private message: add recipients (recipients and author of parent message) - current author
# + legacy-code management (! we manage only 4 and 6 commands)
partner_ids = set()
kwargs_partner_ids = kwargs.pop('partner_ids', [])
for partner_id in kwargs_partner_ids:
if isinstance(partner_id, (list, tuple)) and partner_id[0] == 4 and len(partner_id) == 2:
partner_ids.add(partner_id[1])
if isinstance(partner_id, (list, tuple)) and partner_id[0] == 6 and len(partner_id) == 3:
partner_ids |= set(partner_id[2])
elif isinstance(partner_id, (int, long)):
partner_ids.add(partner_id)
else:
pass # we do not manage anything else
if parent_id and not model:
parent_message = mail_message.browse(cr, uid, parent_id, context=context)
private_followers = set([partner.id for partner in parent_message.partner_ids])
if parent_message.author_id:
private_followers.add(parent_message.author_id.id)
private_followers -= set([author_id])
partner_ids |= private_followers
# 3. Attachments
# - HACK TDE FIXME: Chatter: attachments linked to the document (not done JS-side), load the message
attachment_ids = self._message_preprocess_attachments(cr, uid, attachments, kwargs.pop('attachment_ids', []), model, thread_id, context)
# 4: mail.message.subtype
subtype_id = False
if subtype:
if '.' not in subtype:
subtype = 'mail.%s' % subtype
subtype_id = self.pool.get('ir.model.data').xmlid_to_res_id(cr, uid, subtype)
# automatically subscribe recipients if asked to
if context.get('mail_post_autofollow') and thread_id and partner_ids:
partner_to_subscribe = partner_ids
if context.get('mail_post_autofollow_partner_ids'):
partner_to_subscribe = filter(lambda item: item in context.get('mail_post_autofollow_partner_ids'), partner_ids)
self.message_subscribe(cr, uid, [thread_id], list(partner_to_subscribe), context=context)
# _mail_flat_thread: automatically set free messages to the first posted message
if self._mail_flat_thread and model and not parent_id and thread_id:
message_ids = mail_message.search(cr, uid, ['&', ('res_id', '=', thread_id), ('model', '=', model), ('type', '=', 'email')], context=context, order="id ASC", limit=1)
if not message_ids:
message_ids = message_ids = mail_message.search(cr, uid, ['&', ('res_id', '=', thread_id), ('model', '=', model)], context=context, order="id ASC", limit=1)
parent_id = message_ids and message_ids[0] or False
# we want to set a parent: force to set the parent_id to the oldest ancestor, to avoid having more than 1 level of thread
elif parent_id:
message_ids = mail_message.search(cr, SUPERUSER_ID, [('id', '=', parent_id), ('parent_id', '!=', False)], context=context)
# avoid loops when finding ancestors
processed_list = []
if message_ids:
message = mail_message.browse(cr, SUPERUSER_ID, message_ids[0], context=context)
while (message.parent_id and message.parent_id.id not in processed_list):
processed_list.append(message.parent_id.id)
message = message.parent_id
parent_id = message.id
values = kwargs
values.update({
'author_id': author_id,
'model': model,
'res_id': model and thread_id or False,
'body': body,
'subject': subject or False,
'type': type,
'parent_id': parent_id,
'attachment_ids': attachment_ids,
'subtype_id': subtype_id,
'partner_ids': [(4, pid) for pid in partner_ids],
})
# Avoid warnings about non-existing fields
for x in ('from', 'to', 'cc'):
values.pop(x, None)
# Post the message
msg_id = mail_message.create(cr, uid, values, context=context)
# Post-process: subscribe author, update message_last_post
if model and model != 'mail.thread' and thread_id and subtype_id:
# done with SUPERUSER_ID, because on some models users can post only with read access, not necessarily write access
self.write(cr, SUPERUSER_ID, [thread_id], {'message_last_post': fields.datetime.now()}, context=context)
message = mail_message.browse(cr, uid, msg_id, context=context)
if message.author_id and model and thread_id and type != 'notification' and not context.get('mail_create_nosubscribe'):
self.message_subscribe(cr, uid, [thread_id], [message.author_id.id], context=context)
return msg_id
#------------------------------------------------------
# Followers API
#------------------------------------------------------
def message_get_subscription_data(self, cr, uid, ids, user_pid=None, context=None):
""" Wrapper to get subtypes data. """
return self._get_subscription_data(cr, uid, ids, None, None, user_pid=user_pid, context=context)
def message_subscribe_users(self, cr, uid, ids, user_ids=None, subtype_ids=None, context=None):
""" Wrapper on message_subscribe, using users. If user_ids is not
provided, subscribe uid instead. """
if user_ids is None:
user_ids = [uid]
partner_ids = [user.partner_id.id for user in self.pool.get('res.users').browse(cr, uid, user_ids, context=context)]
result = self.message_subscribe(cr, uid, ids, partner_ids, subtype_ids=subtype_ids, context=context)
if partner_ids and result:
self.pool['ir.ui.menu'].clear_cache()
return result
def message_subscribe(self, cr, uid, ids, partner_ids, subtype_ids=None, context=None):
""" Add partners to the records followers. """
if context is None:
context = {}
# not necessary for computation, but saves an access right check
if not partner_ids:
return True
mail_followers_obj = self.pool.get('mail.followers')
subtype_obj = self.pool.get('mail.message.subtype')
user_pid = self.pool.get('res.users').browse(cr, uid, uid, context=context).partner_id.id
if set(partner_ids) == set([user_pid]):
try:
self.check_access_rights(cr, uid, 'read')
self.check_access_rule(cr, uid, ids, 'read')
except (osv.except_osv, orm.except_orm):
return False
else:
self.check_access_rights(cr, uid, 'write')
self.check_access_rule(cr, uid, ids, 'write')
existing_pids_dict = {}
fol_ids = mail_followers_obj.search(cr, SUPERUSER_ID, ['&', '&', ('res_model', '=', self._name), ('res_id', 'in', ids), ('partner_id', 'in', partner_ids)])
for fol in mail_followers_obj.browse(cr, SUPERUSER_ID, fol_ids, context=context):
existing_pids_dict.setdefault(fol.res_id, set()).add(fol.partner_id.id)
# subtype_ids specified: update already subscribed partners
if subtype_ids and fol_ids:
mail_followers_obj.write(cr, SUPERUSER_ID, fol_ids, {'subtype_ids': [(6, 0, subtype_ids)]}, context=context)
# subtype_ids not specified: do not update already subscribed partner, fetch default subtypes for new partners
if subtype_ids is None:
subtype_ids = subtype_obj.search(
cr, uid, [
('default', '=', True), '|', ('res_model', '=', self._name), ('res_model', '=', False)], context=context)
for id in ids:
existing_pids = existing_pids_dict.get(id, set())
new_pids = set(partner_ids) - existing_pids
# subscribe new followers
for new_pid in new_pids:
mail_followers_obj.create(
cr, SUPERUSER_ID, {
'res_model': self._name,
'res_id': id,
'partner_id': new_pid,
'subtype_ids': [(6, 0, subtype_ids)],
}, context=context)
return True
def message_unsubscribe_users(self, cr, uid, ids, user_ids=None, context=None):
""" Wrapper on message_subscribe, using users. If user_ids is not
provided, unsubscribe uid instead. """
if user_ids is None:
user_ids = [uid]
partner_ids = [user.partner_id.id for user in self.pool.get('res.users').browse(cr, uid, user_ids, context=context)]
result = self.message_unsubscribe(cr, uid, ids, partner_ids, context=context)
if partner_ids and result:
self.pool['ir.ui.menu'].clear_cache()
return result
def message_unsubscribe(self, cr, uid, ids, partner_ids, context=None):
""" Remove partners from the records followers. """
# not necessary for computation, but saves an access right check
if not partner_ids:
return True
user_pid = self.pool.get('res.users').read(cr, uid, uid, ['partner_id'], context=context)['partner_id'][0]
if set(partner_ids) == set([user_pid]):
self.check_access_rights(cr, uid, 'read')
self.check_access_rule(cr, uid, ids, 'read')
else:
self.check_access_rights(cr, uid, 'write')
self.check_access_rule(cr, uid, ids, 'write')
fol_obj = self.pool['mail.followers']
fol_ids = fol_obj.search(
cr, SUPERUSER_ID, [
('res_model', '=', self._name),
('res_id', 'in', ids),
('partner_id', 'in', partner_ids)
], context=context)
return fol_obj.unlink(cr, SUPERUSER_ID, fol_ids, context=context)
def _message_get_auto_subscribe_fields(self, cr, uid, updated_fields, auto_follow_fields=None, context=None):
""" Returns the list of relational fields linking to res.users that should
trigger an auto subscribe. The default list checks for the fields
- called 'user_id'
- linking to res.users
- with track_visibility set
In OpenERP V7, this is sufficent for all major addon such as opportunity,
project, issue, recruitment, sale.
Override this method if a custom behavior is needed about fields
that automatically subscribe users.
"""
if auto_follow_fields is None:
auto_follow_fields = ['user_id']
user_field_lst = []
for name, field in self._fields.items():
if name in auto_follow_fields and name in updated_fields and getattr(field, 'track_visibility', False) and field.comodel_name == 'res.users':
user_field_lst.append(name)
return user_field_lst
def _message_auto_subscribe_notify(self, cr, uid, ids, partner_ids, context=None):
""" Send notifications to the partners automatically subscribed to the thread
Override this method if a custom behavior is needed about partners
that should be notified or messages that should be sent
"""
# find first email message, set it as unread for auto_subscribe fields for them to have a notification
if partner_ids:
for record_id in ids:
message_obj = self.pool.get('mail.message')
msg_ids = message_obj.search(cr, SUPERUSER_ID, [
('model', '=', self._name),
('res_id', '=', record_id),
('type', '=', 'email')], limit=1, context=context)
if not msg_ids:
msg_ids = message_obj.search(cr, SUPERUSER_ID, [
('model', '=', self._name),
('res_id', '=', record_id)], limit=1, context=context)
if msg_ids:
notification_obj = self.pool.get('mail.notification')
notification_obj._notify(cr, uid, msg_ids[0], partners_to_notify=partner_ids, context=context)
message = message_obj.browse(cr, uid, msg_ids[0], context=context)
if message.parent_id:
partner_ids_to_parent_notify = set(partner_ids).difference(partner.id for partner in message.parent_id.notified_partner_ids)
for partner_id in partner_ids_to_parent_notify:
notification_obj.create(cr, uid, {
'message_id': message.parent_id.id,
'partner_id': partner_id,
'is_read': True,
}, context=context)
def message_auto_subscribe(self, cr, uid, ids, updated_fields, context=None, values=None):
""" Handle auto subscription. Two methods for auto subscription exist:
- tracked res.users relational fields, such as user_id fields. Those fields
must be relation fields toward a res.users record, and must have the
track_visilibity attribute set.
- using subtypes parent relationship: check if the current model being
modified has an header record (such as a project for tasks) whose followers
can be added as followers of the current records. Example of structure
with project and task:
- st_project_1.parent_id = st_task_1
- st_project_1.res_model = 'project.project'
- st_project_1.relation_field = 'project_id'
- st_task_1.model = 'project.task'
:param list updated_fields: list of updated fields to track
:param dict values: updated values; if None, the first record will be browsed
to get the values. Added after releasing 7.0, therefore
not merged with updated_fields argumment.
"""
subtype_obj = self.pool.get('mail.message.subtype')
follower_obj = self.pool.get('mail.followers')
new_followers = dict()
# fetch auto_follow_fields: res.users relation fields whose changes are tracked for subscription
user_field_lst = self._message_get_auto_subscribe_fields(cr, uid, updated_fields, context=context)
# fetch header subtypes
header_subtype_ids = subtype_obj.search(cr, uid, ['|', ('res_model', '=', False), ('parent_id.res_model', '=', self._name)], context=context)
subtypes = subtype_obj.browse(cr, uid, header_subtype_ids, context=context)
# if no change in tracked field or no change in tracked relational field: quit
relation_fields = set([subtype.relation_field for subtype in subtypes if subtype.relation_field is not False])
if not any(relation in updated_fields for relation in relation_fields) and not user_field_lst:
return True
# legacy behavior: if values is not given, compute the values by browsing
# @TDENOTE: remove me in 8.0
if values is None:
record = self.browse(cr, uid, ids[0], context=context)
for updated_field in updated_fields:
field_value = getattr(record, updated_field)
if isinstance(field_value, BaseModel):
field_value = field_value.id
values[updated_field] = field_value
# find followers of headers, update structure for new followers
headers = set()
for subtype in subtypes:
if subtype.relation_field and values.get(subtype.relation_field):
headers.add((subtype.res_model, values.get(subtype.relation_field)))
if headers:
header_domain = ['|'] * (len(headers) - 1)
for header in headers:
header_domain += ['&', ('res_model', '=', header[0]), ('res_id', '=', header[1])]
header_follower_ids = follower_obj.search(
cr, SUPERUSER_ID,
header_domain,
context=context
)
for header_follower in follower_obj.browse(cr, SUPERUSER_ID, header_follower_ids, context=context):
for subtype in header_follower.subtype_ids:
if subtype.parent_id and subtype.parent_id.res_model == self._name:
new_followers.setdefault(header_follower.partner_id.id, set()).add(subtype.parent_id.id)
elif subtype.res_model is False:
new_followers.setdefault(header_follower.partner_id.id, set()).add(subtype.id)
# add followers coming from res.users relational fields that are tracked
user_ids = [values[name] for name in user_field_lst if values.get(name)]
user_pids = [user.partner_id.id for user in self.pool.get('res.users').browse(cr, SUPERUSER_ID, user_ids, context=context)]
for partner_id in user_pids:
new_followers.setdefault(partner_id, None)
for pid, subtypes in new_followers.items():
subtypes = list(subtypes) if subtypes is not None else None
self.message_subscribe(cr, uid, ids, [pid], subtypes, context=context)
self._message_auto_subscribe_notify(cr, uid, ids, user_pids, context=context)
return True
#------------------------------------------------------
# Thread state
#------------------------------------------------------
def message_mark_as_unread(self, cr, uid, ids, context=None):
""" Set as unread. """
partner_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).partner_id.id
cr.execute('''
UPDATE mail_notification SET
is_read=false
WHERE
message_id IN (SELECT id from mail_message where res_id=any(%s) and model=%s limit 1) and
partner_id = %s
''', (ids, self._name, partner_id))
self.pool.get('mail.notification').invalidate_cache(cr, uid, ['is_read'], context=context)
return True
def message_mark_as_read(self, cr, uid, ids, context=None):
""" Set as read. """
partner_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).partner_id.id
cr.execute('''
UPDATE mail_notification SET
is_read=true
WHERE
message_id IN (SELECT id FROM mail_message WHERE res_id=ANY(%s) AND model=%s) AND
partner_id = %s
''', (ids, self._name, partner_id))
self.pool.get('mail.notification').invalidate_cache(cr, uid, ['is_read'], context=context)
return True
#------------------------------------------------------
# Thread suggestion
#------------------------------------------------------
def get_suggested_thread(self, cr, uid, removed_suggested_threads=None, context=None):
"""Return a list of suggested threads, sorted by the numbers of followers"""
if context is None:
context = {}
# TDE HACK: originally by MAT from portal/mail_mail.py but not working until the inheritance graph bug is not solved in trunk
# TDE FIXME: relocate in portal when it won't be necessary to reload the hr.employee model in an additional bridge module
if 'is_portal' in self.pool['res.groups']._fields:
user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
if any(group.is_portal for group in user.groups_id):
return []
threads = []
if removed_suggested_threads is None:
removed_suggested_threads = []
thread_ids = self.search(cr, uid, [('id', 'not in', removed_suggested_threads), ('message_is_follower', '=', False)], context=context)
for thread in self.browse(cr, uid, thread_ids, context=context):
data = {
'id': thread.id,
'popularity': len(thread.message_follower_ids),
'name': thread.name,
'image_small': thread.image_small
}
threads.append(data)
return sorted(threads, key=lambda x: (x['popularity'], x['id']), reverse=True)[:3]
def message_change_thread(self, cr, uid, id, new_res_id, new_model, context=None):
"""
Transfert the list of the mail thread messages from an model to another
:param id : the old res_id of the mail.message
:param new_res_id : the new res_id of the mail.message
:param new_model : the name of the new model of the mail.message
Example : self.pool.get("crm.lead").message_change_thread(self, cr, uid, 2, 4, "project.issue", context)
will transfert thread of the lead (id=2) to the issue (id=4)
"""
# get the sbtype id of the comment Message
subtype_res_id = self.pool.get('ir.model.data').xmlid_to_res_id(cr, uid, 'mail.mt_comment', raise_if_not_found=True)
# get the ids of the comment and none-comment of the thread
message_obj = self.pool.get('mail.message')
msg_ids_comment = message_obj.search(cr, uid, [
('model', '=', self._name),
('res_id', '=', id),
('subtype_id', '=', subtype_res_id)], context=context)
msg_ids_not_comment = message_obj.search(cr, uid, [
('model', '=', self._name),
('res_id', '=', id),
('subtype_id', '!=', subtype_res_id)], context=context)
# update the messages
message_obj.write(cr, uid, msg_ids_comment, {"res_id" : new_res_id, "model" : new_model}, context=context)
message_obj.write(cr, uid, msg_ids_not_comment, {"res_id" : new_res_id, "model" : new_model, "subtype_id" : None}, context=context)
return True
|
shakamunyi/sahara
|
refs/heads/master
|
sahara/service/api/v2/images.py
|
3
|
# Copyright (c) 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara import conductor as c
from sahara.utils.openstack import base as b
from sahara.utils.openstack import images as sahara_images
conductor = c.API
# Image Registry
def get_images(name, tags):
return b.execute_with_retries(
sahara_images.image_manager().list_registered, name, tags)
def get_image(**kwargs):
if len(kwargs) == 1 and 'id' in kwargs:
return b.execute_with_retries(
sahara_images.image_manager().get, kwargs['id'])
else:
return b.execute_with_retries(
sahara_images.image_manager().find, **kwargs)
def get_registered_image(id):
return b.execute_with_retries(
sahara_images.image_manager().get_registered_image, id)
def register_image(image_id, username, description=None):
manager = sahara_images.image_manager()
b.execute_with_retries(
manager.set_image_info, image_id, username, description)
return b.execute_with_retries(manager.get, image_id)
def unregister_image(image_id):
manager = sahara_images.image_manager()
b.execute_with_retries(manager.unset_image_info, image_id)
return b.execute_with_retries(manager.get, image_id)
def get_image_tags(image_id):
return b.execute_with_retries(
sahara_images.image_manager().get, image_id).tags
def set_image_tags(image_id, tags):
manager = sahara_images.image_manager()
image_obj = b.execute_with_retries(manager.get, image_id)
org_tags = frozenset(image_obj.tags)
new_tags = frozenset(tags)
to_add = list(new_tags - org_tags)
to_remove = list(org_tags - new_tags)
if to_add:
b.execute_with_retries(manager.tag, image_id, to_add)
if to_remove:
b.execute_with_retries(manager.untag, image_id, to_remove)
return b.execute_with_retries(manager.get, image_id)
def remove_image_tags(image_id):
manager = sahara_images.image_manager()
image_obj = b.execute_with_retries(manager.get, image_id)
tags = image_obj.tags
b.execute_with_retries(manager.untag, image_id, tags)
return b.execute_with_retries(manager.get, image_id)
|
greytide/greytide
|
refs/heads/master
|
bot/config.py
|
207
|
# Configuration for the minibot.py bot starts here
server = "irc.rizon.net"
port = 6667
channels = ["#asdfgbus", "#botbus"]
defaultchannel = "#asdfgbus"
nick = "minibot-testing-ss13"
altnick = "minibot-testing-ss13_"
name = "minibot"
ident = "minibot"
realname = "minibot"
password = "CHANGETHIS"
# Configuration ends here
|
arjoly/scikit-learn
|
refs/heads/master
|
sklearn/feature_extraction/setup.py
|
314
|
import os
def configuration(parent_package='', top_path=None):
import numpy
from numpy.distutils.misc_util import Configuration
config = Configuration('feature_extraction', parent_package, top_path)
libraries = []
if os.name == 'posix':
libraries.append('m')
config.add_extension('_hashing',
sources=['_hashing.c'],
include_dirs=[numpy.get_include()],
libraries=libraries)
return config
|
zdary/intellij-community
|
refs/heads/master
|
python/helpers/pycharm/teamcity/flake8_plugin.py
|
31
|
try:
from flake8.formatting import base # noqa
except ImportError:
from teamcity.flake8_v2_plugin import * # noqa
else:
from teamcity.flake8_v3_plugin import * # noqa
|
kumarshivam675/Mobile10X-Hack
|
refs/heads/master
|
build/lib.linux-x86_64-2.7/yowsup/structs/__init__.py
|
70
|
from .protocolentity import ProtocolEntity
from .protocoltreenode import ProtocolTreeNode
|
seba3c/scamera
|
refs/heads/master
|
notifications/admin.py
|
1
|
from django.contrib import admin
from notifications.models import NotificationUserProfile
from notifications.telegram.models import TelegramNotificationHandler, TelegramBot
admin.site.register(NotificationUserProfile)
admin.site.register(TelegramBot)
admin.site.register(TelegramNotificationHandler)
|
jflater/Ederson-hpc
|
refs/heads/master
|
Scripts/primer_compliment.py
|
1
|
"""Find reverse complement of reverse primer, print forward primer"""
# Input file should be in .fa format, with both forward and reverse primer
# the word "reverse" in record.name of each reverse primer
import sys
import screed
from Bio.Seq import Seq
def main():
"""this is the main, see above"""
for record in screed.open(sys.argv[1]):
seq = record.sequence
my_seq = Seq(seq)
if "reverse" in record.name:
print ">" + record.name + "_complement"
print my_seq.reverse_complement()
if "forward" in record.name:
print ">" + record.name + "_complement"
print my_seq.reverse_complement()
if __name__ == '__main__':
main()
|
hzlf/openbroadcast
|
refs/heads/master
|
website/apps/alibrary/migrations/0155_auto__add_namevariation.py
|
1
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'NameVariation'
db.create_table('alibrary_namevariation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200, db_index=True)),
('artist', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['alibrary.Artist'])),
))
db.send_create_signal('alibrary', ['NameVariation'])
def backwards(self, orm):
# Deleting model 'NameVariation'
db.delete_table('alibrary_namevariation')
models = {
'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'alibrary.agency': {
'Meta': {'ordering': "('name',)", 'object_name': 'Agency'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'artists': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'agencies'", 'to': "orm['alibrary.Artist']", 'through': "orm['alibrary.AgencyArtist']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['l10n.Country']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agencies_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'max_length': '1024', 'null': 'True'}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agencies_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'agency_children'", 'null': 'True', 'to': "orm['alibrary.Agency']"}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agencies_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '12'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.agencyartist': {
'Meta': {'object_name': 'AgencyArtist'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Agency']"}),
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'countries': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'agency_countries'", 'symmetrical': 'False', 'to': "orm['l10n.Country']"}),
'exclusive': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'scopes': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'agency_scopes'", 'symmetrical': 'False', 'to': "orm['alibrary.AgencyScope']"})
},
'alibrary.agencyscope': {
'Meta': {'object_name': 'AgencyScope'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'alibrary.apilookup': {
'Meta': {'ordering': "('created',)", 'object_name': 'APILookup'},
'api_data': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'processed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '2'}),
'provider': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50'}),
'ressource_id': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'alibrary.artist': {
'Meta': {'ordering': "('name',)", 'object_name': 'Artist'},
'aliases': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'aliases_rel_+'", 'null': 'True', 'to': "orm['alibrary.Artist']"}),
'biography': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'booking_contact': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['l10n.Country']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artists_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'max_length': '1024', 'null': 'True'}),
'date_end': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'date_start': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'disable_editing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'disable_link': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'disambiguation': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'enable_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_folder'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ipi_code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'isni_code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'listed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Artist']", 'through': "orm['alibrary.ArtistMembership']", 'symmetrical': 'False'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artists_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'professions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Profession']", 'through': "orm['alibrary.ArtistProfessions']", 'symmetrical': 'False'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artists_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.artistmembership': {
'Meta': {'object_name': 'ArtistMembership'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_child'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_parent'", 'to': "orm['alibrary.Artist']"}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_membership_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"})
},
'alibrary.artistplugin': {
'Meta': {'object_name': 'ArtistPlugin', 'db_table': "'cmsplugin_artistplugin'", '_ormbases': ['cms.CMSPlugin']},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'})
},
'alibrary.artistprofessions': {
'Meta': {'object_name': 'ArtistProfessions'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Profession']"})
},
'alibrary.daypart': {
'Meta': {'ordering': "('day', 'time_start')", 'object_name': 'Daypart'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'day': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '1', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time_end': ('django.db.models.fields.TimeField', [], {}),
'time_start': ('django.db.models.fields.TimeField', [], {})
},
'alibrary.distributor': {
'Meta': {'ordering': "('name',)", 'object_name': 'Distributor'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['l10n.Country']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'distributors_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'max_length': '1024', 'null': 'True'}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'first_placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'labels': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'distributors'", 'to': "orm['alibrary.Label']", 'through': "orm['alibrary.DistributorLabel']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'distributors_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'label_children'", 'null': 'True', 'to': "orm['alibrary.Distributor']"}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'distributors_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '12'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.distributorlabel': {
'Meta': {'object_name': 'DistributorLabel'},
'countries': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'distribution_countries'", 'symmetrical': 'False', 'to': "orm['l10n.Country']"}),
'distributor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Distributor']"}),
'exclusive': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Label']"})
},
'alibrary.format': {
'Meta': {'ordering': "('format', 'version')", 'object_name': 'Format'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'default_price': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'format': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'default': "'base'", 'max_length': '10'})
},
'alibrary.label': {
'Meta': {'ordering': "('name',)", 'object_name': 'Label'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['l10n.Country']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'labels_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'max_length': '1024', 'null': 'True'}),
'date_end': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'date_start': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disable_editing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'disable_link': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'first_placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'label_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'labelcode': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'listed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'label_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'labels_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'label_children'", 'null': 'True', 'to': "orm['alibrary.Label']"}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'labels_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '12'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.license': {
'Meta': {'ordering': "('name',)", 'object_name': 'License'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'iconset': ('django.db.models.fields.CharField', [], {'max_length': '36', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '36', 'null': 'True', 'blank': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'license_children'", 'null': 'True', 'to': "orm['alibrary.License']"}),
'restricted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'aebfb430-6461-4ef0-907c-8e6412b7acd1'", 'max_length': '36'})
},
'alibrary.licensetranslation': {
'Meta': {'ordering': "('language_code',)", 'unique_together': "(('language_code', 'master'),)", 'object_name': 'LicenseTranslation', 'db_table': "'alibrary_license_translation'"},
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '15', 'blank': 'True'}),
'license_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'to': "orm['alibrary.License']"}),
'name_translated': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'alibrary.media': {
'Meta': {'ordering': "('mediamumber', 'tracknumber')", 'object_name': 'Media'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_artist'", 'null': 'True', 'to': "orm['alibrary.Artist']"}),
'base_bitrate': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'base_duration': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'base_filesize': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'base_format': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'base_samplerate': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'conversion_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '2'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'max_length': '1024', 'null': 'True'}),
'danceability': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'echonest_duration': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'echonest_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'echoprint_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '2'}),
'energy': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'extra_artists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.Artist']", 'null': 'True', 'through': "orm['alibrary.MediaExtraartists']", 'blank': 'True'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isrc': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'key': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_license'", 'null': 'True', 'to': "orm['alibrary.License']"}),
'liveness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'lock': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '1'}),
'loudness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'master': ('django.db.models.fields.files.FileField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'master_sha1': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'mediamumber': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'mediatype': ('django.db.models.fields.CharField', [], {'default': "'track'", 'max_length': '12'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'processed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '2'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_release'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['alibrary.Release']"}),
'sections': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'null': 'True', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'speechiness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'start_of_fade_out': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'tempo': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'tracknumber': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.mediaextraartists': {
'Meta': {'ordering': "('artist__name', 'profession__name')", 'object_name': 'MediaExtraartists'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'extraartist_artist'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'extraartist_media'", 'to': "orm['alibrary.Media']"}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'media_extraartist_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"})
},
'alibrary.mediaformat': {
'Meta': {'ordering': "('name',)", 'object_name': 'Mediaformat'},
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_listing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'alibrary.mediaplugin': {
'Meta': {'object_name': 'MediaPlugin', 'db_table': "'cmsplugin_mediaplugin'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'headline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Media']"})
},
'alibrary.namevariation': {
'Meta': {'ordering': "('name',)", 'object_name': 'NameVariation'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'})
},
'alibrary.playlist': {
'Meta': {'ordering': "('-updated',)", 'object_name': 'Playlist'},
'broadcast_status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'broadcast_status_messages': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'd_tags': ('tagging.fields.TagField', [], {'max_length': '1024', 'null': 'True'}),
'dayparts': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'daypart_plalists'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['alibrary.Daypart']"}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '12', 'null': 'True'}),
'edit_mode': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}),
'enable_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.PlaylistItem']", 'null': 'True', 'through': "orm['alibrary.PlaylistItemPlaylist']", 'blank': 'True'}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'main_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'rotation': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'seasons': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'season_plalists'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['alibrary.Season']"}),
'series': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Series']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'series_number': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'target_duration': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'basket'", 'max_length': '12', 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'weather': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'weather_plalists'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['alibrary.Weather']"})
},
'alibrary.playlistitem': {
'Meta': {'object_name': 'PlaylistItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.playlistitemplaylist': {
'Meta': {'object_name': 'PlaylistItemPlaylist'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'cue_in': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'cue_out': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_cross': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_in': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_out': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.PlaylistItem']"}),
'playlist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Playlist']"}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.playlistmedia': {
'Meta': {'object_name': 'PlaylistMedia'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'cue_in': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'cue_out': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_cross': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_in': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'fade_out': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Media']"}),
'playlist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Playlist']"}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.profession': {
'Meta': {'ordering': "('name',)", 'object_name': 'Profession'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_listing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'})
},
'alibrary.relation': {
'Meta': {'ordering': "('url',)", 'object_name': 'Relation'},
'action': ('django.db.models.fields.CharField', [], {'default': "'information'", 'max_length': '50'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512'})
},
'alibrary.release': {
'Meta': {'ordering': "('-created',)", 'object_name': 'Release'},
'album_artists': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'release_albumartists'", 'to': "orm['alibrary.Artist']", 'through': "orm['alibrary.ReleaseAlbumartists']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'asin': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'barcode': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'catalognumber': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'cover_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_cover_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'releases_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'd_tags': ('tagging.fields.TagField', [], {'max_length': '1024', 'null': 'True'}),
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'enable_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'extra_artists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.Artist']", 'null': 'True', 'through': "orm['alibrary.ReleaseExtraartists']", 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_folder'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_label'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['alibrary.Label']"}),
'legacy_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_license'", 'null': 'True', 'to': "orm['alibrary.License']"}),
'main_format': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Mediaformat']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'media': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'releases'", 'to': "orm['alibrary.Media']", 'through': "orm['alibrary.ReleaseMedia']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'migrated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'releases_owner'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'placeholder_1': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'pressings': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'releases_publisher'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'release_country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'releasedate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'releasedate_approx': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'releasestatus': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'releasetype': ('django.db.models.fields.CharField', [], {'default': "'other'", 'max_length': '24'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'totaltracks': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.releasealbumartists': {
'Meta': {'ordering': "('position',)", 'object_name': 'ReleaseAlbumartists'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_albumartist_artist'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'join_phrase': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '12', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_albumartist_release'", 'to': "orm['alibrary.Release']"})
},
'alibrary.releaseextraartists': {
'Meta': {'object_name': 'ReleaseExtraartists'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_extraartist_artist'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_extraartist_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_extraartist_release'", 'to': "orm['alibrary.Release']"})
},
'alibrary.releasemedia': {
'Meta': {'object_name': 'ReleaseMedia'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Media']"}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Release']"})
},
'alibrary.releaseplugin': {
'Meta': {'object_name': 'ReleasePlugin', 'db_table': "'cmsplugin_releaseplugin'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Release']"})
},
'alibrary.releaserelations': {
'Meta': {'object_name': 'ReleaseRelations'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'relation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_relation_relation'", 'to': "orm['alibrary.Relation']"}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_relation_release'", 'to': "orm['alibrary.Release']"})
},
'alibrary.season': {
'Meta': {'ordering': "('-name',)", 'object_name': 'Season'},
'date_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name_de': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'alibrary.series': {
'Meta': {'ordering': "('-name',)", 'object_name': 'Series'},
'description': ('lib.fields.extra.MarkdownTextField', [], {'null': 'True', 'blank': 'True'}),
'description_html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'name'", 'overwrite': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.service': {
'Meta': {'ordering': "('name',)", 'object_name': 'Service'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'pattern': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'})
},
'alibrary.weather': {
'Meta': {'ordering': "('-name',)", 'object_name': 'Weather'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name_de': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'arating.vote': {
'Meta': {'unique_together': "(('user', 'content_type', 'object_id'),)", 'object_name': 'Vote'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': "orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'l10n.country': {
'Meta': {'ordering': "('name',)", 'object_name': 'Country'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'admin_area': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'continent': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iso2_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2'}),
'iso3_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'numcode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
}
}
complete_apps = ['alibrary']
|
mixturemodel-flow/tensorflow
|
refs/heads/master
|
tensorflow/python/grappler/model_analyzer.py
|
38
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Provides a proper python API for the symbols exported through swig."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import pywrap_tensorflow as tf_wrap
from tensorflow.python.framework import errors
def GenerateModelReport(metagraph):
"""Report what's known statically about each node in the provided metagraph.
Args:
metagraph: A TensorFlow MetaGraphDef.
Returns:
A string containing the report.
"""
with errors.raise_exception_on_not_ok_status():
ret_from_swig = tf_wrap.GenerateModelReport(metagraph.SerializeToString())
return ret_from_swig
|
luotao1/Paddle
|
refs/heads/develop
|
python/paddle/fluid/tests/unittests/npu/test_sgd_op_npu.py
|
2
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import unittest
import sys
sys.path.append("..")
from op_test import OpTest
import paddle
import paddle.fluid as fluid
paddle.enable_static()
SEED = 2021
@unittest.skipIf(not paddle.is_compiled_with_npu(),
"core is not compiled with NPU")
class TestSGD(OpTest):
def setUp(self):
self.set_npu()
self.place = paddle.NPUPlace(0)
self.op_type = "sgd"
self.conf()
w = np.random.random((self.h, self.w)).astype("float32")
g = np.random.random((self.h, self.w)).astype("float32")
lr = np.array([0.1]).astype("float32")
self.inputs = {'Param': w, 'Grad': g, 'LearningRate': lr}
self.outputs = {'ParamOut': w - lr * g}
def set_npu(self):
self.__class__.use_npu = True
def init_dtype(self):
self.dtype = np.float32
def conf(self):
self.h = 12
self.w = 15
def test_check_output(self):
self.check_output_with_place(self.place, check_dygraph=False)
@unittest.skipIf(not paddle.is_compiled_with_npu(),
"core is not compiled with NPU")
class TestNet(unittest.TestCase):
def _test(self, run_npu=True):
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
np.random.seed(SEED)
a_np = np.random.random(size=(32, 32)).astype('float32')
b_np = np.random.random(size=(32, 32)).astype('float32')
label_np = np.random.randint(2, size=(32, 1)).astype('int64')
with paddle.static.program_guard(main_prog, startup_prog):
a = paddle.static.data(name="a", shape=[32, 32], dtype='float32')
b = paddle.static.data(name="b", shape=[32, 32], dtype='float32')
label = paddle.static.data(
name="label", shape=[32, 1], dtype='int64')
sum = paddle.add(a, b)
z = paddle.pow(sum, 2.0)
fc_1 = fluid.layers.fc(input=z, size=128)
prediction = fluid.layers.fc(input=fc_1, size=2, act='softmax')
cost = fluid.layers.cross_entropy(input=prediction, label=label)
loss = fluid.layers.reduce_mean(cost)
sgd = fluid.optimizer.SGD(learning_rate=0.01)
sgd.minimize(loss)
if run_npu:
place = paddle.NPUPlace(0)
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
print("Start run on {}".format(place))
for epoch in range(100):
pred_res, loss_res = exe.run(
main_prog,
feed={"a": a_np,
"b": b_np,
"label": label_np},
fetch_list=[prediction, loss])
if epoch % 10 == 0:
print("Epoch {} | Prediction[0]: {}, Loss: {}".format(
epoch, pred_res[0], loss_res))
return pred_res, loss_res
def test_npu(self):
cpu_pred, cpu_loss = self._test(False)
npu_pred, npu_loss = self._test(True)
self.assertTrue(np.allclose(npu_pred, cpu_pred))
self.assertTrue(np.allclose(npu_loss, cpu_loss))
if __name__ == '__main__':
unittest.main()
|
lmazuel/azure-sdk-for-python
|
refs/heads/master
|
azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/snapshot_paged.py
|
2
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class SnapshotPaged(Paged):
"""
A paging container for iterating over a list of :class:`Snapshot <azure.mgmt.compute.v2016_04_30_preview.models.Snapshot>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[Snapshot]'}
}
def __init__(self, *args, **kwargs):
super(SnapshotPaged, self).__init__(*args, **kwargs)
|
linkmax91/bitquant
|
refs/heads/master
|
web/home/ipython/examples/bitcoin-pricer.py
|
1
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
#!/usr/bin/python
# Copyright (c) 2014 Bitquant Research Laboratories (Asia) Ltd.
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import print_function
from wsgiref.handlers import CGIHandler
from flask import Flask, Response, request
from werkzeug.utils import secure_filename
import subprocess
import sys
import shutil
import os
import json
import getpass
import login
import traceback
import fcntl
import time
import crypt
import pandas
import datetime
import time
import pytz
import dateutil
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import getpass
# <codecell>
#me = getpass.getuser()
#sys.path.append(os.path.join("/home", me, "git/bitquant/web/home/ipython/examples"))
try:
script_dir = os.path.dirname(os.path.realpath(__file__))
os.chdir(script_dir)
except:
pass
from BitcoinAverager import PriceCompositor
app = Flask(__name__)
all_exchanges = ['bitfinexUSD','bitstampUSD','itbitUSD',
'itbitEUR','krakenEUR','itbitSGD','anxhkHKD',
'okcoinCNY', 'btcnCNY']
compositor = PriceCompositor(all_exchanges)
@app.route('/')
def index():
return average_form()
@app.route('/average-form')
def average_form():
retval = """
<form method=POST action="generate-data">
Start time (yyyy/mm/dd): <input name="year" size=4 value="2014">/
<input name="month" size=2 value="02">/<input name="day" size=2 value="01"> <input name="hour" size=2 value="00">:<input name="minute" size=2 value="00">:<input name="second" size=2 value="00"><br>
Time zone: <input name="tz" size=20 value="Europe/London"><br>
Time interval: <input name=interval_length value="3" size=3>
<select name="interval_type">
<option value="month">month(s)</option>
<option value="week">week(s)</option>
<option value="day">day(s)</option>
<option value="hour" selected>hour(s)</option>
<option value="minute">minute(s)</option>
<option value="second">second(s)</option>
</select><br>
Intervals: <input name=intervals value="20" size=3><p>
Exchanges:<br>
"""
for i in all_exchanges:
if i not in compositor.averager:
continue
index_range = compositor.averager[i].index_range()
if 'CNY' in i:
c = ''
else:
c = 'checked'
retval += '<input %s type="checkbox" name=exchanges value="%s">%s' % (c, i, i)
try:
retval += " - %s UTC to %s UTC - %d rows<br>" % ( time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(index_range[0])),
time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(index_range[1])), index_range[2])
except:
retval += "error"
retval += """
<p>
Include:
<br>
Base Currency: <input name="base_currency" value="GBP" size="3">
<br>
Format: <select name="format">
<option value="text/html">HTML</option>
<option value="text/csv">CSV</option>
<option value="text/json">JSON</option>
</select>
<p>
<input type="checkbox" name="table" value="True" checked>Table<br>
<input type="checkbox" name="plot" value="True">Plot<br>
Price plot fields (comma_separated): <input name="price_plot_fields" value="price"><br>
Volume plot fields (comma separated): <input name="volume_plot_fields" value="volume"><br>
<input type="submit" />
</form>
<p>
<a href="/cgi-bin/bittrader/bitcoinaverager.py/reload" target="_blank">Reload data (this will take a few minutes)</a>
"""
return retval
@app.route('/generate-data', methods = ['POST'])
def generate_data():
import cStringIO
year = request.form['year']
month = request.form['month']
day = request.form['day']
hour = request.form['hour']
minute = request.form['minute']
second = request.form['second']
time_zone = request.form['tz']
interval_length = int(request.form['interval_length'])
interval_type = request.form['interval_type']
intervals = int(request.form['intervals'])
base_currency = request.form.get('base_currency', 'GBP')
time_table = (request.form.get('time_table', 'True') == 'True')
currency_table = (request.form.get('currency_table', 'True') == 'True')
conversion_table = (request.form.get('conversion_table', 'True') == 'True')
exchange_table = (request.form.get('exchange_table', 'True') == 'True')
converted_prices = (request.form.get('converted_prices', 'True') == 'True')
show_table = (request.form.get('table', '') == 'True')
plot = (request.form.get('plot', '') == 'True')
price_plot_fields = request.form.get('price_plot_fields', '')
volume_plot_fields = request.form.get('volume_plot_fields', '')
format = request.form.get('format', "text/html")
local_tz = pytz.timezone(time_zone)
start_date = local_tz.localize(datetime.datetime(int(year),
int(month),
int(day),
int(hour),
int(minute),
int(second)))
time_delta = None
if interval_type == "month":
time_delta = dateutil.relativedelta.relativedelta(months=interval_length)
elif interval_type == "week":
time_delta = dateutil.relativedelta.relativedelta(weeks=interval_length)
elif interval_type == "day":
time_delta = dateutil.relativedelta.relativedelta(days=interval_length)
elif interval_type == "hour":
time_delta = dateutil.relativedelta.relativedelta(hours=interval_length)
elif interval_type == "minute":
time_delta = dateutil.relativedelta.relativedelta(minutes=interval_length)
elif interval_type == "seconds":
time_delta = dateutil.relativedelta.relativedelta(seconds=interval_length)
else:
return "invalid interval_type"
exchanges = request.form.getlist("exchanges")
compositor.set_params(exchanges, base_currency)
table = compositor.generate(start_date,
time_delta,
intervals,
times=time_table,
currency=currency_table,
exchange=exchange_table,
rates=conversion_table,
converted_prices=converted_prices)
output = cStringIO.StringIO()
if format == "text/html":
if show_table:
table.to_html(output, classes=["data","compact", "stripe"])
if plot:
sio = cStringIO.StringIO()
plt.figure(figsize=(6, 6))
ax1 = plt.subplot2grid((8,1), (0,0), rowspan=7)
ax2 = plt.subplot2grid((8,1), (7,0))
ax1.xaxis.set_ticklabels([])
table[[x.strip() for x in price_plot_fields.split(",")]].plot(ax=ax1)
table[[x.strip() for x in volume_plot_fields.split(",")]].plot(ax=ax2)
plt.savefig(sio,format='png')
output.write('<img src="data:image/png;base64,%s"/>' % \
sio.getvalue().encode("base64").strip())
sio.close()
elif format == "text/csv":
table.to_csv(output)
elif format == "text/json":
table.to_json(output, orient='split', date_format='iso')
else:
return "invalid format"
string = output.getvalue()
output.close()
if format == "text/html":
string = string.replace('border="1"', '')
header = """
<head>
<script src="//code.jquery.com/jquery-1.11.1.min.js"></script>
<script src="//cdn.datatables.net/1.10.2/js/jquery.dataTables.min.js"></script>
<script src="//cdn.datatables.net/fixedcolumns/3.0.0/js/dataTables.fixedColumns.min.js"></script>
<script>
$(document).ready(function() {
var table = $('.data').dataTable({
"bFilter": false,
"scrollY": "350px",
"scrollX": true,
"scrollCollapse": true,
"paging": false,
"columns" : [
"""
col_items = []
for col in compositor.col_format():
for j in range(col[1]):
if col[0] != "index" and col[0] != "sum":
visible = ", visible: " + '$("#' + col[0] + '_table").prop("checked")'
else:
visible = ""
col_items.append('{className : "' + col[0] + '"' + visible + '}')
header += ",\n".join(col_items) + """
]
});
new $.fn.dataTable.FixedColumns( table );
$( ".toggle" ).change(function() {
item = $(this).attr("item");
if ($(this).prop("checked")) {
table.DataTable().columns(item).visible(true, false);
} else {
table.DataTable().columns(item).visible(false, false);
}
table.DataTable().draw(false);
new $.fn.dataTable.FixedColumns( table );
});
});
</script>
<style type="text/css">
.data .index {
white-space: nowrap
}
.times {
white-space: nowrap
}
td {
text-align: right
}
th {
text-align: center
}
</style>
<link href="//cdn.datatables.net/1.10.2/css/jquery.dataTables.css" type="text/css" rel="stylesheet">
<link href="//cdn.datatables.net/fixedcolumns/3.0.0/css/dataTables.fixedColumns.min.css" rel="stylesheet">
</head>
<input type="checkbox" class="toggle" item=".currency" id="currency_table" name="currency_table" value="True">Itemize by currency
<input type="checkbox" class="toggle" item=".exchange" id="exchange_table" name="exchange_table" value="True">Itemize by exchange
<input type="checkbox" class="toggle" item=".converted" id="converted_table" name="conversion_table" value="True">Currency converted prices
<input type="checkbox" class="toggle" item=".rates" id="rates_table" name="conversion_table" value="True">Currency rates
<input type="checkbox" class="toggle" item=".times" id="times_table" name="time_table" value="True">Time/Epoch information
"""
return Response(header+string, mimetype=format)
@app.route("/reload")
def reload():
compositor = PriceCompositor()
compositor.reload()
return "reloaded"
# <codecell>
import tornado
import tornado.wsgi
import tornado.httpserver
container = tornado.wsgi.WSGIContainer(app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(9010)
tornado.ioloop.IOLoop.instance().start()
# <codecell>
|
rowhit/h2o-2
|
refs/heads/master
|
py/testdir_multi_jvm/test_GLM2_catdata.py
|
9
|
import unittest, time, sys, copy
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_glm, h2o_browse as h2b, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
h2o.init(3,java_heap_GB=4)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_GLM2_catdata_hosts(self):
# these are still in /home/kevin/scikit/datasets/logreg
# FIX! just two for now..
csvFilenameList = [
"1_100kx7_logreg.data.gz",
"2_100kx7_logreg.data.gz"
]
# pop open a browser on the cloud
### h2b.browseTheCloud()
# save the first, for all comparisions, to avoid slow drift with each iteration
validation1 = {}
for csvFilename in csvFilenameList:
csvPathname = csvFilename
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
print "\n" + csvPathname
start = time.time()
# FIX! why can't I include 0 here? it keeps getting 'unable to solve" if 0 is included
# 0 by itself is okay?
kwargs = {'response': 7, 'family': "binomial", 'n_folds': 3, 'lambda': 1e-4}
timeoutSecs = 200
glm = h2o_cmd.runGLM(parseResult=parseResult, timeoutSecs=timeoutSecs, **kwargs)
h2o_glm.simpleCheckGLM(self, glm, 'C7', **kwargs)
print "glm end on ", csvPathname, 'took', time.time() - start, 'seconds'
### h2b.browseJsonHistoryAsUrlLastMatch("GLM")
# compare this glm to the first one. since the files are replications, the results
# should be similar?
validation = glm['glm_model']['submodels'][0]['validation']
if validation1:
h2o_glm.compareToFirstGlm(self, 'auc', validation, validation1)
else:
validation1 = copy.deepcopy(validation)
if __name__ == '__main__':
h2o.unit_main()
|
NischalLal/Humpty-Dumpty-SriGanesh
|
refs/heads/master
|
my_website/urls.py
|
1
|
"""my_website URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', include('myblog.urls')),
]
|
AndrewGrossman/django
|
refs/heads/master
|
tests/utils_tests/test_lorem_ipsum.py
|
246
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
from django.utils.lorem_ipsum import paragraphs, words
class WebdesignTest(unittest.TestCase):
def test_words(self):
self.assertEqual(words(7), 'lorem ipsum dolor sit amet consectetur adipisicing')
def test_paragraphs(self):
self.assertEqual(paragraphs(1),
['Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'])
|
chriskiehl/Gooey
|
refs/heads/master
|
gooey/tests/integration/runner.py
|
2
|
import os
import time
from concurrent import futures
from gooey.gui.util.freeze import getResourcePath
from gooey.python_bindings import config_generator
from gooey.util.functional import merge
def run_integration(module, assertionFunction, **kwargs):
"""
Integration test harness.
WXPython is *super* finicky when it comes to integration tests. It needs
the main Python thread for its app loop, which means we have to integration
test on a separate thread. The causes further strangeness in how Unittest
and WXPython interact. In short, each test must be in its own module and
thus import its own wx instance, and be run in its own "space."
So long as the above is satisfied, then integration tests can run reliably.
"""
from gooey.gui import application
options = merge({
'image_dir': '::gooey/default',
'language_dir': getResourcePath('languages'),
'show_success_modal': False
}, kwargs)
module_path = os.path.abspath(module.__file__)
parser = module.get_parser()
build_spec = config_generator.create_from_parser(parser, module_path, **options)
time.sleep(2)
app = application.build_app(build_spec=build_spec)
executor = futures.ThreadPoolExecutor(max_workers=1)
# executor runs in parallel and will submit a wx.Destroy request
# when done making its assertions
testResult = executor.submit(assertionFunction, app, build_spec)
# main loop blocks the main thread
app.MainLoop()
# .result() blocks as well while we wait for the thread to finish
# any waiting it may be doing.
testResult.result()
del app
|
kalxas/QGIS
|
refs/heads/master
|
tests/src/python/test_qgsprocessingrecentalgorithmslog.py
|
45
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsProcessingRecentAlgorithmLog.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '2018-07'
__copyright__ = 'Copyright 2018, The QGIS Project'
from qgis.PyQt.QtCore import QCoreApplication
from qgis.core import QgsSettings
from qgis.gui import QgsProcessingRecentAlgorithmLog, QgsGui
from qgis.testing import start_app, unittest
from qgis.PyQt.QtTest import QSignalSpy
start_app()
class TestQgsProcessingRecentAlgorithmLog(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
QCoreApplication.setOrganizationName("QGIS_Test")
QCoreApplication.setOrganizationDomain("QGIS_TestPyQgsNewGeoPackageLayerDialog.com")
QCoreApplication.setApplicationName("QGIS_TestPyQgsNewGeoPackageLayerDialog")
QgsSettings().clear()
def test_log(self):
log = QgsProcessingRecentAlgorithmLog()
self.assertFalse(log.recentAlgorithmIds())
spy = QSignalSpy(log.changed)
log.push('test')
self.assertEqual(log.recentAlgorithmIds(), ['test'])
self.assertEqual(len(spy), 1)
log.push('test')
self.assertEqual(log.recentAlgorithmIds(), ['test'])
self.assertEqual(len(spy), 1)
log.push('test2')
self.assertEqual(log.recentAlgorithmIds(), ['test2', 'test'])
self.assertEqual(len(spy), 2)
log.push('test')
self.assertEqual(log.recentAlgorithmIds(), ['test', 'test2'])
self.assertEqual(len(spy), 3)
log.push('test3')
self.assertEqual(log.recentAlgorithmIds(), ['test3', 'test', 'test2'])
self.assertEqual(len(spy), 4)
log.push('test4')
self.assertEqual(log.recentAlgorithmIds(), ['test4', 'test3', 'test', 'test2'])
self.assertEqual(len(spy), 5)
log.push('test5')
self.assertEqual(log.recentAlgorithmIds(), ['test5', 'test4', 'test3', 'test', 'test2'])
self.assertEqual(len(spy), 6)
log.push('test6')
self.assertEqual(log.recentAlgorithmIds(), ['test6', 'test5', 'test4', 'test3', 'test'])
self.assertEqual(len(spy), 7)
log.push('test3')
self.assertEqual(log.recentAlgorithmIds(), ['test3', 'test6', 'test5', 'test4', 'test'])
self.assertEqual(len(spy), 8)
log.push('test3')
self.assertEqual(log.recentAlgorithmIds(), ['test3', 'test6', 'test5', 'test4', 'test'])
self.assertEqual(len(spy), 8)
# test that log has been saved to QgsSettings
log2 = QgsProcessingRecentAlgorithmLog()
self.assertEqual(log2.recentAlgorithmIds(), ['test3', 'test6', 'test5', 'test4', 'test'])
def test_gui_instance(self):
self.assertIsNotNone(QgsGui.instance().processingRecentAlgorithmLog())
if __name__ == '__main__':
unittest.main()
|
jamesorr/mocsy
|
refs/heads/master
|
src/GSW/make_saar_data.py
|
1
|
#!/usr/bin/env python
# $id$
"""
Make gsw_mod_saar_data.f from the current gsw_data_v3_0.nc. This is a developer
utility and not a part of the public distribution, but its end-product is.
Note that it generates gsw_saar_data.c but will not overwrite it if it exists.
General concept: we don't want end-users of this distribution to require having
netcdf installed, nor do we want to incur the I/O overhead every time this
library is used. So we simply generate static data from the netcdf file.
"""
import math, os, sys
from netCDF4 import Dataset
def float2string(val, sformat, addcomma):
if math.isnan(val):
str_val = "9e90_r8"
else:
str_val = sformat % val
if str_val.find(".") < 0 and str_val.find("e") < 0 :
str_val += "."
str_val += "_r8"
if addcomma:
str_val += ", "
return str_val;
def write_variable_real(var_name, dims, v):
ndims = len(dims)
# list dimensions in reverse order (nz,ny,nx)
if ndims == 1:
fortran_dims = "(%s)" % v.dimensions[0]
elif ndims == 2:
fortran_dims = "(%s,%s)" % v.dimensions[::-1]
elif ndims == 3:
fortran_dims = "(%s,%s,%s)" % v.dimensions[::-1]
out.write("real (r8), dimension%s :: %s\n" % (fortran_dims, var_name))
buf = " "
maxlen = 78
numformat = "%.17g"
if ndims == 1:
out.write("data %s / &\n" % var_name)
lastx = dims[0]-1
#
# The following construct (and variations below) transfer the
# netcdf variable into a memory-resident buffer all at once.
# Anything else is not advised.
#
vv = v[:]
for val, x in [(vv[cx],cx) for cx in range(dims[0])]:
sval = float2string(val,numformat,(x != lastx))
if len(buf)+len(sval) > maxlen:
out.write(buf+"&\n")
buf = " "
buf += sval
if buf:
out.write(buf+" &\n")
out.write(" /\n\n")
elif ndims == 2:
out.write("data %s / &\n" % var_name)
lastx = dims[0]-1
lasty = dims[1]-1
vv = v[:][:]
for x in range(dims[0]):
for val,y in [(vv[x][cy],cy) for cy in range(dims[1])]:
sval = float2string(val,numformat,(x != lastx or y != lasty))
if len(buf)+len(sval) > maxlen:
out.write(buf+"&\n")
buf = " "
buf += sval
if buf:
out.write(buf+" &\n")
out.write(" /\n\n")
else:
#
# For 3d real arrays we construct separate data statements for short
# array sections (one row per statement) to avoid continuation line
# limits (gFortran is unlimited, but iFort has a 511 line limit).
#
lastz = dims[2]-1
vv = v[:][:][:]
for x in range(dims[0]):
for y in range(dims[1]):
out.write("data %s(:,%d,%d) / &\n" % (var_name, y+1, x+1))
for val,z in [(vv[x][y][cz],cz) for cz in range(dims[2])]:
sval = float2string(val,numformat,(z != lastz))
if len(buf)+len(sval) > maxlen:
out.write(buf+"&\n")
buf = " "
buf += sval
if buf:
out.write(buf+" &\n")
buf = " "
out.write(" /\n")
out.write("\n")
def write_variable_int(var_name, dims, v):
ndims = len(dims)
# list dimensions in reverse order (nz,ny,nx)
if ndims == 1:
fortran_dims = "(%s)" % v.dimensions[0]
elif ndims == 2:
fortran_dims = "(%s,%s)" % v.dimensions[::-1]
elif ndims == 3:
fortran_dims = "(%s,%s,%s)" % v.dimensions[::-1]
out.write("integer, dimension%s :: %s\n" % (fortran_dims, var_name))
out.write("data %s / &\n" % var_name)
buf = " "
maxlen = 78
nan = "999"
if ndims == 1:
lastx = dims[0]-1
#
# The following construct (and variations below) transfer the
# netcdf variable into a memory-resident buffer all at once.
# Anything else is not advised.
#
vv = v[:]
for val, x in [(vv[cx],cx) for cx in range(dims[0])]:
if math.isnan(val):
sval = nan
else:
sval = "%d" % val
if x != lastx:
sval += ", "
if len(buf)+len(sval) > maxlen:
out.write(buf+"&\n")
buf = " "
buf += sval
elif ndims == 2:
lastx = dims[0]-1
lasty = dims[1]-1
vv = v[:][:]
for x in range(dims[0]):
for val,y in [(vv[x][cy],cy) for cy in range(dims[1])]:
if math.isnan(val):
sval = nan
else:
sval = "%d" % val
if x != lastx or y != lasty:
sval += ", "
if len(buf)+len(sval) > maxlen:
out.write(buf+"&\n")
buf = " "
buf += sval
else:
lastx = dims[0]-1
lasty = dims[1]-1
lastz = dims[2]-1
vv = v[:][:][:]
for x in range(dims[0]):
for y in range(dims[1]):
for val,z in [(vv[x][y][cz],cz) for cz in range(dims[2])]:
if math.isnan(val):
sval = nan
else:
sval = "%d" % val
if x != lastx or y != lasty or z != lastz:
sval += ", "
if len(buf)+len(sval) > maxlen:
out.write(buf+"&\n")
buf = " "
buf += sval
if buf:
out.write(buf+" &\n")
out.write(" /\n\n")
rootgrp = Dataset('gsw_data_v3_0.nc', 'r')
v = rootgrp.variables
d = rootgrp.dimensions
nx = len(d['nx'])
ny = len(d['ny'])
nz = len(d['nz'])
version_date = rootgrp.version_date
version_number = rootgrp.version_number
vars_real = [["p_ref", "", [nz]], ["lats_ref", "", [ny]],
["longs_ref", "", [nx]], ["saar_ref", "SAAR_ref", [nx,ny,nz]],
["delta_sa_ref", "deltaSA_ref", [nx,ny,nz]]]
vars_int = [["ndepth_ref", "", [nx,ny]]]
try:
fd = os.open("gsw_mod_saar_data.f90", os.O_CREAT|os.O_EXCL|os.O_RDWR, 0644)
except:
print str(sys.exc_info()[1])
print "Will not overwrite gsw_mod_saar_data.f90. Exiting."
sys.exit(1)
out = os.fdopen(fd, "w")
out.write("!\n!** $Id$\n!** Extracted from gsw_data_v3_0.nc\n!\n")
out.write("""
!==========================================================================
module gsw_mod_saar_data
!==========================================================================
use gsw_mod_kinds
implicit none
integer, dimension(4) :: deli = (/0,1,1,0/), delj = (/0,0,1,1/)
integer, parameter :: npan = 6
real (r8), dimension(npan) :: longs_pan, lats_pan
data longs_pan /260.00_r8, 272.59_r8, 276.50_r8, 278.65_r8, 280.73_r8, 292.0_r8/
data lats_pan / 19.55_r8, 13.97_r8, 9.60_r8, 8.10_r8, 9.33_r8, 3.4_r8/
""")
out.write("integer, parameter :: nx = %d\n"% nx)
out.write("integer, parameter :: ny = %d\n"% ny)
out.write("integer, parameter :: nz = %d\n"% nz)
out.write("\n")
out.write("character(*), parameter :: gsw_version_date = \"%s\"\n" % version_date)
out.write("character(*), parameter :: gsw_version_number = \"%s\"\n\n" % version_number)
for var_label, var_name, dims in [var for var in vars_real]:
if not var_name:
var_name = var_label
write_variable_real(var_label, dims, v[var_name])
for var_label, var_name, dims in [var for var in vars_int]:
if not var_name:
var_name = var_label
write_variable_int(var_label, dims, v[var_name])
out.write("""
contains
subroutine gsw_get_version (version_date, version_number)
implicit none
character(*), intent(out) :: version_date, version_number
version_date = gsw_version_date
version_number = gsw_version_number
end subroutine gsw_get_version
end module gsw_mod_saar_data
!--------------------------------------------------------------------------
""")
out.close()
sys.exit(0)
|
brchiu/tensorflow
|
refs/heads/master
|
tensorflow/contrib/learn/python/learn/learn_io/numpy_io.py
|
28
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Methods to allow dict of numpy arrays (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.estimator.inputs.numpy_io import numpy_input_fn as core_numpy_input_fn
from tensorflow.python.util.deprecation import deprecated
@deprecated(None, 'Use tf.estimator.inputs.numpy_input_fn.')
def numpy_input_fn(x,
y=None,
batch_size=128,
num_epochs=1,
shuffle=True,
queue_capacity=1000,
num_threads=1):
"""This input_fn diffs from the core version with default `shuffle`."""
return core_numpy_input_fn(x=x,
y=y,
batch_size=batch_size,
shuffle=shuffle,
num_epochs=num_epochs,
queue_capacity=queue_capacity,
num_threads=num_threads)
|
jgoclawski/django
|
refs/heads/master
|
tests/validators/tests.py
|
163
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import io
import os
import re
import types
from datetime import datetime, timedelta
from unittest import TestCase
from django.core.exceptions import ValidationError
from django.core.validators import (
BaseValidator, EmailValidator, MaxLengthValidator, MaxValueValidator,
MinLengthValidator, MinValueValidator, RegexValidator, URLValidator,
int_list_validator, validate_comma_separated_integer_list, validate_email,
validate_integer, validate_ipv4_address, validate_ipv6_address,
validate_ipv46_address, validate_slug, validate_unicode_slug,
)
from django.test import SimpleTestCase
from django.test.utils import str_prefix
from django.utils._os import upath
NOW = datetime.now()
EXTENDED_SCHEMES = ['http', 'https', 'ftp', 'ftps', 'git', 'file']
TEST_DATA = [
# (validator, value, expected),
(validate_integer, '42', None),
(validate_integer, '-42', None),
(validate_integer, -42, None),
(validate_integer, -42.5, ValidationError),
(validate_integer, None, ValidationError),
(validate_integer, 'a', ValidationError),
(validate_integer, '\n42', ValidationError),
(validate_integer, '42\n', ValidationError),
(validate_email, 'email@here.com', None),
(validate_email, 'weirder-email@here.and.there.com', None),
(validate_email, 'email@[127.0.0.1]', None),
(validate_email, 'email@[2001:dB8::1]', None),
(validate_email, 'email@[2001:dB8:0:0:0:0:0:1]', None),
(validate_email, 'email@[::fffF:127.0.0.1]', None),
(validate_email, 'example@valid-----hyphens.com', None),
(validate_email, 'example@valid-with-hyphens.com', None),
(validate_email, 'test@domain.with.idn.tld.उदाहरण.परीक्षा', None),
(validate_email, 'email@localhost', None),
(EmailValidator(whitelist=['localdomain']), 'email@localdomain', None),
(validate_email, '"test@test"@example.com', None),
(validate_email, 'example@atm.%s' % ('a' * 63), None),
(validate_email, 'example@%s.atm' % ('a' * 63), None),
(validate_email, 'example@%s.%s.atm' % ('a' * 63, 'b' * 10), None),
(validate_email, 'example@atm.%s' % ('a' * 64), ValidationError),
(validate_email, 'example@%s.atm.%s' % ('b' * 64, 'a' * 63), ValidationError),
(validate_email, None, ValidationError),
(validate_email, '', ValidationError),
(validate_email, 'abc', ValidationError),
(validate_email, 'abc@', ValidationError),
(validate_email, 'abc@bar', ValidationError),
(validate_email, 'a @x.cz', ValidationError),
(validate_email, 'abc@.com', ValidationError),
(validate_email, 'something@@somewhere.com', ValidationError),
(validate_email, 'email@127.0.0.1', ValidationError),
(validate_email, 'email@[127.0.0.256]', ValidationError),
(validate_email, 'email@[2001:db8::12345]', ValidationError),
(validate_email, 'email@[2001:db8:0:0:0:0:1]', ValidationError),
(validate_email, 'email@[::ffff:127.0.0.256]', ValidationError),
(validate_email, 'example@invalid-.com', ValidationError),
(validate_email, 'example@-invalid.com', ValidationError),
(validate_email, 'example@invalid.com-', ValidationError),
(validate_email, 'example@inv-.alid-.com', ValidationError),
(validate_email, 'example@inv-.-alid.com', ValidationError),
(validate_email, 'test@example.com\n\n<script src="x.js">', ValidationError),
# Quoted-string format (CR not allowed)
(validate_email, '"\\\011"@here.com', None),
(validate_email, '"\\\012"@here.com', ValidationError),
(validate_email, 'trailingdot@shouldfail.com.', ValidationError),
# Max length of domain name labels is 63 characters per RFC 1034.
(validate_email, 'a@%s.us' % ('a' * 63), None),
(validate_email, 'a@%s.us' % ('a' * 64), ValidationError),
# Trailing newlines in username or domain not allowed
(validate_email, 'a@b.com\n', ValidationError),
(validate_email, 'a\n@b.com', ValidationError),
(validate_email, '"test@test"\n@example.com', ValidationError),
(validate_email, 'a@[127.0.0.1]\n', ValidationError),
(validate_slug, 'slug-ok', None),
(validate_slug, 'longer-slug-still-ok', None),
(validate_slug, '--------', None),
(validate_slug, 'nohyphensoranything', None),
(validate_slug, 'a', None),
(validate_slug, '1', None),
(validate_slug, 'a1', None),
(validate_slug, '', ValidationError),
(validate_slug, ' text ', ValidationError),
(validate_slug, ' ', ValidationError),
(validate_slug, 'some@mail.com', ValidationError),
(validate_slug, '你好', ValidationError),
(validate_slug, '你 好', ValidationError),
(validate_slug, '\n', ValidationError),
(validate_slug, 'trailing-newline\n', ValidationError),
(validate_unicode_slug, 'slug-ok', None),
(validate_unicode_slug, 'longer-slug-still-ok', None),
(validate_unicode_slug, '--------', None),
(validate_unicode_slug, 'nohyphensoranything', None),
(validate_unicode_slug, 'a', None),
(validate_unicode_slug, '1', None),
(validate_unicode_slug, 'a1', None),
(validate_unicode_slug, '你好', None),
(validate_unicode_slug, '', ValidationError),
(validate_unicode_slug, ' text ', ValidationError),
(validate_unicode_slug, ' ', ValidationError),
(validate_unicode_slug, 'some@mail.com', ValidationError),
(validate_unicode_slug, '\n', ValidationError),
(validate_unicode_slug, '你 好', ValidationError),
(validate_unicode_slug, 'trailing-newline\n', ValidationError),
(validate_ipv4_address, '1.1.1.1', None),
(validate_ipv4_address, '255.0.0.0', None),
(validate_ipv4_address, '0.0.0.0', None),
(validate_ipv4_address, '256.1.1.1', ValidationError),
(validate_ipv4_address, '25.1.1.', ValidationError),
(validate_ipv4_address, '25,1,1,1', ValidationError),
(validate_ipv4_address, '25.1 .1.1', ValidationError),
(validate_ipv4_address, '1.1.1.1\n', ValidationError),
# validate_ipv6_address uses django.utils.ipv6, which
# is tested in much greater detail in its own testcase
(validate_ipv6_address, 'fe80::1', None),
(validate_ipv6_address, '::1', None),
(validate_ipv6_address, '1:2:3:4:5:6:7:8', None),
(validate_ipv6_address, '1:2', ValidationError),
(validate_ipv6_address, '::zzz', ValidationError),
(validate_ipv6_address, '12345::', ValidationError),
(validate_ipv46_address, '1.1.1.1', None),
(validate_ipv46_address, '255.0.0.0', None),
(validate_ipv46_address, '0.0.0.0', None),
(validate_ipv46_address, 'fe80::1', None),
(validate_ipv46_address, '::1', None),
(validate_ipv46_address, '1:2:3:4:5:6:7:8', None),
(validate_ipv46_address, '256.1.1.1', ValidationError),
(validate_ipv46_address, '25.1.1.', ValidationError),
(validate_ipv46_address, '25,1,1,1', ValidationError),
(validate_ipv46_address, '25.1 .1.1', ValidationError),
(validate_ipv46_address, '1:2', ValidationError),
(validate_ipv46_address, '::zzz', ValidationError),
(validate_ipv46_address, '12345::', ValidationError),
(validate_comma_separated_integer_list, '1', None),
(validate_comma_separated_integer_list, '12', None),
(validate_comma_separated_integer_list, '1,2', None),
(validate_comma_separated_integer_list, '1,2,3', None),
(validate_comma_separated_integer_list, '10,32', None),
(validate_comma_separated_integer_list, '', ValidationError),
(validate_comma_separated_integer_list, 'a', ValidationError),
(validate_comma_separated_integer_list, 'a,b,c', ValidationError),
(validate_comma_separated_integer_list, '1, 2, 3', ValidationError),
(validate_comma_separated_integer_list, ',', ValidationError),
(validate_comma_separated_integer_list, '1,2,3,', ValidationError),
(validate_comma_separated_integer_list, '1,2,', ValidationError),
(validate_comma_separated_integer_list, ',1', ValidationError),
(validate_comma_separated_integer_list, '1,,2', ValidationError),
(int_list_validator(sep='.'), '1.2.3', None),
(int_list_validator(sep='.'), '1,2,3', ValidationError),
(int_list_validator(sep='.'), '1.2.3\n', ValidationError),
(MaxValueValidator(10), 10, None),
(MaxValueValidator(10), -10, None),
(MaxValueValidator(10), 0, None),
(MaxValueValidator(NOW), NOW, None),
(MaxValueValidator(NOW), NOW - timedelta(days=1), None),
(MaxValueValidator(0), 1, ValidationError),
(MaxValueValidator(NOW), NOW + timedelta(days=1), ValidationError),
(MinValueValidator(-10), -10, None),
(MinValueValidator(-10), 10, None),
(MinValueValidator(-10), 0, None),
(MinValueValidator(NOW), NOW, None),
(MinValueValidator(NOW), NOW + timedelta(days=1), None),
(MinValueValidator(0), -1, ValidationError),
(MinValueValidator(NOW), NOW - timedelta(days=1), ValidationError),
(MaxLengthValidator(10), '', None),
(MaxLengthValidator(10), 10 * 'x', None),
(MaxLengthValidator(10), 15 * 'x', ValidationError),
(MinLengthValidator(10), 15 * 'x', None),
(MinLengthValidator(10), 10 * 'x', None),
(MinLengthValidator(10), '', ValidationError),
(URLValidator(EXTENDED_SCHEMES), 'file://localhost/path', None),
(URLValidator(EXTENDED_SCHEMES), 'git://example.com/', None),
(URLValidator(EXTENDED_SCHEMES), 'git://-invalid.com', ValidationError),
# Trailing newlines not accepted
(URLValidator(), 'http://www.djangoproject.com/\n', ValidationError),
(URLValidator(), 'http://[::ffff:192.9.5.5]\n', ValidationError),
# Trailing junk does not take forever to reject
(URLValidator(), 'http://www.asdasdasdasdsadfm.com.br ', ValidationError),
(URLValidator(), 'http://www.asdasdasdasdsadfm.com.br z', ValidationError),
(BaseValidator(True), True, None),
(BaseValidator(True), False, ValidationError),
(RegexValidator(), '', None),
(RegexValidator(), 'x1x2', None),
(RegexValidator('[0-9]+'), 'xxxxxx', ValidationError),
(RegexValidator('[0-9]+'), '1234', None),
(RegexValidator(re.compile('[0-9]+')), '1234', None),
(RegexValidator('.*'), '', None),
(RegexValidator(re.compile('.*')), '', None),
(RegexValidator('.*'), 'xxxxx', None),
(RegexValidator('x'), 'y', ValidationError),
(RegexValidator(re.compile('x')), 'y', ValidationError),
(RegexValidator('x', inverse_match=True), 'y', None),
(RegexValidator(re.compile('x'), inverse_match=True), 'y', None),
(RegexValidator('x', inverse_match=True), 'x', ValidationError),
(RegexValidator(re.compile('x'), inverse_match=True), 'x', ValidationError),
(RegexValidator('x', flags=re.IGNORECASE), 'y', ValidationError),
(RegexValidator('a'), 'A', ValidationError),
(RegexValidator('a', flags=re.IGNORECASE), 'A', None),
]
def create_path(filename):
return os.path.abspath(os.path.join(os.path.dirname(upath(__file__)), filename))
# Add valid and invalid URL tests.
# This only tests the validator without extended schemes.
with io.open(create_path('valid_urls.txt'), encoding='utf8') as f:
for url in f:
TEST_DATA.append((URLValidator(), url.strip(), None))
with io.open(create_path('invalid_urls.txt'), encoding='utf8') as f:
for url in f:
TEST_DATA.append((URLValidator(), url.strip(), ValidationError))
def create_simple_test_method(validator, expected, value, num):
if expected is not None and issubclass(expected, Exception):
test_mask = 'test_%s_raises_error_%d'
def test_func(self):
# assertRaises not used, so as to be able to produce an error message
# containing the tested value
try:
validator(value)
except expected:
pass
else:
self.fail("%s not raised when validating '%s'" % (
expected.__name__, value))
else:
test_mask = 'test_%s_%d'
def test_func(self):
try:
self.assertEqual(expected, validator(value))
except ValidationError as e:
self.fail("Validation of '%s' failed. Error message was: %s" % (
value, str(e)))
if isinstance(validator, types.FunctionType):
val_name = validator.__name__
else:
val_name = validator.__class__.__name__
test_name = test_mask % (val_name, num)
return test_name, test_func
# Dynamically assemble a test class with the contents of TEST_DATA
class TestSimpleValidators(SimpleTestCase):
def test_single_message(self):
v = ValidationError('Not Valid')
self.assertEqual(str(v), str_prefix("[%(_)s'Not Valid']"))
self.assertEqual(repr(v), str_prefix("ValidationError([%(_)s'Not Valid'])"))
def test_message_list(self):
v = ValidationError(['First Problem', 'Second Problem'])
self.assertEqual(str(v), str_prefix("[%(_)s'First Problem', %(_)s'Second Problem']"))
self.assertEqual(repr(v), str_prefix("ValidationError([%(_)s'First Problem', %(_)s'Second Problem'])"))
def test_message_dict(self):
v = ValidationError({'first': ['First Problem']})
self.assertEqual(str(v), str_prefix("{%(_)s'first': [%(_)s'First Problem']}"))
self.assertEqual(repr(v), str_prefix("ValidationError({%(_)s'first': [%(_)s'First Problem']})"))
def test_regex_validator_flags(self):
try:
RegexValidator(re.compile('a'), flags=re.IGNORECASE)
except TypeError:
pass
else:
self.fail("TypeError not raised when flags and pre-compiled regex in RegexValidator")
def test_max_length_validator_message(self):
v = MaxLengthValidator(16, message='"%(value)s" has more than %(limit_value)d characters.')
with self.assertRaisesMessage(ValidationError, '"djangoproject.com" has more than 16 characters.'):
v('djangoproject.com')
test_counter = 0
for validator, value, expected in TEST_DATA:
name, method = create_simple_test_method(validator, expected, value, test_counter)
setattr(TestSimpleValidators, name, method)
test_counter += 1
class TestValidatorEquality(TestCase):
"""
Tests that validators have valid equality operators (#21638)
"""
def test_regex_equality(self):
self.assertEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
RegexValidator(r'^(?:[0-9\.\-]*)://'),
)
self.assertEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
)
self.assertNotEqual(
RegexValidator('', flags=re.IGNORECASE),
RegexValidator(''),
)
self.assertNotEqual(
RegexValidator(''),
RegexValidator('', inverse_match=True),
)
def test_regex_equality_nocache(self):
pattern = r'^(?:[a-z0-9\.\-]*)://'
left = RegexValidator(pattern)
re.purge()
right = RegexValidator(pattern)
self.assertEqual(
left,
right,
)
def test_regex_equality_blank(self):
self.assertEqual(
RegexValidator(),
RegexValidator(),
)
def test_email_equality(self):
self.assertEqual(
EmailValidator(),
EmailValidator(),
)
self.assertNotEqual(
EmailValidator(message="BAD EMAIL"),
EmailValidator(),
)
self.assertEqual(
EmailValidator(message="BAD EMAIL", code="bad"),
EmailValidator(message="BAD EMAIL", code="bad"),
)
def test_basic_equality(self):
self.assertEqual(
MaxValueValidator(44),
MaxValueValidator(44),
)
self.assertNotEqual(
MaxValueValidator(44),
MinValueValidator(44),
)
self.assertNotEqual(
MinValueValidator(45),
MinValueValidator(11),
)
|
iresprite/json-chunked-stream-client
|
refs/heads/master
|
test_singleQuoteBlock.py
|
1
|
from unittest import TestCase
import ByteStreamHandler
class TestSingleQuoteBlock(TestCase):
def setUp(self):
stack = [ByteStreamHandler.State()]
self.single_quote_block = ByteStreamHandler.SingleQuoteBlock(stack)
def test_handle(self):
self.assertIsInstance(self.single_quote_block.handle("\'", []), ByteStreamHandler.EmptyState)
self.assertIsInstance(self.single_quote_block.handle("\\", []), ByteStreamHandler.EscapedChar)
def test_closed_quote_block(self):
state = ByteStreamHandler.State()
dq_state = ByteStreamHandler.SingleQuoteBlock([])
stack = [state, dq_state]
self.assertEqual(self.single_quote_block.closed_quote_block(stack), state)
self.assertIsInstance(self.single_quote_block.closed_quote_block([]), ByteStreamHandler.EmptyState)
|
p4datasystems/CarnotKE
|
refs/heads/master
|
jyhton/lib-python/2.7/test/badsyntax_future9.py
|
202
|
"""This is a test"""
from __future__ import nested_scopes, braces
def f(x):
def g(y):
return x + y
return g
print f(2)(4)
|
robmagee/django-cms
|
refs/heads/develop
|
cms/south_migrations/0067_auto__add_field_aliaspluginmodel_alias_placeholder__chg_field_aliasplu.py
|
59
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'AliasPluginModel.alias_placeholder'
db.add_column(u'cms_aliaspluginmodel', 'alias_placeholder',
self.gf('django.db.models.fields.related.ForeignKey')(related_name='alias_placeholder', null=True, to=orm['cms.Placeholder']),
keep_default=False)
# Changing field 'AliasPluginModel.plugin'
db.alter_column(u'cms_aliaspluginmodel', 'plugin_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['cms.CMSPlugin']))
def backwards(self, orm):
# Deleting field 'AliasPluginModel.alias_placeholder'
db.delete_column(u'cms_aliaspluginmodel', 'alias_placeholder_id')
# Changing field 'AliasPluginModel.plugin'
db.alter_column(u'cms_aliaspluginmodel', 'plugin_id', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['cms.CMSPlugin']))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.aliaspluginmodel': {
'Meta': {'object_name': 'AliasPluginModel', '_ormbases': ['cms.CMSPlugin']},
'alias_placeholder': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'alias_placeholder'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'plugin': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'alias_reference'", 'null': 'True', 'to': "orm['cms.CMSPlugin']"})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id', 'lft')", 'unique_together': "(('publisher_is_draft', 'application_namespace'), ('reverse_id', 'site', 'publisher_is_draft'))", 'object_name': 'Page'},
'application_namespace': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_home': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'languages': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'revision_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'djangocms_pages'", 'to': u"orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "'INHERIT'", 'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'xframe_options': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [u'auth.User']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': u"orm['auth.User']"}),
u'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': [u'auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': u"orm['auth.User']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cms.placeholderreference': {
'Meta': {'object_name': 'PlaceholderReference', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'placeholder_ref': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'})
},
'cms.staticplaceholder': {
'Meta': {'unique_together': "(('code', 'site'),)", 'object_name': 'StaticPlaceholder'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'creation_method': ('django.db.models.fields.CharField', [], {'default': "'code'", 'max_length': '20', 'blank': 'True'}),
'dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'draft': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'static_draft'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'public': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'static_public'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']", 'null': 'True', 'blank': 'True'})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '155', 'null': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Title']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms.usersettings': {
'Meta': {'object_name': 'UserSettings'},
'clipboard': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'djangocms_usersettings'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
|
free-electrons/custom_tests_tool
|
refs/heads/master
|
tests/test_crawlers.py
|
1
|
import requests
import requests_mock
import time
from nose.tools import assert_equal, assert_raises
from crawlers import FreeElectronsCrawler, KernelCICrawler
from crawlers import RemoteAccessError, RemoteEmptyError
class TestKernelCICrawler(object):
BASE_URL = 'https://storage.kernelci.org/'
RELEASE_URL = 'https://api.kernelci.org/build?limit=1&job=%s&field=kernel&field=created_on&sort=created_on&git_branch=%s'
DEFAULT_API_TOKEN = 'foobar42'
DEFAULT_ARCH = 'arm'
DEFAULT_BRANCH = 'master'
DEFAULT_DEFCONFIG = 'test_defconfig'
DEFAULT_DTB = 'test'
DEFAULT_IMAGE = 'zImage'
DEFAULT_MODULES = 'modules.tar.xz'
DEFAULT_RELEASE = 'version-deadcoffee-4.2'
DEFAULT_TREE = 'mainline'
@requests_mock.mock()
def test_check_release(self, mock):
url = self.RELEASE_URL % (self.DEFAULT_TREE, self.DEFAULT_BRANCH)
response = {
'result': [{'kernel': self.DEFAULT_RELEASE,
'created_on': {'$date': time.time()*1000}}],
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
mock.get(url, json=response, request_headers=headers)
crawler = KernelCICrawler(cfg)
assert_equal(self.DEFAULT_RELEASE,
crawler._get_latest_release(self.DEFAULT_TREE,
self.DEFAULT_BRANCH))
@requests_mock.mock()
def test_check_release_error(self, mock):
url = self.RELEASE_URL % (self.DEFAULT_TREE, self.DEFAULT_BRANCH)
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(url, status_code=404)
crawler = KernelCICrawler(cfg)
assert_raises(RemoteEmptyError, crawler._get_latest_release,
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
@requests_mock.mock()
def test_check_release_missing_result(self, mock):
url = self.RELEASE_URL % (self.DEFAULT_TREE, self.DEFAULT_BRANCH)
response = {
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
mock.get(url, json=response, request_headers=headers)
crawler = KernelCICrawler(cfg)
assert_raises(RemoteEmptyError, crawler._get_latest_release,
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
@requests_mock.mock()
def test_check_release_empty_result(self, mock):
url = self.RELEASE_URL % (self.DEFAULT_TREE, self.DEFAULT_BRANCH)
response = {
'result': list(),
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
mock.get(url, json=response, request_headers=headers)
crawler = KernelCICrawler(cfg)
assert_raises(RemoteEmptyError, crawler._get_latest_release,
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
@requests_mock.mock()
def test_check_release_missing_result_kernel(self, mock):
url = self.RELEASE_URL % (self.DEFAULT_TREE, self.DEFAULT_BRANCH)
response = {
'result': [{'test': 'test'}],
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
mock.get(url, json=response, request_headers=headers)
crawler = KernelCICrawler(cfg)
assert_raises(RemoteEmptyError, crawler._get_latest_release,
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
@requests_mock.mock()
def test_check_release_url(self, mock):
url = self.RELEASE_URL % (self.DEFAULT_TREE, self.DEFAULT_BRANCH)
base_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
response = {
'result': [{'kernel': self.DEFAULT_RELEASE,
'created_on': {'$date': time.time()*1000}}],
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
mock.get(url, json=response, request_headers=headers)
crawler = KernelCICrawler(cfg)
assert_equal(base_url, crawler._get_base_url(self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG))
@requests_mock.mock()
def test_check_artifacts_all(self, mock):
release_url = self.RELEASE_URL % (self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
kernel_url = '%s/%s' % (config_url, self.DEFAULT_IMAGE)
modules_url = '%s/%s' % (config_url, self.DEFAULT_MODULES)
dtb_url = '%s/dtbs/%s.dtb' % (config_url, self.DEFAULT_DTB)
release_response = {
'result': [{'kernel': self.DEFAULT_RELEASE,
'created_on': {'$date': time.time()*1000}}],
}
release_headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, json=release_response,
request_headers=release_headers)
mock.get(config_url)
mock.get(kernel_url)
mock.get(modules_url)
mock.get(dtb_url)
crawler = KernelCICrawler(cfg)
items = crawler.crawl(board, self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
assert_equal(kernel_url, items['kernel'])
assert_equal(dtb_url, items['dtb'])
assert_equal(modules_url, items['modules'])
@requests_mock.mock()
def test_check_artifacts_all_missing_config(self, mock):
release_url = self.RELEASE_URL % (
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
release_response = {
'result': [{'kernel': self.DEFAULT_RELEASE,
'created_on': {'$date': time.time()*1000}}],
}
release_headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, json=release_response,
request_headers=release_headers)
mock.get(config_url, status_code=404)
crawler = KernelCICrawler(cfg)
assert_raises(RemoteEmptyError, crawler.crawl,
board, self.DEFAULT_TREE, self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
@requests_mock.mock()
def test_check_artifacts_all_missing_kernel(self, mock):
release_url = self.RELEASE_URL % (
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
kernel_url = '%s/%s' % (config_url, self.DEFAULT_IMAGE)
release_response = {
'result': [{'kernel': self.DEFAULT_RELEASE,
'created_on': {'$date': time.time()*1000}}],
}
release_headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, json=release_response,
request_headers=release_headers)
mock.get(config_url)
mock.get(kernel_url, status_code=404)
crawler = KernelCICrawler(cfg)
assert_raises(RemoteEmptyError, crawler.crawl,
board, self.DEFAULT_TREE, self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
@requests_mock.mock()
def test_check_artifacts_all_missing_modules(self, mock):
release_url = self.RELEASE_URL % (
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
kernel_url = '%s/%s' % (config_url, self.DEFAULT_IMAGE)
modules_url = '%s/%s' % (config_url, self.DEFAULT_MODULES)
release_response = {
'result': [{'kernel': self.DEFAULT_RELEASE,
'created_on': {'$date': time.time()*1000}}],
}
release_headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, json=release_response,
request_headers=release_headers)
mock.get(config_url)
mock.get(kernel_url)
mock.get(modules_url, status_code=404)
crawler = KernelCICrawler(cfg)
assert_raises(RemoteEmptyError, crawler.crawl,
board, self.DEFAULT_TREE, self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
@requests_mock.mock()
def test_check_artifacts_all_missing_dtb(self, mock):
release_url = self.RELEASE_URL % (
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
kernel_url = '%s/%s' % (config_url, self.DEFAULT_IMAGE)
modules_url = '%s/%s' % (config_url, self.DEFAULT_MODULES)
dtb_url = '%s/dtbs/%s.dtb' % (config_url, self.DEFAULT_DTB)
release_response = {
'result': [{'kernel': self.DEFAULT_RELEASE,
'created_on': {'$date': time.time()*1000}}],
}
release_headers = {
'Authorization': self.DEFAULT_API_TOKEN,
}
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, json=release_response,
request_headers=release_headers)
mock.get(config_url)
mock.get(kernel_url)
mock.get(modules_url)
mock.get(dtb_url, status_code=404)
crawler = KernelCICrawler(cfg)
assert_raises(RemoteEmptyError, crawler.crawl,
board, self.DEFAULT_TREE, self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
class TestFECrawler(object):
BASE_URL = 'http://lava.bootlin.com/downloads/builds/'
DEFAULT_API_TOKEN = 'foobar42'
DEFAULT_ARCH = 'arm'
DEFAULT_BRANCH = 'master'
DEFAULT_DEFCONFIG = 'test_defconfig'
DEFAULT_DTB = 'test'
DEFAULT_IMAGE = 'zImage'
DEFAULT_MODULES = 'modules.tar.xz'
DEFAULT_RELEASE = 'version-deadcoffee-4.2'
DEFAULT_TREE = 'mainline'
@requests_mock.mock()
def test_check_release(self, mock):
url = '%s/%s/%s/latest' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(url, text=self.DEFAULT_RELEASE, headers={'last-modified':
time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())})
crawler = FreeElectronsCrawler(cfg)
assert_equal(self.DEFAULT_RELEASE,
crawler._get_latest_release(self.DEFAULT_TREE,
self.DEFAULT_BRANCH))
@requests_mock.mock()
def test_check_release_error(self, mock):
url = '%s/%s/%s/latest' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(url, status_code=404)
crawler = FreeElectronsCrawler(cfg)
assert_raises(RemoteEmptyError, crawler._get_latest_release,
self.DEFAULT_TREE, self.DEFAULT_BRANCH)
@requests_mock.mock()
def test_check_release_url(self, mock):
url = '%s/%s/%s/latest' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
base_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(url, text=self.DEFAULT_RELEASE, headers={'last-modified':
time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())})
crawler = FreeElectronsCrawler(cfg)
assert_equal(base_url, crawler._get_base_url(self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG))
@requests_mock.mock()
def test_check_artifacts_all(self, mock):
release_url = '%s/%s/%s/latest' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
kernel_url = '%s/%s' % (config_url, self.DEFAULT_IMAGE)
modules_url = '%s/%s' % (config_url, self.DEFAULT_MODULES)
dtb_url = '%s/dtbs/%s.dtb' % (config_url, self.DEFAULT_DTB)
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, text=self.DEFAULT_RELEASE, headers={'last-modified':
time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())})
mock.get(config_url)
mock.get(kernel_url)
mock.get(modules_url)
mock.get(dtb_url)
crawler = FreeElectronsCrawler(cfg)
items = crawler.crawl(board, self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
assert_equal(kernel_url, items['kernel'])
assert_equal(dtb_url, items['dtb'])
assert_equal(modules_url, items['modules'])
@requests_mock.mock()
def test_check_artifacts_all_missing_config(self, mock):
release_url = '%s/%s/%s/latest' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, text=self.DEFAULT_RELEASE, headers={'last-modified':
time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())})
mock.get(config_url, status_code=404)
crawler = FreeElectronsCrawler(cfg)
assert_raises(RemoteEmptyError, crawler.crawl,
board, self.DEFAULT_TREE, self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
@requests_mock.mock()
def test_check_artifacts_all_missing_kernel(self, mock):
release_url = '%s/%s/%s/latest' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
kernel_url = '%s/%s' % (config_url, self.DEFAULT_IMAGE)
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, text=self.DEFAULT_RELEASE, headers={'last-modified':
time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())})
mock.get(config_url)
mock.get(kernel_url, status_code=404)
crawler = FreeElectronsCrawler(cfg)
assert_raises(RemoteEmptyError, crawler.crawl,
board, self.DEFAULT_TREE, self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
@requests_mock.mock()
def test_check_artifacts_all_missing_modules(self, mock):
release_url = '%s/%s/%s/latest' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
kernel_url = '%s/%s' % (config_url, self.DEFAULT_IMAGE)
modules_url = '%s/%s' % (config_url, self.DEFAULT_MODULES)
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, text=self.DEFAULT_RELEASE, headers={'last-modified':
time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())})
mock.get(config_url)
mock.get(kernel_url)
mock.get(modules_url, status_code=404)
crawler = FreeElectronsCrawler(cfg)
assert_raises(RemoteEmptyError, crawler.crawl,
board, self.DEFAULT_TREE, self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
@requests_mock.mock()
def test_check_artifacts_all_missing_dtb(self, mock):
release_url = '%s/%s/%s/latest' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH)
config_url = '%s/%s/%s/%s/%s/%s' % (self.BASE_URL,
self.DEFAULT_TREE,
self.DEFAULT_BRANCH,
self.DEFAULT_RELEASE,
self.DEFAULT_ARCH,
self.DEFAULT_DEFCONFIG)
kernel_url = '%s/%s' % (config_url, self.DEFAULT_IMAGE)
modules_url = '%s/%s' % (config_url, self.DEFAULT_MODULES)
dtb_url = '%s/dtbs/%s.dtb' % (config_url, self.DEFAULT_DTB)
board = {
'arch': self.DEFAULT_ARCH,
'dt': self.DEFAULT_DTB,
'name': 'test'
}
cfg = {
'api_token': self.DEFAULT_API_TOKEN,
}
mock.get(release_url, text=self.DEFAULT_RELEASE, headers={'last-modified':
time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())})
mock.get(config_url)
mock.get(kernel_url)
mock.get(modules_url)
mock.get(dtb_url, status_code=404)
crawler = FreeElectronsCrawler(cfg)
assert_raises(RemoteEmptyError, crawler.crawl,
board, self.DEFAULT_TREE, self.DEFAULT_BRANCH,
self.DEFAULT_DEFCONFIG)
|
brendandahl/servo
|
refs/heads/master
|
tests/wpt/css-tests/tools/pywebsocket/src/mod_pywebsocket/mux.py
|
636
|
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file provides classes and helper functions for multiplexing extension.
Specification:
http://tools.ietf.org/html/draft-ietf-hybi-websocket-multiplexing-06
"""
import collections
import copy
import email
import email.parser
import logging
import math
import struct
import threading
import traceback
from mod_pywebsocket import common
from mod_pywebsocket import handshake
from mod_pywebsocket import util
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_hybi import Frame
from mod_pywebsocket._stream_hybi import Stream
from mod_pywebsocket._stream_hybi import StreamOptions
from mod_pywebsocket._stream_hybi import create_binary_frame
from mod_pywebsocket._stream_hybi import create_closing_handshake_body
from mod_pywebsocket._stream_hybi import create_header
from mod_pywebsocket._stream_hybi import create_length_header
from mod_pywebsocket._stream_hybi import parse_frame
from mod_pywebsocket.handshake import hybi
_CONTROL_CHANNEL_ID = 0
_DEFAULT_CHANNEL_ID = 1
_MUX_OPCODE_ADD_CHANNEL_REQUEST = 0
_MUX_OPCODE_ADD_CHANNEL_RESPONSE = 1
_MUX_OPCODE_FLOW_CONTROL = 2
_MUX_OPCODE_DROP_CHANNEL = 3
_MUX_OPCODE_NEW_CHANNEL_SLOT = 4
_MAX_CHANNEL_ID = 2 ** 29 - 1
_INITIAL_NUMBER_OF_CHANNEL_SLOTS = 64
_INITIAL_QUOTA_FOR_CLIENT = 8 * 1024
_HANDSHAKE_ENCODING_IDENTITY = 0
_HANDSHAKE_ENCODING_DELTA = 1
# We need only these status code for now.
_HTTP_BAD_RESPONSE_MESSAGES = {
common.HTTP_STATUS_BAD_REQUEST: 'Bad Request',
}
# DropChannel reason code
# TODO(bashi): Define all reason code defined in -05 draft.
_DROP_CODE_NORMAL_CLOSURE = 1000
_DROP_CODE_INVALID_ENCAPSULATING_MESSAGE = 2001
_DROP_CODE_CHANNEL_ID_TRUNCATED = 2002
_DROP_CODE_ENCAPSULATED_FRAME_IS_TRUNCATED = 2003
_DROP_CODE_UNKNOWN_MUX_OPCODE = 2004
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK = 2005
_DROP_CODE_CHANNEL_ALREADY_EXISTS = 2006
_DROP_CODE_NEW_CHANNEL_SLOT_VIOLATION = 2007
_DROP_CODE_UNKNOWN_REQUEST_ENCODING = 2010
_DROP_CODE_SEND_QUOTA_VIOLATION = 3005
_DROP_CODE_SEND_QUOTA_OVERFLOW = 3006
_DROP_CODE_ACKNOWLEDGED = 3008
_DROP_CODE_BAD_FRAGMENTATION = 3009
class MuxUnexpectedException(Exception):
"""Exception in handling multiplexing extension."""
pass
# Temporary
class MuxNotImplementedException(Exception):
"""Raised when a flow enters unimplemented code path."""
pass
class LogicalConnectionClosedException(Exception):
"""Raised when logical connection is gracefully closed."""
pass
class PhysicalConnectionError(Exception):
"""Raised when there is a physical connection error."""
def __init__(self, drop_code, message=''):
super(PhysicalConnectionError, self).__init__(
'code=%d, message=%r' % (drop_code, message))
self.drop_code = drop_code
self.message = message
class LogicalChannelError(Exception):
"""Raised when there is a logical channel error."""
def __init__(self, channel_id, drop_code, message=''):
super(LogicalChannelError, self).__init__(
'channel_id=%d, code=%d, message=%r' % (
channel_id, drop_code, message))
self.channel_id = channel_id
self.drop_code = drop_code
self.message = message
def _encode_channel_id(channel_id):
if channel_id < 0:
raise ValueError('Channel id %d must not be negative' % channel_id)
if channel_id < 2 ** 7:
return chr(channel_id)
if channel_id < 2 ** 14:
return struct.pack('!H', 0x8000 + channel_id)
if channel_id < 2 ** 21:
first = chr(0xc0 + (channel_id >> 16))
return first + struct.pack('!H', channel_id & 0xffff)
if channel_id < 2 ** 29:
return struct.pack('!L', 0xe0000000 + channel_id)
raise ValueError('Channel id %d is too large' % channel_id)
def _encode_number(number):
return create_length_header(number, False)
def _create_add_channel_response(channel_id, encoded_handshake,
encoding=0, rejected=False):
if encoding != 0 and encoding != 1:
raise ValueError('Invalid encoding %d' % encoding)
first_byte = ((_MUX_OPCODE_ADD_CHANNEL_RESPONSE << 5) |
(rejected << 4) | encoding)
block = (chr(first_byte) +
_encode_channel_id(channel_id) +
_encode_number(len(encoded_handshake)) +
encoded_handshake)
return block
def _create_drop_channel(channel_id, code=None, message=''):
if len(message) > 0 and code is None:
raise ValueError('Code must be specified if message is specified')
first_byte = _MUX_OPCODE_DROP_CHANNEL << 5
block = chr(first_byte) + _encode_channel_id(channel_id)
if code is None:
block += _encode_number(0) # Reason size
else:
reason = struct.pack('!H', code) + message
reason_size = _encode_number(len(reason))
block += reason_size + reason
return block
def _create_flow_control(channel_id, replenished_quota):
first_byte = _MUX_OPCODE_FLOW_CONTROL << 5
block = (chr(first_byte) +
_encode_channel_id(channel_id) +
_encode_number(replenished_quota))
return block
def _create_new_channel_slot(slots, send_quota):
if slots < 0 or send_quota < 0:
raise ValueError('slots and send_quota must be non-negative.')
first_byte = _MUX_OPCODE_NEW_CHANNEL_SLOT << 5
block = (chr(first_byte) +
_encode_number(slots) +
_encode_number(send_quota))
return block
def _create_fallback_new_channel_slot():
first_byte = (_MUX_OPCODE_NEW_CHANNEL_SLOT << 5) | 1 # Set the F flag
block = (chr(first_byte) + _encode_number(0) + _encode_number(0))
return block
def _parse_request_text(request_text):
request_line, header_lines = request_text.split('\r\n', 1)
words = request_line.split(' ')
if len(words) != 3:
raise ValueError('Bad Request-Line syntax %r' % request_line)
[command, path, version] = words
if version != 'HTTP/1.1':
raise ValueError('Bad request version %r' % version)
# email.parser.Parser() parses RFC 2822 (RFC 822) style headers.
# RFC 6455 refers RFC 2616 for handshake parsing, and RFC 2616 refers
# RFC 822.
headers = email.parser.Parser().parsestr(header_lines)
return command, path, version, headers
class _ControlBlock(object):
"""A structure that holds parsing result of multiplexing control block.
Control block specific attributes will be added by _MuxFramePayloadParser.
(e.g. encoded_handshake will be added for AddChannelRequest and
AddChannelResponse)
"""
def __init__(self, opcode):
self.opcode = opcode
class _MuxFramePayloadParser(object):
"""A class that parses multiplexed frame payload."""
def __init__(self, payload):
self._data = payload
self._read_position = 0
self._logger = util.get_class_logger(self)
def read_channel_id(self):
"""Reads channel id.
Raises:
ValueError: when the payload doesn't contain
valid channel id.
"""
remaining_length = len(self._data) - self._read_position
pos = self._read_position
if remaining_length == 0:
raise ValueError('Invalid channel id format')
channel_id = ord(self._data[pos])
channel_id_length = 1
if channel_id & 0xe0 == 0xe0:
if remaining_length < 4:
raise ValueError('Invalid channel id format')
channel_id = struct.unpack('!L',
self._data[pos:pos+4])[0] & 0x1fffffff
channel_id_length = 4
elif channel_id & 0xc0 == 0xc0:
if remaining_length < 3:
raise ValueError('Invalid channel id format')
channel_id = (((channel_id & 0x1f) << 16) +
struct.unpack('!H', self._data[pos+1:pos+3])[0])
channel_id_length = 3
elif channel_id & 0x80 == 0x80:
if remaining_length < 2:
raise ValueError('Invalid channel id format')
channel_id = struct.unpack('!H',
self._data[pos:pos+2])[0] & 0x3fff
channel_id_length = 2
self._read_position += channel_id_length
return channel_id
def read_inner_frame(self):
"""Reads an inner frame.
Raises:
PhysicalConnectionError: when the inner frame is invalid.
"""
if len(self._data) == self._read_position:
raise PhysicalConnectionError(
_DROP_CODE_ENCAPSULATED_FRAME_IS_TRUNCATED)
bits = ord(self._data[self._read_position])
self._read_position += 1
fin = (bits & 0x80) == 0x80
rsv1 = (bits & 0x40) == 0x40
rsv2 = (bits & 0x20) == 0x20
rsv3 = (bits & 0x10) == 0x10
opcode = bits & 0xf
payload = self.remaining_data()
# Consume rest of the message which is payload data of the original
# frame.
self._read_position = len(self._data)
return fin, rsv1, rsv2, rsv3, opcode, payload
def _read_number(self):
if self._read_position + 1 > len(self._data):
raise ValueError(
'Cannot read the first byte of number field')
number = ord(self._data[self._read_position])
if number & 0x80 == 0x80:
raise ValueError(
'The most significant bit of the first byte of number should '
'be unset')
self._read_position += 1
pos = self._read_position
if number == 127:
if pos + 8 > len(self._data):
raise ValueError('Invalid number field')
self._read_position += 8
number = struct.unpack('!Q', self._data[pos:pos+8])[0]
if number > 0x7FFFFFFFFFFFFFFF:
raise ValueError('Encoded number(%d) >= 2^63' % number)
if number <= 0xFFFF:
raise ValueError(
'%d should not be encoded by 9 bytes encoding' % number)
return number
if number == 126:
if pos + 2 > len(self._data):
raise ValueError('Invalid number field')
self._read_position += 2
number = struct.unpack('!H', self._data[pos:pos+2])[0]
if number <= 125:
raise ValueError(
'%d should not be encoded by 3 bytes encoding' % number)
return number
def _read_size_and_contents(self):
"""Reads data that consists of followings:
- the size of the contents encoded the same way as payload length
of the WebSocket Protocol with 1 bit padding at the head.
- the contents.
"""
try:
size = self._read_number()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
str(e))
pos = self._read_position
if pos + size > len(self._data):
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Cannot read %d bytes data' % size)
self._read_position += size
return self._data[pos:pos+size]
def _read_add_channel_request(self, first_byte, control_block):
reserved = (first_byte >> 2) & 0x7
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
# Invalid encoding will be handled by MuxHandler.
encoding = first_byte & 0x3
try:
control_block.channel_id = self.read_channel_id()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK)
control_block.encoding = encoding
encoded_handshake = self._read_size_and_contents()
control_block.encoded_handshake = encoded_handshake
return control_block
def _read_add_channel_response(self, first_byte, control_block):
reserved = (first_byte >> 2) & 0x3
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
control_block.accepted = (first_byte >> 4) & 1
control_block.encoding = first_byte & 0x3
try:
control_block.channel_id = self.read_channel_id()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK)
control_block.encoded_handshake = self._read_size_and_contents()
return control_block
def _read_flow_control(self, first_byte, control_block):
reserved = first_byte & 0x1f
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
try:
control_block.channel_id = self.read_channel_id()
control_block.send_quota = self._read_number()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
str(e))
return control_block
def _read_drop_channel(self, first_byte, control_block):
reserved = first_byte & 0x1f
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
try:
control_block.channel_id = self.read_channel_id()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK)
reason = self._read_size_and_contents()
if len(reason) == 0:
control_block.drop_code = None
control_block.drop_message = ''
elif len(reason) >= 2:
control_block.drop_code = struct.unpack('!H', reason[:2])[0]
control_block.drop_message = reason[2:]
else:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Received DropChannel that conains only 1-byte reason')
return control_block
def _read_new_channel_slot(self, first_byte, control_block):
reserved = first_byte & 0x1e
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
control_block.fallback = first_byte & 1
try:
control_block.slots = self._read_number()
control_block.send_quota = self._read_number()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
str(e))
return control_block
def read_control_blocks(self):
"""Reads control block(s).
Raises:
PhysicalConnectionError: when the payload contains invalid control
block(s).
StopIteration: when no control blocks left.
"""
while self._read_position < len(self._data):
first_byte = ord(self._data[self._read_position])
self._read_position += 1
opcode = (first_byte >> 5) & 0x7
control_block = _ControlBlock(opcode=opcode)
if opcode == _MUX_OPCODE_ADD_CHANNEL_REQUEST:
yield self._read_add_channel_request(first_byte, control_block)
elif opcode == _MUX_OPCODE_ADD_CHANNEL_RESPONSE:
yield self._read_add_channel_response(
first_byte, control_block)
elif opcode == _MUX_OPCODE_FLOW_CONTROL:
yield self._read_flow_control(first_byte, control_block)
elif opcode == _MUX_OPCODE_DROP_CHANNEL:
yield self._read_drop_channel(first_byte, control_block)
elif opcode == _MUX_OPCODE_NEW_CHANNEL_SLOT:
yield self._read_new_channel_slot(first_byte, control_block)
else:
raise PhysicalConnectionError(
_DROP_CODE_UNKNOWN_MUX_OPCODE,
'Invalid opcode %d' % opcode)
assert self._read_position == len(self._data)
raise StopIteration
def remaining_data(self):
"""Returns remaining data."""
return self._data[self._read_position:]
class _LogicalRequest(object):
"""Mimics mod_python request."""
def __init__(self, channel_id, command, path, protocol, headers,
connection):
"""Constructs an instance.
Args:
channel_id: the channel id of the logical channel.
command: HTTP request command.
path: HTTP request path.
headers: HTTP headers.
connection: _LogicalConnection instance.
"""
self.channel_id = channel_id
self.method = command
self.uri = path
self.protocol = protocol
self.headers_in = headers
self.connection = connection
self.server_terminated = False
self.client_terminated = False
def is_https(self):
"""Mimics request.is_https(). Returns False because this method is
used only by old protocols (hixie and hybi00).
"""
return False
class _LogicalConnection(object):
"""Mimics mod_python mp_conn."""
# For details, see the comment of set_read_state().
STATE_ACTIVE = 1
STATE_GRACEFULLY_CLOSED = 2
STATE_TERMINATED = 3
def __init__(self, mux_handler, channel_id):
"""Constructs an instance.
Args:
mux_handler: _MuxHandler instance.
channel_id: channel id of this connection.
"""
self._mux_handler = mux_handler
self._channel_id = channel_id
self._incoming_data = ''
# - Protects _waiting_write_completion
# - Signals the thread waiting for completion of write by mux handler
self._write_condition = threading.Condition()
self._waiting_write_completion = False
self._read_condition = threading.Condition()
self._read_state = self.STATE_ACTIVE
def get_local_addr(self):
"""Getter to mimic mp_conn.local_addr."""
return self._mux_handler.physical_connection.get_local_addr()
local_addr = property(get_local_addr)
def get_remote_addr(self):
"""Getter to mimic mp_conn.remote_addr."""
return self._mux_handler.physical_connection.get_remote_addr()
remote_addr = property(get_remote_addr)
def get_memorized_lines(self):
"""Gets memorized lines. Not supported."""
raise MuxUnexpectedException('_LogicalConnection does not support '
'get_memorized_lines')
def write(self, data):
"""Writes data. mux_handler sends data asynchronously. The caller will
be suspended until write done.
Args:
data: data to be written.
Raises:
MuxUnexpectedException: when called before finishing the previous
write.
"""
try:
self._write_condition.acquire()
if self._waiting_write_completion:
raise MuxUnexpectedException(
'Logical connection %d is already waiting the completion '
'of write' % self._channel_id)
self._waiting_write_completion = True
self._mux_handler.send_data(self._channel_id, data)
self._write_condition.wait()
# TODO(tyoshino): Raise an exception if woke up by on_writer_done.
finally:
self._write_condition.release()
def write_control_data(self, data):
"""Writes data via the control channel. Don't wait finishing write
because this method can be called by mux dispatcher.
Args:
data: data to be written.
"""
self._mux_handler.send_control_data(data)
def on_write_data_done(self):
"""Called when sending data is completed."""
try:
self._write_condition.acquire()
if not self._waiting_write_completion:
raise MuxUnexpectedException(
'Invalid call of on_write_data_done for logical '
'connection %d' % self._channel_id)
self._waiting_write_completion = False
self._write_condition.notify()
finally:
self._write_condition.release()
def on_writer_done(self):
"""Called by the mux handler when the writer thread has finished."""
try:
self._write_condition.acquire()
self._waiting_write_completion = False
self._write_condition.notify()
finally:
self._write_condition.release()
def append_frame_data(self, frame_data):
"""Appends incoming frame data. Called when mux_handler dispatches
frame data to the corresponding application.
Args:
frame_data: incoming frame data.
"""
self._read_condition.acquire()
self._incoming_data += frame_data
self._read_condition.notify()
self._read_condition.release()
def read(self, length):
"""Reads data. Blocks until enough data has arrived via physical
connection.
Args:
length: length of data to be read.
Raises:
LogicalConnectionClosedException: when closing handshake for this
logical channel has been received.
ConnectionTerminatedException: when the physical connection has
closed, or an error is caused on the reader thread.
"""
self._read_condition.acquire()
while (self._read_state == self.STATE_ACTIVE and
len(self._incoming_data) < length):
self._read_condition.wait()
try:
if self._read_state == self.STATE_GRACEFULLY_CLOSED:
raise LogicalConnectionClosedException(
'Logical channel %d has closed.' % self._channel_id)
elif self._read_state == self.STATE_TERMINATED:
raise ConnectionTerminatedException(
'Receiving %d byte failed. Logical channel (%d) closed' %
(length, self._channel_id))
value = self._incoming_data[:length]
self._incoming_data = self._incoming_data[length:]
finally:
self._read_condition.release()
return value
def set_read_state(self, new_state):
"""Sets the state of this connection. Called when an event for this
connection has occurred.
Args:
new_state: state to be set. new_state must be one of followings:
- STATE_GRACEFULLY_CLOSED: when closing handshake for this
connection has been received.
- STATE_TERMINATED: when the physical connection has closed or
DropChannel of this connection has received.
"""
self._read_condition.acquire()
self._read_state = new_state
self._read_condition.notify()
self._read_condition.release()
class _InnerMessage(object):
"""Holds the result of _InnerMessageBuilder.build().
"""
def __init__(self, opcode, payload):
self.opcode = opcode
self.payload = payload
class _InnerMessageBuilder(object):
"""A class that holds the context of inner message fragmentation and
builds a message from fragmented inner frame(s).
"""
def __init__(self):
self._control_opcode = None
self._pending_control_fragments = []
self._message_opcode = None
self._pending_message_fragments = []
self._frame_handler = self._handle_first
def _handle_first(self, frame):
if frame.opcode == common.OPCODE_CONTINUATION:
raise InvalidFrameException('Sending invalid continuation opcode')
if common.is_control_opcode(frame.opcode):
return self._process_first_fragmented_control(frame)
else:
return self._process_first_fragmented_message(frame)
def _process_first_fragmented_control(self, frame):
self._control_opcode = frame.opcode
self._pending_control_fragments.append(frame.payload)
if not frame.fin:
self._frame_handler = self._handle_fragmented_control
return None
return self._reassemble_fragmented_control()
def _process_first_fragmented_message(self, frame):
self._message_opcode = frame.opcode
self._pending_message_fragments.append(frame.payload)
if not frame.fin:
self._frame_handler = self._handle_fragmented_message
return None
return self._reassemble_fragmented_message()
def _handle_fragmented_control(self, frame):
if frame.opcode != common.OPCODE_CONTINUATION:
raise InvalidFrameException(
'Sending invalid opcode %d while sending fragmented control '
'message' % frame.opcode)
self._pending_control_fragments.append(frame.payload)
if not frame.fin:
return None
return self._reassemble_fragmented_control()
def _reassemble_fragmented_control(self):
opcode = self._control_opcode
payload = ''.join(self._pending_control_fragments)
self._control_opcode = None
self._pending_control_fragments = []
if self._message_opcode is not None:
self._frame_handler = self._handle_fragmented_message
else:
self._frame_handler = self._handle_first
return _InnerMessage(opcode, payload)
def _handle_fragmented_message(self, frame):
# Sender can interleave a control message while sending fragmented
# messages.
if common.is_control_opcode(frame.opcode):
if self._control_opcode is not None:
raise MuxUnexpectedException(
'Should not reach here(Bug in builder)')
return self._process_first_fragmented_control(frame)
if frame.opcode != common.OPCODE_CONTINUATION:
raise InvalidFrameException(
'Sending invalid opcode %d while sending fragmented message' %
frame.opcode)
self._pending_message_fragments.append(frame.payload)
if not frame.fin:
return None
return self._reassemble_fragmented_message()
def _reassemble_fragmented_message(self):
opcode = self._message_opcode
payload = ''.join(self._pending_message_fragments)
self._message_opcode = None
self._pending_message_fragments = []
self._frame_handler = self._handle_first
return _InnerMessage(opcode, payload)
def build(self, frame):
"""Build an inner message. Returns an _InnerMessage instance when
the given frame is the last fragmented frame. Returns None otherwise.
Args:
frame: an inner frame.
Raises:
InvalidFrameException: when received invalid opcode. (e.g.
receiving non continuation data opcode but the fin flag of
the previous inner frame was not set.)
"""
return self._frame_handler(frame)
class _LogicalStream(Stream):
"""Mimics the Stream class. This class interprets multiplexed WebSocket
frames.
"""
def __init__(self, request, stream_options, send_quota, receive_quota):
"""Constructs an instance.
Args:
request: _LogicalRequest instance.
stream_options: StreamOptions instance.
send_quota: Initial send quota.
receive_quota: Initial receive quota.
"""
# Physical stream is responsible for masking.
stream_options.unmask_receive = False
Stream.__init__(self, request, stream_options)
self._send_closed = False
self._send_quota = send_quota
# - Protects _send_closed and _send_quota
# - Signals the thread waiting for send quota replenished
self._send_condition = threading.Condition()
# The opcode of the first frame in messages.
self._message_opcode = common.OPCODE_TEXT
# True when the last message was fragmented.
self._last_message_was_fragmented = False
self._receive_quota = receive_quota
self._write_inner_frame_semaphore = threading.Semaphore()
self._inner_message_builder = _InnerMessageBuilder()
def _create_inner_frame(self, opcode, payload, end=True):
frame = Frame(fin=end, opcode=opcode, payload=payload)
for frame_filter in self._options.outgoing_frame_filters:
frame_filter.filter(frame)
if len(payload) != len(frame.payload):
raise MuxUnexpectedException(
'Mux extension must not be used after extensions which change '
' frame boundary')
first_byte = ((frame.fin << 7) | (frame.rsv1 << 6) |
(frame.rsv2 << 5) | (frame.rsv3 << 4) | frame.opcode)
return chr(first_byte) + frame.payload
def _write_inner_frame(self, opcode, payload, end=True):
payload_length = len(payload)
write_position = 0
try:
# An inner frame will be fragmented if there is no enough send
# quota. This semaphore ensures that fragmented inner frames are
# sent in order on the logical channel.
# Note that frames that come from other logical channels or
# multiplexing control blocks can be inserted between fragmented
# inner frames on the physical channel.
self._write_inner_frame_semaphore.acquire()
# Consume an octet quota when this is the first fragmented frame.
if opcode != common.OPCODE_CONTINUATION:
try:
self._send_condition.acquire()
while (not self._send_closed) and self._send_quota == 0:
self._send_condition.wait()
if self._send_closed:
raise BadOperationException(
'Logical connection %d is closed' %
self._request.channel_id)
self._send_quota -= 1
finally:
self._send_condition.release()
while write_position < payload_length:
try:
self._send_condition.acquire()
while (not self._send_closed) and self._send_quota == 0:
self._logger.debug(
'No quota. Waiting FlowControl message for %d.' %
self._request.channel_id)
self._send_condition.wait()
if self._send_closed:
raise BadOperationException(
'Logical connection %d is closed' %
self.request._channel_id)
remaining = payload_length - write_position
write_length = min(self._send_quota, remaining)
inner_frame_end = (
end and
(write_position + write_length == payload_length))
inner_frame = self._create_inner_frame(
opcode,
payload[write_position:write_position+write_length],
inner_frame_end)
self._send_quota -= write_length
self._logger.debug('Consumed quota=%d, remaining=%d' %
(write_length, self._send_quota))
finally:
self._send_condition.release()
# Writing data will block the worker so we need to release
# _send_condition before writing.
self._logger.debug('Sending inner frame: %r' % inner_frame)
self._request.connection.write(inner_frame)
write_position += write_length
opcode = common.OPCODE_CONTINUATION
except ValueError, e:
raise BadOperationException(e)
finally:
self._write_inner_frame_semaphore.release()
def replenish_send_quota(self, send_quota):
"""Replenish send quota."""
try:
self._send_condition.acquire()
if self._send_quota + send_quota > 0x7FFFFFFFFFFFFFFF:
self._send_quota = 0
raise LogicalChannelError(
self._request.channel_id, _DROP_CODE_SEND_QUOTA_OVERFLOW)
self._send_quota += send_quota
self._logger.debug('Replenished send quota for channel id %d: %d' %
(self._request.channel_id, self._send_quota))
finally:
self._send_condition.notify()
self._send_condition.release()
def consume_receive_quota(self, amount):
"""Consumes receive quota. Returns False on failure."""
if self._receive_quota < amount:
self._logger.debug('Violate quota on channel id %d: %d < %d' %
(self._request.channel_id,
self._receive_quota, amount))
return False
self._receive_quota -= amount
return True
def send_message(self, message, end=True, binary=False):
"""Override Stream.send_message."""
if self._request.server_terminated:
raise BadOperationException(
'Requested send_message after sending out a closing handshake')
if binary and isinstance(message, unicode):
raise BadOperationException(
'Message for binary frame must be instance of str')
if binary:
opcode = common.OPCODE_BINARY
else:
opcode = common.OPCODE_TEXT
message = message.encode('utf-8')
for message_filter in self._options.outgoing_message_filters:
message = message_filter.filter(message, end, binary)
if self._last_message_was_fragmented:
if opcode != self._message_opcode:
raise BadOperationException('Message types are different in '
'frames for the same message')
opcode = common.OPCODE_CONTINUATION
else:
self._message_opcode = opcode
self._write_inner_frame(opcode, message, end)
self._last_message_was_fragmented = not end
def _receive_frame(self):
"""Overrides Stream._receive_frame.
In addition to call Stream._receive_frame, this method adds the amount
of payload to receiving quota and sends FlowControl to the client.
We need to do it here because Stream.receive_message() handles
control frames internally.
"""
opcode, payload, fin, rsv1, rsv2, rsv3 = Stream._receive_frame(self)
amount = len(payload)
# Replenish extra one octet when receiving the first fragmented frame.
if opcode != common.OPCODE_CONTINUATION:
amount += 1
self._receive_quota += amount
frame_data = _create_flow_control(self._request.channel_id,
amount)
self._logger.debug('Sending flow control for %d, replenished=%d' %
(self._request.channel_id, amount))
self._request.connection.write_control_data(frame_data)
return opcode, payload, fin, rsv1, rsv2, rsv3
def _get_message_from_frame(self, frame):
"""Overrides Stream._get_message_from_frame.
"""
try:
inner_message = self._inner_message_builder.build(frame)
except InvalidFrameException:
raise LogicalChannelError(
self._request.channel_id, _DROP_CODE_BAD_FRAGMENTATION)
if inner_message is None:
return None
self._original_opcode = inner_message.opcode
return inner_message.payload
def receive_message(self):
"""Overrides Stream.receive_message."""
# Just call Stream.receive_message(), but catch
# LogicalConnectionClosedException, which is raised when the logical
# connection has closed gracefully.
try:
return Stream.receive_message(self)
except LogicalConnectionClosedException, e:
self._logger.debug('%s', e)
return None
def _send_closing_handshake(self, code, reason):
"""Overrides Stream._send_closing_handshake."""
body = create_closing_handshake_body(code, reason)
self._logger.debug('Sending closing handshake for %d: (%r, %r)' %
(self._request.channel_id, code, reason))
self._write_inner_frame(common.OPCODE_CLOSE, body, end=True)
self._request.server_terminated = True
def send_ping(self, body=''):
"""Overrides Stream.send_ping"""
self._logger.debug('Sending ping on logical channel %d: %r' %
(self._request.channel_id, body))
self._write_inner_frame(common.OPCODE_PING, body, end=True)
self._ping_queue.append(body)
def _send_pong(self, body):
"""Overrides Stream._send_pong"""
self._logger.debug('Sending pong on logical channel %d: %r' %
(self._request.channel_id, body))
self._write_inner_frame(common.OPCODE_PONG, body, end=True)
def close_connection(self, code=common.STATUS_NORMAL_CLOSURE, reason=''):
"""Overrides Stream.close_connection."""
# TODO(bashi): Implement
self._logger.debug('Closing logical connection %d' %
self._request.channel_id)
self._request.server_terminated = True
def stop_sending(self):
"""Stops accepting new send operation (_write_inner_frame)."""
self._send_condition.acquire()
self._send_closed = True
self._send_condition.notify()
self._send_condition.release()
class _OutgoingData(object):
"""A structure that holds data to be sent via physical connection and
origin of the data.
"""
def __init__(self, channel_id, data):
self.channel_id = channel_id
self.data = data
class _PhysicalConnectionWriter(threading.Thread):
"""A thread that is responsible for writing data to physical connection.
TODO(bashi): Make sure there is no thread-safety problem when the reader
thread reads data from the same socket at a time.
"""
def __init__(self, mux_handler):
"""Constructs an instance.
Args:
mux_handler: _MuxHandler instance.
"""
threading.Thread.__init__(self)
self._logger = util.get_class_logger(self)
self._mux_handler = mux_handler
self.setDaemon(True)
# When set, make this thread stop accepting new data, flush pending
# data and exit.
self._stop_requested = False
# The close code of the physical connection.
self._close_code = common.STATUS_NORMAL_CLOSURE
# Deque for passing write data. It's protected by _deque_condition
# until _stop_requested is set.
self._deque = collections.deque()
# - Protects _deque, _stop_requested and _close_code
# - Signals threads waiting for them to be available
self._deque_condition = threading.Condition()
def put_outgoing_data(self, data):
"""Puts outgoing data.
Args:
data: _OutgoingData instance.
Raises:
BadOperationException: when the thread has been requested to
terminate.
"""
try:
self._deque_condition.acquire()
if self._stop_requested:
raise BadOperationException('Cannot write data anymore')
self._deque.append(data)
self._deque_condition.notify()
finally:
self._deque_condition.release()
def _write_data(self, outgoing_data):
message = (_encode_channel_id(outgoing_data.channel_id) +
outgoing_data.data)
try:
self._mux_handler.physical_stream.send_message(
message=message, end=True, binary=True)
except Exception, e:
util.prepend_message_to_exception(
'Failed to send message to %r: ' %
(self._mux_handler.physical_connection.remote_addr,), e)
raise
# TODO(bashi): It would be better to block the thread that sends
# control data as well.
if outgoing_data.channel_id != _CONTROL_CHANNEL_ID:
self._mux_handler.notify_write_data_done(outgoing_data.channel_id)
def run(self):
try:
self._deque_condition.acquire()
while not self._stop_requested:
if len(self._deque) == 0:
self._deque_condition.wait()
continue
outgoing_data = self._deque.popleft()
self._deque_condition.release()
self._write_data(outgoing_data)
self._deque_condition.acquire()
# Flush deque.
#
# At this point, self._deque_condition is always acquired.
try:
while len(self._deque) > 0:
outgoing_data = self._deque.popleft()
self._write_data(outgoing_data)
finally:
self._deque_condition.release()
# Close physical connection.
try:
# Don't wait the response here. The response will be read
# by the reader thread.
self._mux_handler.physical_stream.close_connection(
self._close_code, wait_response=False)
except Exception, e:
util.prepend_message_to_exception(
'Failed to close the physical connection: %r' % e)
raise
finally:
self._mux_handler.notify_writer_done()
def stop(self, close_code=common.STATUS_NORMAL_CLOSURE):
"""Stops the writer thread."""
self._deque_condition.acquire()
self._stop_requested = True
self._close_code = close_code
self._deque_condition.notify()
self._deque_condition.release()
class _PhysicalConnectionReader(threading.Thread):
"""A thread that is responsible for reading data from physical connection.
"""
def __init__(self, mux_handler):
"""Constructs an instance.
Args:
mux_handler: _MuxHandler instance.
"""
threading.Thread.__init__(self)
self._logger = util.get_class_logger(self)
self._mux_handler = mux_handler
self.setDaemon(True)
def run(self):
while True:
try:
physical_stream = self._mux_handler.physical_stream
message = physical_stream.receive_message()
if message is None:
break
# Below happens only when a data message is received.
opcode = physical_stream.get_last_received_opcode()
if opcode != common.OPCODE_BINARY:
self._mux_handler.fail_physical_connection(
_DROP_CODE_INVALID_ENCAPSULATING_MESSAGE,
'Received a text message on physical connection')
break
except ConnectionTerminatedException, e:
self._logger.debug('%s', e)
break
try:
self._mux_handler.dispatch_message(message)
except PhysicalConnectionError, e:
self._mux_handler.fail_physical_connection(
e.drop_code, e.message)
break
except LogicalChannelError, e:
self._mux_handler.fail_logical_channel(
e.channel_id, e.drop_code, e.message)
except Exception, e:
self._logger.debug(traceback.format_exc())
break
self._mux_handler.notify_reader_done()
class _Worker(threading.Thread):
"""A thread that is responsible for running the corresponding application
handler.
"""
def __init__(self, mux_handler, request):
"""Constructs an instance.
Args:
mux_handler: _MuxHandler instance.
request: _LogicalRequest instance.
"""
threading.Thread.__init__(self)
self._logger = util.get_class_logger(self)
self._mux_handler = mux_handler
self._request = request
self.setDaemon(True)
def run(self):
self._logger.debug('Logical channel worker started. (id=%d)' %
self._request.channel_id)
try:
# Non-critical exceptions will be handled by dispatcher.
self._mux_handler.dispatcher.transfer_data(self._request)
except LogicalChannelError, e:
self._mux_handler.fail_logical_channel(
e.channel_id, e.drop_code, e.message)
finally:
self._mux_handler.notify_worker_done(self._request.channel_id)
class _MuxHandshaker(hybi.Handshaker):
"""Opening handshake processor for multiplexing."""
_DUMMY_WEBSOCKET_KEY = 'dGhlIHNhbXBsZSBub25jZQ=='
def __init__(self, request, dispatcher, send_quota, receive_quota):
"""Constructs an instance.
Args:
request: _LogicalRequest instance.
dispatcher: Dispatcher instance (dispatch.Dispatcher).
send_quota: Initial send quota.
receive_quota: Initial receive quota.
"""
hybi.Handshaker.__init__(self, request, dispatcher)
self._send_quota = send_quota
self._receive_quota = receive_quota
# Append headers which should not be included in handshake field of
# AddChannelRequest.
# TODO(bashi): Make sure whether we should raise exception when
# these headers are included already.
request.headers_in[common.UPGRADE_HEADER] = (
common.WEBSOCKET_UPGRADE_TYPE)
request.headers_in[common.SEC_WEBSOCKET_VERSION_HEADER] = (
str(common.VERSION_HYBI_LATEST))
request.headers_in[common.SEC_WEBSOCKET_KEY_HEADER] = (
self._DUMMY_WEBSOCKET_KEY)
def _create_stream(self, stream_options):
"""Override hybi.Handshaker._create_stream."""
self._logger.debug('Creating logical stream for %d' %
self._request.channel_id)
return _LogicalStream(
self._request, stream_options, self._send_quota,
self._receive_quota)
def _create_handshake_response(self, accept):
"""Override hybi._create_handshake_response."""
response = []
response.append('HTTP/1.1 101 Switching Protocols\r\n')
# Upgrade and Sec-WebSocket-Accept should be excluded.
response.append('%s: %s\r\n' % (
common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE))
if self._request.ws_protocol is not None:
response.append('%s: %s\r\n' % (
common.SEC_WEBSOCKET_PROTOCOL_HEADER,
self._request.ws_protocol))
if (self._request.ws_extensions is not None and
len(self._request.ws_extensions) != 0):
response.append('%s: %s\r\n' % (
common.SEC_WEBSOCKET_EXTENSIONS_HEADER,
common.format_extensions(self._request.ws_extensions)))
response.append('\r\n')
return ''.join(response)
def _send_handshake(self, accept):
"""Override hybi.Handshaker._send_handshake."""
# Don't send handshake response for the default channel
if self._request.channel_id == _DEFAULT_CHANNEL_ID:
return
handshake_response = self._create_handshake_response(accept)
frame_data = _create_add_channel_response(
self._request.channel_id,
handshake_response)
self._logger.debug('Sending handshake response for %d: %r' %
(self._request.channel_id, frame_data))
self._request.connection.write_control_data(frame_data)
class _LogicalChannelData(object):
"""A structure that holds information about logical channel.
"""
def __init__(self, request, worker):
self.request = request
self.worker = worker
self.drop_code = _DROP_CODE_NORMAL_CLOSURE
self.drop_message = ''
class _HandshakeDeltaBase(object):
"""A class that holds information for delta-encoded handshake."""
def __init__(self, headers):
self._headers = headers
def create_headers(self, delta=None):
"""Creates request headers for an AddChannelRequest that has
delta-encoded handshake.
Args:
delta: headers should be overridden.
"""
headers = copy.copy(self._headers)
if delta:
for key, value in delta.items():
# The spec requires that a header with an empty value is
# removed from the delta base.
if len(value) == 0 and headers.has_key(key):
del headers[key]
else:
headers[key] = value
return headers
class _MuxHandler(object):
"""Multiplexing handler. When a handler starts, it launches three
threads; the reader thread, the writer thread, and a worker thread.
The reader thread reads data from the physical stream, i.e., the
ws_stream object of the underlying websocket connection. The reader
thread interprets multiplexed frames and dispatches them to logical
channels. Methods of this class are mostly called by the reader thread.
The writer thread sends multiplexed frames which are created by
logical channels via the physical connection.
The worker thread launched at the starting point handles the
"Implicitly Opened Connection". If multiplexing handler receives
an AddChannelRequest and accepts it, the handler will launch a new worker
thread and dispatch the request to it.
"""
def __init__(self, request, dispatcher):
"""Constructs an instance.
Args:
request: mod_python request of the physical connection.
dispatcher: Dispatcher instance (dispatch.Dispatcher).
"""
self.original_request = request
self.dispatcher = dispatcher
self.physical_connection = request.connection
self.physical_stream = request.ws_stream
self._logger = util.get_class_logger(self)
self._logical_channels = {}
self._logical_channels_condition = threading.Condition()
# Holds client's initial quota
self._channel_slots = collections.deque()
self._handshake_base = None
self._worker_done_notify_received = False
self._reader = None
self._writer = None
def start(self):
"""Starts the handler.
Raises:
MuxUnexpectedException: when the handler already started, or when
opening handshake of the default channel fails.
"""
if self._reader or self._writer:
raise MuxUnexpectedException('MuxHandler already started')
self._reader = _PhysicalConnectionReader(self)
self._writer = _PhysicalConnectionWriter(self)
self._reader.start()
self._writer.start()
# Create "Implicitly Opened Connection".
logical_connection = _LogicalConnection(self, _DEFAULT_CHANNEL_ID)
headers = copy.copy(self.original_request.headers_in)
# Add extensions for logical channel.
headers[common.SEC_WEBSOCKET_EXTENSIONS_HEADER] = (
common.format_extensions(
self.original_request.mux_processor.extensions()))
self._handshake_base = _HandshakeDeltaBase(headers)
logical_request = _LogicalRequest(
_DEFAULT_CHANNEL_ID,
self.original_request.method,
self.original_request.uri,
self.original_request.protocol,
self._handshake_base.create_headers(),
logical_connection)
# Client's send quota for the implicitly opened connection is zero,
# but we will send FlowControl later so set the initial quota to
# _INITIAL_QUOTA_FOR_CLIENT.
self._channel_slots.append(_INITIAL_QUOTA_FOR_CLIENT)
send_quota = self.original_request.mux_processor.quota()
if not self._do_handshake_for_logical_request(
logical_request, send_quota=send_quota):
raise MuxUnexpectedException(
'Failed handshake on the default channel id')
self._add_logical_channel(logical_request)
# Send FlowControl for the implicitly opened connection.
frame_data = _create_flow_control(_DEFAULT_CHANNEL_ID,
_INITIAL_QUOTA_FOR_CLIENT)
logical_request.connection.write_control_data(frame_data)
def add_channel_slots(self, slots, send_quota):
"""Adds channel slots.
Args:
slots: number of slots to be added.
send_quota: initial send quota for slots.
"""
self._channel_slots.extend([send_quota] * slots)
# Send NewChannelSlot to client.
frame_data = _create_new_channel_slot(slots, send_quota)
self.send_control_data(frame_data)
def wait_until_done(self, timeout=None):
"""Waits until all workers are done. Returns False when timeout has
occurred. Returns True on success.
Args:
timeout: timeout in sec.
"""
self._logical_channels_condition.acquire()
try:
while len(self._logical_channels) > 0:
self._logger.debug('Waiting workers(%d)...' %
len(self._logical_channels))
self._worker_done_notify_received = False
self._logical_channels_condition.wait(timeout)
if not self._worker_done_notify_received:
self._logger.debug('Waiting worker(s) timed out')
return False
finally:
self._logical_channels_condition.release()
# Flush pending outgoing data
self._writer.stop()
self._writer.join()
return True
def notify_write_data_done(self, channel_id):
"""Called by the writer thread when a write operation has done.
Args:
channel_id: objective channel id.
"""
try:
self._logical_channels_condition.acquire()
if channel_id in self._logical_channels:
channel_data = self._logical_channels[channel_id]
channel_data.request.connection.on_write_data_done()
else:
self._logger.debug('Seems that logical channel for %d has gone'
% channel_id)
finally:
self._logical_channels_condition.release()
def send_control_data(self, data):
"""Sends data via the control channel.
Args:
data: data to be sent.
"""
self._writer.put_outgoing_data(_OutgoingData(
channel_id=_CONTROL_CHANNEL_ID, data=data))
def send_data(self, channel_id, data):
"""Sends data via given logical channel. This method is called by
worker threads.
Args:
channel_id: objective channel id.
data: data to be sent.
"""
self._writer.put_outgoing_data(_OutgoingData(
channel_id=channel_id, data=data))
def _send_drop_channel(self, channel_id, code=None, message=''):
frame_data = _create_drop_channel(channel_id, code, message)
self._logger.debug(
'Sending drop channel for channel id %d' % channel_id)
self.send_control_data(frame_data)
def _send_error_add_channel_response(self, channel_id, status=None):
if status is None:
status = common.HTTP_STATUS_BAD_REQUEST
if status in _HTTP_BAD_RESPONSE_MESSAGES:
message = _HTTP_BAD_RESPONSE_MESSAGES[status]
else:
self._logger.debug('Response message for %d is not found' % status)
message = '???'
response = 'HTTP/1.1 %d %s\r\n\r\n' % (status, message)
frame_data = _create_add_channel_response(channel_id,
encoded_handshake=response,
encoding=0, rejected=True)
self.send_control_data(frame_data)
def _create_logical_request(self, block):
if block.channel_id == _CONTROL_CHANNEL_ID:
# TODO(bashi): Raise PhysicalConnectionError with code 2006
# instead of MuxUnexpectedException.
raise MuxUnexpectedException(
'Received the control channel id (0) as objective channel '
'id for AddChannel')
if block.encoding > _HANDSHAKE_ENCODING_DELTA:
raise PhysicalConnectionError(
_DROP_CODE_UNKNOWN_REQUEST_ENCODING)
method, path, version, headers = _parse_request_text(
block.encoded_handshake)
if block.encoding == _HANDSHAKE_ENCODING_DELTA:
headers = self._handshake_base.create_headers(headers)
connection = _LogicalConnection(self, block.channel_id)
request = _LogicalRequest(block.channel_id, method, path, version,
headers, connection)
return request
def _do_handshake_for_logical_request(self, request, send_quota=0):
try:
receive_quota = self._channel_slots.popleft()
except IndexError:
raise LogicalChannelError(
request.channel_id, _DROP_CODE_NEW_CHANNEL_SLOT_VIOLATION)
handshaker = _MuxHandshaker(request, self.dispatcher,
send_quota, receive_quota)
try:
handshaker.do_handshake()
except handshake.VersionException, e:
self._logger.info('%s', e)
self._send_error_add_channel_response(
request.channel_id, status=common.HTTP_STATUS_BAD_REQUEST)
return False
except handshake.HandshakeException, e:
# TODO(bashi): Should we _Fail the Logical Channel_ with 3001
# instead?
self._logger.info('%s', e)
self._send_error_add_channel_response(request.channel_id,
status=e.status)
return False
except handshake.AbortedByUserException, e:
self._logger.info('%s', e)
self._send_error_add_channel_response(request.channel_id)
return False
return True
def _add_logical_channel(self, logical_request):
try:
self._logical_channels_condition.acquire()
if logical_request.channel_id in self._logical_channels:
self._logger.debug('Channel id %d already exists' %
logical_request.channel_id)
raise PhysicalConnectionError(
_DROP_CODE_CHANNEL_ALREADY_EXISTS,
'Channel id %d already exists' %
logical_request.channel_id)
worker = _Worker(self, logical_request)
channel_data = _LogicalChannelData(logical_request, worker)
self._logical_channels[logical_request.channel_id] = channel_data
worker.start()
finally:
self._logical_channels_condition.release()
def _process_add_channel_request(self, block):
try:
logical_request = self._create_logical_request(block)
except ValueError, e:
self._logger.debug('Failed to create logical request: %r' % e)
self._send_error_add_channel_response(
block.channel_id, status=common.HTTP_STATUS_BAD_REQUEST)
return
if self._do_handshake_for_logical_request(logical_request):
if block.encoding == _HANDSHAKE_ENCODING_IDENTITY:
# Update handshake base.
# TODO(bashi): Make sure this is the right place to update
# handshake base.
self._handshake_base = _HandshakeDeltaBase(
logical_request.headers_in)
self._add_logical_channel(logical_request)
else:
self._send_error_add_channel_response(
block.channel_id, status=common.HTTP_STATUS_BAD_REQUEST)
def _process_flow_control(self, block):
try:
self._logical_channels_condition.acquire()
if not block.channel_id in self._logical_channels:
return
channel_data = self._logical_channels[block.channel_id]
channel_data.request.ws_stream.replenish_send_quota(
block.send_quota)
finally:
self._logical_channels_condition.release()
def _process_drop_channel(self, block):
self._logger.debug(
'DropChannel received for %d: code=%r, reason=%r' %
(block.channel_id, block.drop_code, block.drop_message))
try:
self._logical_channels_condition.acquire()
if not block.channel_id in self._logical_channels:
return
channel_data = self._logical_channels[block.channel_id]
channel_data.drop_code = _DROP_CODE_ACKNOWLEDGED
# Close the logical channel
channel_data.request.connection.set_read_state(
_LogicalConnection.STATE_TERMINATED)
channel_data.request.ws_stream.stop_sending()
finally:
self._logical_channels_condition.release()
def _process_control_blocks(self, parser):
for control_block in parser.read_control_blocks():
opcode = control_block.opcode
self._logger.debug('control block received, opcode: %d' % opcode)
if opcode == _MUX_OPCODE_ADD_CHANNEL_REQUEST:
self._process_add_channel_request(control_block)
elif opcode == _MUX_OPCODE_ADD_CHANNEL_RESPONSE:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Received AddChannelResponse')
elif opcode == _MUX_OPCODE_FLOW_CONTROL:
self._process_flow_control(control_block)
elif opcode == _MUX_OPCODE_DROP_CHANNEL:
self._process_drop_channel(control_block)
elif opcode == _MUX_OPCODE_NEW_CHANNEL_SLOT:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Received NewChannelSlot')
else:
raise MuxUnexpectedException(
'Unexpected opcode %r' % opcode)
def _process_logical_frame(self, channel_id, parser):
self._logger.debug('Received a frame. channel id=%d' % channel_id)
try:
self._logical_channels_condition.acquire()
if not channel_id in self._logical_channels:
# We must ignore the message for an inactive channel.
return
channel_data = self._logical_channels[channel_id]
fin, rsv1, rsv2, rsv3, opcode, payload = parser.read_inner_frame()
consuming_byte = len(payload)
if opcode != common.OPCODE_CONTINUATION:
consuming_byte += 1
if not channel_data.request.ws_stream.consume_receive_quota(
consuming_byte):
# The client violates quota. Close logical channel.
raise LogicalChannelError(
channel_id, _DROP_CODE_SEND_QUOTA_VIOLATION)
header = create_header(opcode, len(payload), fin, rsv1, rsv2, rsv3,
mask=False)
frame_data = header + payload
channel_data.request.connection.append_frame_data(frame_data)
finally:
self._logical_channels_condition.release()
def dispatch_message(self, message):
"""Dispatches message. The reader thread calls this method.
Args:
message: a message that contains encapsulated frame.
Raises:
PhysicalConnectionError: if the message contains physical
connection level errors.
LogicalChannelError: if the message contains logical channel
level errors.
"""
parser = _MuxFramePayloadParser(message)
try:
channel_id = parser.read_channel_id()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_CHANNEL_ID_TRUNCATED)
if channel_id == _CONTROL_CHANNEL_ID:
self._process_control_blocks(parser)
else:
self._process_logical_frame(channel_id, parser)
def notify_worker_done(self, channel_id):
"""Called when a worker has finished.
Args:
channel_id: channel id corresponded with the worker.
"""
self._logger.debug('Worker for channel id %d terminated' % channel_id)
try:
self._logical_channels_condition.acquire()
if not channel_id in self._logical_channels:
raise MuxUnexpectedException(
'Channel id %d not found' % channel_id)
channel_data = self._logical_channels.pop(channel_id)
finally:
self._worker_done_notify_received = True
self._logical_channels_condition.notify()
self._logical_channels_condition.release()
if not channel_data.request.server_terminated:
self._send_drop_channel(
channel_id, code=channel_data.drop_code,
message=channel_data.drop_message)
def notify_reader_done(self):
"""This method is called by the reader thread when the reader has
finished.
"""
self._logger.debug(
'Termiating all logical connections waiting for incoming data '
'...')
self._logical_channels_condition.acquire()
for channel_data in self._logical_channels.values():
try:
channel_data.request.connection.set_read_state(
_LogicalConnection.STATE_TERMINATED)
except Exception:
self._logger.debug(traceback.format_exc())
self._logical_channels_condition.release()
def notify_writer_done(self):
"""This method is called by the writer thread when the writer has
finished.
"""
self._logger.debug(
'Termiating all logical connections waiting for write '
'completion ...')
self._logical_channels_condition.acquire()
for channel_data in self._logical_channels.values():
try:
channel_data.request.connection.on_writer_done()
except Exception:
self._logger.debug(traceback.format_exc())
self._logical_channels_condition.release()
def fail_physical_connection(self, code, message):
"""Fail the physical connection.
Args:
code: drop reason code.
message: drop message.
"""
self._logger.debug('Failing the physical connection...')
self._send_drop_channel(_CONTROL_CHANNEL_ID, code, message)
self._writer.stop(common.STATUS_INTERNAL_ENDPOINT_ERROR)
def fail_logical_channel(self, channel_id, code, message):
"""Fail a logical channel.
Args:
channel_id: channel id.
code: drop reason code.
message: drop message.
"""
self._logger.debug('Failing logical channel %d...' % channel_id)
try:
self._logical_channels_condition.acquire()
if channel_id in self._logical_channels:
channel_data = self._logical_channels[channel_id]
# Close the logical channel. notify_worker_done() will be
# called later and it will send DropChannel.
channel_data.drop_code = code
channel_data.drop_message = message
channel_data.request.connection.set_read_state(
_LogicalConnection.STATE_TERMINATED)
channel_data.request.ws_stream.stop_sending()
else:
self._send_drop_channel(channel_id, code, message)
finally:
self._logical_channels_condition.release()
def use_mux(request):
return hasattr(request, 'mux_processor') and (
request.mux_processor.is_active())
def start(request, dispatcher):
mux_handler = _MuxHandler(request, dispatcher)
mux_handler.start()
mux_handler.add_channel_slots(_INITIAL_NUMBER_OF_CHANNEL_SLOTS,
_INITIAL_QUOTA_FOR_CLIENT)
mux_handler.wait_until_done()
# vi:sts=4 sw=4 et
|
oliverdrake/ucbc
|
refs/heads/master
|
main/migrations/0001_initial.py
|
2
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UserRole'
db.create_table('main_userrole', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='roles', to=orm['auth.User'])),
))
db.send_create_signal('main', ['UserRole'])
def backwards(self, orm):
# Deleting model 'UserRole'
db.delete_table('main_userrole')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_set'", 'blank': 'True', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_set'", 'blank': 'True', 'to': "orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.userrole': {
'Meta': {'object_name': 'UserRole'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'roles'", 'to': "orm['auth.User']"})
}
}
complete_apps = ['main']
|
asnir/airflow
|
refs/heads/master
|
tests/contrib/hooks/test_zendesk_hook.py
|
37
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import mock
from airflow.hooks.zendesk_hook import ZendeskHook
from zdesk import RateLimitError
class TestZendeskHook(unittest.TestCase):
@mock.patch("airflow.hooks.zendesk_hook.time")
def test_sleeps_for_correct_interval(self, mocked_time):
sleep_time = 10
# To break out of the otherwise infinite tries
mocked_time.sleep = mock.Mock(side_effect=ValueError, return_value=3)
conn_mock = mock.Mock()
mock_response = mock.Mock()
mock_response.headers.get.return_value = sleep_time
conn_mock.call = mock.Mock(
side_effect=RateLimitError(msg="some message", code="some code",
response=mock_response))
zendesk_hook = ZendeskHook("conn_id")
zendesk_hook.get_conn = mock.Mock(return_value=conn_mock)
with self.assertRaises(ValueError):
zendesk_hook.call("some_path", get_all_pages=False)
mocked_time.sleep.assert_called_with(sleep_time)
@mock.patch("airflow.hooks.zendesk_hook.Zendesk")
def test_returns_single_page_if_get_all_pages_false(self, _):
zendesk_hook = ZendeskHook("conn_id")
mock_connection = mock.Mock()
mock_connection.host = "some_host"
zendesk_hook.get_connection = mock.Mock(return_value=mock_connection)
zendesk_hook.get_conn()
mock_conn = mock.Mock()
mock_call = mock.Mock(
return_value={'next_page': 'https://some_host/something', 'path':
[]})
mock_conn.call = mock_call
zendesk_hook.get_conn = mock.Mock(return_value=mock_conn)
zendesk_hook.call("path", get_all_pages=False)
mock_call.assert_called_once_with("path", None)
@mock.patch("airflow.hooks.zendesk_hook.Zendesk")
def test_returns_multiple_pages_if_get_all_pages_true(self, _):
zendesk_hook = ZendeskHook("conn_id")
mock_connection = mock.Mock()
mock_connection.host = "some_host"
zendesk_hook.get_connection = mock.Mock(return_value=mock_connection)
zendesk_hook.get_conn()
mock_conn = mock.Mock()
mock_call = mock.Mock(
return_value={'next_page': 'https://some_host/something', 'path': []})
mock_conn.call = mock_call
zendesk_hook.get_conn = mock.Mock(return_value=mock_conn)
zendesk_hook.call("path", get_all_pages=True)
assert mock_call.call_count == 2
@mock.patch("airflow.hooks.zendesk_hook.Zendesk")
def test_zdesk_is_inited_correctly(self, mock_zendesk):
conn_mock = mock.Mock()
conn_mock.host = "conn_host"
conn_mock.login = "conn_login"
conn_mock.password = "conn_pass"
zendesk_hook = ZendeskHook("conn_id")
zendesk_hook.get_connection = mock.Mock(return_value=conn_mock)
zendesk_hook.get_conn()
mock_zendesk.assert_called_with('https://conn_host', 'conn_login',
'conn_pass', True)
|
waheedahmed/edx-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/views/transcripts_ajax.py
|
140
|
"""
Actions manager for transcripts ajax calls.
+++++++++++++++++++++++++++++++++++++++++++
Module do not support rollback (pressing "Cancel" button in Studio)
All user changes are saved immediately.
"""
import copy
import os
import logging
import json
import requests
from django.http import HttpResponse, Http404
from django.core.exceptions import PermissionDenied
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.utils.translation import ugettext as _
from opaque_keys import InvalidKeyError
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import NotFoundError
from xmodule.modulestore.django import modulestore
from opaque_keys.edx.keys import UsageKey
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.exceptions import ItemNotFoundError
from util.json_request import JsonResponse
from xmodule.video_module.transcripts_utils import (
generate_subs_from_source,
generate_srt_from_sjson, remove_subs_from_store,
download_youtube_subs, get_transcripts_from_youtube,
copy_or_rename_transcript,
manage_video_subtitles_save,
GetTranscriptsFromYouTubeException,
TranscriptsRequestValidationException,
youtube_video_transcript_name,
)
from student.auth import has_course_author_access
__all__ = [
'upload_transcripts',
'download_transcripts',
'check_transcripts',
'choose_transcripts',
'replace_transcripts',
'rename_transcripts',
'save_transcripts',
]
log = logging.getLogger(__name__)
def error_response(response, message, status_code=400):
"""
Simplify similar actions: log message and return JsonResponse with message included in response.
By default return 400 (Bad Request) Response.
"""
log.debug(message)
response['status'] = message
return JsonResponse(response, status_code)
@login_required
def upload_transcripts(request):
"""
Upload transcripts for current module.
returns: response dict::
status: 'Success' and HTTP 200 or 'Error' and HTTP 400.
subs: Value of uploaded and saved html5 sub field in video item.
"""
response = {
'status': 'Unknown server error',
'subs': '',
}
locator = request.POST.get('locator')
if not locator:
return error_response(response, 'POST data without "locator" form data.')
try:
item = _get_item(request, request.POST)
except (InvalidKeyError, ItemNotFoundError):
return error_response(response, "Can't find item by locator.")
if 'transcript-file' not in request.FILES:
return error_response(response, 'POST data without "file" form data.')
video_list = request.POST.get('video_list')
if not video_list:
return error_response(response, 'POST data without video names.')
try:
video_list = json.loads(video_list)
except ValueError:
return error_response(response, 'Invalid video_list JSON.')
# Used utf-8-sig encoding type instead of utf-8 to remove BOM(Byte Order Mark), e.g. U+FEFF
source_subs_filedata = request.FILES['transcript-file'].read().decode('utf-8-sig')
source_subs_filename = request.FILES['transcript-file'].name
if '.' not in source_subs_filename:
return error_response(response, "Undefined file extension.")
basename = os.path.basename(source_subs_filename)
source_subs_name = os.path.splitext(basename)[0]
source_subs_ext = os.path.splitext(basename)[1][1:]
if item.category != 'video':
return error_response(response, 'Transcripts are supported only for "video" modules.')
# Allow upload only if any video link is presented
if video_list:
sub_attr = source_subs_name
try:
# Generate and save for 1.0 speed, will create subs_sub_attr.srt.sjson subtitles file in storage.
generate_subs_from_source({1: sub_attr}, source_subs_ext, source_subs_filedata, item)
for video_dict in video_list:
video_name = video_dict['video']
# We are creating transcripts for every video source, if in future some of video sources would be deleted.
# Updates item.sub with `video_name` on success.
copy_or_rename_transcript(video_name, sub_attr, item, user=request.user)
response['subs'] = item.sub
response['status'] = 'Success'
except Exception as ex:
return error_response(response, ex.message)
else:
return error_response(response, 'Empty video sources.')
return JsonResponse(response)
@login_required
def download_transcripts(request):
"""
Passes to user requested transcripts file.
Raises Http404 if unsuccessful.
"""
locator = request.GET.get('locator')
if not locator:
log.debug('GET data without "locator" property.')
raise Http404
try:
item = _get_item(request, request.GET)
except (InvalidKeyError, ItemNotFoundError):
log.debug("Can't find item by locator.")
raise Http404
subs_id = request.GET.get('subs_id')
if not subs_id:
log.debug('GET data without "subs_id" property.')
raise Http404
if item.category != 'video':
log.debug('transcripts are supported only for video" modules.')
raise Http404
filename = 'subs_{0}.srt.sjson'.format(subs_id)
content_location = StaticContent.compute_location(item.location.course_key, filename)
try:
sjson_transcripts = contentstore().find(content_location)
log.debug("Downloading subs for %s id", subs_id)
str_subs = generate_srt_from_sjson(json.loads(sjson_transcripts.data), speed=1.0)
if not str_subs:
log.debug('generate_srt_from_sjson produces no subtitles')
raise Http404
response = HttpResponse(str_subs, content_type='application/x-subrip')
response['Content-Disposition'] = 'attachment; filename="{0}.srt"'.format(subs_id)
return response
except NotFoundError:
log.debug("Can't find content in storage for %s subs", subs_id)
raise Http404
@login_required
def check_transcripts(request):
"""
Check state of transcripts availability.
request.GET['data'] has key `videos`, which can contain any of the following::
[
{u'type': u'youtube', u'video': u'OEoXaMPEzfM', u'mode': u'youtube'},
{u'type': u'html5', u'video': u'video1', u'mode': u'mp4'}
{u'type': u'html5', u'video': u'video2', u'mode': u'webm'}
]
`type` is youtube or html5
`video` is html5 or youtube video_id
`mode` is youtube, ,p4 or webm
Returns transcripts_presence dict::
html5_local: list of html5 ids, if subtitles exist locally for them;
is_youtube_mode: bool, if we have youtube_id, and as youtube mode is of higher priority, reflect this with flag;
youtube_local: bool, if youtube transcripts exist locally;
youtube_server: bool, if youtube transcripts exist on server;
youtube_diff: bool, if youtube transcripts exist on youtube server, and are different from local youtube ones;
current_item_subs: string, value of item.sub field;
status: string, 'Error' or 'Success';
subs: string, new value of item.sub field, that should be set in module;
command: string, action to front-end what to do and what to show to user.
"""
transcripts_presence = {
'html5_local': [],
'html5_equal': False,
'is_youtube_mode': False,
'youtube_local': False,
'youtube_server': False,
'youtube_diff': True,
'current_item_subs': None,
'status': 'Error',
}
try:
__, videos, item = _validate_transcripts_data(request)
except TranscriptsRequestValidationException as e:
return error_response(transcripts_presence, e.message)
transcripts_presence['status'] = 'Success'
filename = 'subs_{0}.srt.sjson'.format(item.sub)
content_location = StaticContent.compute_location(item.location.course_key, filename)
try:
local_transcripts = contentstore().find(content_location).data
transcripts_presence['current_item_subs'] = item.sub
except NotFoundError:
pass
# Check for youtube transcripts presence
youtube_id = videos.get('youtube', None)
if youtube_id:
transcripts_presence['is_youtube_mode'] = True
# youtube local
filename = 'subs_{0}.srt.sjson'.format(youtube_id)
content_location = StaticContent.compute_location(item.location.course_key, filename)
try:
local_transcripts = contentstore().find(content_location).data
transcripts_presence['youtube_local'] = True
except NotFoundError:
log.debug("Can't find transcripts in storage for youtube id: %s", youtube_id)
# youtube server
youtube_text_api = copy.deepcopy(settings.YOUTUBE['TEXT_API'])
youtube_text_api['params']['v'] = youtube_id
youtube_transcript_name = youtube_video_transcript_name(youtube_text_api)
if youtube_transcript_name:
youtube_text_api['params']['name'] = youtube_transcript_name
youtube_response = requests.get('http://' + youtube_text_api['url'], params=youtube_text_api['params'])
if youtube_response.status_code == 200 and youtube_response.text:
transcripts_presence['youtube_server'] = True
#check youtube local and server transcripts for equality
if transcripts_presence['youtube_server'] and transcripts_presence['youtube_local']:
try:
youtube_server_subs = get_transcripts_from_youtube(
youtube_id,
settings,
item.runtime.service(item, "i18n")
)
if json.loads(local_transcripts) == youtube_server_subs: # check transcripts for equality
transcripts_presence['youtube_diff'] = False
except GetTranscriptsFromYouTubeException:
pass
# Check for html5 local transcripts presence
html5_subs = []
for html5_id in videos['html5']:
filename = 'subs_{0}.srt.sjson'.format(html5_id)
content_location = StaticContent.compute_location(item.location.course_key, filename)
try:
html5_subs.append(contentstore().find(content_location).data)
transcripts_presence['html5_local'].append(html5_id)
except NotFoundError:
log.debug("Can't find transcripts in storage for non-youtube video_id: %s", html5_id)
if len(html5_subs) == 2: # check html5 transcripts for equality
transcripts_presence['html5_equal'] = json.loads(html5_subs[0]) == json.loads(html5_subs[1])
command, subs_to_use = _transcripts_logic(transcripts_presence, videos)
transcripts_presence.update({
'command': command,
'subs': subs_to_use,
})
return JsonResponse(transcripts_presence)
def _transcripts_logic(transcripts_presence, videos):
"""
By `transcripts_presence` content, figure what show to user:
returns: `command` and `subs`.
`command`: string, action to front-end what to do and what show to user.
`subs`: string, new value of item.sub field, that should be set in module.
`command` is one of::
replace: replace local youtube subtitles with server one's
found: subtitles are found
import: import subtitles from youtube server
choose: choose one from two html5 subtitles
not found: subtitles are not found
"""
command = None
# new value of item.sub field, that should be set in module.
subs = ''
# youtube transcripts are of high priority than html5 by design
if (
transcripts_presence['youtube_diff'] and
transcripts_presence['youtube_local'] and
transcripts_presence['youtube_server']): # youtube server and local exist
command = 'replace'
subs = videos['youtube']
elif transcripts_presence['youtube_local']: # only youtube local exist
command = 'found'
subs = videos['youtube']
elif transcripts_presence['youtube_server']: # only youtube server exist
command = 'import'
else: # html5 part
if transcripts_presence['html5_local']: # can be 1 or 2 html5 videos
if len(transcripts_presence['html5_local']) == 1 or transcripts_presence['html5_equal']:
command = 'found'
subs = transcripts_presence['html5_local'][0]
else:
command = 'choose'
subs = transcripts_presence['html5_local'][0]
else: # html5 source have no subtitles
# check if item sub has subtitles
if transcripts_presence['current_item_subs'] and not transcripts_presence['is_youtube_mode']:
log.debug("Command is use existing %s subs", transcripts_presence['current_item_subs'])
command = 'use_existing'
else:
command = 'not_found'
log.debug(
"Resulted command: %s, current transcripts: %s, youtube mode: %s",
command,
transcripts_presence['current_item_subs'],
transcripts_presence['is_youtube_mode']
)
return command, subs
@login_required
def choose_transcripts(request):
"""
Replaces html5 subtitles, presented for both html5 sources, with chosen one.
Code removes rejected html5 subtitles and updates sub attribute with chosen html5_id.
It does nothing with youtube id's.
Returns: status `Success` and resulted item.sub value or status `Error` and HTTP 400.
"""
response = {
'status': 'Error',
'subs': '',
}
try:
data, videos, item = _validate_transcripts_data(request)
except TranscriptsRequestValidationException as e:
return error_response(response, e.message)
html5_id = data.get('html5_id') # html5_id chosen by user
# find rejected html5_id and remove appropriate subs from store
html5_id_to_remove = [x for x in videos['html5'] if x != html5_id]
if html5_id_to_remove:
remove_subs_from_store(html5_id_to_remove, item)
if item.sub != html5_id: # update sub value
item.sub = html5_id
item.save_with_metadata(request.user)
response = {
'status': 'Success',
'subs': item.sub,
}
return JsonResponse(response)
@login_required
def replace_transcripts(request):
"""
Replaces all transcripts with youtube ones.
Downloads subtitles from youtube and replaces all transcripts with downloaded ones.
Returns: status `Success` and resulted item.sub value or status `Error` and HTTP 400.
"""
response = {'status': 'Error', 'subs': ''}
try:
__, videos, item = _validate_transcripts_data(request)
except TranscriptsRequestValidationException as e:
return error_response(response, e.message)
youtube_id = videos['youtube']
if not youtube_id:
return error_response(response, 'YouTube id {} is not presented in request data.'.format(youtube_id))
try:
download_youtube_subs(youtube_id, item, settings)
except GetTranscriptsFromYouTubeException as e:
return error_response(response, e.message)
item.sub = youtube_id
item.save_with_metadata(request.user)
response = {
'status': 'Success',
'subs': item.sub,
}
return JsonResponse(response)
def _validate_transcripts_data(request):
"""
Validates, that request contains all proper data for transcripts processing.
Returns tuple of 3 elements::
data: dict, loaded json from request,
videos: parsed `data` to useful format,
item: video item from storage
Raises `TranscriptsRequestValidationException` if validation is unsuccessful
or `PermissionDenied` if user has no access.
"""
data = json.loads(request.GET.get('data', '{}'))
if not data:
raise TranscriptsRequestValidationException(_('Incoming video data is empty.'))
try:
item = _get_item(request, data)
except (InvalidKeyError, ItemNotFoundError):
raise TranscriptsRequestValidationException(_("Can't find item by locator."))
if item.category != 'video':
raise TranscriptsRequestValidationException(_('Transcripts are supported only for "video" modules.'))
# parse data form request.GET.['data']['video'] to useful format
videos = {'youtube': '', 'html5': {}}
for video_data in data.get('videos'):
if video_data['type'] == 'youtube':
videos['youtube'] = video_data['video']
else: # do not add same html5 videos
if videos['html5'].get('video') != video_data['video']:
videos['html5'][video_data['video']] = video_data['mode']
return data, videos, item
@login_required
def rename_transcripts(request):
"""
Create copies of existing subtitles with new names of HTML5 sources.
Old subtitles are not deleted now, because we do not have rollback functionality.
If succeed, Item.sub will be chosen randomly from html5 video sources provided by front-end.
"""
response = {'status': 'Error', 'subs': ''}
try:
__, videos, item = _validate_transcripts_data(request)
except TranscriptsRequestValidationException as e:
return error_response(response, e.message)
old_name = item.sub
for new_name in videos['html5'].keys(): # copy subtitles for every HTML5 source
try:
# updates item.sub with new_name if it is successful.
copy_or_rename_transcript(new_name, old_name, item, user=request.user)
except NotFoundError:
# subtitles file `item.sub` is not presented in the system. Nothing to copy or rename.
error_response(response, "Can't find transcripts in storage for {}".format(old_name))
response['status'] = 'Success'
response['subs'] = item.sub # item.sub has been changed, it is not equal to old_name.
log.debug("Updated item.sub to %s", item.sub)
return JsonResponse(response)
@login_required
def save_transcripts(request):
"""
Saves video module with updated values of fields.
Returns: status `Success` or status `Error` and HTTP 400.
"""
response = {'status': 'Error'}
data = json.loads(request.GET.get('data', '{}'))
if not data:
return error_response(response, 'Incoming video data is empty.')
try:
item = _get_item(request, data)
except (InvalidKeyError, ItemNotFoundError):
return error_response(response, "Can't find item by locator.")
metadata = data.get('metadata')
if metadata is not None:
new_sub = metadata.get('sub')
for metadata_key, value in metadata.items():
setattr(item, metadata_key, value)
item.save_with_metadata(request.user) # item becomes updated with new values
if new_sub:
manage_video_subtitles_save(item, request.user)
else:
# If `new_sub` is empty, it means that user explicitly does not want to use
# transcripts for current video ids and we remove all transcripts from storage.
current_subs = data.get('current_subs')
if current_subs is not None:
for sub in current_subs:
remove_subs_from_store(sub, item)
response['status'] = 'Success'
return JsonResponse(response)
def _get_item(request, data):
"""
Obtains from 'data' the locator for an item.
Next, gets that item from the modulestore (allowing any errors to raise up).
Finally, verifies that the user has access to the item.
Returns the item.
"""
usage_key = UsageKey.from_string(data.get('locator'))
# This is placed before has_course_author_access() to validate the location,
# because has_course_author_access() raises r if location is invalid.
item = modulestore().get_item(usage_key)
# use the item's course_key, because the usage_key might not have the run
if not has_course_author_access(request.user, item.location.course_key):
raise PermissionDenied()
return item
|
sahmed95/sympy
|
refs/heads/master
|
sympy/polys/tests/test_ring_series.py
|
12
|
from sympy.polys.domains import QQ, EX, RR
from sympy.polys.rings import ring
from sympy.polys.ring_series import (_invert_monoms, rs_integrate,
rs_trunc, rs_mul, rs_square, rs_pow, _has_constant_term, rs_hadamard_exp,
rs_series_from_list, rs_exp, rs_log, rs_newton, rs_series_inversion,
rs_compose_add, rs_asin, rs_atan, rs_atanh, rs_tan, rs_cot, rs_sin, rs_cos,
rs_cos_sin, rs_sinh, rs_cosh, rs_tanh, _tan1, rs_fun, rs_nth_root,
rs_LambertW, rs_series_reversion, rs_is_puiseux, rs_series)
from sympy.utilities.pytest import raises
from sympy.core.compatibility import range
from sympy.core.symbol import symbols
from sympy.functions import (sin, cos, exp, tan, cot, atan, asin, atanh,
tanh, log, sqrt)
from sympy.core.numbers import Rational
from sympy.core import expand
def is_close(a, b):
tol = 10**(-10)
assert abs(a - b) < tol
def test_ring_series1():
R, x = ring('x', QQ)
p = x**4 + 2*x**3 + 3*x + 4
assert _invert_monoms(p) == 4*x**4 + 3*x**3 + 2*x + 1
assert rs_hadamard_exp(p) == x**4/24 + x**3/3 + 3*x + 4
R, x = ring('x', QQ)
p = x**4 + 2*x**3 + 3*x + 4
assert rs_integrate(p, x) == x**5/5 + x**4/2 + 3*x**2/2 + 4*x
R, x, y = ring('x, y', QQ)
p = x**2*y**2 + x + 1
assert rs_integrate(p, x) == x**3*y**2/3 + x**2/2 + x
assert rs_integrate(p, y) == x**2*y**3/3 + x*y + y
def test_trunc():
R, x, y, t = ring('x, y, t', QQ)
p = (y + t*x)**4
p1 = rs_trunc(p, x, 3)
assert p1 == y**4 + 4*y**3*t*x + 6*y**2*t**2*x**2
def test_mul_trunc():
R, x, y, t = ring('x, y, t', QQ)
p = 1 + t*x + t*y
for i in range(2):
p = rs_mul(p, p, t, 3)
assert p == 6*x**2*t**2 + 12*x*y*t**2 + 6*y**2*t**2 + 4*x*t + 4*y*t + 1
p = 1 + t*x + t*y + t**2*x*y
p1 = rs_mul(p, p, t, 2)
assert p1 == 1 + 2*t*x + 2*t*y
R1, z = ring('z', QQ)
def test1(p):
p2 = rs_mul(p, z, x, 2)
raises(ValueError, lambda: test1(p))
p1 = 2 + 2*x + 3*x**2
p2 = 3 + x**2
assert rs_mul(p1, p2, x, 4) == 2*x**3 + 11*x**2 + 6*x + 6
def test_square_trunc():
R, x, y, t = ring('x, y, t', QQ)
p = (1 + t*x + t*y)*2
p1 = rs_mul(p, p, x, 3)
p2 = rs_square(p, x, 3)
assert p1 == p2
p = 1 + x + x**2 + x**3
assert rs_square(p, x, 4) == 4*x**3 + 3*x**2 + 2*x + 1
def test_pow_trunc():
R, x, y, z = ring('x, y, z', QQ)
p0 = y + x*z
p = p0**16
for xx in (x, y, z):
p1 = rs_trunc(p, xx, 8)
p2 = rs_pow(p0, 16, xx, 8)
assert p1 == p2
p = 1 + x
p1 = rs_pow(p, 3, x, 2)
assert p1 == 1 + 3*x
assert rs_pow(p, 0, x, 2) == 1
assert rs_pow(p, -2, x, 2) == 1 - 2*x
p = x + y
assert rs_pow(p, 3, y, 3) == x**3 + 3*x**2*y + 3*x*y**2
assert rs_pow(1 + x, Rational(2, 3), x, 4) == 4*x**3/81 - x**2/9 + 2*x/3 + 1
def test_has_constant_term():
R, x, y, z = ring('x, y, z', QQ)
p = y + x*z
assert _has_constant_term(p, x)
p = x + x**4
assert not _has_constant_term(p, x)
p = 1 + x + x**4
assert _has_constant_term(p, x)
p = x + y + x*z
def test_inversion():
R, x = ring('x', QQ)
p = 2 + x + 2*x**2
n = 5
p1 = rs_series_inversion(p, x, n)
assert rs_trunc(p*p1, x, n) == 1
R, x, y = ring('x, y', QQ)
p = 2 + x + 2*x**2 + y*x + x**2*y
p1 = rs_series_inversion(p, x, n)
assert rs_trunc(p*p1, x, n) == 1
R, x, y = ring('x, y', QQ)
p = 1 + x + y
def test2(p):
p1 = rs_series_inversion(p, x, 4)
raises(NotImplementedError, lambda: test2(p))
p = R.zero
def test3(p):
p1 = rs_series_inversion(p, x, 3)
raises(ZeroDivisionError, lambda: test3(p))
def test_series_reversion():
R, x, y = ring('x, y', QQ)
p = rs_tan(x, x, 10)
r1 = rs_series_reversion(p, x, 8, y)
r2 = rs_atan(y, y, 8)
assert rs_series_reversion(p, x, 8, y) == rs_atan(y, y, 8)
p = rs_sin(x, x, 10)
assert rs_series_reversion(p, x, 8, y) == 5*y**7/112 + 3*y**5/40 + \
y**3/6 + y
def test_series_from_list():
R, x = ring('x', QQ)
p = 1 + 2*x + x**2 + 3*x**3
c = [1, 2, 0, 4, 4]
r = rs_series_from_list(p, c, x, 5)
pc = R.from_list(list(reversed(c)))
r1 = rs_trunc(pc.compose(x, p), x, 5)
assert r == r1
R, x, y = ring('x, y', QQ)
c = [1, 3, 5, 7]
p1 = rs_series_from_list(x + y, c, x, 3, concur=0)
p2 = rs_trunc((1 + 3*(x+y) + 5*(x+y)**2 + 7*(x+y)**3), x, 3)
assert p1 == p2
R, x = ring('x', QQ)
h = 25
p = rs_exp(x, x, h) - 1
p1 = rs_series_from_list(p, c, x, h)
p2 = 0
for i, cx in enumerate(c):
p2 += cx*rs_pow(p, i, x, h)
assert p1 == p2
def test_log():
R, x = ring('x', QQ)
p = 1 + x
p1 = rs_log(p, x, 4)
assert p1 == x - x**2/2 + x**3/3
p = 1 + x +2*x**2/3
p1 = rs_log(p, x, 9)
assert p1 == -17*x**8/648 + 13*x**7/189 - 11*x**6/162 - x**5/45 + \
7*x**4/36 - x**3/3 + x**2/6 + x
p2 = rs_series_inversion(p, x, 9)
p3 = rs_log(p2, x, 9)
assert p3 == -p1
R, x, y = ring('x, y', QQ)
p = 1 + x + 2*y*x**2
p1 = rs_log(p, x, 6)
assert p1 == (4*x**5*y**2 - 2*x**5*y - 2*x**4*y**2 + x**5/5 + 2*x**4*y -
x**4/4 - 2*x**3*y + x**3/3 + 2*x**2*y - x**2/2 + x)
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_log(x + a, x, 5) == -EX(1/(4*a**4))*x**4 + EX(1/(3*a**3))*x**3 \
- EX(1/(2*a**2))*x**2 + EX(1/a)*x + EX(log(a))
assert rs_log(x + x**2*y + a, x, 4) == -EX(a**(-2))*x**3*y + \
EX(1/(3*a**3))*x**3 + EX(1/a)*x**2*y - EX(1/(2*a**2))*x**2 + \
EX(1/a)*x + EX(log(a))
p = x + x**2 + 3
assert rs_log(p, x, 10).compose(x, 5) == EX(log(3) + 19281291595/9920232)
def test_exp():
R, x = ring('x', QQ)
p = x + x**4
for h in [10, 30]:
q = rs_series_inversion(1 + p, x, h) - 1
p1 = rs_exp(q, x, h)
q1 = rs_log(p1, x, h)
assert q1 == q
p1 = rs_exp(p, x, 30)
assert p1.coeff(x**29) == QQ(74274246775059676726972369, 353670479749588078181744640000)
prec = 21
p = rs_log(1 + x, x, prec)
p1 = rs_exp(p, x, prec)
assert p1 == x + 1
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', QQ[exp(a), a])
assert rs_exp(x + a, x, 5) == exp(a)*x**4/24 + exp(a)*x**3/6 + \
exp(a)*x**2/2 + exp(a)*x + exp(a)
assert rs_exp(x + x**2*y + a, x, 5) == exp(a)*x**4*y**2/2 + \
exp(a)*x**4*y/2 + exp(a)*x**4/24 + exp(a)*x**3*y + \
exp(a)*x**3/6 + exp(a)*x**2*y + exp(a)*x**2/2 + exp(a)*x + exp(a)
R, x, y = ring('x, y', EX)
assert rs_exp(x + a, x, 5) == EX(exp(a)/24)*x**4 + EX(exp(a)/6)*x**3 + \
EX(exp(a)/2)*x**2 + EX(exp(a))*x + EX(exp(a))
assert rs_exp(x + x**2*y + a, x, 5) == EX(exp(a)/2)*x**4*y**2 + \
EX(exp(a)/2)*x**4*y + EX(exp(a)/24)*x**4 + EX(exp(a))*x**3*y + \
EX(exp(a)/6)*x**3 + EX(exp(a))*x**2*y + EX(exp(a)/2)*x**2 + \
EX(exp(a))*x + EX(exp(a))
def test_newton():
R, x = ring('x', QQ)
p = x**2 - 2
r = rs_newton(p, x, 4)
f = [1, 0, -2]
assert r == 8*x**4 + 4*x**2 + 2
def test_compose_add():
R, x = ring('x', QQ)
p1 = x**3 - 1
p2 = x**2 - 2
assert rs_compose_add(p1, p2) == x**6 - 6*x**4 - 2*x**3 + 12*x**2 - 12*x - 7
def test_fun():
R, x, y = ring('x, y', QQ)
p = x*y + x**2*y**3 + x**5*y
assert rs_fun(p, rs_tan, x, 10) == rs_tan(p, x, 10)
assert rs_fun(p, _tan1, x, 10) == _tan1(p, x, 10)
def test_nth_root():
R, x, y = ring('x, y', QQ)
r1 = rs_nth_root(1 + x**2*y, 4, x, 10)
assert rs_nth_root(1 + x**2*y, 4, x, 10) == -77*x**8*y**4/2048 + \
7*x**6*y**3/128 - 3*x**4*y**2/32 + x**2*y/4 + 1
assert rs_nth_root(1 + x*y + x**2*y**3, 3, x, 5) == -x**4*y**6/9 + \
5*x**4*y**5/27 - 10*x**4*y**4/243 - 2*x**3*y**4/9 + 5*x**3*y**3/81 + \
x**2*y**3/3 - x**2*y**2/9 + x*y/3 + 1
assert rs_nth_root(8*x, 3, x, 3) == 2*x**QQ(1, 3)
assert rs_nth_root(8*x + x**2 + x**3, 3, x, 3) == x**QQ(4,3)/12 + 2*x**QQ(1,3)
r = rs_nth_root(8*x + x**2*y + x**3, 3, x, 4)
assert r == -x**QQ(7,3)*y**2/288 + x**QQ(7,3)/12 + x**QQ(4,3)*y/12 + 2*x**QQ(1,3)
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_nth_root(x + a, 3, x, 4) == EX(5/(81*a**QQ(8, 3)))*x**3 - \
EX(1/(9*a**QQ(5, 3)))*x**2 + EX(1/(3*a**QQ(2, 3)))*x + EX(a**QQ(1, 3))
assert rs_nth_root(x**QQ(2, 3) + x**2*y + 5, 2, x, 3) == -EX(sqrt(5)/100)*\
x**QQ(8, 3)*y - EX(sqrt(5)/16000)*x**QQ(8, 3) + EX(sqrt(5)/10)*x**2*y + \
EX(sqrt(5)/2000)*x**2 - EX(sqrt(5)/200)*x**QQ(4, 3) + \
EX(sqrt(5)/10)*x**QQ(2, 3) + EX(sqrt(5))
def test_atan():
R, x, y = ring('x, y', QQ)
assert rs_atan(x, x, 9) == -x**7/7 + x**5/5 - x**3/3 + x
assert rs_atan(x*y + x**2*y**3, x, 9) == 2*x**8*y**11 - x**8*y**9 + \
2*x**7*y**9 - x**7*y**7/7 - x**6*y**9/3 + x**6*y**7 - x**5*y**7 + \
x**5*y**5/5 - x**4*y**5 - x**3*y**3/3 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_atan(x + a, x, 5) == -EX((a**3 - a)/(a**8 + 4*a**6 + 6*a**4 + \
4*a**2 + 1))*x**4 + EX((3*a**2 - 1)/(3*a**6 + 9*a**4 + \
9*a**2 + 3))*x**3 - EX(a/(a**4 + 2*a**2 + 1))*x**2 + \
EX(1/(a**2 + 1))*x + EX(atan(a))
assert rs_atan(x + x**2*y + a, x, 4) == -EX(2*a/(a**4 + 2*a**2 + 1)) \
*x**3*y + EX((3*a**2 - 1)/(3*a**6 + 9*a**4 + 9*a**2 + 3))*x**3 + \
EX(1/(a**2 + 1))*x**2*y - EX(a/(a**4 + 2*a**2 + 1))*x**2 + EX(1/(a**2 \
+ 1))*x + EX(atan(a))
def test_asin():
R, x, y = ring('x, y', QQ)
assert rs_asin(x + x*y, x, 5) == x**3*y**3/6 + x**3*y**2/2 + x**3*y/2 + \
x**3/6 + x*y + x
assert rs_asin(x*y + x**2*y**3, x, 6) == x**5*y**7/2 + 3*x**5*y**5/40 + \
x**4*y**5/2 + x**3*y**3/6 + x**2*y**3 + x*y
def test_tan():
R, x, y = ring('x, y', QQ)
assert rs_tan(x, x, 9) == \
x + x**3/3 + 2*x**5/15 + 17*x**7/315
assert rs_tan(x*y + x**2*y**3, x, 9) == 4*x**8*y**11/3 + 17*x**8*y**9/45 + \
4*x**7*y**9/3 + 17*x**7*y**7/315 + x**6*y**9/3 + 2*x**6*y**7/3 + \
x**5*y**7 + 2*x**5*y**5/15 + x**4*y**5 + x**3*y**3/3 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', QQ[tan(a), a])
assert rs_tan(x + a, x, 5) == (tan(a)**5 + 5*tan(a)**3/3 + \
2*tan(a)/3)*x**4 + (tan(a)**4 + 4*tan(a)**2/3 + 1/3)*x**3 + \
(tan(a)**3 + tan(a))*x**2 + (tan(a)**2 + 1)*x + tan(a)
assert rs_tan(x + x**2*y + a, x, 4) == (2*tan(a)**3 + 2*tan(a))*x**3*y + \
(tan(a)**4 + 4/3*tan(a)**2 + 1/3)*x**3 + (tan(a)**2 + 1)*x**2*y + \
(tan(a)**3 + tan(a))*x**2 + (tan(a)**2 + 1)*x + tan(a)
R, x, y = ring('x, y', EX)
assert rs_tan(x + a, x, 5) == EX(tan(a)**5 + 5*tan(a)**3/3 + \
2*tan(a)/3)*x**4 + EX(tan(a)**4 + 4*tan(a)**2/3 + EX(1)/3)*x**3 + \
EX(tan(a)**3 + tan(a))*x**2 + EX(tan(a)**2 + 1)*x + EX(tan(a))
assert rs_tan(x + x**2*y + a, x, 4) == EX(2*tan(a)**3 + \
2*tan(a))*x**3*y + EX(tan(a)**4 + 4*tan(a)**2/3 + EX(1)/3)*x**3 + \
EX(tan(a)**2 + 1)*x**2*y + EX(tan(a)**3 + tan(a))*x**2 + \
EX(tan(a)**2 + 1)*x + EX(tan(a))
p = x + x**2 + 5
assert rs_atan(p, x, 10).compose(x, 10) == EX(atan(5) + 67701870330562640/ \
668083460499)
def test_cot():
R, x, y = ring('x, y', QQ)
assert rs_cot(x**6 + x**7, x, 8) == x**-6 - x**-5 + x**-4 - x**-3 + \
x**-2 - x**-1 + 1 - x + x**2 - x**3 + x**4 - x**5 + 2*x**6/3 - 4*x**7/3
assert rs_cot(x + x**2*y, x, 5) == -x**4*y**5 - x**4*y/15 + x**3*y**4 - \
x**3/45 - x**2*y**3 - x**2*y/3 + x*y**2 - x/3 - y + x**-1
def test_sin():
R, x, y = ring('x, y', QQ)
assert rs_sin(x, x, 9) == \
x - x**3/6 + x**5/120 - x**7/5040
assert rs_sin(x*y + x**2*y**3, x, 9) == x**8*y**11/12 - \
x**8*y**9/720 + x**7*y**9/12 - x**7*y**7/5040 - x**6*y**9/6 + \
x**6*y**7/24 - x**5*y**7/2 + x**5*y**5/120 - x**4*y**5/2 - \
x**3*y**3/6 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', QQ[sin(a), cos(a), a])
assert rs_sin(x + a, x, 5) == sin(a)*x**4/24 - cos(a)*x**3/6 - \
sin(a)*x**2/2 + cos(a)*x + sin(a)
assert rs_sin(x + x**2*y + a, x, 5) == -sin(a)*x**4*y**2/2 - \
cos(a)*x**4*y/2 + sin(a)*x**4/24 - sin(a)*x**3*y - cos(a)*x**3/6 + \
cos(a)*x**2*y - sin(a)*x**2/2 + cos(a)*x + sin(a)
R, x, y = ring('x, y', EX)
assert rs_sin(x + a, x, 5) == EX(sin(a)/24)*x**4 - EX(cos(a)/6)*x**3 - \
EX(sin(a)/2)*x**2 + EX(cos(a))*x + EX(sin(a))
assert rs_sin(x + x**2*y + a, x, 5) == -EX(sin(a)/2)*x**4*y**2 - \
EX(cos(a)/2)*x**4*y + EX(sin(a)/24)*x**4 - EX(sin(a))*x**3*y - \
EX(cos(a)/6)*x**3 + EX(cos(a))*x**2*y - EX(sin(a)/2)*x**2 + \
EX(cos(a))*x + EX(sin(a))
def test_cos():
R, x, y = ring('x, y', QQ)
assert rs_cos(x, x, 9) == \
x**8/40320 - x**6/720 + x**4/24 - x**2/2 + 1
assert rs_cos(x*y + x**2*y**3, x, 9) == x**8*y**12/24 - \
x**8*y**10/48 + x**8*y**8/40320 + x**7*y**10/6 - \
x**7*y**8/120 + x**6*y**8/4 - x**6*y**6/720 + x**5*y**6/6 - \
x**4*y**6/2 + x**4*y**4/24 - x**3*y**4 - x**2*y**2/2 + 1
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', QQ[sin(a), cos(a), a])
assert rs_cos(x + a, x, 5) == cos(a)*x**4/24 + sin(a)*x**3/6 - \
cos(a)*x**2/2 - sin(a)*x + cos(a)
assert rs_cos(x + x**2*y + a, x, 5) == -cos(a)*x**4*y**2/2 + \
sin(a)*x**4*y/2 + cos(a)*x**4/24 - cos(a)*x**3*y + sin(a)*x**3/6 - \
sin(a)*x**2*y - cos(a)*x**2/2 - sin(a)*x + cos(a)
R, x, y = ring('x, y', EX)
assert rs_cos(x + a, x, 5) == EX(cos(a)/24)*x**4 + EX(sin(a)/6)*x**3 - \
EX(cos(a)/2)*x**2 - EX(sin(a))*x + EX(cos(a))
assert rs_cos(x + x**2*y + a, x, 5) == -EX(cos(a)/2)*x**4*y**2 + \
EX(sin(a)/2)*x**4*y + EX(cos(a)/24)*x**4 - EX(cos(a))*x**3*y + \
EX(sin(a)/6)*x**3 - EX(sin(a))*x**2*y - EX(cos(a)/2)*x**2 - \
EX(sin(a))*x + EX(cos(a))
def test_cos_sin():
R, x, y = ring('x, y', QQ)
cos, sin = rs_cos_sin(x, x, 9)
assert cos == rs_cos(x, x, 9)
assert sin == rs_sin(x, x, 9)
cos, sin = rs_cos_sin(x + x*y, x, 5)
assert cos == rs_cos(x + x*y, x, 5)
assert sin == rs_sin(x + x*y, x, 5)
def test_atanh():
R, x, y = ring('x, y', QQ)
assert rs_atanh(x, x, 9) == x**7/7 + x**5/5 + x**3/3 + x
assert rs_atanh(x*y + x**2*y**3, x, 9) == 2*x**8*y**11 + x**8*y**9 + \
2*x**7*y**9 + x**7*y**7/7 + x**6*y**9/3 + x**6*y**7 + x**5*y**7 + \
x**5*y**5/5 + x**4*y**5 + x**3*y**3/3 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_atanh(x + a, x, 5) == EX((a**3 + a)/(a**8 - 4*a**6 + 6*a**4 - \
4*a**2 + 1))*x**4 - EX((3*a**2 + 1)/(3*a**6 - 9*a**4 + \
9*a**2 - 3))*x**3 + EX(a/(a**4 - 2*a**2 + 1))*x**2 - EX(1/(a**2 - \
1))*x + EX(atanh(a))
assert rs_atanh(x + x**2*y + a, x, 4) == EX(2*a/(a**4 - 2*a**2 + \
1))*x**3*y - EX((3*a**2 + 1)/(3*a**6 - 9*a**4 + 9*a**2 - 3))*x**3 - \
EX(1/(a**2 - 1))*x**2*y + EX(a/(a**4 - 2*a**2 + 1))*x**2 - \
EX(1/(a**2 - 1))*x + EX(atanh(a))
p = x + x**2 + 5
assert rs_atanh(p, x, 10).compose(x, 10) == EX(-733442653682135/5079158784 \
+ atanh(5))
def test_sinh():
R, x, y = ring('x, y', QQ)
assert rs_sinh(x, x, 9) == x**7/5040 + x**5/120 + x**3/6 + x
assert rs_sinh(x*y + x**2*y**3, x, 9) == x**8*y**11/12 + \
x**8*y**9/720 + x**7*y**9/12 + x**7*y**7/5040 + x**6*y**9/6 + \
x**6*y**7/24 + x**5*y**7/2 + x**5*y**5/120 + x**4*y**5/2 + \
x**3*y**3/6 + x**2*y**3 + x*y
def test_cosh():
R, x, y = ring('x, y', QQ)
assert rs_cosh(x, x, 9) == x**8/40320 + x**6/720 + x**4/24 + \
x**2/2 + 1
assert rs_cosh(x*y + x**2*y**3, x, 9) == x**8*y**12/24 + \
x**8*y**10/48 + x**8*y**8/40320 + x**7*y**10/6 + \
x**7*y**8/120 + x**6*y**8/4 + x**6*y**6/720 + x**5*y**6/6 + \
x**4*y**6/2 + x**4*y**4/24 + x**3*y**4 + x**2*y**2/2 + 1
def test_tanh():
R, x, y = ring('x, y', QQ)
assert rs_tanh(x, x, 9) == -17*x**7/315 + 2*x**5/15 - x**3/3 + x
assert rs_tanh(x*y + x**2*y**3 , x, 9) == 4*x**8*y**11/3 - \
17*x**8*y**9/45 + 4*x**7*y**9/3 - 17*x**7*y**7/315 - x**6*y**9/3 + \
2*x**6*y**7/3 - x**5*y**7 + 2*x**5*y**5/15 - x**4*y**5 - \
x**3*y**3/3 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_tanh(x + a, x, 5) == EX(tanh(a)**5 - 5*tanh(a)**3/3 + \
2*tanh(a)/3)*x**4 + EX(-tanh(a)**4 + 4*tanh(a)**2/3 - QQ(1, 3))*x**3 + \
EX(tanh(a)**3 - tanh(a))*x**2 + EX(-tanh(a)**2 + 1)*x + EX(tanh(a))
p = rs_tanh(x + x**2*y + a, x, 4)
assert (p.compose(x, 10)).compose(y, 5) == EX(-1000*tanh(a)**4 + \
10100*tanh(a)**3 + 2470*tanh(a)**2/3 - 10099*tanh(a) + QQ(530, 3))
def test_RR():
rs_funcs = [rs_sin, rs_cos, rs_tan, rs_cot, rs_atan, rs_tanh]
sympy_funcs = [sin, cos, tan, cot, atan, tanh]
R, x, y = ring('x, y', RR)
a = symbols('a')
for rs_func, sympy_func in zip(rs_funcs, sympy_funcs):
p = rs_func(2 + x, x, 5).compose(x, 5)
q = sympy_func(2 + a).series(a, 0, 5).removeO()
is_close(p.as_expr(), q.subs(a, 5).n())
p = rs_nth_root(2 + x, 5, x, 5).compose(x, 5)
q = ((2 + a)**QQ(1, 5)).series(a, 0, 5).removeO()
is_close(p.as_expr(), q.subs(a, 5).n())
def test_is_regular():
R, x, y = ring('x, y', QQ)
p = 1 + 2*x + x**2 + 3*x**3
assert not rs_is_puiseux(p, x)
p = x + x**QQ(1,5)*y
assert rs_is_puiseux(p, x)
assert not rs_is_puiseux(p, y)
p = x + x**2*y**QQ(1,5)*y
assert not rs_is_puiseux(p, x)
def test_puiseux():
R, x, y = ring('x, y', QQ)
p = x**QQ(2,5) + x**QQ(2,3) + x
r = rs_series_inversion(p, x, 1)
r1 = -x**QQ(14,15) + x**QQ(4,5) - 3*x**QQ(11,15) + x**QQ(2,3) + \
2*x**QQ(7,15) - x**QQ(2,5) - x**QQ(1,5) + x**QQ(2,15) - x**QQ(-2,15) \
+ x**QQ(-2,5)
assert r == r1
r = rs_nth_root(1 + p, 3, x, 1)
assert r == -x**QQ(4,5)/9 + x**QQ(2,3)/3 + x**QQ(2,5)/3 + 1
r = rs_log(1 + p, x, 1)
assert r == -x**QQ(4,5)/2 + x**QQ(2,3) + x**QQ(2,5)
r = rs_LambertW(p, x, 1)
assert r == -x**QQ(4,5) + x**QQ(2,3) + x**QQ(2,5)
r = rs_exp(p, x, 1)
assert r == x**QQ(4,5)/2 + x**QQ(2,3) + x**QQ(2,5) + 1
p1 = x + x**QQ(1,5)*y
r = rs_exp(p1, x, 1)
assert r == x**QQ(4,5)*y**4/24 + x**QQ(3,5)*y**3/6 + x**QQ(2,5)*y**2/2 + \
x**QQ(1,5)*y + 1
r = rs_atan(p, x, 2)
assert r == -x**QQ(9,5) - x**QQ(26,15) - x**QQ(22,15) - x**QQ(6,5)/3 + \
x + x**QQ(2,3) + x**QQ(2,5)
r = rs_atan(p1, x, 2)
assert r == x**QQ(9,5)*y**9/9 + x**QQ(9,5)*y**4 - x**QQ(7,5)*y**7/7 - \
x**QQ(7,5)*y**2 + x*y**5/5 + x - x**QQ(3,5)*y**3/3 + x**QQ(1,5)*y
r = rs_asin(p, x, 2)
assert r == x**QQ(9,5)/2 + x**QQ(26,15)/2 + x**QQ(22,15)/2 + \
x**QQ(6,5)/6 + x + x**QQ(2,3) + x**QQ(2,5)
r = rs_tan(p, x, 2)
assert r == x**QQ(9,5) + x**QQ(26,15) + x**QQ(22,15) + x**QQ(6,5)/3 + \
x + x**QQ(2,3) + x**QQ(2,5)
r = rs_cot(p, x, 1)
assert r == -x**QQ(14,15) + x**QQ(4,5) - 3*x**QQ(11,15) + \
2*x**QQ(2,3)/3 + 2*x**QQ(7,15) - 4*x**QQ(2,5)/3 - x**QQ(1,5) + \
x**QQ(2,15) - x**QQ(-2,15) + x**QQ(-2,5)
r = rs_sin(p, x, 2)
assert r == -x**QQ(9,5)/2 - x**QQ(26,15)/2 - x**QQ(22,15)/2 - \
x**QQ(6,5)/6 + x + x**QQ(2,3) + x**QQ(2,5)
r = rs_cos(p, x, 2)
assert r == x**QQ(28,15)/6 - x**QQ(5,3) + x**QQ(8,5)/24 - x**QQ(7,5) - \
x**QQ(4,3)/2 - x**QQ(16,15) - x**QQ(4,5)/2 + 1
r = rs_cos_sin(p, x, 2)
assert r[0] == x**QQ(28,15)/6 - x**QQ(5,3) + x**QQ(8,5)/24 - x**QQ(7,5) - \
x**QQ(4,3)/2 - x**QQ(16,15) - x**QQ(4,5)/2 + 1
assert r[1] == -x**QQ(9,5)/2 - x**QQ(26,15)/2 - x**QQ(22,15)/2 - \
x**QQ(6,5)/6 + x + x**QQ(2,3) + x**QQ(2,5)
r = rs_atanh(p, x, 2)
assert r == x**QQ(9,5) + x**QQ(26,15) + x**QQ(22,15) + x**QQ(6,5)/3 + x + \
x**QQ(2,3) + x**QQ(2,5)
r = rs_sinh(p, x, 2)
assert r == x**QQ(9,5)/2 + x**QQ(26,15)/2 + x**QQ(22,15)/2 + \
x**QQ(6,5)/6 + x + x**QQ(2,3) + x**QQ(2,5)
r = rs_cosh(p, x, 2)
assert r == x**QQ(28,15)/6 + x**QQ(5,3) + x**QQ(8,5)/24 + x**QQ(7,5) + \
x**QQ(4,3)/2 + x**QQ(16,15) + x**QQ(4,5)/2 + 1
r = rs_tanh(p, x, 2)
assert r == -x**QQ(9,5) - x**QQ(26,15) - x**QQ(22,15) - x**QQ(6,5)/3 + \
x + x**QQ(2,3) + x**QQ(2,5)
def test1():
R, x = ring('x', QQ)
r = rs_sin(x, x, 15)*x**(-5)
assert r == x**8/6227020800 - x**6/39916800 + x**4/362880 - x**2/5040 + \
QQ(1,120) - x**-2/6 + x**-4
p = rs_sin(x, x, 10)
r = rs_nth_root(p, 2, x, 10)
assert r == -67*x**QQ(17,2)/29030400 - x**QQ(13,2)/24192 + \
x**QQ(9,2)/1440 - x**QQ(5,2)/12 + x**QQ(1,2)
p = rs_sin(x, x, 10)
r = rs_nth_root(p, 7, x, 10)
r = rs_pow(r, 5, x, 10)
assert r == -97*x**QQ(61,7)/124467840 - x**QQ(47,7)/16464 + \
11*x**QQ(33,7)/3528 - 5*x**QQ(19,7)/42 + x**QQ(5,7)
r = rs_exp(x**QQ(1,2), x, 10)
assert r == x**QQ(19,2)/121645100408832000 + x**9/6402373705728000 + \
x**QQ(17,2)/355687428096000 + x**8/20922789888000 + \
x**QQ(15,2)/1307674368000 + x**7/87178291200 + \
x**QQ(13,2)/6227020800 + x**6/479001600 + x**QQ(11,2)/39916800 + \
x**5/3628800 + x**QQ(9,2)/362880 + x**4/40320 + x**QQ(7,2)/5040 + \
x**3/720 + x**QQ(5,2)/120 + x**2/24 + x**QQ(3,2)/6 + x/2 + \
x**QQ(1,2) + 1
def test_puiseux2():
R, y = ring('y', QQ)
S, x = ring('x', R)
p = x + x**QQ(1,5)*y
r = rs_atan(p, x, 3)
assert r == (y**13/13 + y**8 + 2*y**3)*x**QQ(13,5) - (y**11/11 + y**6 +
y)*x**QQ(11,5) + (y**9/9 + y**4)*x**QQ(9,5) - (y**7/7 +
y**2)*x**QQ(7,5) + (y**5/5 + 1)*x - y**3*x**QQ(3,5)/3 + y*x**QQ(1,5)
def test_rs_series():
x, a, b, c = symbols('x, a, b, c')
assert rs_series(a, a, 5).as_expr() == a
assert rs_series(sin(1/a), a, 5).as_expr() == sin(1/a)
assert rs_series(sin(a), a, 5).as_expr() == (sin(a).series(a, 0,
5)).removeO()
assert rs_series(sin(a) + cos(a), a, 5).as_expr() == ((sin(a) +
cos(a)).series(a, 0, 5)).removeO()
assert rs_series(sin(a)*cos(a), a, 5).as_expr() == ((sin(a)*
cos(a)).series(a, 0, 5)).removeO()
p = (sin(a) - a)*(cos(a**2) + a**4/2)
assert expand(rs_series(p, a, 10).as_expr()) == expand(p.series(a, 0,
10).removeO())
p = sin(a**2/2 + a/3) + cos(a/5)*sin(a/2)**3
assert expand(rs_series(p, a, 5).as_expr()) == expand(p.series(a, 0,
5).removeO())
p = sin(x**2 + a)*(cos(x**3 - 1) - a - a**2)
assert expand(rs_series(p, a, 5).as_expr()) == expand(p.series(a, 0,
5).removeO())
p = sin(a**2 - a/3 + 2)**5*exp(a**3 - a/2)
assert expand(rs_series(p, a, 10).as_expr()) == expand(p.series(a, 0,
10).removeO())
p = sin(a + b + c)
assert expand(rs_series(p, a, 5).as_expr()) == expand(p.series(a, 0,
5).removeO())
p = tan(sin(a**2 + 4) + b + c)
assert expand(rs_series(p, a, 6).as_expr()) == expand(p.series(a, 0,
6).removeO())
|
joerocklin/gem5
|
refs/heads/master
|
ext/ply/test/lex_dup2.py
|
174
|
# lex_dup2.py
#
# Duplicated rule specifiers
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
tokens = [
"PLUS",
"MINUS",
"NUMBER",
]
t_PLUS = r'\+'
t_MINUS = r'-'
def t_NUMBER(t):
r'\d+'
pass
def t_NUMBER(t):
r'\d+'
pass
def t_error(t):
pass
lex.lex()
|
akretion/stock-logistics-warehouse
|
refs/heads/8.0
|
__unported__/stock_orderpoint_creator/orderpoint_template.py
|
23
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Vaucher (Camptocamp)
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Template of order point object """
from openerp.osv.orm import Model, fields
from base_product_config_template import BaseProductConfigTemplate
class OrderpointTemplate(BaseProductConfigTemplate, Model):
""" Template for orderpoints """
_name = 'stock.warehouse.orderpoint.template'
_inherit = 'stock.warehouse.orderpoint'
_table = 'stock_warehouse_orderpoint_template'
_clean_mode = 'deactivate'
_columns = {
'product_id': fields.many2one('product.product',
'Product',
required=False,
ondelete='cascade',
domain=[('type','=','product')]),
}
def _get_ids_2_clean(self, cursor, uid, template_br, product_ids, context=None):
""" hook to select model specific objects to clean
return must return a list of id"""
model_obj = self._get_model()
ids_to_del = model_obj.search(cursor, uid,
[('product_id', 'in', product_ids)])
return ids_to_del
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
xHeliotrope/injustice_dropper
|
refs/heads/master
|
env/lib/python3.4/site-packages/phonenumbers/shortdata/region_UA.py
|
11
|
"""Auto-generated file, do not edit by hand. UA metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_UA = PhoneMetadata(id='UA', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d{2}', possible_number_pattern='\\d{3}'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='1(?:0[123]|12)', possible_number_pattern='\\d{3}', example_number='112'),
short_code=PhoneNumberDesc(national_number_pattern='1(?:0[123]|12)', possible_number_pattern='\\d{3}', example_number='112'),
standard_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
carrier_specific=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
short_data=True)
|
shuangshuangwang/spark
|
refs/heads/master
|
python/pyspark/ml/pipeline.py
|
1
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from pyspark import keyword_only, since, SparkContext
from pyspark.ml.base import Estimator, Model, Transformer
from pyspark.ml.param import Param, Params
from pyspark.ml.util import MLReadable, MLWritable, JavaMLWriter, JavaMLReader, \
DefaultParamsReader, DefaultParamsWriter, MLWriter, MLReader, JavaMLWritable
from pyspark.ml.wrapper import JavaParams, JavaWrapper
from pyspark.ml.common import inherit_doc, _java2py, _py2java
@inherit_doc
class Pipeline(Estimator, MLReadable, MLWritable):
"""
A simple pipeline, which acts as an estimator. A Pipeline consists
of a sequence of stages, each of which is either an
:py:class:`Estimator` or a :py:class:`Transformer`. When
:py:meth:`Pipeline.fit` is called, the stages are executed in
order. If a stage is an :py:class:`Estimator`, its
:py:meth:`Estimator.fit` method will be called on the input
dataset to fit a model. Then the model, which is a transformer,
will be used to transform the dataset as the input to the next
stage. If a stage is a :py:class:`Transformer`, its
:py:meth:`Transformer.transform` method will be called to produce
the dataset for the next stage. The fitted model from a
:py:class:`Pipeline` is a :py:class:`PipelineModel`, which
consists of fitted models and transformers, corresponding to the
pipeline stages. If stages is an empty list, the pipeline acts as an
identity transformer.
.. versionadded:: 1.3.0
"""
stages = Param(Params._dummy(), "stages", "a list of pipeline stages")
@keyword_only
def __init__(self, *, stages=None):
"""
__init__(self, \\*, stages=None)
"""
super(Pipeline, self).__init__()
kwargs = self._input_kwargs
self.setParams(**kwargs)
def setStages(self, value):
"""
Set pipeline stages.
.. versionadded:: 1.3.0
Parameters
----------
value : list
of :py:class:`pyspark.ml.Transformer`
or :py:class:`pyspark.ml.Estimator`
Returns
-------
:py:class:`Pipeline`
the pipeline instance
"""
return self._set(stages=value)
@since("1.3.0")
def getStages(self):
"""
Get pipeline stages.
"""
return self.getOrDefault(self.stages)
@keyword_only
@since("1.3.0")
def setParams(self, *, stages=None):
"""
setParams(self, \\*, stages=None)
Sets params for Pipeline.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _fit(self, dataset):
stages = self.getStages()
for stage in stages:
if not (isinstance(stage, Estimator) or isinstance(stage, Transformer)):
raise TypeError(
"Cannot recognize a pipeline stage of type %s." % type(stage))
indexOfLastEstimator = -1
for i, stage in enumerate(stages):
if isinstance(stage, Estimator):
indexOfLastEstimator = i
transformers = []
for i, stage in enumerate(stages):
if i <= indexOfLastEstimator:
if isinstance(stage, Transformer):
transformers.append(stage)
dataset = stage.transform(dataset)
else: # must be an Estimator
model = stage.fit(dataset)
transformers.append(model)
if i < indexOfLastEstimator:
dataset = model.transform(dataset)
else:
transformers.append(stage)
return PipelineModel(transformers)
def copy(self, extra=None):
"""
Creates a copy of this instance.
.. versionadded:: 1.4.0
Parameters
----------
extra : dict, optional
extra parameters
Returns
-------
:py:class:`Pipeline`
new instance
"""
if extra is None:
extra = dict()
that = Params.copy(self, extra)
stages = [stage.copy(extra) for stage in that.getStages()]
return that.setStages(stages)
@since("2.0.0")
def write(self):
"""Returns an MLWriter instance for this ML instance."""
allStagesAreJava = PipelineSharedReadWrite.checkStagesForJava(self.getStages())
if allStagesAreJava:
return JavaMLWriter(self)
return PipelineWriter(self)
@classmethod
@since("2.0.0")
def read(cls):
"""Returns an MLReader instance for this class."""
return PipelineReader(cls)
@classmethod
def _from_java(cls, java_stage):
"""
Given a Java Pipeline, create and return a Python wrapper of it.
Used for ML persistence.
"""
# Create a new instance of this stage.
py_stage = cls()
# Load information from java_stage to the instance.
py_stages = [JavaParams._from_java(s) for s in java_stage.getStages()]
py_stage.setStages(py_stages)
py_stage._resetUid(java_stage.uid())
return py_stage
def _to_java(self):
"""
Transfer this instance to a Java Pipeline. Used for ML persistence.
Returns
-------
py4j.java_gateway.JavaObject
Java object equivalent to this instance.
"""
gateway = SparkContext._gateway
cls = SparkContext._jvm.org.apache.spark.ml.PipelineStage
java_stages = gateway.new_array(cls, len(self.getStages()))
for idx, stage in enumerate(self.getStages()):
java_stages[idx] = stage._to_java()
_java_obj = JavaParams._new_java_obj("org.apache.spark.ml.Pipeline", self.uid)
_java_obj.setStages(java_stages)
return _java_obj
def _make_java_param_pair(self, param, value):
"""
Makes a Java param pair.
"""
sc = SparkContext._active_spark_context
param = self._resolveParam(param)
java_param = sc._jvm.org.apache.spark.ml.param.Param(param.parent, param.name, param.doc)
if isinstance(value, Params) and hasattr(value, "_to_java"):
# Convert JavaEstimator/JavaTransformer object or Estimator/Transformer object which
# implements `_to_java` method (such as OneVsRest, Pipeline object) to java object.
# used in the case of an estimator having another estimator as a parameter
# the reason why this is not in _py2java in common.py is that importing
# Estimator and Model in common.py results in a circular import with inherit_doc
java_value = value._to_java()
else:
java_value = _py2java(sc, value)
return java_param.w(java_value)
def _transfer_param_map_to_java(self, pyParamMap):
"""
Transforms a Python ParamMap into a Java ParamMap.
"""
paramMap = JavaWrapper._new_java_obj("org.apache.spark.ml.param.ParamMap")
for param in self.params:
if param in pyParamMap:
pair = self._make_java_param_pair(param, pyParamMap[param])
paramMap.put([pair])
return paramMap
def _transfer_param_map_from_java(self, javaParamMap):
"""
Transforms a Java ParamMap into a Python ParamMap.
"""
sc = SparkContext._active_spark_context
paramMap = dict()
for pair in javaParamMap.toList():
param = pair.param()
if self.hasParam(str(param.name())):
java_obj = pair.value()
if sc._jvm.Class.forName("org.apache.spark.ml.PipelineStage").isInstance(java_obj):
# Note: JavaParams._from_java support both JavaEstimator/JavaTransformer class
# and Estimator/Transformer class which implements `_from_java` static method
# (such as OneVsRest, Pipeline class).
py_obj = JavaParams._from_java(java_obj)
else:
py_obj = _java2py(sc, java_obj)
paramMap[self.getParam(param.name())] = py_obj
return paramMap
@inherit_doc
class PipelineWriter(MLWriter):
"""
(Private) Specialization of :py:class:`MLWriter` for :py:class:`Pipeline` types
"""
def __init__(self, instance):
super(PipelineWriter, self).__init__()
self.instance = instance
def saveImpl(self, path):
stages = self.instance.getStages()
PipelineSharedReadWrite.validateStages(stages)
PipelineSharedReadWrite.saveImpl(self.instance, stages, self.sc, path)
@inherit_doc
class PipelineReader(MLReader):
"""
(Private) Specialization of :py:class:`MLReader` for :py:class:`Pipeline` types
"""
def __init__(self, cls):
super(PipelineReader, self).__init__()
self.cls = cls
def load(self, path):
metadata = DefaultParamsReader.loadMetadata(path, self.sc)
if 'language' not in metadata['paramMap'] or metadata['paramMap']['language'] != 'Python':
return JavaMLReader(self.cls).load(path)
else:
uid, stages = PipelineSharedReadWrite.load(metadata, self.sc, path)
return Pipeline(stages=stages)._resetUid(uid)
@inherit_doc
class PipelineModelWriter(MLWriter):
"""
(Private) Specialization of :py:class:`MLWriter` for :py:class:`PipelineModel` types
"""
def __init__(self, instance):
super(PipelineModelWriter, self).__init__()
self.instance = instance
def saveImpl(self, path):
stages = self.instance.stages
PipelineSharedReadWrite.validateStages(stages)
PipelineSharedReadWrite.saveImpl(self.instance, stages, self.sc, path)
@inherit_doc
class PipelineModelReader(MLReader):
"""
(Private) Specialization of :py:class:`MLReader` for :py:class:`PipelineModel` types
"""
def __init__(self, cls):
super(PipelineModelReader, self).__init__()
self.cls = cls
def load(self, path):
metadata = DefaultParamsReader.loadMetadata(path, self.sc)
if 'language' not in metadata['paramMap'] or metadata['paramMap']['language'] != 'Python':
return JavaMLReader(self.cls).load(path)
else:
uid, stages = PipelineSharedReadWrite.load(metadata, self.sc, path)
return PipelineModel(stages=stages)._resetUid(uid)
@inherit_doc
class PipelineModel(Model, MLReadable, MLWritable):
"""
Represents a compiled pipeline with transformers and fitted models.
.. versionadded:: 1.3.0
"""
def __init__(self, stages):
super(PipelineModel, self).__init__()
self.stages = stages
def _transform(self, dataset):
for t in self.stages:
dataset = t.transform(dataset)
return dataset
def copy(self, extra=None):
"""
Creates a copy of this instance.
.. versionadded:: 1.4.0
:param extra: extra parameters
:returns: new instance
"""
if extra is None:
extra = dict()
stages = [stage.copy(extra) for stage in self.stages]
return PipelineModel(stages)
@since("2.0.0")
def write(self):
"""Returns an MLWriter instance for this ML instance."""
allStagesAreJava = PipelineSharedReadWrite.checkStagesForJava(self.stages)
if allStagesAreJava:
return JavaMLWriter(self)
return PipelineModelWriter(self)
@classmethod
@since("2.0.0")
def read(cls):
"""Returns an MLReader instance for this class."""
return PipelineModelReader(cls)
@classmethod
def _from_java(cls, java_stage):
"""
Given a Java PipelineModel, create and return a Python wrapper of it.
Used for ML persistence.
"""
# Load information from java_stage to the instance.
py_stages = [JavaParams._from_java(s) for s in java_stage.stages()]
# Create a new instance of this stage.
py_stage = cls(py_stages)
py_stage._resetUid(java_stage.uid())
return py_stage
def _to_java(self):
"""
Transfer this instance to a Java PipelineModel. Used for ML persistence.
:return: Java object equivalent to this instance.
"""
gateway = SparkContext._gateway
cls = SparkContext._jvm.org.apache.spark.ml.Transformer
java_stages = gateway.new_array(cls, len(self.stages))
for idx, stage in enumerate(self.stages):
java_stages[idx] = stage._to_java()
_java_obj =\
JavaParams._new_java_obj("org.apache.spark.ml.PipelineModel", self.uid, java_stages)
return _java_obj
@inherit_doc
class PipelineSharedReadWrite():
"""
Functions for :py:class:`MLReader` and :py:class:`MLWriter` shared between
:py:class:`Pipeline` and :py:class:`PipelineModel`
.. versionadded:: 2.3.0
"""
@staticmethod
def checkStagesForJava(stages):
return all(isinstance(stage, JavaMLWritable) for stage in stages)
@staticmethod
def validateStages(stages):
"""
Check that all stages are Writable
"""
for stage in stages:
if not isinstance(stage, MLWritable):
raise ValueError("Pipeline write will fail on this pipeline " +
"because stage %s of type %s is not MLWritable",
stage.uid, type(stage))
@staticmethod
def saveImpl(instance, stages, sc, path):
"""
Save metadata and stages for a :py:class:`Pipeline` or :py:class:`PipelineModel`
- save metadata to path/metadata
- save stages to stages/IDX_UID
"""
stageUids = [stage.uid for stage in stages]
jsonParams = {'stageUids': stageUids, 'language': 'Python'}
DefaultParamsWriter.saveMetadata(instance, path, sc, paramMap=jsonParams)
stagesDir = os.path.join(path, "stages")
for index, stage in enumerate(stages):
stage.write().save(PipelineSharedReadWrite
.getStagePath(stage.uid, index, len(stages), stagesDir))
@staticmethod
def load(metadata, sc, path):
"""
Load metadata and stages for a :py:class:`Pipeline` or :py:class:`PipelineModel`
Returns
-------
tuple
(UID, list of stages)
"""
stagesDir = os.path.join(path, "stages")
stageUids = metadata['paramMap']['stageUids']
stages = []
for index, stageUid in enumerate(stageUids):
stagePath = \
PipelineSharedReadWrite.getStagePath(stageUid, index, len(stageUids), stagesDir)
stage = DefaultParamsReader.loadParamsInstance(stagePath, sc)
stages.append(stage)
return (metadata['uid'], stages)
@staticmethod
def getStagePath(stageUid, stageIdx, numStages, stagesDir):
"""
Get path for saving the given stage.
"""
stageIdxDigits = len(str(numStages))
stageDir = str(stageIdx).zfill(stageIdxDigits) + "_" + stageUid
stagePath = os.path.join(stagesDir, stageDir)
return stagePath
|
ax003d/openerp
|
refs/heads/master
|
openerp/addons/hr_attendance/res_config.py
|
434
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class hr_attendance_config_settings(osv.osv_memory):
_inherit = 'hr.config.settings'
_columns = {
'group_hr_attendance': fields.boolean('Track attendances for all employees',
implied_group='base.group_hr_attendance',
help="Allocates attendance group to all users."),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
renyi533/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/eig_op_test.py
|
2
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.linalg_ops.eig."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
def _AddTest(test_class, op_name, testcase_name, fn):
test_name = "_".join(["test", op_name, testcase_name])
if hasattr(test_class, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(test_class, test_name, fn)
class EigTest(test.TestCase):
@test_util.run_deprecated_v1
def testWrongDimensions(self):
# The input to self_adjoint_eig should be a tensor of
# at least rank 2.
scalar = constant_op.constant(1.)
with self.assertRaises(ValueError):
linalg_ops.eig(scalar)
vector = constant_op.constant([1., 2.])
with self.assertRaises(ValueError):
linalg_ops.eig(vector)
@test_util.run_deprecated_v1
def testConcurrentExecutesWithoutError(self):
all_ops = []
with self.session(use_gpu=True) as sess:
for compute_v_ in True, False:
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
if compute_v_:
e1, v1 = linalg_ops.eig(matrix1)
e2, v2 = linalg_ops.eig(matrix2)
all_ops += [e1, v1, e2, v2]
else:
e1 = linalg_ops.eigvals(matrix1)
e2 = linalg_ops.eigvals(matrix2)
all_ops += [e1, e2]
val = self.evaluate(all_ops)
self.assertAllEqual(val[0], val[2])
# The algorithm is slightly different for compute_v being True and False,
# so require approximate equality only here.
self.assertAllClose(val[2], val[4])
self.assertAllEqual(val[4], val[5])
self.assertAllEqual(val[1], val[3])
def testMatrixThatFailsWhenFlushingDenormsToZero(self):
# Test a 32x32 matrix which is known to fail if denorm floats are flushed to
# zero.
matrix = np.genfromtxt(
test.test_src_dir_path(
"python/kernel_tests/testdata/"
"self_adjoint_eig_fail_if_denorms_flushed.txt")).astype(np.float32)
self.assertEqual(matrix.shape, (32, 32))
matrix_tensor = constant_op.constant(matrix)
with self.session(use_gpu=True) as sess:
(e, v) = self.evaluate(linalg_ops.self_adjoint_eig(matrix_tensor))
self.assertEqual(e.size, 32)
self.assertAllClose(
np.matmul(v, v.transpose()), np.eye(32, dtype=np.float32), atol=2e-3)
self.assertAllClose(matrix,
np.matmul(np.matmul(v, np.diag(e)), v.transpose()))
def SortEigenValues(e):
perm = np.argsort(e.real + e.imag, -1)
return np.take(e, perm, -1)
def SortEigenDecomposition(e, v):
if v.ndim < 2:
return e, v
else:
perm = np.argsort(e.real + e.imag, -1)
return np.take(e, perm, -1), np.take(v, perm, -1)
def EquilibrateEigenVectorPhases(x, y):
"""Equilibrate the phase of the Eigenvectors in the columns of `x` and `y`.
Eigenvectors are only unique up to an arbitrary phase. This function rotates x
such that it matches y. Precondition: The columns of x and y differ by a
multiplicative complex phase factor only.
Args:
x: `np.ndarray` with Eigenvectors
y: `np.ndarray` with Eigenvectors
Returns:
`np.ndarray` containing an equilibrated version of x.
"""
phases = np.sum(np.conj(x) * y, -2, keepdims=True)
phases /= np.abs(phases)
return phases * x
def _GetEigTest(dtype_, shape_, compute_v_):
def CompareEigenVectors(self, x, y, tol):
x = EquilibrateEigenVectorPhases(x, y)
self.assertAllClose(x, y, atol=tol)
def CompareEigenDecompositions(self, x_e, x_v, y_e, y_v, tol):
num_batches = int(np.prod(x_e.shape[:-1]))
n = x_e.shape[-1]
x_e = np.reshape(x_e, [num_batches] + [n])
x_v = np.reshape(x_v, [num_batches] + [n, n])
y_e = np.reshape(y_e, [num_batches] + [n])
y_v = np.reshape(y_v, [num_batches] + [n, n])
for i in range(num_batches):
x_ei, x_vi = SortEigenDecomposition(x_e[i, :], x_v[i, :, :])
y_ei, y_vi = SortEigenDecomposition(y_e[i, :], y_v[i, :, :])
self.assertAllClose(x_ei, y_ei, atol=tol, rtol=tol)
CompareEigenVectors(self, x_vi, y_vi, tol)
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
# most of matrices are diagonalizable # TODO
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a = np.tile(a, batch_shape + (1, 1))
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
atol = 1e-4
else:
atol = 1e-12
np_e, np_v = np.linalg.eig(a)
with self.session(use_gpu=True):
if compute_v_:
tf_e, tf_v = linalg_ops.eig(constant_op.constant(a))
# Check that V*diag(E)*V^(-1) is close to A.
a_ev = math_ops.matmul(
math_ops.matmul(tf_v, array_ops.matrix_diag(tf_e)),
linalg_ops.matrix_inverse(tf_v))
self.assertAllClose(self.evaluate(a_ev), a, atol=atol)
# Compare to numpy.linalg.eig.
CompareEigenDecompositions(self, np_e, np_v, self.evaluate(tf_e),
self.evaluate(tf_v), atol)
else:
tf_e = linalg_ops.eigvals(constant_op.constant(a))
self.assertAllClose(
SortEigenValues(np_e),
SortEigenValues(self.evaluate(tf_e)),
atol=atol)
return Test
if __name__ == "__main__":
dtypes_to_test = [dtypes_lib.float32, dtypes_lib.float64]
if not test.is_built_with_rocm():
# ROCm does not support BLAS operations for complex types
dtypes_to_test += [dtypes_lib.complex64, dtypes_lib.complex128]
for compute_v in True, False:
for dtype in dtypes_to_test:
for size in 1, 2, 5, 10:
for batch_dims in [(), (3,)] + [(3, 2)] * (max(size, size) < 10):
shape = batch_dims + (size, size)
name = "%s_%s_%s" % (dtype.name, "_".join(map(str, shape)), compute_v)
_AddTest(EigTest, "Eig", name, _GetEigTest(dtype, shape, compute_v))
# No gradient yet
test.main()
|
deepaklukose/grpc
|
refs/heads/master
|
src/python/grpcio/grpc/_grpcio_metadata.py
|
2
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc/_grpcio_metadata.py.template`!!!
__version__ = """1.9.0.dev0"""
|
geekaia/edx-platform
|
refs/heads/master
|
common/djangoapps/student/management/commands/emaillist.py
|
68
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
class Command(BaseCommand):
help = \
''' Extract an e-mail list of all active students. '''
def handle(self, *args, **options):
#text = open(args[0]).read()
#subject = open(args[1]).read()
users = User.objects.all()
for user in users:
if user.is_active:
print user.email
|
TeamEOS/external_chromium_org
|
refs/heads/lp5.0
|
tools/site_compare/scrapers/chrome/__init__.py
|
179
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Selects the appropriate scraper for Chrome."""
def GetScraper(version):
"""Returns the scraper module for the given version.
Args:
version: version string of Chrome, or None for most recent
Returns:
scrape module for given version
"""
if version is None:
version = "0.1.101.0"
parsed_version = [int(x) for x in version.split(".")]
if (parsed_version[0] > 0 or
parsed_version[1] > 1 or
parsed_version[2] > 97 or
parsed_version[3] > 0):
scraper_version = "chrome011010"
else:
scraper_version = "chrome01970"
return __import__(scraper_version, globals(), locals(), [''])
# if invoked rather than imported, test
if __name__ == "__main__":
print GetScraper("0.1.101.0").version
|
ajenta/dj-oydiv
|
refs/heads/master
|
dj_oydiv/config/__init__.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import logging
from django.conf import settings
from django.conf import ImproperlyConfigured
from . import default_settings
log = logging.getLogger(__name__)
__all__ = ['config']
PREFIX = 'OYDIV_'
class _Config(object):
"""
Allow our app to have some preconfigured defaults, that are overriden
in the django settings file.
"""
def __getattr__(self, attr):
try:
return getattr(settings, PREFIX + attr)
except AttributeError:
try:
return getattr(default_settings, attr)
except AttributeError:
raise ImproperlyConfigured(
"The setting '{}' must be defined in django settings.".format(PREFIX + attr)
)
config = _Config()
|
sihart25/Vimto
|
refs/heads/master
|
vimto/local_apps/polls/StationHandler.py
|
1
|
from django.conf import settings
MEDIA_ROOT = getattr(settings, "MEDIA_ROOT", None)
STATIONS = {}
SECTIONS = []
#########################################################################
# Station and Section Classes
class Station():
def __init__(self, num, lat, lng, nme):
self.num = num
self.nme = nme
self.lat = lat
self.lng = lng
class Section():
def __init__(self, name, startnum, endnum, dirn, minlat, minlng, maxlat, maxlng,
startlat, startlng, endlat, endlng):
self.name = name
self.startnum = startnum
self.endnum = endnum
self.section_direction = dirn
self.minlat = minlat
self.minlng = minlng
self.maxlat = maxlat
self.maxlng = maxlng
self.startlat = startlat
self.startlng = startlng
self.endlat = endlat
self.endlng = endlng
self.dlat = endlat - startlat
self.dlng = endlng - startlng
self.ReadPoints(self.name+".csv")
# self.CreateIteractvePts(self.name+".csv")
def ReadPoints(self, filename):
path = MEDIA_ROOT + "Vimto/Edited/ConfigData/"
print(""+path+filename)
self.GPSList = []
with open((path+filename), 'r') as f:
for line in f.readlines():
lat, lng = line.strip().split(' ')
self.GPSList.append(float(lat))
self.GPSList.append(float(lng))
# print(self.GPSList)
def CreateIteractvePts(self, filename):
path = MEDIA_ROOT + "Vimto/Edited/ConfigData/"
# print(""+path+filename)
import csv
with open(path+filename, 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
numspaces = 200
dlat = (self.endlat - self.startlat)/numspaces
dlng = (self.endlng - self.startlng)/numspaces
for i in range(0, numspaces):
# print("", i, "value of loop")
lat = self.startlat+(dlat*i)
lng = self.startlng+(dlng*i)
spamwriter.writerow([lat, lng])
STATIONS[0] = Station(0, 52.478732, -1.899240, 'Grand Central New Street')
STATIONS[1] = Station(1, 52.479932, -1.897152, 'Corporation Street')
STATIONS[2] = Station(2, 52.481887, -1.896281, 'Bull Street')
STATIONS[3] = Station(3, 52.483556, -1.899273, 'Birmingham, Snow Hill')
STATIONS[4] = Station(4, 52.486663, -1.903911, 'St. Pauls')
STATIONS[5] = Station(5, 52.489686, -1.913182, 'Jewellery Quarter')
STATIONS[6] = Station(6, 52.496907, -1.931041, 'Soho, Benson Road')
STATIONS[7] = Station(7, 52.498867, -1.938733, 'Winson Green, Outer Circle')
STATIONS[8] = Station(8, 52.502373, -1.951652, 'Handsworth, Booth Street')
STATIONS[9] = Station(9, 52.505723, -1.964655, 'The Hawthorns')
STATIONS[10] = Station(10, 52.508779, -1.982677, 'Kenrick Park')
STATIONS[11] = Station(11, 52.511784, -1.988073, 'Trinity Way')
STATIONS[12] = Station(12, 52.516604, -1.994781, 'West Bromwich Central')
STATIONS[13] = Station(13, 52.518639, -1.999724, 'Lodge Road, West Bromwich Town Hall')
STATIONS[14] = Station(14, 52.520463, -2.004534, 'Dartmouth Street ')
STATIONS[15] = Station(15, 52.525122, -2.008589, 'Dudley Street, Guns Village')
STATIONS[16] = Station(16, 52.531002, -2.011202, 'Black Lake')
STATIONS[17] = Station(17, 52.548893, -2.025621, 'Wednesbury, Great Western Street')
STATIONS[18] = Station(18, 52.549482, -2.030593, 'Wednesbury Parkway')
STATIONS[19] = Station(19, 52.555668, -2.057287, 'Bradley Lane')
STATIONS[20] = Station(20, 52.559794, -2.065500, 'Loxdale')
STATIONS[21] = Station(21, 52.565810, -2.074917, 'Bilston Central')
STATIONS[22] = Station(22, 52.567891, -2.080698, 'The Crescent')
STATIONS[23] = Station(23, 52.571903, -2.098076, 'Priestfield')
STATIONS[24] = Station(24, 52.581056, -2.116881, 'The Royal')
STATIONS[25] = Station(25, 52.584081, -2.124243, 'Wolverhampton, St. Georges')
def ReadSectionGPS():
# for key, value in STATIONS.items():
# print ("This is :" + str(key) + ":" + str(value.num) + ":" + value.nme)
i = 0
# start at second element
for key, value in STATIONS.items():
if(key > 0):
minlat = STATIONS[i].lat
minlng = STATIONS[i].lng
maxlat = STATIONS[i].lat
maxlng = STATIONS[i].lng
if(minlat > value.lat):
minlat = value.lat
if(minlng > value.lng):
minlng = value.lng
if(maxlat < value.lat):
maxlat = value.lat
if(maxlng < value.lng):
maxlng = value.lng
x = Section("Section" + str(STATIONS[i].num)+"-" + str(value.num), i, value.num, 'BW',
minlat, minlng, maxlat, maxlng,
STATIONS[i].lat, STATIONS[i].lng, value.lat, value.lng)
SECTIONS.append(x)
# print ("This is count:%s" % x.name)
i += 1
for x in SECTIONS:
print ("Section:%s" % x.name)
# print (x.GPSList)
|
resmo/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/azure/azure_rm_mariadbserver_info.py
|
29
|
#!/usr/bin/python
#
# Copyright (c) 2017 Zim Kalinowski, <zikalino@microsoft.com>
# Copyright (c) 2019 Matti Ranta, (@techknowlogick)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_mariadbserver_info
version_added: "2.9"
short_description: Get Azure MariaDB Server facts
description:
- Get facts of MariaDB Server.
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
type: str
name:
description:
- The name of the server.
type: str
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
type: list
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
- Matti Ranta (@techknowlogick)
'''
EXAMPLES = '''
- name: Get instance of MariaDB Server
azure_rm_mariadbserver_info:
resource_group: myResourceGroup
name: server_name
- name: List instances of MariaDB Server
azure_rm_mariadbserver_info:
resource_group: myResourceGroup
'''
RETURN = '''
servers:
description:
- A list of dictionaries containing facts for MariaDB servers.
returned: always
type: complex
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DBforMariaDB/servers/myabdud1223
resource_group:
description:
- Resource group name.
returned: always
type: str
sample: myResourceGroup
name:
description:
- Resource name.
returned: always
type: str
sample: myabdud1223
location:
description:
- The location the resource resides in.
returned: always
type: str
sample: eastus
sku:
description:
- The SKU of the server.
returned: always
type: complex
contains:
name:
description:
- The name of the SKU.
returned: always
type: str
sample: GP_Gen4_2
tier:
description:
- The tier of the particular SKU.
returned: always
type: str
sample: GeneralPurpose
capacity:
description:
- The scale capacity.
returned: always
type: int
sample: 2
storage_mb:
description:
- The maximum storage allowed for a server.
returned: always
type: int
sample: 128000
enforce_ssl:
description:
- Enable SSL enforcement.
returned: always
type: bool
sample: False
admin_username:
description:
- The administrator's login name of a server.
returned: always
type: str
sample: serveradmin
version:
description:
- Server version.
returned: always
type: str
sample: "9.6"
user_visible_state:
description:
- A state of a server that is visible to user.
returned: always
type: str
sample: Ready
fully_qualified_domain_name:
description:
- The fully qualified domain name of a server.
returned: always
type: str
sample: myabdud1223.mys.database.azure.com
tags:
description:
- Tags assigned to the resource. Dictionary of string:string pairs.
type: dict
sample: { tag1: abc }
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.rdbms.mariadb import MariaDBManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMMariaDbServerInfo(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str'
),
tags=dict(
type='list'
)
)
# store the results of the module operation
self.results = dict(
changed=False
)
self.resource_group = None
self.name = None
self.tags = None
super(AzureRMMariaDbServerInfo, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
is_old_facts = self.module._name == 'azure_rm_mariadbserver_facts'
if is_old_facts:
self.module.deprecate("The 'azure_rm_mariadbserver_facts' module has been renamed to 'azure_rm_mariadbserver_info'", version='2.13')
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if (self.resource_group is not None and
self.name is not None):
self.results['servers'] = self.get()
elif (self.resource_group is not None):
self.results['servers'] = self.list_by_resource_group()
return self.results
def get(self):
response = None
results = []
try:
response = self.mariadb_client.servers.get(resource_group_name=self.resource_group,
server_name=self.name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for MariaDB Server.')
if response and self.has_tags(response.tags, self.tags):
results.append(self.format_item(response))
return results
def list_by_resource_group(self):
response = None
results = []
try:
response = self.mariadb_client.servers.list_by_resource_group(resource_group_name=self.resource_group)
self.log("Response : {0}".format(response))
except CloudError as e:
self.log('Could not get facts for MariaDB Servers.')
if response is not None:
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(self.format_item(item))
return results
def format_item(self, item):
d = item.as_dict()
d = {
'id': d['id'],
'resource_group': self.resource_group,
'name': d['name'],
'sku': d['sku'],
'location': d['location'],
'storage_mb': d['storage_profile']['storage_mb'],
'version': d['version'],
'enforce_ssl': (d['ssl_enforcement'] == 'Enabled'),
'admin_username': d['administrator_login'],
'user_visible_state': d['user_visible_state'],
'fully_qualified_domain_name': d['fully_qualified_domain_name'],
'tags': d.get('tags')
}
return d
def main():
AzureRMMariaDbServerInfo()
if __name__ == '__main__':
main()
|
shedskin/shedskin
|
refs/heads/master
|
tests/185.py
|
6
|
# file.next
print file('run.py').next().strip()
# re.groups returns tuple
import re
m = re.match(r"(\d+)\.?(\d+)?", "24")
groups = m.groups()
print groups
# overloading __getitem__ problem
class Vector3f:
def __getitem__(self, key):
return 19
v = Vector3f()
print v[0]
# more string formatting
print '!'+('%06d%6r%6.2f' % (18,'hoi', 1.17))+'!'
print '!'+('%0*d%*s%*.*f' % (6,18,6,'hoi',8,2,1.171))+'!'
# and/or funtest (already worked)
hoppa = (17, 18)
a, b = hoppa or (19,20)
print a, b
hoppa = None
a, b = hoppa or (19,20)
print a, b
x = [1,2]
y = [3,4,5]
c = x and y or None
print c
y = None
z = None
c = x and y or z
print c
# TI problem (seeding bool)
def rungame(strategy, verbose):
strategy()
def s_bestinfo():
z = [0]
print z
def s_worstinfo():
z = [0]
print z
def eval_strategy(strategy):
rungame(strategy, False)
def main():
eval_strategy(s_bestinfo)
eval_strategy(s_worstinfo)
main()
# test
import signal
|
sujeetv/incubator-hawq
|
refs/heads/master
|
src/bin/gpupgrade/setcatversion.py
|
9
|
import subprocess
releases = {"3.0": "200703112",
"3.1": "200712072",
"3.2": "200808253",
"3.3": "200902041"}
def release2catverno(rno):
if not rno in releases.keys():
raise Exception("unknown version %s" % rno)
return releases[rno]
def stop_cluster():
p.subprocess.Popen(['gpstop', '-a'], shell=False, close_fds=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = p.communicate()
if p.returncode != 0:
raise Exception("could not stop cluster: " + result[0] + result[1])
def get_control_data(datadir):
'''
Parse the output of pg_controldata run on data directory, returning
catalog version and state
'''
cmd = ['pg_controldata', datadir]
p = subprocess.Popen(cmd, shell=False, close_fds=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = p.communicate()
if p.returncode != 0:
raise Exception("error running " + ' '.join(cmd) + ": " + result[0] + result[1])
out = result[0].strip()
ver = ""
state = ""
for line in out.split('\n'):
s = line.split(':')
if s[0] == 'Catalog version number':
ver = s[1].strip()
elif s[0] == 'Database cluster state':
state = s[1].strip()
return [ver, state]
def setcatversion(datadir, frm, to):
'''
Set catalog version to 'to' from 'frm'. Check that the system is down
and actually set to the previous version.
'''
(ver, state) = get_control_data(datadir)
frmcatverno = release2catverno(frm)
if ver != frmcatverno:
raise Exception("Expected version %s but found %s" % (frmcatverno, ver))
cmd = ['/Users/swm/greenplum-db-devel/bin/lib/gpmodcatversion', '--catversion', to, datadir]
p = subprocess.Popen(cmd,
shell=False, close_fds=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = p.communicate()
if p.returncode != 0:
raise Exception("could not update catalog to %s" % to)
if __name__ == '__main__':
paths = ['/Users/swm/greenplum-db-devel/upg/upgradetest-1',
'/Users/swm/greenplum-db-devel/upg/upgradetest1',
'/Users/swm/greenplum-db-devel/upg/upgradetest0']
for p in paths:
setcatversion(p, '3.2', '3.3')
|
signed/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/utils/regex_helper.py
|
361
|
"""
Functions for reversing a regular expression (used in reverse URL resolving).
Used internally by Django and not intended for external use.
This is not, and is not intended to be, a complete reg-exp decompiler. It
should be good enough for a large class of URLS, however.
"""
# Mapping of an escape character to a representative of that class. So, e.g.,
# "\w" is replaced by "x" in a reverse URL. A value of None means to ignore
# this sequence. Any missing key is mapped to itself.
ESCAPE_MAPPINGS = {
"A": None,
"b": None,
"B": None,
"d": u"0",
"D": u"x",
"s": u" ",
"S": u"x",
"w": u"x",
"W": u"!",
"Z": None,
}
class Choice(list):
"""
Used to represent multiple possibilities at this point in a pattern string.
We use a distinguished type, rather than a list, so that the usage in the
code is clear.
"""
class Group(list):
"""
Used to represent a capturing group in the pattern string.
"""
class NonCapture(list):
"""
Used to represent a non-capturing group in the pattern string.
"""
def normalize(pattern):
"""
Given a reg-exp pattern, normalizes it to a list of forms that suffice for
reverse matching. This does the following:
(1) For any repeating sections, keeps the minimum number of occurrences
permitted (this means zero for optional groups).
(2) If an optional group includes parameters, include one occurrence of
that group (along with the zero occurrence case from step (1)).
(3) Select the first (essentially an arbitrary) element from any character
class. Select an arbitrary character for any unordered class (e.g. '.'
or '\w') in the pattern.
(5) Ignore comments and any of the reg-exp flags that won't change
what we construct ("iLmsu"). "(?x)" is an error, however.
(6) Raise an error on all other non-capturing (?...) forms (e.g.
look-ahead and look-behind matches) and any disjunctive ('|')
constructs.
Django's URLs for forward resolving are either all positional arguments or
all keyword arguments. That is assumed here, as well. Although reverse
resolving can be done using positional args when keyword args are
specified, the two cannot be mixed in the same reverse() call.
"""
# Do a linear scan to work out the special features of this pattern. The
# idea is that we scan once here and collect all the information we need to
# make future decisions.
result = []
non_capturing_groups = []
consume_next = True
pattern_iter = next_char(iter(pattern))
num_args = 0
# A "while" loop is used here because later on we need to be able to peek
# at the next character and possibly go around without consuming another
# one at the top of the loop.
try:
ch, escaped = pattern_iter.next()
except StopIteration:
return zip([u''], [[]])
try:
while True:
if escaped:
result.append(ch)
elif ch == '.':
# Replace "any character" with an arbitrary representative.
result.append(u".")
elif ch == '|':
# FIXME: One day we'll should do this, but not in 1.0.
raise NotImplementedError
elif ch == "^":
pass
elif ch == '$':
break
elif ch == ')':
# This can only be the end of a non-capturing group, since all
# other unescaped parentheses are handled by the grouping
# section later (and the full group is handled there).
#
# We regroup everything inside the capturing group so that it
# can be quantified, if necessary.
start = non_capturing_groups.pop()
inner = NonCapture(result[start:])
result = result[:start] + [inner]
elif ch == '[':
# Replace ranges with the first character in the range.
ch, escaped = pattern_iter.next()
result.append(ch)
ch, escaped = pattern_iter.next()
while escaped or ch != ']':
ch, escaped = pattern_iter.next()
elif ch == '(':
# Some kind of group.
ch, escaped = pattern_iter.next()
if ch != '?' or escaped:
# A positional group
name = "_%d" % num_args
num_args += 1
result.append(Group(((u"%%(%s)s" % name), name)))
walk_to_end(ch, pattern_iter)
else:
ch, escaped = pattern_iter.next()
if ch in "iLmsu#":
# All of these are ignorable. Walk to the end of the
# group.
walk_to_end(ch, pattern_iter)
elif ch == ':':
# Non-capturing group
non_capturing_groups.append(len(result))
elif ch != 'P':
# Anything else, other than a named group, is something
# we cannot reverse.
raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch)
else:
ch, escaped = pattern_iter.next()
if ch != '<':
raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch)
# We are in a named capturing group. Extra the name and
# then skip to the end.
name = []
ch, escaped = pattern_iter.next()
while ch != '>':
name.append(ch)
ch, escaped = pattern_iter.next()
param = ''.join(name)
result.append(Group(((u"%%(%s)s" % param), param)))
walk_to_end(ch, pattern_iter)
elif ch in "*?+{":
# Quanitifers affect the previous item in the result list.
count, ch = get_quantifier(ch, pattern_iter)
if ch:
# We had to look ahead, but it wasn't need to compute the
# quanitifer, so use this character next time around the
# main loop.
consume_next = False
if count == 0:
if contains(result[-1], Group):
# If we are quantifying a capturing group (or
# something containing such a group) and the minimum is
# zero, we must also handle the case of one occurrence
# being present. All the quantifiers (except {0,0},
# which we conveniently ignore) that have a 0 minimum
# also allow a single occurrence.
result[-1] = Choice([None, result[-1]])
else:
result.pop()
elif count > 1:
result.extend([result[-1]] * (count - 1))
else:
# Anything else is a literal.
result.append(ch)
if consume_next:
ch, escaped = pattern_iter.next()
else:
consume_next = True
except StopIteration:
pass
except NotImplementedError:
# A case of using the disjunctive form. No results for you!
return zip([u''], [[]])
return zip(*flatten_result(result))
def next_char(input_iter):
"""
An iterator that yields the next character from "pattern_iter", respecting
escape sequences. An escaped character is replaced by a representative of
its class (e.g. \w -> "x"). If the escaped character is one that is
skipped, it is not returned (the next character is returned instead).
Yields the next character, along with a boolean indicating whether it is a
raw (unescaped) character or not.
"""
for ch in input_iter:
if ch != '\\':
yield ch, False
continue
ch = input_iter.next()
representative = ESCAPE_MAPPINGS.get(ch, ch)
if representative is None:
continue
yield representative, True
def walk_to_end(ch, input_iter):
"""
The iterator is currently inside a capturing group. We want to walk to the
close of this group, skipping over any nested groups and handling escaped
parentheses correctly.
"""
if ch == '(':
nesting = 1
else:
nesting = 0
for ch, escaped in input_iter:
if escaped:
continue
elif ch == '(':
nesting += 1
elif ch == ')':
if not nesting:
return
nesting -= 1
def get_quantifier(ch, input_iter):
"""
Parse a quantifier from the input, where "ch" is the first character in the
quantifier.
Returns the minimum number of occurences permitted by the quantifier and
either None or the next character from the input_iter if the next character
is not part of the quantifier.
"""
if ch in '*?+':
try:
ch2, escaped = input_iter.next()
except StopIteration:
ch2 = None
if ch2 == '?':
ch2 = None
if ch == '+':
return 1, ch2
return 0, ch2
quant = []
while ch != '}':
ch, escaped = input_iter.next()
quant.append(ch)
quant = quant[:-1]
values = ''.join(quant).split(',')
# Consume the trailing '?', if necessary.
try:
ch, escaped = input_iter.next()
except StopIteration:
ch = None
if ch == '?':
ch = None
return int(values[0]), ch
def contains(source, inst):
"""
Returns True if the "source" contains an instance of "inst". False,
otherwise.
"""
if isinstance(source, inst):
return True
if isinstance(source, NonCapture):
for elt in source:
if contains(elt, inst):
return True
return False
def flatten_result(source):
"""
Turns the given source sequence into a list of reg-exp possibilities and
their arguments. Returns a list of strings and a list of argument lists.
Each of the two lists will be of the same length.
"""
if source is None:
return [u''], [[]]
if isinstance(source, Group):
if source[1] is None:
params = []
else:
params = [source[1]]
return [source[0]], [params]
result = [u'']
result_args = [[]]
pos = last = 0
for pos, elt in enumerate(source):
if isinstance(elt, basestring):
continue
piece = u''.join(source[last:pos])
if isinstance(elt, Group):
piece += elt[0]
param = elt[1]
else:
param = None
last = pos + 1
for i in range(len(result)):
result[i] += piece
if param:
result_args[i].append(param)
if isinstance(elt, (Choice, NonCapture)):
if isinstance(elt, NonCapture):
elt = [elt]
inner_result, inner_args = [], []
for item in elt:
res, args = flatten_result(item)
inner_result.extend(res)
inner_args.extend(args)
new_result = []
new_args = []
for item, args in zip(result, result_args):
for i_item, i_args in zip(inner_result, inner_args):
new_result.append(item + i_item)
new_args.append(args[:] + i_args)
result = new_result
result_args = new_args
if pos >= last:
piece = u''.join(source[last:])
for i in range(len(result)):
result[i] += piece
return result, result_args
|
caot/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/contrib/auth/signals.py
|
334
|
from django.dispatch import Signal
user_logged_in = Signal(providing_args=['request', 'user'])
user_logged_out = Signal(providing_args=['request', 'user'])
|
MrSurly/micropython
|
refs/heads/master
|
tests/basics/list1.py
|
17
|
# basic list functionality
x = [1, 2, 3 * 4]
print(x)
x[0] = 4
print(x)
x[1] += -4
print(x)
x.append(5)
print(x)
f = x.append
f(4)
print(x)
x.extend([100, 200])
print(x)
x.extend(range(3))
print(x)
x += [2, 1]
print(x)
# unsupported type on RHS of add
try:
[] + None
except TypeError:
print('TypeError')
|
ejeschke/ginga
|
refs/heads/master
|
ginga/util/io_rgb.py
|
3
|
#
# io_rgb.py -- RGB image file handling.
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import sys
import time
import mimetypes
from io import BytesIO
import numpy as np
from ginga.BaseImage import Header, ImageError
from ginga.util import iohelper, rgb_cms
from ginga.util.io import io_base
from ginga.misc import Bunch
from ginga import trcalc
try:
# do we have opencv available?
import cv2
have_opencv = True
except ImportError:
have_opencv = False
try:
# do we have Python Imaging Library available?
import PIL.Image as PILimage
from PIL.ExifTags import TAGS
have_pil = True
except ImportError:
have_pil = False
# piexif library for getting metadata, in the case that we don't have PIL
try:
import piexif
have_exif = True
except ImportError:
have_exif = False
# For testing...
#have_pil = False
#have_exif = False
#have_opencv = False
def load_file(filepath, idx=None, logger=None, **kwargs):
"""
Load an object from a RGB file.
"""
opener = RGBFileHandler(logger)
return opener.load_file(filepath, **kwargs)
class BaseRGBFileHandler(io_base.BaseIOHandler):
name = 'RGB'
def __init__(self, logger):
super(BaseRGBFileHandler, self).__init__(logger)
self._path = None
self.clr_mgr = rgb_cms.ColorManager(self.logger)
def load_file(self, filespec, dstobj=None, **kwargs):
info = iohelper.get_fileinfo(filespec)
if not info.ondisk:
raise ValueError("File does not appear to be on disk: %s" % (
info.url))
filepath = info.filepath
if dstobj is None:
# Put here to avoid circular import
from ginga.RGBImage import RGBImage
dstobj = RGBImage(logger=self.logger)
header = Header()
metadata = {'header': header, 'path': filepath}
data_np = self.imload(filepath, metadata)
dstobj.set_data(data_np, metadata=metadata)
if dstobj.name is not None:
dstobj.set(name=dstobj.name)
else:
name = iohelper.name_image_from_path(filepath, idx=None)
dstobj.set(name=name)
if 'order' in metadata:
dstobj.order = metadata['order']
dstobj.set(path=filepath, idx=None, image_loader=self.load_file)
return dstobj
def open_file(self, filespec, **kwargs):
self._path = filespec
return self
def close(self):
self._path = None
def __len__(self):
return 1
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
def load_idx_cont(self, idx_spec, loader_cont_fn, **kwargs):
# TODO: raise an error if idx_spec doesn't match a single image
idx = 0
if idx_spec is not None and idx_spec != '':
idx = int(idx_spec)
data_obj = self.load_idx(idx, **kwargs)
# call continuation function
loader_cont_fn(data_obj)
def imload(self, filepath, metadata):
"""Load an image file, guessing the format, and return a numpy
array containing an RGB image. If EXIF keywords can be read
they are returned in the metadata.
"""
start_time = time.time()
typ, enc = mimetypes.guess_type(filepath)
if not typ:
typ = 'image/jpeg'
typ, subtyp = typ.split('/')
self.logger.debug("MIME type is %s/%s" % (typ, subtyp))
data_np = self._imload(filepath, metadata)
end_time = time.time()
self.logger.debug("loading time %.4f sec" % (end_time - start_time))
return data_np
def imresize(self, data, new_wd, new_ht, method='bilinear'):
"""Scale an image in numpy array _data_ to the specified width and
height. A smooth scaling is preferred.
"""
start_time = time.time()
newdata = self._imresize(data, new_wd, new_ht, method=method)
end_time = time.time()
self.logger.debug("scaling time %.4f sec" % (end_time - start_time))
return newdata
def get_thumb(self, filepath):
if not have_pil:
raise Exception("Install PIL to use this method")
if not have_exif:
raise Exception("Install piexif to use this method")
try:
info = piexif.load(filepath)
buf = info['thumbnail']
except Exception as e:
return None
image = PILimage.open(BytesIO(buf))
data_np = np.array(image)
return data_np
def piexif_getexif(self, filepath, kwds):
if have_exif:
try:
info = piexif.load(filepath)
if info is not None:
# TODO: is there a more efficient way to do this than
# iterating in python?
for ifd in ["0th", "Exif", "GPS", "Interop", "1st"]:
if ifd in info:
for tag, value in info[ifd].items():
kwd = piexif.TAGS[ifd][tag].get('name', tag)
kwds[kwd] = value
except Exception as e:
self.logger.warning("Failed to get image metadata: %s" % (str(e)))
else:
self.logger.warning("Please install 'piexif' module to get image metadata")
def get_buffer(self, data_np, header, format, output=None):
"""Get image as a buffer in (format).
Format should be 'jpeg', 'png', etc.
"""
if not have_pil:
raise Exception("Install PIL to use this method")
image = PILimage.fromarray(data_np)
buf = output
if buf is None:
buf = BytesIO()
image.save(buf, format)
return buf
def get_directory(self):
return self.hdu_db
def get_info_idx(self, idx):
return self.hdu_db[idx]
class OpenCvFileHandler(BaseRGBFileHandler):
name = 'OpenCv'
def open_file(self, filespec, **kwargs):
info = iohelper.get_fileinfo(filespec)
if not info.ondisk:
raise ImageError("File does not appear to be on disk: %s" % (
info.url))
self.fileinfo = info
filepath = info.filepath
self._path = filepath
self.rgb_f = cv2.VideoCapture(filepath)
# self.rgb_f.set(cv2.CAP_PROP_CONVERT_RGB, False)
idx = 0
extver_db = {}
self.hdu_info = []
self.hdu_db = {}
numframes = int(self.rgb_f.get(cv2.CAP_PROP_FRAME_COUNT))
self.logger.info("number of frames: {}".format(numframes))
naxispath = [numframes]
idx = 0
name = "frame{}".format(idx)
extver = 0
# prepare a record of pertinent info about the HDU for
# lookups by numerical index or (NAME, EXTVER)
d = Bunch.Bunch(index=idx, name=name, extver=extver,
dtype='uint8', htype='N/A')
self.hdu_info.append(d)
# different ways of accessing this HDU:
# by numerical index
self.hdu_db[idx] = d
# by (hduname, extver)
key = (name, extver)
if key not in self.hdu_db:
self.hdu_db[key] = d
self.extver_db = extver_db
return self
def close(self):
self._path = None
self.rgb_f = None
def __len__(self):
return len(self.hdu_info)
def load_idx(self, idx, **kwargs):
if self.rgb_f is None:
raise ValueError("Please call open_file() first!")
if idx is None:
idx = 0
self.rgb_f.set(cv2.CAP_PROP_POS_FRAMES, idx)
okay, data_np = self.rgb_f.read()
if not okay:
raise ValueError("Error reading index {}".format(idx))
metadata = {}
data_np = self._process_opencv_array(data_np, metadata,
self.fileinfo.filepath)
from ginga.RGBImage import RGBImage
data_obj = RGBImage(data_np=data_np, logger=self.logger,
order=metadata['order'], metadata=metadata)
data_obj.io = self
name = self.fileinfo.name + '[{}]'.format(idx)
data_obj.set(name=name, path=self.fileinfo.filepath, idx=idx)
return data_obj
def save_file_as(self, filepath, data_np, header):
# TODO: save keyword metadata!
if not have_opencv:
raise ImageError("Install 'opencv' to be able "
"to save images")
# First choice is OpenCv, because it supports high-bit depth
# multiband images
cv2.imwrite(filepath, data_np)
def _imload(self, filepath, metadata):
if not have_opencv:
raise ImageError("Install 'opencv' to be able to load images")
# OpenCv supports high-bit depth multiband images if you read like
# this
data_np = cv2.imread(filepath,
cv2.IMREAD_ANYDEPTH + cv2.IMREAD_ANYCOLOR)
return self._process_opencv_array(data_np, metadata, filepath)
def _process_opencv_array(self, data_np, metadata, filepath):
# opencv returns BGR images, whereas PIL and others return RGB
if len(data_np.shape) >= 3 and data_np.shape[2] >= 3:
#data_np = data_np[..., :: -1]
if data_np.shape[2] == 3:
order = 'BGR'
dst_order = 'RGB'
else:
order = 'BGRA'
dst_order = 'RGBA'
data_np = trcalc.reorder_image(dst_order, data_np, order)
metadata['order'] = dst_order
kwds = metadata.get('header', None)
if kwds is None:
kwds = Header()
metadata['header'] = kwds
# OpenCv doesn't "do" image metadata, so we punt to piexif
# library (if installed)
self.piexif_getexif(filepath, kwds)
# OpenCv added a feature to do auto-orientation when loading
# (see https://github.com/opencv/opencv/issues/4344)
# So reset these values to prevent auto-orientation from
# happening later
kwds['Orientation'] = 1
kwds['Image Orientation'] = 1
# convert to working color profile, if can
if self.clr_mgr.can_profile():
data_np = self.clr_mgr.profile_to_working_numpy(data_np, kwds)
return data_np
def _imresize(self, data, new_wd, new_ht, method='bilinear'):
# TODO: take into account the method parameter
if not have_opencv:
raise ImageError("Install 'opencv' to be able "
"to resize RGB images")
# First choice is OpenCv, because it supports high-bit depth
# multiband images
newdata = cv2.resize(data, dsize=(new_wd, new_ht),
interpolation=cv2.INTER_CUBIC)
return newdata
class PillowFileHandler(BaseRGBFileHandler):
name = 'Pillow'
def open_file(self, filespec, **kwargs):
info = iohelper.get_fileinfo(filespec)
if not info.ondisk:
raise ImageError("File does not appear to be on disk: %s" % (
info.url))
self.fileinfo = info
filepath = info.filepath
self._path = filepath
self.rgb_f = PILimage.open(filepath)
idx = 0
extver_db = {}
self.hdu_info = []
self.hdu_db = {}
numframes = getattr(self.rgb_f, 'n_frames', 1)
self.logger.info("number of frames: {}".format(numframes))
for idx in range(numframes):
name = "frame{}".format(idx)
extver = 0
# prepare a record of pertinent info about the HDU for
# lookups by numerical index or (NAME, EXTVER)
d = Bunch.Bunch(index=idx, name=name, extver=extver,
dtype='uint8', htype='N/A')
self.hdu_info.append(d)
# different ways of accessing this HDU:
# by numerical index
self.hdu_db[idx] = d
# by (hduname, extver)
key = (name, extver)
if key not in self.hdu_db:
self.hdu_db[key] = d
self.extver_db = extver_db
return self
def close(self):
self._path = None
self.rgb_f = None
def __len__(self):
return len(self.hdu_info)
def save_file_as(self, filepath, data_np, header):
# TODO: save keyword metadata!
if not have_pil:
raise ImageError("Install 'pillow' to be able "
"to save images")
img = PILimage.fromarray(data_np)
# pillow is not happy saving images to JPG with an alpha channel
img = img.convert('RGB')
img.save(filepath)
def load_idx(self, idx, **kwargs):
if self.rgb_f is None:
raise ValueError("Please call open_file() first!")
# "seek" functionality does not seem to be working for all the
# versions of Pillow we are encountering
#self.rgb_f.seek(idx)
image = self.rgb_f
kwds = {}
try:
self._get_header(image, kwds)
except Exception as e:
self.logger.warning("Failed to get image metadata: %s" % (str(e)))
metadata = dict(header=kwds)
# convert to working color profile, if can
if self.clr_mgr.can_profile():
image = self.clr_mgr.profile_to_working_pil(image, kwds)
# convert from PIL to numpy
data_np = np.array(image)
from ginga.RGBImage import RGBImage
data_obj = RGBImage(data_np=data_np, logger=self.logger,
order=image.mode)
data_obj.io = self
name = self.fileinfo.name + '[{}]'.format(idx)
data_obj.set(name=name, path=self.fileinfo.filepath, idx=idx,
header=kwds)
return data_obj
def _get_header(self, image, kwds):
if hasattr(image, '_getexif'):
info = image._getexif()
if info is not None:
for tag, value in info.items():
kwd = TAGS.get(tag, tag)
kwds[kwd] = value
elif have_exif:
self.piexif_getexif(image.info["exif"], kwds)
else:
raise Exception("Please install 'piexif' module to get image metadata")
def _imload(self, filepath, metadata):
if not have_pil:
raise ImageError("Install 'pillow' to be able "
"to load RGB images")
image = PILimage.open(filepath)
kwds = metadata.get('header', None)
if kwds is None:
kwds = Header()
metadata['header'] = kwds
try:
self._get_header(image, kwds)
except Exception as e:
self.logger.warning("Failed to get image metadata: {!r}".format(e))
# convert to working color profile, if can
if self.clr_mgr.can_profile():
image = self.clr_mgr.profile_to_working_pil(image, kwds)
# convert from PIL to numpy
data_np = np.array(image)
metadata['order'] = image.mode
return data_np
def _imresize(self, data, new_wd, new_ht, method='bilinear'):
# TODO: take into account the method parameter
if not have_pil:
raise ImageError("Install 'pillow' to be able "
"to resize RGB images")
img = PILimage.fromarray(data)
img = img.resize((new_wd, new_ht), PILimage.BICUBIC)
newdata = np.array(img)
return newdata
class PPMFileHandler(BaseRGBFileHandler):
name = 'PPM'
def _imload(self, filepath, metadata):
return open_ppm(filepath)
# UTILITY FUNCTIONS
def open_ppm(filepath):
infile = open(filepath, 'rb')
# Get type: PPM or PGM
header = infile.readline()
ptype = header.strip().upper()
if ptype == b'P5':
depth = 1
elif ptype == b'P6':
depth = 3
#print header
# Get image dimensions
header = infile.readline().strip()
while header.startswith(b'#') or len(header) == 0:
header = infile.readline().strip()
#print(header)
width, height = [int(x) for x in header.split()]
header = infile.readline()
# Get unit size
maxval = int(header)
if maxval <= 255:
dtype = np.uint8
elif maxval <= 65535:
dtype = np.uint16
#print width, height, maxval
# read image
if depth > 1:
arr = np.fromfile(infile, dtype=dtype).reshape((height, width, depth))
else:
arr = np.fromfile(infile, dtype=dtype).reshape((height, width))
if sys.byteorder == 'little':
arr = arr.byteswap()
return arr
from collections.abc import Sequence, Iterator
class VideoAccess(Sequence, Iterator):
def __init__(self):
super(Sequence, self).__init__()
self.rgb_f = None
self.idx = -1
self.shape = (0, 0, 0)
def open(self, filepath):
self.rgb_f = cv2.VideoCapture(filepath)
# self.rgb_f.set(cv2.CAP_PROP_CONVERT_RGB, False)
self.idx = 0
# Get width and height of frames and resize window
width = int(self.rgb_f.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(self.rgb_f.get(cv2.CAP_PROP_FRAME_HEIGHT))
depth = int(self.rgb_f.get(cv2.CAP_PROP_FRAME_COUNT))
self.shape = (width, height, depth)
return self
def read(self, idx):
self.rgb_f.set(cv2.CAP_PROP_POS_FRAMES, idx)
okay, data_np = self.rgb_f.read()
if not okay:
raise ValueError("Error reading index {}".format(idx))
data_np = data_np[..., :: -1]
return data_np
def __next__(self):
self.idx += 1
if self.idx == self.shape[2]:
raise StopIteration("Reached the end of frames")
return self.read(self.idx)
def __getitem__(self, idx):
return self.read(idx)
def __len__(self):
return self.shape[2]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# hopefully this closes the object
self.rgb_f = None
return False
RGBFileHandler = PillowFileHandler
# END
|
halvertoluke/edx-platform
|
refs/heads/default_branch
|
common/lib/xmodule/xmodule/library_root_xblock.py
|
42
|
"""
'library' XBlock (LibraryRoot)
"""
import logging
from xmodule.studio_editable import StudioEditableModule
from xblock.fields import Scope, String, List, Boolean
from xblock.fragment import Fragment
from xblock.core import XBlock
log = logging.getLogger(__name__)
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
class LibraryRoot(XBlock):
"""
The LibraryRoot is the root XBlock of a content library. All other blocks in
the library are its children. It contains metadata such as the library's
display_name.
"""
display_name = String(
help=_("Enter the name of the library as it should appear in Studio."),
default="Library",
display_name=_("Library Display Name"),
scope=Scope.settings
)
advanced_modules = List(
display_name=_("Advanced Module List"),
help=_("Enter the names of the advanced components to use in your library."),
scope=Scope.settings,
xml_node=True,
)
show_children_previews = Boolean(
display_name="Hide children preview",
help="Choose if preview of library contents is shown",
scope=Scope.user_state,
default=True
)
has_children = True
has_author_view = True
def __unicode__(self):
return u"Library: {}".format(self.display_name)
def __str__(self):
return unicode(self).encode('utf-8')
def author_view(self, context):
"""
Renders the Studio preview view.
"""
fragment = Fragment()
self.render_children(context, fragment, can_reorder=False, can_add=True)
return fragment
def render_children(self, context, fragment, can_reorder=False, can_add=False): # pylint: disable=unused-argument
"""
Renders the children of the module with HTML appropriate for Studio. Reordering is not supported.
"""
contents = []
paging = context.get('paging', None)
children_count = len(self.children) # pylint: disable=no-member
item_start, item_end = 0, children_count
# TODO sort children
if paging:
page_number = paging.get('page_number', 0)
raw_page_size = paging.get('page_size', None)
page_size = raw_page_size if raw_page_size is not None else children_count
item_start, item_end = page_size * page_number, page_size * (page_number + 1)
children_to_show = self.children[item_start:item_end] # pylint: disable=no-member
force_render = context.get('force_render', None)
for child_key in children_to_show:
# Children must have a separate context from the library itself. Make a copy.
child_context = context.copy()
child_context['show_preview'] = self.show_children_previews
child_context['can_edit_visibility'] = False
child = self.runtime.get_block(child_key)
child_view_name = StudioEditableModule.get_preview_view_name(child)
if unicode(child.location) == force_render:
child_context['show_preview'] = True
if child_context['show_preview']:
rendered_child = self.runtime.render_child(child, child_view_name, child_context)
else:
rendered_child = self.runtime.render_child_placeholder(child, child_view_name, child_context)
fragment.add_frag_resources(rendered_child)
contents.append({
'id': unicode(child.location),
'content': rendered_child.content,
})
fragment.add_content(
self.runtime.render_template("studio_render_paged_children_view.html", {
'items': contents,
'xblock_context': context,
'can_add': can_add,
'first_displayed': item_start,
'total_children': children_count,
'displayed_children': len(children_to_show),
'previews': self.show_children_previews
})
)
@property
def display_org_with_default(self):
"""
Org display names are not implemented. This just provides API compatibility with CourseDescriptor.
Always returns the raw 'org' field from the key.
"""
return self.scope_ids.usage_id.course_key.org
@property
def display_number_with_default(self):
"""
Display numbers are not implemented. This just provides API compatibility with CourseDescriptor.
Always returns the raw 'library' field from the key.
"""
return self.scope_ids.usage_id.course_key.library
@XBlock.json_handler
def trigger_previews(self, request_body, suffix): # pylint: disable=unused-argument
""" Enable or disable previews in studio for library children. """
self.show_children_previews = request_body.get('showChildrenPreviews', self.show_children_previews)
return {'showChildrenPreviews': self.show_children_previews}
|
LittleLama/Sick-Beard-BoxCar2
|
refs/heads/development
|
lib/hachoir_parser/program/java.py
|
90
|
"""
Compiled Java classes parser.
Author: Thomas de Grenier de Latour (TGL) <degrenier@easyconnect.fr>
Creation: 2006/11/01
Last-update: 2006/11/06
Introduction:
* This parser is for compiled Java classes, aka .class files. What is nice
with this format is that it is well documented in the official Java VM specs.
* Some fields, and most field sets, have dynamic sizes, and there is no offset
to directly jump from an header to a given section, or anything like that.
It means that accessing a field at the end of the file requires that you've
already parsed almost the whole file. That's not very efficient, but it's
okay given the usual size of .class files (usually a few KB).
* Most fields are just indexes of some "constant pool" entries, which holds
most constant datas of the class. And constant pool entries reference other
constant pool entries, etc. Hence, a raw display of this fields only shows
integers and is not really understandable. Because of that, this parser
comes with two important custom field classes:
- CPInfo are constant pool entries. They have a type ("Utf8", "Methodref",
etc.), and some contents fields depending on this type. They also have a
"__str__()" method, which returns a syntetic view of this contents.
- CPIndex are constant pool indexes (UInt16). It is possible to specify
what type of CPInfo they are allowed to points to. They also have a
custom display method, usually printing something like "-> foo", where
foo is the str() of their target CPInfo.
References:
* The Java Virtual Machine Specification, 2nd edition, chapter 4, in HTML:
http://java.sun.com/docs/books/vmspec/2nd-edition/html/ClassFile.doc.html
=> That's the spec i've been implementing so far. I think it is format
version 46.0 (JDK 1.2).
* The Java Virtual Machine Specification, 2nd edition, chapter 4, in PDF:
http://java.sun.com/docs/books/vmspec/2nd-edition/ClassFileFormat.pdf
=> don't trust the URL, this PDF version is more recent than the HTML one.
It highligths some recent additions to the format (i don't know the
exact version though), which are not yet implemented in this parser.
* The Java Virtual Machine Specification, chapter 4:
http://java.sun.com/docs/books/vmspec/html/ClassFile.doc.html
=> describes an older format, probably version 45.3 (JDK 1.1).
TODO/FIXME:
* Google for some existing free .class files parsers, to get more infos on
the various formats differences, etc.
* Write/compile some good tests cases.
* Rework pretty-printing of CPIndex fields. This str() thing sinks.
* Add support of formats other than 46.0 (45.3 seems to already be ok, but
there are things to add for later formats).
* Make parsing robust: currently, the parser will die on asserts as soon as
something seems wrong. It should rather be tolerant, print errors/warnings,
and try its best to continue. Check how error-handling is done in other
parsers.
* Gettextize the whole thing.
* Check whether Float32/64 are really the same as Java floats/double. PEP-0754
says that handling of +/-infinity and NaN is very implementation-dependent.
Also check how this values are displayed.
* Make the parser edition-proof. For instance, editing a constant-pool string
should update the length field of it's entry, etc. Sounds like a huge work.
"""
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (
ParserError, FieldSet, StaticFieldSet,
Enum, RawBytes, PascalString16, Float32, Float64,
Int8, UInt8, Int16, UInt16, Int32, UInt32, Int64,
Bit, NullBits )
from lib.hachoir_core.endian import BIG_ENDIAN
from lib.hachoir_core.text_handler import textHandler, hexadecimal
from lib.hachoir_core.tools import paddingSize
###############################################################################
def parse_flags(flags, flags_dict, show_unknown_flags=True, separator=" "):
"""
Parses an integer representing a set of flags. The known flags are
stored with their bit-mask in a dictionnary. Returns a string.
"""
flags_list = []
mask = 0x01
while mask <= flags:
if flags & mask:
if mask in flags_dict:
flags_list.append(flags_dict[mask])
elif show_unknown_flags:
flags_list.append("???")
mask = mask << 1
return separator.join(flags_list)
###############################################################################
code_to_type_name = {
'B': "byte",
'C': "char",
'D': "double",
'F': "float",
'I': "int",
'J': "long",
'S': "short",
'Z': "boolean",
'V': "void",
}
def eat_descriptor(descr):
"""
Read head of a field/method descriptor. Returns a pair of strings, where
the first one is a human-readable string representation of the first found
type, and the second one is the tail of the parameter.
"""
array_dim = 0
while descr[0] == '[':
array_dim += 1
descr = descr[1:]
if (descr[0] == 'L'):
try: end = descr.find(';')
except: raise ParserError("Not a valid descriptor string: " + descr)
type = descr[1:end]
descr = descr[end:]
else:
global code_to_type_name
try:
type = code_to_type_name[descr[0]]
except KeyError:
raise ParserError("Not a valid descriptor string: %s" % descr)
return (type.replace("/", ".") + array_dim * "[]", descr[1:])
def parse_field_descriptor(descr, name=None):
"""
Parse a field descriptor (single type), and returns it as human-readable
string representation.
"""
assert descr
(type, tail) = eat_descriptor(descr)
assert not tail
if name:
return type + " " + name
else:
return type
def parse_method_descriptor(descr, name=None):
"""
Parse a method descriptor (params type and return type), and returns it
as human-readable string representation.
"""
assert descr and (descr[0] == '(')
descr = descr[1:]
params_list = []
while descr[0] != ')':
(param, descr) = eat_descriptor(descr)
params_list.append(param)
(type, tail) = eat_descriptor(descr[1:])
assert not tail
params = ", ".join(params_list)
if name:
return "%s %s(%s)" % (type, name, params)
else:
return "%s (%s)" % (type, params)
def parse_any_descriptor(descr, name=None):
"""
Parse either a field or method descriptor, and returns it as human-
readable string representation.
"""
assert descr
if descr[0] == '(':
return parse_method_descriptor(descr, name)
else:
return parse_field_descriptor(descr, name)
###############################################################################
class FieldArray(FieldSet):
"""
Holds a fixed length array of fields which all have the same type. This
type may be variable-length. Each field will be named "foo[x]" (with x
starting at 0).
"""
def __init__(self, parent, name, elements_class, length,
**elements_extra_args):
"""Create a FieldArray of <length> fields of class <elements_class>,
named "<name>[x]". The **elements_extra_args will be passed to the
constructor of each field when yielded."""
FieldSet.__init__(self, parent, name)
self.array_elements_class = elements_class
self.array_length = length
self.array_elements_extra_args = elements_extra_args
def createFields(self):
for i in range(0, self.array_length):
yield self.array_elements_class(self, "%s[%d]" % (self.name, i),
**self.array_elements_extra_args)
class ConstantPool(FieldSet):
"""
ConstantPool is similar to a FieldArray of CPInfo fields, but:
- numbering starts at 1 instead of zero
- some indexes are skipped (after Long or Double entries)
"""
def __init__(self, parent, name, length):
FieldSet.__init__(self, parent, name)
self.constant_pool_length = length
def createFields(self):
i = 1
while i < self.constant_pool_length:
name = "%s[%d]" % (self.name, i)
yield CPInfo(self, name)
i += 1
if self[name].constant_type in ("Long", "Double"):
i += 1
###############################################################################
class CPIndex(UInt16):
"""
Holds index of a constant pool entry.
"""
def __init__(self, parent, name, description=None, target_types=None,
target_text_handler=(lambda x: x), allow_zero=False):
"""
Initialize a CPIndex.
- target_type is the tuple of expected type for the target CPInfo
(if None, then there will be no type check)
- target_text_handler is a string transformation function used for
pretty printing the target str() result
- allow_zero states whether null index is allowed (sometimes, constant
pool index is optionnal)
"""
UInt16.__init__(self, parent, name, description)
if isinstance(target_types, str):
self.target_types = (target_types,)
else:
self.target_types = target_types
self.allow_zero = allow_zero
self.target_text_handler = target_text_handler
self.getOriginalDisplay = lambda: self.value
def createDisplay(self):
cp_entry = self.get_cp_entry()
if self.allow_zero and not cp_entry:
return "ZERO"
assert cp_entry
return "-> " + self.target_text_handler(str(cp_entry))
def get_cp_entry(self):
"""
Returns the target CPInfo field.
"""
assert self.value < self["/constant_pool_count"].value
if self.allow_zero and not self.value: return None
cp_entry = self["/constant_pool/constant_pool[%d]" % self.value]
assert isinstance(cp_entry, CPInfo)
if self.target_types:
assert cp_entry.constant_type in self.target_types
return cp_entry
###############################################################################
class JavaOpcode(FieldSet):
OPSIZE = 0
def __init__(self, parent, name, op, desc):
FieldSet.__init__(self, parent, name)
if self.OPSIZE != 0: self._size = self.OPSIZE*8
self.op = op
self.desc = desc
def createDisplay(self):
return self.op
def createDescription(self):
return self.desc
def createValue(self):
return self.createDisplay()
class OpcodeNoArgs(JavaOpcode):
OPSIZE = 1
def createFields(self):
yield UInt8(self, "opcode")
class OpcodeCPIndex(JavaOpcode):
OPSIZE = 3
def createFields(self):
yield UInt8(self, "opcode")
yield CPIndex(self, "index")
def createDisplay(self):
return "%s(%i)"%(self.op, self["index"].value)
class OpcodeCPIndexShort(JavaOpcode):
OPSIZE = 2
def createFields(self):
yield UInt8(self, "opcode")
yield UInt8(self, "index")
def createDisplay(self):
return "%s(%i)"%(self.op, self["index"].value)
class OpcodeIndex(JavaOpcode):
OPSIZE = 2
def createFields(self):
yield UInt8(self, "opcode")
yield UInt8(self, "index")
def createDisplay(self):
return "%s(%i)"%(self.op, self["index"].value)
class OpcodeShortJump(JavaOpcode):
OPSIZE = 3
def createFields(self):
yield UInt8(self, "opcode")
yield Int16(self, "offset")
def createDisplay(self):
return "%s(%s)"%(self.op, self["offset"].value)
class OpcodeLongJump(JavaOpcode):
OPSIZE = 5
def createFields(self):
yield UInt8(self, "opcode")
yield Int32(self, "offset")
def createDisplay(self):
return "%s(%s)"%(self.op, self["offset"].value)
class OpcodeSpecial_bipush(JavaOpcode):
OPSIZE = 2
def createFields(self):
yield UInt8(self, "opcode")
yield Int8(self, "value")
def createDisplay(self):
return "%s(%s)"%(self.op, self["value"].value)
class OpcodeSpecial_sipush(JavaOpcode):
OPSIZE = 3
def createFields(self):
yield UInt8(self, "opcode")
yield Int16(self, "value")
def createDisplay(self):
return "%s(%s)"%(self.op, self["value"].value)
class OpcodeSpecial_iinc(JavaOpcode):
OPSIZE = 3
def createFields(self):
yield UInt8(self, "opcode")
yield UInt8(self, "index")
yield Int8(self, "value")
def createDisplay(self):
return "%s(%i,%i)"%(self.op, self["index"].value, self["value"].value)
class OpcodeSpecial_wide(JavaOpcode):
def createFields(self):
yield UInt8(self, "opcode")
new_op = UInt8(self, "new_opcode")
yield new_op
op = new_op._description = JavaBytecode.OPCODE_TABLE.get(new_op.value, ["reserved", None, "Reserved"])[0]
yield UInt16(self, "index")
if op == "iinc":
yield Int16(self, "value")
self.createDisplay = lambda self: "%s(%i,%i)"%(self.op, self["index"].value, self["value"].value)
else:
self.createDisplay = lambda self: "%s(%i)"%(self.op, self["index"].value)
class OpcodeSpecial_invokeinterface(JavaOpcode):
OPSIZE = 5
def createFields(self):
yield UInt8(self, "opcode")
yield CPIndex(self, "index")
yield UInt8(self, "count")
yield UInt8(self, "zero", "Must be zero.")
def createDisplay(self):
return "%s(%i,%i,%i)"%(self.op, self["index"].value, self["count"].value, self["zero"].value)
class OpcodeSpecial_newarray(JavaOpcode):
OPSIZE = 2
def createFields(self):
yield UInt8(self, "opcode")
yield Enum(UInt8(self, "atype"), {4: "boolean",
5: "char",
6: "float",
7: "double",
8: "byte",
9: "short",
10:"int",
11:"long"})
def createDisplay(self):
return "%s(%s)"%(self.op, self["atype"].createDisplay())
class OpcodeSpecial_multianewarray(JavaOpcode):
OPSIZE = 4
def createFields(self):
yield UInt8(self, "opcode")
yield CPIndex(self, "index")
yield UInt8(self, "dimensions")
def createDisplay(self):
return "%s(%i,%i)"%(self.op, self["index"].value, self["dimensions"].value)
class OpcodeSpecial_tableswitch(JavaOpcode):
def createFields(self):
yield UInt8(self, "opcode")
pad = paddingSize(self.address+8, 32)
if pad:
yield NullBits(self, "padding", pad)
yield Int32(self, "default")
low = Int32(self, "low")
yield low
high = Int32(self, "high")
yield high
for i in range(high.value-low.value+1):
yield Int32(self, "offset[]")
def createDisplay(self):
return "%s(%i,%i,%i,...)"%(self.op, self["default"].value, self["low"].value, self["high"].value)
class OpcodeSpecial_lookupswitch(JavaOpcode):
def createFields(self):
yield UInt8(self, "opcode")
pad = paddingSize(self.address+8, 32)
if pad:
yield NullBits(self, "padding", pad)
yield Int32(self, "default")
n = Int32(self, "npairs")
yield n
for i in range(n.value):
yield Int32(self, "match[]")
yield Int32(self, "offset[]")
def createDisplay(self):
return "%s(%i,%i,...)"%(self.op, self["default"].value, self["npairs"].value)
class JavaBytecode(FieldSet):
OPCODE_TABLE = {
0x00: ("nop", OpcodeNoArgs, "performs no operation. Stack: [No change]"),
0x01: ("aconst_null", OpcodeNoArgs, "pushes a 'null' reference onto the stack. Stack: -> null"),
0x02: ("iconst_m1", OpcodeNoArgs, "loads the int value -1 onto the stack. Stack: -> -1"),
0x03: ("iconst_0", OpcodeNoArgs, "loads the int value 0 onto the stack. Stack: -> 0"),
0x04: ("iconst_1", OpcodeNoArgs, "loads the int value 1 onto the stack. Stack: -> 1"),
0x05: ("iconst_2", OpcodeNoArgs, "loads the int value 2 onto the stack. Stack: -> 2"),
0x06: ("iconst_3", OpcodeNoArgs, "loads the int value 3 onto the stack. Stack: -> 3"),
0x07: ("iconst_4", OpcodeNoArgs, "loads the int value 4 onto the stack. Stack: -> 4"),
0x08: ("iconst_5", OpcodeNoArgs, "loads the int value 5 onto the stack. Stack: -> 5"),
0x09: ("lconst_0", OpcodeNoArgs, "pushes the long 0 onto the stack. Stack: -> 0L"),
0x0a: ("lconst_1", OpcodeNoArgs, "pushes the long 1 onto the stack. Stack: -> 1L"),
0x0b: ("fconst_0", OpcodeNoArgs, "pushes '0.0f' onto the stack. Stack: -> 0.0f"),
0x0c: ("fconst_1", OpcodeNoArgs, "pushes '1.0f' onto the stack. Stack: -> 1.0f"),
0x0d: ("fconst_2", OpcodeNoArgs, "pushes '2.0f' onto the stack. Stack: -> 2.0f"),
0x0e: ("dconst_0", OpcodeNoArgs, "pushes the constant '0.0' onto the stack. Stack: -> 0.0"),
0x0f: ("dconst_1", OpcodeNoArgs, "pushes the constant '1.0' onto the stack. Stack: -> 1.0"),
0x10: ("bipush", OpcodeSpecial_bipush, "pushes the signed 8-bit integer argument onto the stack. Stack: -> value"),
0x11: ("sipush", OpcodeSpecial_sipush, "pushes the signed 16-bit integer argument onto the stack. Stack: -> value"),
0x12: ("ldc", OpcodeCPIndexShort, "pushes a constant from a constant pool (String, int, float or class type) onto the stack. Stack: -> value"),
0x13: ("ldc_w", OpcodeCPIndex, "pushes a constant from a constant pool (String, int, float or class type) onto the stack. Stack: -> value"),
0x14: ("ldc2_w", OpcodeCPIndex, "pushes a constant from a constant pool (double or long) onto the stack. Stack: -> value"),
0x15: ("iload", OpcodeIndex, "loads an int 'value' from a local variable '#index'. Stack: -> value"),
0x16: ("lload", OpcodeIndex, "loads a long value from a local variable '#index'. Stack: -> value"),
0x17: ("fload", OpcodeIndex, "loads a float 'value' from a local variable '#index'. Stack: -> value"),
0x18: ("dload", OpcodeIndex, "loads a double 'value' from a local variable '#index'. Stack: -> value"),
0x19: ("aload", OpcodeIndex, "loads a reference onto the stack from a local variable '#index'. Stack: -> objectref"),
0x1a: ("iload_0", OpcodeNoArgs, "loads an int 'value' from variable 0. Stack: -> value"),
0x1b: ("iload_1", OpcodeNoArgs, "loads an int 'value' from variable 1. Stack: -> value"),
0x1c: ("iload_2", OpcodeNoArgs, "loads an int 'value' from variable 2. Stack: -> value"),
0x1d: ("iload_3", OpcodeNoArgs, "loads an int 'value' from variable 3. Stack: -> value"),
0x1e: ("lload_0", OpcodeNoArgs, "load a long value from a local variable 0. Stack: -> value"),
0x1f: ("lload_1", OpcodeNoArgs, "load a long value from a local variable 1. Stack: -> value"),
0x20: ("lload_2", OpcodeNoArgs, "load a long value from a local variable 2. Stack: -> value"),
0x21: ("lload_3", OpcodeNoArgs, "load a long value from a local variable 3. Stack: -> value"),
0x22: ("fload_0", OpcodeNoArgs, "loads a float 'value' from local variable 0. Stack: -> value"),
0x23: ("fload_1", OpcodeNoArgs, "loads a float 'value' from local variable 1. Stack: -> value"),
0x24: ("fload_2", OpcodeNoArgs, "loads a float 'value' from local variable 2. Stack: -> value"),
0x25: ("fload_3", OpcodeNoArgs, "loads a float 'value' from local variable 3. Stack: -> value"),
0x26: ("dload_0", OpcodeNoArgs, "loads a double from local variable 0. Stack: -> value"),
0x27: ("dload_1", OpcodeNoArgs, "loads a double from local variable 1. Stack: -> value"),
0x28: ("dload_2", OpcodeNoArgs, "loads a double from local variable 2. Stack: -> value"),
0x29: ("dload_3", OpcodeNoArgs, "loads a double from local variable 3. Stack: -> value"),
0x2a: ("aload_0", OpcodeNoArgs, "loads a reference onto the stack from local variable 0. Stack: -> objectref"),
0x2b: ("aload_1", OpcodeNoArgs, "loads a reference onto the stack from local variable 1. Stack: -> objectref"),
0x2c: ("aload_2", OpcodeNoArgs, "loads a reference onto the stack from local variable 2. Stack: -> objectref"),
0x2d: ("aload_3", OpcodeNoArgs, "loads a reference onto the stack from local variable 3. Stack: -> objectref"),
0x2e: ("iaload", OpcodeNoArgs, "loads an int from an array. Stack: arrayref, index -> value"),
0x2f: ("laload", OpcodeNoArgs, "load a long from an array. Stack: arrayref, index -> value"),
0x30: ("faload", OpcodeNoArgs, "loads a float from an array. Stack: arrayref, index -> value"),
0x31: ("daload", OpcodeNoArgs, "loads a double from an array. Stack: arrayref, index -> value"),
0x32: ("aaload", OpcodeNoArgs, "loads onto the stack a reference from an array. Stack: arrayref, index -> value"),
0x33: ("baload", OpcodeNoArgs, "loads a byte or Boolean value from an array. Stack: arrayref, index -> value"),
0x34: ("caload", OpcodeNoArgs, "loads a char from an array. Stack: arrayref, index -> value"),
0x35: ("saload", OpcodeNoArgs, "load short from array. Stack: arrayref, index -> value"),
0x36: ("istore", OpcodeIndex, "store int 'value' into variable '#index'. Stack: value ->"),
0x37: ("lstore", OpcodeIndex, "store a long 'value' in a local variable '#index'. Stack: value ->"),
0x38: ("fstore", OpcodeIndex, "stores a float 'value' into a local variable '#index'. Stack: value ->"),
0x39: ("dstore", OpcodeIndex, "stores a double 'value' into a local variable '#index'. Stack: value ->"),
0x3a: ("astore", OpcodeIndex, "stores a reference into a local variable '#index'. Stack: objectref ->"),
0x3b: ("istore_0", OpcodeNoArgs, "store int 'value' into variable 0. Stack: value ->"),
0x3c: ("istore_1", OpcodeNoArgs, "store int 'value' into variable 1. Stack: value ->"),
0x3d: ("istore_2", OpcodeNoArgs, "store int 'value' into variable 2. Stack: value ->"),
0x3e: ("istore_3", OpcodeNoArgs, "store int 'value' into variable 3. Stack: value ->"),
0x3f: ("lstore_0", OpcodeNoArgs, "store a long 'value' in a local variable 0. Stack: value ->"),
0x40: ("lstore_1", OpcodeNoArgs, "store a long 'value' in a local variable 1. Stack: value ->"),
0x41: ("lstore_2", OpcodeNoArgs, "store a long 'value' in a local variable 2. Stack: value ->"),
0x42: ("lstore_3", OpcodeNoArgs, "store a long 'value' in a local variable 3. Stack: value ->"),
0x43: ("fstore_0", OpcodeNoArgs, "stores a float 'value' into local variable 0. Stack: value ->"),
0x44: ("fstore_1", OpcodeNoArgs, "stores a float 'value' into local variable 1. Stack: value ->"),
0x45: ("fstore_2", OpcodeNoArgs, "stores a float 'value' into local variable 2. Stack: value ->"),
0x46: ("fstore_3", OpcodeNoArgs, "stores a float 'value' into local variable 3. Stack: value ->"),
0x47: ("dstore_0", OpcodeNoArgs, "stores a double into local variable 0. Stack: value ->"),
0x48: ("dstore_1", OpcodeNoArgs, "stores a double into local variable 1. Stack: value ->"),
0x49: ("dstore_2", OpcodeNoArgs, "stores a double into local variable 2. Stack: value ->"),
0x4a: ("dstore_3", OpcodeNoArgs, "stores a double into local variable 3. Stack: value ->"),
0x4b: ("astore_0", OpcodeNoArgs, "stores a reference into local variable 0. Stack: objectref ->"),
0x4c: ("astore_1", OpcodeNoArgs, "stores a reference into local variable 1. Stack: objectref ->"),
0x4d: ("astore_2", OpcodeNoArgs, "stores a reference into local variable 2. Stack: objectref ->"),
0x4e: ("astore_3", OpcodeNoArgs, "stores a reference into local variable 3. Stack: objectref ->"),
0x4f: ("iastore", OpcodeNoArgs, "stores an int into an array. Stack: arrayref, index, value ->"),
0x50: ("lastore", OpcodeNoArgs, "store a long to an array. Stack: arrayref, index, value ->"),
0x51: ("fastore", OpcodeNoArgs, "stores a float in an array. Stack: arreyref, index, value ->"),
0x52: ("dastore", OpcodeNoArgs, "stores a double into an array. Stack: arrayref, index, value ->"),
0x53: ("aastore", OpcodeNoArgs, "stores into a reference to an array. Stack: arrayref, index, value ->"),
0x54: ("bastore", OpcodeNoArgs, "stores a byte or Boolean value into an array. Stack: arrayref, index, value ->"),
0x55: ("castore", OpcodeNoArgs, "stores a char into an array. Stack: arrayref, index, value ->"),
0x56: ("sastore", OpcodeNoArgs, "store short to array. Stack: arrayref, index, value ->"),
0x57: ("pop", OpcodeNoArgs, "discards the top value on the stack. Stack: value ->"),
0x58: ("pop2", OpcodeNoArgs, "discards the top two values on the stack (or one value, if it is a double or long). Stack: {value2, value1} ->"),
0x59: ("dup", OpcodeNoArgs, "duplicates the value on top of the stack. Stack: value -> value, value"),
0x5a: ("dup_x1", OpcodeNoArgs, "inserts a copy of the top value into the stack two values from the top. Stack: value2, value1 -> value1, value2, value1"),
0x5b: ("dup_x2", OpcodeNoArgs, "inserts a copy of the top value into the stack two (if value2 is double or long it takes up the entry of value3, too) or three values (if value2 is neither double nor long) from the top. Stack: value3, value2, value1 -> value1, value3, value2, value1"),
0x5c: ("dup2", OpcodeNoArgs, "duplicate top two stack words (two values, if value1 is not double nor long; a single value, if value1 is double or long). Stack: {value2, value1} -> {value2, value1}, {value2, value1}"),
0x5d: ("dup2_x1", OpcodeNoArgs, "duplicate two words and insert beneath third word. Stack: value3, {value2, value1} -> {value2, value1}, value3, {value2, value1}"),
0x5e: ("dup2_x2", OpcodeNoArgs, "duplicate two words and insert beneath fourth word. Stack: {value4, value3}, {value2, value1} -> {value2, value1}, {value4, value3}, {value2, value1}"),
0x5f: ("swap", OpcodeNoArgs, "swaps two top words on the stack (note that value1 and value2 must not be double or long). Stack: value2, value1 -> value1, value2"),
0x60: ("iadd", OpcodeNoArgs, "adds two ints together. Stack: value1, value2 -> result"),
0x61: ("ladd", OpcodeNoArgs, "add two longs. Stack: value1, value2 -> result"),
0x62: ("fadd", OpcodeNoArgs, "adds two floats. Stack: value1, value2 -> result"),
0x63: ("dadd", OpcodeNoArgs, "adds two doubles. Stack: value1, value2 -> result"),
0x64: ("isub", OpcodeNoArgs, "int subtract. Stack: value1, value2 -> result"),
0x65: ("lsub", OpcodeNoArgs, "subtract two longs. Stack: value1, value2 -> result"),
0x66: ("fsub", OpcodeNoArgs, "subtracts two floats. Stack: value1, value2 -> result"),
0x67: ("dsub", OpcodeNoArgs, "subtracts a double from another. Stack: value1, value2 -> result"),
0x68: ("imul", OpcodeNoArgs, "multiply two integers. Stack: value1, value2 -> result"),
0x69: ("lmul", OpcodeNoArgs, "multiplies two longs. Stack: value1, value2 -> result"),
0x6a: ("fmul", OpcodeNoArgs, "multiplies two floats. Stack: value1, value2 -> result"),
0x6b: ("dmul", OpcodeNoArgs, "multiplies two doubles. Stack: value1, value2 -> result"),
0x6c: ("idiv", OpcodeNoArgs, "divides two integers. Stack: value1, value2 -> result"),
0x6d: ("ldiv", OpcodeNoArgs, "divide two longs. Stack: value1, value2 -> result"),
0x6e: ("fdiv", OpcodeNoArgs, "divides two floats. Stack: value1, value2 -> result"),
0x6f: ("ddiv", OpcodeNoArgs, "divides two doubles. Stack: value1, value2 -> result"),
0x70: ("irem", OpcodeNoArgs, "logical int remainder. Stack: value1, value2 -> result"),
0x71: ("lrem", OpcodeNoArgs, "remainder of division of two longs. Stack: value1, value2 -> result"),
0x72: ("frem", OpcodeNoArgs, "gets the remainder from a division between two floats. Stack: value1, value2 -> result"),
0x73: ("drem", OpcodeNoArgs, "gets the remainder from a division between two doubles. Stack: value1, value2 -> result"),
0x74: ("ineg", OpcodeNoArgs, "negate int. Stack: value -> result"),
0x75: ("lneg", OpcodeNoArgs, "negates a long. Stack: value -> result"),
0x76: ("fneg", OpcodeNoArgs, "negates a float. Stack: value -> result"),
0x77: ("dneg", OpcodeNoArgs, "negates a double. Stack: value -> result"),
0x78: ("ishl", OpcodeNoArgs, "int shift left. Stack: value1, value2 -> result"),
0x79: ("lshl", OpcodeNoArgs, "bitwise shift left of a long 'value1' by 'value2' positions. Stack: value1, value2 -> result"),
0x7a: ("ishr", OpcodeNoArgs, "int shift right. Stack: value1, value2 -> result"),
0x7b: ("lshr", OpcodeNoArgs, "bitwise shift right of a long 'value1' by 'value2' positions. Stack: value1, value2 -> result"),
0x7c: ("iushr", OpcodeNoArgs, "int shift right. Stack: value1, value2 -> result"),
0x7d: ("lushr", OpcodeNoArgs, "bitwise shift right of a long 'value1' by 'value2' positions, unsigned. Stack: value1, value2 -> result"),
0x7e: ("iand", OpcodeNoArgs, "performs a logical and on two integers. Stack: value1, value2 -> result"),
0x7f: ("land", OpcodeNoArgs, "bitwise and of two longs. Stack: value1, value2 -> result"),
0x80: ("ior", OpcodeNoArgs, "logical int or. Stack: value1, value2 -> result"),
0x81: ("lor", OpcodeNoArgs, "bitwise or of two longs. Stack: value1, value2 -> result"),
0x82: ("ixor", OpcodeNoArgs, "int xor. Stack: value1, value2 -> result"),
0x83: ("lxor", OpcodeNoArgs, "bitwise exclusive or of two longs. Stack: value1, value2 -> result"),
0x84: ("iinc", OpcodeSpecial_iinc, "increment local variable '#index' by signed byte 'const'. Stack: [No change]"),
0x85: ("i2l", OpcodeNoArgs, "converts an int into a long. Stack: value -> result"),
0x86: ("i2f", OpcodeNoArgs, "converts an int into a float. Stack: value -> result"),
0x87: ("i2d", OpcodeNoArgs, "converts an int into a double. Stack: value -> result"),
0x88: ("l2i", OpcodeNoArgs, "converts a long to an int. Stack: value -> result"),
0x89: ("l2f", OpcodeNoArgs, "converts a long to a float. Stack: value -> result"),
0x8a: ("l2d", OpcodeNoArgs, "converts a long to a double. Stack: value -> result"),
0x8b: ("f2i", OpcodeNoArgs, "converts a float to an int. Stack: value -> result"),
0x8c: ("f2l", OpcodeNoArgs, "converts a float to a long. Stack: value -> result"),
0x8d: ("f2d", OpcodeNoArgs, "converts a float to a double. Stack: value -> result"),
0x8e: ("d2i", OpcodeNoArgs, "converts a double to an int. Stack: value -> result"),
0x8f: ("d2l", OpcodeNoArgs, "converts a double to a long. Stack: value -> result"),
0x90: ("d2f", OpcodeNoArgs, "converts a double to a float. Stack: value -> result"),
0x91: ("i2b", OpcodeNoArgs, "converts an int into a byte. Stack: value -> result"),
0x92: ("i2c", OpcodeNoArgs, "converts an int into a character. Stack: value -> result"),
0x93: ("i2s", OpcodeNoArgs, "converts an int into a short. Stack: value -> result"),
0x94: ("lcmp", OpcodeNoArgs, "compares two longs values. Stack: value1, value2 -> result"),
0x95: ("fcmpl", OpcodeNoArgs, "compares two floats. Stack: value1, value2 -> result"),
0x96: ("fcmpg", OpcodeNoArgs, "compares two floats. Stack: value1, value2 -> result"),
0x97: ("dcmpl", OpcodeNoArgs, "compares two doubles. Stack: value1, value2 -> result"),
0x98: ("dcmpg", OpcodeNoArgs, "compares two doubles. Stack: value1, value2 -> result"),
0x99: ("ifeq", OpcodeShortJump, "if 'value' is 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
0x9a: ("ifne", OpcodeShortJump, "if 'value' is not 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
0x9c: ("ifge", OpcodeShortJump, "if 'value' is greater than or equal to 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
0x9d: ("ifgt", OpcodeShortJump, "if 'value' is greater than 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
0x9e: ("ifle", OpcodeShortJump, "if 'value' is less than or equal to 0, branch to the 16-bit instruction offset argument. Stack: value ->"),
0x9f: ("if_icmpeq", OpcodeShortJump, "if ints are equal, branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
0xa0: ("if_icmpne", OpcodeShortJump, "if ints are not equal, branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
0xa1: ("if_icmplt", OpcodeShortJump, "if 'value1' is less than 'value2', branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
0xa2: ("if_icmpge", OpcodeShortJump, "if 'value1' is greater than or equal to 'value2', branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
0xa3: ("if_icmpgt", OpcodeShortJump, "if 'value1' is greater than 'value2', branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
0xa4: ("if_icmple", OpcodeShortJump, "if 'value1' is less than or equal to 'value2', branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
0xa5: ("if_acmpeq", OpcodeShortJump, "if references are equal, branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
0xa6: ("if_acmpne", OpcodeShortJump, "if references are not equal, branch to the 16-bit instruction offset argument. Stack: value1, value2 ->"),
0xa7: ("goto", OpcodeShortJump, "goes to the 16-bit instruction offset argument. Stack: [no change]"),
0xa8: ("jsr", OpcodeShortJump, "jump to subroutine at the 16-bit instruction offset argument and place the return address on the stack. Stack: -> address"),
0xa9: ("ret", OpcodeIndex, "continue execution from address taken from a local variable '#index'. Stack: [No change]"),
0xaa: ("tableswitch", OpcodeSpecial_tableswitch, "continue execution from an address in the table at offset 'index'. Stack: index ->"),
0xab: ("lookupswitch", OpcodeSpecial_lookupswitch, "a target address is looked up from a table using a key and execution continues from the instruction at that address. Stack: key ->"),
0xac: ("ireturn", OpcodeNoArgs, "returns an integer from a method. Stack: value -> [empty]"),
0xad: ("lreturn", OpcodeNoArgs, "returns a long value. Stack: value -> [empty]"),
0xae: ("freturn", OpcodeNoArgs, "returns a float. Stack: value -> [empty]"),
0xaf: ("dreturn", OpcodeNoArgs, "returns a double from a method. Stack: value -> [empty]"),
0xb0: ("areturn", OpcodeNoArgs, "returns a reference from a method. Stack: objectref -> [empty]"),
0xb1: ("return", OpcodeNoArgs, "return void from method. Stack: -> [empty]"),
0xb2: ("getstatic", OpcodeCPIndex, "gets a static field 'value' of a class, where the field is identified by field reference in the constant pool. Stack: -> value"),
0xb3: ("putstatic", OpcodeCPIndex, "set static field to 'value' in a class, where the field is identified by a field reference in constant pool. Stack: value ->"),
0xb4: ("getfield", OpcodeCPIndex, "gets a field 'value' of an object 'objectref', where the field is identified by field reference <argument> in the constant pool. Stack: objectref -> value"),
0xb5: ("putfield", OpcodeCPIndex, "set field to 'value' in an object 'objectref', where the field is identified by a field reference <argument> in constant pool. Stack: objectref, value ->"),
0xb6: ("invokevirtual", OpcodeCPIndex, "invoke virtual method on object 'objectref', where the method is identified by method reference <argument> in constant pool. Stack: objectref, [arg1, arg2, ...] ->"),
0xb7: ("invokespecial", OpcodeCPIndex, "invoke instance method on object 'objectref', where the method is identified by method reference <argument> in constant pool. Stack: objectref, [arg1, arg2, ...] ->"),
0xb8: ("invokestatic", OpcodeCPIndex, "invoke a static method, where the method is identified by method reference <argument> in the constant pool. Stack: [arg1, arg2, ...] ->"),
0xb9: ("invokeinterface", OpcodeSpecial_invokeinterface, "invokes an interface method on object 'objectref', where the interface method is identified by method reference <argument> in constant pool. Stack: objectref, [arg1, arg2, ...] ->"),
0xba: ("xxxunusedxxx", OpcodeNoArgs, "this opcode is reserved for historical reasons. Stack: "),
0xbb: ("new", OpcodeCPIndex, "creates new object of type identified by class reference <argument> in constant pool. Stack: -> objectref"),
0xbc: ("newarray", OpcodeSpecial_newarray, "creates new array with 'count' elements of primitive type given in the argument. Stack: count -> arrayref"),
0xbd: ("anewarray", OpcodeCPIndex, "creates a new array of references of length 'count' and component type identified by the class reference <argument> in the constant pool. Stack: count -> arrayref"),
0xbe: ("arraylength", OpcodeNoArgs, "gets the length of an array. Stack: arrayref -> length"),
0xbf: ("athrow", OpcodeNoArgs, "throws an error or exception (notice that the rest of the stack is cleared, leaving only a reference to the Throwable). Stack: objectref -> [empty], objectref"),
0xc0: ("checkcast", OpcodeCPIndex, "checks whether an 'objectref' is of a certain type, the class reference of which is in the constant pool. Stack: objectref -> objectref"),
0xc1: ("instanceof", OpcodeCPIndex, "determines if an object 'objectref' is of a given type, identified by class reference <argument> in constant pool. Stack: objectref -> result"),
0xc2: ("monitorenter", OpcodeNoArgs, "enter monitor for object (\"grab the lock\" - start of synchronized() section). Stack: objectref -> "),
0xc3: ("monitorexit", OpcodeNoArgs, "exit monitor for object (\"release the lock\" - end of synchronized() section). Stack: objectref -> "),
0xc4: ("wide", OpcodeSpecial_wide, "execute 'opcode', where 'opcode' is either iload, fload, aload, lload, dload, istore, fstore, astore, lstore, dstore, or ret, but assume the 'index' is 16 bit; or execute iinc, where the 'index' is 16 bits and the constant to increment by is a signed 16 bit short. Stack: [same as for corresponding instructions]"),
0xc5: ("multianewarray", OpcodeSpecial_multianewarray, "create a new array of 'dimensions' dimensions with elements of type identified by class reference in constant pool; the sizes of each dimension is identified by 'count1', ['count2', etc]. Stack: count1, [count2,...] -> arrayref"),
0xc6: ("ifnull", OpcodeShortJump, "if 'value' is null, branch to the 16-bit instruction offset argument. Stack: value ->"),
0xc7: ("ifnonnull", OpcodeShortJump, "if 'value' is not null, branch to the 16-bit instruction offset argument. Stack: value ->"),
0xc8: ("goto_w", OpcodeLongJump, "goes to another instruction at the 32-bit branch offset argument. Stack: [no change]"),
0xc9: ("jsr_w", OpcodeLongJump, "jump to subroutine at the 32-bit branch offset argument and place the return address on the stack. Stack: -> address"),
0xca: ("breakpoint", OpcodeNoArgs, "reserved for breakpoints in Java debuggers; should not appear in any class file."),
0xfe: ("impdep1", OpcodeNoArgs, "reserved for implementation-dependent operations within debuggers; should not appear in any class file."),
0xff: ("impdep2", OpcodeNoArgs, "reserved for implementation-dependent operations within debuggers; should not appear in any class file.")}
def __init__(self, parent, name, length):
FieldSet.__init__(self, parent, name)
self._size = length*8
def createFields(self):
while self.current_size < self.size:
bytecode = ord(self.parent.stream.readBytes(self.absolute_address+self.current_size, 1))
op, cls, desc = self.OPCODE_TABLE.get(bytecode,["<reserved_opcode>", OpcodeNoArgs, "Reserved opcode."])
yield cls(self, "bytecode[]", op, desc)
###############################################################################
class CPInfo(FieldSet):
"""
Holds a constant pool entry. Entries all have a type, and various contents
fields depending on their type.
"""
def createFields(self):
yield Enum(UInt8(self, "tag"), self.root.CONSTANT_TYPES)
if self["tag"].value not in self.root.CONSTANT_TYPES:
raise ParserError("Java: unknown constant type (%s)" % self["tag"].value)
self.constant_type = self.root.CONSTANT_TYPES[self["tag"].value]
if self.constant_type == "Utf8":
yield PascalString16(self, "bytes", charset="UTF-8")
elif self.constant_type == "Integer":
yield Int32(self, "bytes")
elif self.constant_type == "Float":
yield Float32(self, "bytes")
elif self.constant_type == "Long":
yield Int64(self, "bytes")
elif self.constant_type == "Double":
yield Float64(self, "bytes")
elif self.constant_type == "Class":
yield CPIndex(self, "name_index", "Class or interface name", target_types="Utf8")
elif self.constant_type == "String":
yield CPIndex(self, "string_index", target_types="Utf8")
elif self.constant_type == "Fieldref":
yield CPIndex(self, "class_index", "Field class or interface name", target_types="Class")
yield CPIndex(self, "name_and_type_index", target_types="NameAndType")
elif self.constant_type == "Methodref":
yield CPIndex(self, "class_index", "Method class name", target_types="Class")
yield CPIndex(self, "name_and_type_index", target_types="NameAndType")
elif self.constant_type == "InterfaceMethodref":
yield CPIndex(self, "class_index", "Method interface name", target_types="Class")
yield CPIndex(self, "name_and_type_index", target_types="NameAndType")
elif self.constant_type == "NameAndType":
yield CPIndex(self, "name_index", target_types="Utf8")
yield CPIndex(self, "descriptor_index", target_types="Utf8")
else:
raise ParserError("Not a valid constant pool element type: "
+ self["tag"].value)
def __str__(self):
"""
Returns a human-readable string representation of the constant pool
entry. It is used for pretty-printing of the CPIndex fields pointing
to it.
"""
if self.constant_type == "Utf8":
return self["bytes"].value
elif self.constant_type in ("Integer", "Float", "Long", "Double"):
return self["bytes"].display
elif self.constant_type == "Class":
class_name = str(self["name_index"].get_cp_entry())
return class_name.replace("/",".")
elif self.constant_type == "String":
return str(self["string_index"].get_cp_entry())
elif self.constant_type == "Fieldref":
return "%s (from %s)" % (self["name_and_type_index"], self["class_index"])
elif self.constant_type == "Methodref":
return "%s (from %s)" % (self["name_and_type_index"], self["class_index"])
elif self.constant_type == "InterfaceMethodref":
return "%s (from %s)" % (self["name_and_type_index"], self["class_index"])
elif self.constant_type == "NameAndType":
return parse_any_descriptor(
str(self["descriptor_index"].get_cp_entry()),
name=str(self["name_index"].get_cp_entry()))
else:
# FIXME: Return "<error>" instead of raising an exception?
raise ParserError("Not a valid constant pool element type: "
+ self["tag"].value)
###############################################################################
# field_info {
# u2 access_flags;
# u2 name_index;
# u2 descriptor_index;
# u2 attributes_count;
# attribute_info attributes[attributes_count];
# }
class FieldInfo(FieldSet):
def createFields(self):
# Access flags (16 bits)
yield NullBits(self, "reserved[]", 8)
yield Bit(self, "transient")
yield Bit(self, "volatile")
yield NullBits(self, "reserved[]", 1)
yield Bit(self, "final")
yield Bit(self, "static")
yield Bit(self, "protected")
yield Bit(self, "private")
yield Bit(self, "public")
yield CPIndex(self, "name_index", "Field name", target_types="Utf8")
yield CPIndex(self, "descriptor_index", "Field descriptor", target_types="Utf8",
target_text_handler=parse_field_descriptor)
yield UInt16(self, "attributes_count", "Number of field attributes")
if self["attributes_count"].value > 0:
yield FieldArray(self, "attributes", AttributeInfo,
self["attributes_count"].value)
###############################################################################
# method_info {
# u2 access_flags;
# u2 name_index;
# u2 descriptor_index;
# u2 attributes_count;
# attribute_info attributes[attributes_count];
# }
class MethodInfo(FieldSet):
def createFields(self):
# Access flags (16 bits)
yield NullBits(self, "reserved[]", 4)
yield Bit(self, "strict")
yield Bit(self, "abstract")
yield NullBits(self, "reserved[]", 1)
yield Bit(self, "native")
yield NullBits(self, "reserved[]", 2)
yield Bit(self, "synchronized")
yield Bit(self, "final")
yield Bit(self, "static")
yield Bit(self, "protected")
yield Bit(self, "private")
yield Bit(self, "public")
yield CPIndex(self, "name_index", "Method name", target_types="Utf8")
yield CPIndex(self, "descriptor_index", "Method descriptor",
target_types="Utf8",
target_text_handler=parse_method_descriptor)
yield UInt16(self, "attributes_count", "Number of method attributes")
if self["attributes_count"].value > 0:
yield FieldArray(self, "attributes", AttributeInfo,
self["attributes_count"].value)
###############################################################################
# attribute_info {
# u2 attribute_name_index;
# u4 attribute_length;
# u1 info[attribute_length];
# }
# [...]
class AttributeInfo(FieldSet):
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = (self["attribute_length"].value + 6) * 8
def createFields(self):
yield CPIndex(self, "attribute_name_index", "Attribute name", target_types="Utf8")
yield UInt32(self, "attribute_length", "Length of the attribute")
attr_name = str(self["attribute_name_index"].get_cp_entry())
# ConstantValue_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# u2 constantvalue_index;
# }
if attr_name == "ConstantValue":
if self["attribute_length"].value != 2:
raise ParserError("Java: Invalid attribute %s length (%s)" \
% (self.path, self["attribute_length"].value))
yield CPIndex(self, "constantvalue_index",
target_types=("Long","Float","Double","Integer","String"))
# Code_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# u2 max_stack;
# u2 max_locals;
# u4 code_length;
# u1 code[code_length];
# u2 exception_table_length;
# { u2 start_pc;
# u2 end_pc;
# u2 handler_pc;
# u2 catch_type;
# } exception_table[exception_table_length];
# u2 attributes_count;
# attribute_info attributes[attributes_count];
# }
elif attr_name == "Code":
yield UInt16(self, "max_stack")
yield UInt16(self, "max_locals")
yield UInt32(self, "code_length")
if self["code_length"].value > 0:
yield JavaBytecode(self, "code", self["code_length"].value)
yield UInt16(self, "exception_table_length")
if self["exception_table_length"].value > 0:
yield FieldArray(self, "exception_table", ExceptionTableEntry,
self["exception_table_length"].value)
yield UInt16(self, "attributes_count")
if self["attributes_count"].value > 0:
yield FieldArray(self, "attributes", AttributeInfo,
self["attributes_count"].value)
# Exceptions_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# u2 number_of_exceptions;
# u2 exception_index_table[number_of_exceptions];
# }
elif (attr_name == "Exceptions"):
yield UInt16(self, "number_of_exceptions")
yield FieldArray(self, "exception_index_table", CPIndex,
self["number_of_exceptions"].value, target_types="Class")
assert self["attribute_length"].value == \
2 + self["number_of_exceptions"].value * 2
# InnerClasses_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# u2 number_of_classes;
# { u2 inner_class_info_index;
# u2 outer_class_info_index;
# u2 inner_name_index;
# u2 inner_class_access_flags;
# } classes[number_of_classes];
# }
elif (attr_name == "InnerClasses"):
yield UInt16(self, "number_of_classes")
if self["number_of_classes"].value > 0:
yield FieldArray(self, "classes", InnerClassesEntry,
self["number_of_classes"].value)
assert self["attribute_length"].value == \
2 + self["number_of_classes"].value * 8
# Synthetic_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# }
elif (attr_name == "Synthetic"):
assert self["attribute_length"].value == 0
# SourceFile_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# u2 sourcefile_index;
# }
elif (attr_name == "SourceFile"):
assert self["attribute_length"].value == 2
yield CPIndex(self, "sourcefile_index", target_types="Utf8")
# LineNumberTable_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# u2 line_number_table_length;
# { u2 start_pc;
# u2 line_number;
# } line_number_table[line_number_table_length];
# }
elif (attr_name == "LineNumberTable"):
yield UInt16(self, "line_number_table_length")
if self["line_number_table_length"].value > 0:
yield FieldArray(self, "line_number_table",
LineNumberTableEntry,
self["line_number_table_length"].value)
assert self["attribute_length"].value == \
2 + self["line_number_table_length"].value * 4
# LocalVariableTable_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# u2 local_variable_table_length;
# { u2 start_pc;
# u2 length;
# u2 name_index;
# u2 descriptor_index;
# u2 index;
# } local_variable_table[local_variable_table_length];
# }
elif (attr_name == "LocalVariableTable"):
yield UInt16(self, "local_variable_table_length")
if self["local_variable_table_length"].value > 0:
yield FieldArray(self, "local_variable_table",
LocalVariableTableEntry,
self["local_variable_table_length"].value)
assert self["attribute_length"].value == \
2 + self["local_variable_table_length"].value * 10
# Deprecated_attribute {
# u2 attribute_name_index;
# u4 attribute_length;
# }
elif (attr_name == "Deprecated"):
assert self["attribute_length"].value == 0
# Unkown attribute type. They are allowed by the JVM specs, but we
# can't say much about them...
elif self["attribute_length"].value > 0:
yield RawBytes(self, "info", self["attribute_length"].value)
class ExceptionTableEntry(FieldSet):
static_size = 48 + CPIndex.static_size
def createFields(self):
yield textHandler(UInt16(self, "start_pc"), hexadecimal)
yield textHandler(UInt16(self, "end_pc"), hexadecimal)
yield textHandler(UInt16(self, "handler_pc"), hexadecimal)
yield CPIndex(self, "catch_type", target_types="Class")
class InnerClassesEntry(StaticFieldSet):
format = (
(CPIndex, "inner_class_info_index",
{"target_types": "Class", "allow_zero": True}),
(CPIndex, "outer_class_info_index",
{"target_types": "Class", "allow_zero": True}),
(CPIndex, "inner_name_index",
{"target_types": "Utf8", "allow_zero": True}),
# Inner class access flags (16 bits)
(NullBits, "reserved[]", 5),
(Bit, "abstract"),
(Bit, "interface"),
(NullBits, "reserved[]", 3),
(Bit, "super"),
(Bit, "final"),
(Bit, "static"),
(Bit, "protected"),
(Bit, "private"),
(Bit, "public"),
)
class LineNumberTableEntry(StaticFieldSet):
format = (
(UInt16, "start_pc"),
(UInt16, "line_number")
)
class LocalVariableTableEntry(StaticFieldSet):
format = (
(UInt16, "start_pc"),
(UInt16, "length"),
(CPIndex, "name_index", {"target_types": "Utf8"}),
(CPIndex, "descriptor_index", {"target_types": "Utf8",
"target_text_handler": parse_field_descriptor}),
(UInt16, "index")
)
###############################################################################
# ClassFile {
# u4 magic;
# u2 minor_version;
# u2 major_version;
# u2 constant_pool_count;
# cp_info constant_pool[constant_pool_count-1];
# u2 access_flags;
# u2 this_class;
# u2 super_class;
# u2 interfaces_count;
# u2 interfaces[interfaces_count];
# u2 fields_count;
# field_info fields[fields_count];
# u2 methods_count;
# method_info methods[methods_count];
# u2 attributes_count;
# attribute_info attributes[attributes_count];
# }
class JavaCompiledClassFile(Parser):
"""
Root of the .class parser.
"""
endian = BIG_ENDIAN
PARSER_TAGS = {
"id": "java_class",
"category": "program",
"file_ext": ("class",),
"mime": (u"application/java-vm",),
"min_size": (32 + 3*16),
"description": "Compiled Java class"
}
MAGIC = 0xCAFEBABE
KNOWN_VERSIONS = {
"45.3": "JDK 1.1",
"46.0": "JDK 1.2",
"47.0": "JDK 1.3",
"48.0": "JDK 1.4",
"49.0": "JDK 1.5",
"50.0": "JDK 1.6"
}
# Constants go here since they will probably depend on the detected format
# version at some point. Though, if they happen to be really backward
# compatible, they may become module globals.
CONSTANT_TYPES = {
1: "Utf8",
3: "Integer",
4: "Float",
5: "Long",
6: "Double",
7: "Class",
8: "String",
9: "Fieldref",
10: "Methodref",
11: "InterfaceMethodref",
12: "NameAndType"
}
def validate(self):
if self["magic"].value != self.MAGIC:
return "Wrong magic signature!"
version = "%d.%d" % (self["major_version"].value, self["minor_version"].value)
if version not in self.KNOWN_VERSIONS:
return "Unknown version (%s)" % version
return True
def createDescription(self):
version = "%d.%d" % (self["major_version"].value, self["minor_version"].value)
if version in self.KNOWN_VERSIONS:
return "Compiled Java class, %s" % self.KNOWN_VERSIONS[version]
else:
return "Compiled Java class, version %s" % version
def createFields(self):
yield textHandler(UInt32(self, "magic", "Java compiled class signature"),
hexadecimal)
yield UInt16(self, "minor_version", "Class format minor version")
yield UInt16(self, "major_version", "Class format major version")
yield UInt16(self, "constant_pool_count", "Size of the constant pool")
if self["constant_pool_count"].value > 1:
#yield FieldArray(self, "constant_pool", CPInfo,
# (self["constant_pool_count"].value - 1), first_index=1)
# Mmmh... can't use FieldArray actually, because ConstantPool
# requires some specific hacks (skipping some indexes after Long
# and Double entries).
yield ConstantPool(self, "constant_pool",
(self["constant_pool_count"].value))
# Inner class access flags (16 bits)
yield NullBits(self, "reserved[]", 5)
yield Bit(self, "abstract")
yield Bit(self, "interface")
yield NullBits(self, "reserved[]", 3)
yield Bit(self, "super")
yield Bit(self, "final")
yield Bit(self, "static")
yield Bit(self, "protected")
yield Bit(self, "private")
yield Bit(self, "public")
yield CPIndex(self, "this_class", "Class name", target_types="Class")
yield CPIndex(self, "super_class", "Super class name", target_types="Class")
yield UInt16(self, "interfaces_count", "Number of implemented interfaces")
if self["interfaces_count"].value > 0:
yield FieldArray(self, "interfaces", CPIndex,
self["interfaces_count"].value, target_types="Class")
yield UInt16(self, "fields_count", "Number of fields")
if self["fields_count"].value > 0:
yield FieldArray(self, "fields", FieldInfo,
self["fields_count"].value)
yield UInt16(self, "methods_count", "Number of methods")
if self["methods_count"].value > 0:
yield FieldArray(self, "methods", MethodInfo,
self["methods_count"].value)
yield UInt16(self, "attributes_count", "Number of attributes")
if self["attributes_count"].value > 0:
yield FieldArray(self, "attributes", AttributeInfo,
self["attributes_count"].value)
# vim: set expandtab tabstop=4 shiftwidth=4 autoindent smartindent:
|
ghjm/ansible
|
refs/heads/devel
|
test/integration/targets/ignore_unreachable/fake_connectors/bad_exec.py
|
35
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ansible.plugins.connection.local as ansible_local
from ansible.errors import AnsibleConnectionFailure
from ansible.utils.display import Display
display = Display()
class Connection(ansible_local.Connection):
def exec_command(self, cmd, in_data=None, sudoable=True):
display.debug('Intercepted call to exec remote command')
raise AnsibleConnectionFailure('BADLOCAL Error: this is supposed to fail')
|
ctsit/redi-dropper-client
|
refs/heads/master
|
app/tests/test_api.py
|
1
|
"""
Goal: Simulate api calls
Authors:
Patrick White <pfwhite9@gmail.com> <pfwhite@ufl.edu>
"""
from __future__ import print_function
from flask import url_for
from .base_test_with_data import BaseTestCaseWithData
from redidropper.main import app
from redidropper.main import db
from redidropper.models.user_entity import UserEntity
class TestAPI(BaseTestCaseWithData):
""" This is the class that tests the api """
def __login(self, email):
return self.client.post("/", data={
'email': email,
'password': 'garbagegarbage'
})
def test_save_user_no_login(self):
response = self.client.post("/api/save_user", data={})
self.assertEqual(response._status_code, 302)
def test_save_user_no_admin_login(self):
res_login = self.__login("tech@example.com")
response = self.client.post("/api/save_user", data={})
#TODO: fix the 302 error to be a 403 forbidden error
self.assertEqual(response._status_code, 302)
#self.assertEqual(response._status_code, 403)
def test_save_user(self):
""" Verify that we can save a new user"""
res_login = self.__login("admin@example.com")
#build request
new_user = {
'email': "test@test.com",
'first': "john",
'last': "doe",
'minitial': "f",
'roles': ["admin", "technician"],
'isEdit': False,
}
existing_user = UserEntity.query.filter_by(email=new_user['email'])
if existing_user.count() is 0:
response = self.client.post("/api/save_user", data=new_user)
self.assertEqual(response._status_code, 200)
created_user = UserEntity.query.filter_by(email=new_user['email'])
self.assertEqual(created_user.count(), 1)
else:
self.fail('user already existed')
print('save user test')
def test_edit_user(self):
""" Verify that we can edit an existing user"""
res_login = self.__login("admin@example.com")
my_user = {
'email': "test@test.com",
'first': "john",
'last': "doe",
'minitial': "f",
'roles': ["admin", "technician"],
'isEdit': False,
'usrId': 3,
}
sres = self.client.post("/api/save_user", data=my_user)
existing_user = UserEntity.query.filter_by(email=my_user['email'])
if existing_user.count() is 1:
edited_user = my_user
edited_user['first'] = 'bill'
response = self.client.post("/api/edit_user", data=edited_user)
self.assertEqual(response._status_code, 200)
#see if changed
after_edit_user = UserEntity.query.filter_by(email=my_user['email'])
self.assertEqual(after_edit_user.one().first, 'bill')
else:
self.fail('user not existing')
print('edit user test')
def __get_file_list_data(self, response):
d = Decoder()
data = d.decode(response.data)
return data.get('data').get('subject_event_files')
|
tedelhourani/ansible
|
refs/heads/devel
|
lib/ansible/plugins/lookup/keyring.py
|
82
|
# (c) 2016, Samuel Boucher <boucher.samuel.c@gmail.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: keyring
author:
- Samuel Boucher <boucher.samuel.c@gmail.com>
version_added: "2.3"
requirements:
- keyring (python library)
short_description: grab secrets from the OS keyring
description:
- Allows you to access data stored in the OS provided keyring/keychain.
"""
EXAMPLES = """
- name : output secrets to screen (BAD IDEA)
debug:
msg: "Password: {{item}}"
with_keyring:
- 'servicename username'
- name: access mysql with password from keyring
mysql_db: login_password={{lookup('keyring','mysql joe')}} login_user=joe
"""
RETURN = """
_raw:
description: secrets stored
"""
HAS_KEYRING = True
from ansible.errors import AnsibleError
try:
import keyring
except ImportError:
HAS_KEYRING = False
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, **kwargs):
if not HAS_KEYRING:
raise AnsibleError(u"Can't LOOKUP(keyring): missing required python library 'keyring'")
display.vvvv(u"keyring: %s" % keyring.get_keyring())
ret = []
for term in terms:
(servicename, username) = (term.split()[0], term.split()[1])
display.vvvv(u"username: %s, servicename: %s " % (username, servicename))
password = keyring.get_password(servicename, username)
if password is None:
raise AnsibleError(u"servicename: %s for user %s not found" % (servicename, username))
ret.append(password.rstrip())
return ret
|
lach76/scancode-toolkit
|
refs/heads/develop
|
tests/cluecode/data/ics/markdown-markdown-extensions/tables.py
|
13
|
Content Cell | Content Cell
Content Cell | Content Cell
Copyright 2009 - [Waylan Limberg](http://achinghead.com)
"""
import markdown
|
Kazade/NeHe-Website
|
refs/heads/master
|
google_appengine/google/appengine/ext/remote_api/throttle.py
|
13
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Client-side transfer throttling for use with remote_api_stub.
This module is used to configure rate limiting for programs accessing
AppEngine services through remote_api.
See the Throttle class for more information.
An example with throttling:
---
from google.appengine.ext import db
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.ext.remote_api import throttle
from myapp import models
import getpass
import threading
def auth_func():
return (raw_input('Username:'), getpass.getpass('Password:'))
remote_api_stub.ConfigureRemoteDatastore('my-app', '/remote_api', auth_func)
full_throttle = throttle.DefaultThrottle(multiplier=1.0)
throttle.ThrottleRemoteDatastore(full_throttle)
# Register any threads that will be using the datastore with the throttler
full_throttle.Register(threading.currentThread())
# Now you can access the remote datastore just as if your code was running on
# App Engine, and you don't need to worry about exceeding quota limits!
houses = models.House.all().fetch(100)
for a_house in houses:
a_house.doors += 1
db.put(houses)
---
This example limits usage to the default free quota levels. The multiplier
kwarg to throttle.DefaultThrottle can be used to scale the throttle levels
higher or lower.
Throttles can also be constructed directly for more control over the limits
for different operations. See the Throttle class and the constants following
it for details.
"""
import logging
import os
import threading
import time
import urllib2
import urlparse
if os.environ.get('APPENGINE_RUNTIME') == 'python27':
from google.appengine.api import apiproxy_stub_map
else:
from google.appengine.api import apiproxy_stub_map
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.tools import appengine_rpc
logger = logging.getLogger('google.appengine.ext.remote_api.throttle')
MINIMUM_THROTTLE_SLEEP_DURATION = 0.001
class Error(Exception):
"""Base class for errors in this module."""
class ThreadNotRegisteredError(Error):
"""An unregistered thread has accessed the throttled datastore stub."""
class UnknownThrottleNameError(Error):
"""A transfer was added for an unknown throttle name."""
def InterruptibleSleep(sleep_time):
"""Puts thread to sleep, checking this threads exit_flag four times a second.
Args:
sleep_time: Time to sleep.
"""
slept = 0.0
epsilon = .0001
thread = threading.currentThread()
while slept < sleep_time - epsilon:
remaining = sleep_time - slept
this_sleep_time = min(remaining, 0.25)
time.sleep(this_sleep_time)
slept += this_sleep_time
if hasattr(thread, 'exit_flag') and thread.exit_flag:
return
class Throttle(object):
"""A base class for upload rate throttling.
Transferring large number of entities, too quickly, could trigger
quota limits and cause the transfer process to halt. In order to
stay within the application's quota, we throttle the data transfer
to a specified limit (across all transfer threads).
This class tracks a moving average of some aspect of the transfer
rate (bandwidth, records per second, http connections per
second). It keeps two windows of counts of bytes transferred, on a
per-thread basis. One block is the "current" block, and the other is
the "prior" block. It will rotate the counts from current to prior
when ROTATE_PERIOD has passed. Thus, the current block will
represent from 0 seconds to ROTATE_PERIOD seconds of activity
(determined by: time.time() - self.last_rotate). The prior block
will always represent a full ROTATE_PERIOD.
Sleeping is performed just before a transfer of another block, and is
based on the counts transferred *before* the next transfer. It really
does not matter how much will be transferred, but only that for all the
data transferred SO FAR that we have interspersed enough pauses to
ensure the aggregate transfer rate is within the specified limit.
These counts are maintained on a per-thread basis, so we do not require
any interlocks around incrementing the counts. There IS an interlock on
the rotation of the counts because we do not want multiple threads to
multiply-rotate the counts.
There are various race conditions in the computation and collection
of these counts. We do not require precise values, but simply to
keep the overall transfer within the bandwidth limits. If a given
pause is a little short, or a little long, then the aggregate delays
will be correct.
"""
ROTATE_PERIOD = 600
def __init__(self,
get_time=time.time,
thread_sleep=InterruptibleSleep,
layout=None):
self.get_time = get_time
self.thread_sleep = thread_sleep
self.start_time = get_time()
self.transferred = {}
self.prior_block = {}
self.totals = {}
self.throttles = {}
self.last_rotate = {}
self.rotate_mutex = {}
if layout:
self.AddThrottles(layout)
def AddThrottle(self, name, limit):
self.throttles[name] = limit
self.transferred[name] = {}
self.prior_block[name] = {}
self.totals[name] = {}
self.last_rotate[name] = self.get_time()
self.rotate_mutex[name] = threading.Lock()
def AddThrottles(self, layout):
for key, value in layout.iteritems():
self.AddThrottle(key, value)
def Register(self, thread):
"""Register this thread with the throttler."""
thread_id = id(thread)
for throttle_name in self.throttles.iterkeys():
self.transferred[throttle_name][thread_id] = 0
self.prior_block[throttle_name][thread_id] = 0
self.totals[throttle_name][thread_id] = 0
def VerifyThrottleName(self, throttle_name):
if throttle_name not in self.throttles:
raise UnknownThrottleNameError('%s is not a registered throttle' %
throttle_name)
def AddTransfer(self, throttle_name, token_count):
"""Add a count to the amount this thread has transferred.
Each time a thread transfers some data, it should call this method to
note the amount sent. The counts may be rotated if sufficient time
has passed since the last rotation.
Args:
throttle_name: The name of the throttle to add to.
token_count: The number to add to the throttle counter.
"""
self.VerifyThrottleName(throttle_name)
transferred = self.transferred[throttle_name]
try:
transferred[id(threading.currentThread())] += token_count
except KeyError:
thread = threading.currentThread()
raise ThreadNotRegisteredError(
'Unregistered thread accessing throttled datastore stub: id = %s\n'
'name = %s' % (id(thread), thread.getName()))
if self.last_rotate[throttle_name] + self.ROTATE_PERIOD < self.get_time():
self._RotateCounts(throttle_name)
def Sleep(self, throttle_name=None):
"""Possibly sleep in order to limit the transfer rate.
Note that we sleep based on *prior* transfers rather than what we
may be about to transfer. The next transfer could put us under/over
and that will be rectified *after* that transfer. Net result is that
the average transfer rate will remain within bounds. Spiky behavior
or uneven rates among the threads could possibly bring the transfer
rate above the requested limit for short durations.
Args:
throttle_name: The name of the throttle to sleep on. If None or
omitted, then sleep on all throttles.
"""
if throttle_name is None:
for throttle_name in self.throttles:
self.Sleep(throttle_name=throttle_name)
return
self.VerifyThrottleName(throttle_name)
thread = threading.currentThread()
while True:
duration = self.get_time() - self.last_rotate[throttle_name]
total = 0
for count in self.prior_block[throttle_name].values():
total += count
if total:
duration += self.ROTATE_PERIOD
for count in self.transferred[throttle_name].values():
total += count
sleep_time = self._SleepTime(total, self.throttles[throttle_name],
duration)
if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION:
break
logger.debug('[%s] Throttling on %s. Sleeping for %.1f ms '
'(duration=%.1f ms, total=%d)',
thread.getName(), throttle_name,
sleep_time * 1000, duration * 1000, total)
self.thread_sleep(sleep_time)
if thread.exit_flag:
break
self._RotateCounts(throttle_name)
def _SleepTime(self, total, limit, duration):
"""Calculate the time to sleep on a throttle.
Args:
total: The total amount transferred.
limit: The amount per second that is allowed to be sent.
duration: The amount of time taken to send the total.
Returns:
A float for the amount of time to sleep.
"""
if not limit:
return 0.0
return max(0.0, (total / limit) - duration)
def _RotateCounts(self, throttle_name):
"""Rotate the transfer counters.
If sufficient time has passed, then rotate the counters from active to
the prior-block of counts.
This rotation is interlocked to ensure that multiple threads do not
over-rotate the counts.
Args:
throttle_name: The name of the throttle to rotate.
"""
self.VerifyThrottleName(throttle_name)
self.rotate_mutex[throttle_name].acquire()
try:
next_rotate_time = self.last_rotate[throttle_name] + self.ROTATE_PERIOD
if next_rotate_time >= self.get_time():
return
for name, count in self.transferred[throttle_name].items():
self.prior_block[throttle_name][name] = count
self.transferred[throttle_name][name] = 0
self.totals[throttle_name][name] += count
self.last_rotate[throttle_name] = self.get_time()
finally:
self.rotate_mutex[throttle_name].release()
def TotalTransferred(self, throttle_name):
"""Return the total transferred, and over what period.
Args:
throttle_name: The name of the throttle to total.
Returns:
A tuple of the total count and running time for the given throttle name.
"""
total = 0
for count in self.totals[throttle_name].values():
total += count
for count in self.transferred[throttle_name].values():
total += count
return total, self.get_time() - self.start_time
BANDWIDTH_UP = 'http-bandwidth-up'
BANDWIDTH_DOWN = 'http-bandwidth-down'
REQUESTS = 'http-requests'
HTTPS_BANDWIDTH_UP = 'https-bandwidth-up'
HTTPS_BANDWIDTH_DOWN = 'https-bandwidth-down'
HTTPS_REQUESTS = 'https-requests'
DATASTORE_CALL_COUNT = 'datastore-call-count'
ENTITIES_FETCHED = 'entities-fetched'
ENTITIES_MODIFIED = 'entities-modified'
INDEX_MODIFICATIONS = 'index-modifications'
DEFAULT_LIMITS = {
BANDWIDTH_UP: 100000,
BANDWIDTH_DOWN: 100000,
REQUESTS: 15,
HTTPS_BANDWIDTH_UP: 100000,
HTTPS_BANDWIDTH_DOWN: 100000,
HTTPS_REQUESTS: 15,
DATASTORE_CALL_COUNT: 120,
ENTITIES_FETCHED: 400,
ENTITIES_MODIFIED: 400,
INDEX_MODIFICATIONS: 1600,
}
NO_LIMITS = {
BANDWIDTH_UP: None,
BANDWIDTH_DOWN: None,
REQUESTS: None,
HTTPS_BANDWIDTH_UP: None,
HTTPS_BANDWIDTH_DOWN: None,
HTTPS_REQUESTS: None,
DATASTORE_CALL_COUNT: None,
ENTITIES_FETCHED: None,
ENTITIES_MODIFIED: None,
INDEX_MODIFICATIONS: None,
}
def DefaultThrottle(multiplier=1.0):
"""Return a Throttle instance with multiplier * the quota limits."""
layout = dict([(name, multiplier * limit)
for (name, limit) in DEFAULT_LIMITS.iteritems()])
return Throttle(layout=layout)
class ThrottleHandler(urllib2.BaseHandler):
"""A urllib2 handler for http and https requests that adds to a throttle."""
def __init__(self, throttle):
"""Initialize a ThrottleHandler.
Args:
throttle: A Throttle instance to call for bandwidth and http/https request
throttling.
"""
self.throttle = throttle
def AddRequest(self, throttle_name, req):
"""Add to bandwidth throttle for given request.
Args:
throttle_name: The name of the bandwidth throttle to add to.
req: The request whose size will be added to the throttle.
"""
size = 0
for key, value in req.headers.iteritems():
size += len('%s: %s\n' % (key, value))
for key, value in req.unredirected_hdrs.iteritems():
size += len('%s: %s\n' % (key, value))
(unused_scheme,
unused_host_port, url_path,
unused_query, unused_fragment) = urlparse.urlsplit(req.get_full_url())
size += len('%s %s HTTP/1.1\n' % (req.get_method(), url_path))
data = req.get_data()
if data:
size += len(data)
self.throttle.AddTransfer(throttle_name, size)
def AddResponse(self, throttle_name, res):
"""Add to bandwidth throttle for given response.
Args:
throttle_name: The name of the bandwidth throttle to add to.
res: The response whose size will be added to the throttle.
"""
content = res.read()
def ReturnContent():
return content
res.read = ReturnContent
size = len(content)
headers = res.info()
for key, value in headers.items():
size += len('%s: %s\n' % (key, value))
self.throttle.AddTransfer(throttle_name, size)
def http_request(self, req):
"""Process an HTTP request.
If the throttle is over quota, sleep first. Then add request size to
throttle before returning it to be sent.
Args:
req: A urllib2.Request object.
Returns:
The request passed in.
"""
self.throttle.Sleep(BANDWIDTH_UP)
self.throttle.Sleep(BANDWIDTH_DOWN)
self.AddRequest(BANDWIDTH_UP, req)
return req
def https_request(self, req):
"""Process an HTTPS request.
If the throttle is over quota, sleep first. Then add request size to
throttle before returning it to be sent.
Args:
req: A urllib2.Request object.
Returns:
The request passed in.
"""
self.throttle.Sleep(HTTPS_BANDWIDTH_UP)
self.throttle.Sleep(HTTPS_BANDWIDTH_DOWN)
self.AddRequest(HTTPS_BANDWIDTH_UP, req)
return req
def http_response(self, unused_req, res):
"""Process an HTTP response.
The size of the response is added to the bandwidth throttle and the request
throttle is incremented by one.
Args:
unused_req: The urllib2 request for this response.
res: A urllib2 response object.
Returns:
The response passed in.
"""
self.AddResponse(BANDWIDTH_DOWN, res)
self.throttle.AddTransfer(REQUESTS, 1)
return res
def https_response(self, unused_req, res):
"""Process an HTTPS response.
The size of the response is added to the bandwidth throttle and the request
throttle is incremented by one.
Args:
unused_req: The urllib2 request for this response.
res: A urllib2 response object.
Returns:
The response passed in.
"""
self.AddResponse(HTTPS_BANDWIDTH_DOWN, res)
self.throttle.AddTransfer(HTTPS_REQUESTS, 1)
return res
class ThrottledHttpRpcServer(appengine_rpc.HttpRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests.
This RPC server uses a Throttle to prevent exceeding quotas.
"""
def __init__(self, throttle, *args, **kwargs):
"""Initialize a ThrottledHttpRpcServer.
Also sets request_manager.rpc_server to the ThrottledHttpRpcServer instance.
Args:
throttle: A Throttles instance.
args: Positional arguments to pass through to
appengine_rpc.HttpRpcServer.__init__
kwargs: Keyword arguments to pass through to
appengine_rpc.HttpRpcServer.__init__
"""
self.throttle = throttle
appengine_rpc.HttpRpcServer.__init__(self, *args, **kwargs)
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = appengine_rpc.HttpRpcServer._GetOpener(self)
opener.add_handler(ThrottleHandler(self.throttle))
return opener
def ThrottledHttpRpcServerFactory(throttle, throttle_class=None):
"""Create a factory to produce ThrottledHttpRpcServer for a given throttle.
Args:
throttle: A Throttle instance to use for the ThrottledHttpRpcServer.
throttle_class: A class to use instead of the default
ThrottledHttpRpcServer.
Returns:
A factory to produce a ThrottledHttpRpcServer.
"""
def MakeRpcServer(*args, **kwargs):
"""Factory to produce a ThrottledHttpRpcServer.
Args:
args: Positional args to pass to ThrottledHttpRpcServer.
kwargs: Keyword args to pass to ThrottledHttpRpcServer.
Returns:
A ThrottledHttpRpcServer instance.
"""
kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
kwargs['save_cookies'] = True
if throttle_class:
rpc_server = throttle_class(throttle, *args, **kwargs)
else:
rpc_server = ThrottledHttpRpcServer(throttle, *args, **kwargs)
return rpc_server
return MakeRpcServer
class Throttler(object):
def PrehookHandler(self, service, call, request, response):
handler = getattr(self, '_Prehook_' + call, None)
if handler:
handler(request, response)
def PosthookHandler(self, service, call, request, response):
handler = getattr(self, '_Posthook_' + call, None)
if handler:
handler(request, response)
def SleepHandler(*throttle_names):
def SleepOnThrottles(self, request, response):
if throttle_names:
for throttle_name in throttle_names:
self._DatastoreThrottler__throttle.Sleep(throttle_name)
else:
self._DatastoreThrottler__throttle.Sleep()
return SleepOnThrottles
class DatastoreThrottler(Throttler):
def __init__(self, throttle):
Throttler.__init__(self)
self.__throttle = throttle
def AddCost(self, cost_proto):
"""Add costs from the Cost protobuf."""
self.__throttle.AddTransfer(INDEX_MODIFICATIONS, cost_proto.index_writes())
self.__throttle.AddTransfer(ENTITIES_MODIFIED, cost_proto.entity_writes())
self.__throttle.AddTransfer(BANDWIDTH_UP, cost_proto.entity_write_bytes())
_Prehook_Put = SleepHandler(ENTITIES_MODIFIED,
INDEX_MODIFICATIONS,
BANDWIDTH_UP)
def _Posthook_Put(self, request, response):
self.AddCost(response.cost())
_Prehook_Get = SleepHandler(ENTITIES_FETCHED)
def _Posthook_Get(self, request, response):
self.__throttle.AddTransfer(ENTITIES_FETCHED, response.entity_size())
_Prehook_RunQuery = SleepHandler(ENTITIES_FETCHED)
def _Posthook_RunQuery(self, request, response):
if not response.keys_only():
self.__throttle.AddTransfer(ENTITIES_FETCHED, response.result_size())
_Prehook_Next = SleepHandler(ENTITIES_FETCHED)
def _Posthook_Next(self, request, response):
if not response.keys_only():
self.__throttle.AddTransfer(ENTITIES_FETCHED, response.result_size())
_Prehook_Delete = SleepHandler(ENTITIES_MODIFIED, INDEX_MODIFICATIONS)
def _Posthook_Delete(self, request, response):
self.AddCost(response.cost())
_Prehook_Commit = SleepHandler()
def _Posthook_Commit(self, request, response):
self.AddCost(response.cost())
def ThrottleRemoteDatastore(throttle, remote_datastore_stub=None):
"""Install the given throttle for the remote datastore stub.
Args:
throttle: A Throttle instance to limit datastore access rates
remote_datastore_stub: The datstore stub instance to throttle, for
testing purposes.
"""
if not remote_datastore_stub:
remote_datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
if not isinstance(remote_datastore_stub, remote_api_stub.RemoteDatastoreStub):
raise remote_api_stub.ConfigurationError('remote_api is not configured.')
throttler = DatastoreThrottler(throttle)
remote_datastore_stub._PreHookHandler = throttler.PrehookHandler
remote_datastore_stub._PostHookHandler = throttler.PosthookHandler
|
sepehr125/pybrain
|
refs/heads/master
|
pybrain/rl/environments/twoplayergames/pente.py
|
25
|
__author__ = 'Tom Schaul, tom@idsia.ch'
from pybrain.rl.environments.twoplayergames.gomoku import GomokuGame
class PenteGame(GomokuGame):
""" The game of Pente.
The rules are similar to Go-Moku, except that it is now possible to capture
stones, in pairs, by putting stones at both ends of a pair of the opponent.
The game is won by the first player who either has 5 connected stones, or
has captured 5 pairs.
"""
def reset(self):
GomokuGame.reset(self)
self.pairsTaken = {self.BLACK: 0, self.WHITE: 0}
center = (self.size[0] // 2, self.size[1] // 2)
self._setStone(-self.startcolor, center)
self.movesDone += 1
def getKilling(self, c):
""" return all legal positions for a color that immediately kill the opponent. """
res = GomokuGame.getKilling(self, c)
for p in self.getLegals(c):
k = self._killsWhich(c, p)
if self.pairsTaken[c] + len(k) / 2 >= 5:
res.append(p)
return res
def _killsWhich(self, c, pos):
""" placing a stone of color c at pos would kill which enemy stones? """
res = []
for dir in [(0, 1), (1, 0), (1, 1), (1, -1)]:
for d in [-1, 1]:
killcands = []
for i in [1, 2, 3]:
next = (pos[0] + dir[0] * i * d, pos[1] + dir[1] * i * d)
if (next[0] < 0 or next[0] >= self.size[0]
or next[1] < 0 or next[1] >= self.size[1]):
break
if i == 3 and self.b[next] == c:
res += killcands
break
if i != 3 and self.b[next] != -c:
break
killcands.append(next)
return res
def doMove(self, c, pos):
""" the action is a (color, position) tuple, for the next stone to move.
returns True if the move was legal. """
self.movesDone += 1
if not self.isLegal(c, pos):
return False
elif self._fiveRow(c, pos):
self.winner = c
self.b[pos] = 'x'
return True
else:
tokill = self._killsWhich(c, pos)
if self.pairsTaken[c] + len(tokill) / 2 >= 5:
self.winner = c
self.b[pos] = 'x'
return True
self._setStone(c, pos, tokill)
if self.movesDone == (self.size[0] * self.size[1]
+ 2 * (self.pairsTaken[self.BLACK] + self.pairsTaken[self.WHITE])):
# DRAW
self.winner = self.DRAW
return True
def _setStone(self, c, pos, tokill=None):
""" set stone, and potentially kill stones. """
if tokill == None:
tokill = self._killsWhich(c, pos)
GomokuGame._setStone(self, c, pos)
for p in tokill:
self.b[p] = self.EMPTY
self.pairsTaken[c] += len(tokill) // 2
def __str__(self):
s = GomokuGame.__str__(self)
s += 'Black captured:' + str(self.pairsTaken[self.BLACK]) + ', white captured:' + str(self.pairsTaken[self.WHITE]) + '.'
return s
|
davidhax0r/Twoooly-For-Twitter
|
refs/heads/master
|
lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/response.py
|
316
|
# urllib3/response.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import logging
import zlib
import io
from .exceptions import DecodeError
from .packages.six import string_types as basestring, binary_type
from .util import is_fp_closed
log = logging.getLogger(__name__)
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = binary_type()
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
return self._obj.decompress(data)
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
def _get_decoder(mode):
if mode == 'gzip':
return zlib.decompressobj(16 + zlib.MAX_WBITS)
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed.
Extra parameters for behaviour not present in httplib.HTTPResponse:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, attempts to decode specific content-encoding's based on headers
(like 'gzip' and 'deflate') will be skipped and raw data will be used
instead.
:param original_response:
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
"""
CONTENT_DECODERS = ['gzip', 'deflate']
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
self.headers = headers or {}
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self._decoder = None
self._body = body if body and isinstance(body, basestring) else None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
self._pool = pool
self._connection = connection
if hasattr(body, 'read'):
self._fp = body
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get('location')
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
@property
def data(self):
# For backwords-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
# Note: content-encoding value should be case-insensitive, per RFC 2616
# Section 3.5
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
try:
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read()
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt)
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do not
# properly close the connection in all cases. There is no harm
# in redundantly calling close.
self._fp.close()
flush_decoder = True
self._fp_bytes_read += len(data)
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding,
e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
data += buf + self._decoder.flush()
if cache_content:
self._body = data
return data
finally:
if self._original_response and self._original_response.isclosed():
self.release_conn()
def stream(self, amt=2**16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
# Normalize headers between different versions of Python
headers = {}
for k, v in r.getheaders():
# Python 3: Header keys are returned capitalised
k = k.lower()
has_value = headers.get(k)
if has_value: # Python 3: Repeating header keys are unmerged.
v = ', '.join([has_value, v])
headers[k] = v
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
return ResponseCls(body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw)
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
return self.headers
def getheader(self, name, default=None):
return self.headers.get(name, default)
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
@property
def closed(self):
if self._fp is None:
return True
elif hasattr(self._fp, 'closed'):
return self._fp.closed
elif hasattr(self._fp, 'isclosed'): # Python 2
return self._fp.isclosed()
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError("The file-like object this HTTPResponse is wrapped "
"around has no file descriptor")
def flush(self):
if self._fp is not None and hasattr(self._fp, 'flush'):
return self._fp.flush()
def readable(self):
return True
|
chamaken/cpylmnl
|
refs/heads/master
|
tests/cpylmnl/linux/netlink/__init__.py
|
12133432
| |
rsouza/pypln.web
|
refs/heads/develop
|
pypln/web/backend_adapter/models.py
|
12133432
| |
ocadotechnology/django-tastypie
|
refs/heads/master
|
tests/validation/api/__init__.py
|
12133432
| |
thnee/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/oracle/oci_utils.py
|
29
|
# Copyright (c) 2017, 2018, 2019 Oracle and/or its affiliates.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import
import logging
import logging.config
import os
import tempfile
from datetime import datetime
from operator import eq
import time
try:
import yaml
import oci
from oci.constants import HEADER_NEXT_PAGE
from oci.exceptions import (
InvalidConfig,
InvalidPrivateKey,
MissingPrivateKeyPassphrase,
ConfigFileNotFound,
ServiceError,
MaximumWaitTimeExceeded,
)
from oci.identity.identity_client import IdentityClient
from oci.object_storage.models import CreateBucketDetails
from oci.object_storage.models import UpdateBucketDetails
from oci.retry import RetryStrategyBuilder
from oci.util import to_dict, Sentinel
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
from ansible.module_utils._text import to_bytes
from ansible.module_utils.six import iteritems
__version__ = "1.6.0-dev"
MAX_WAIT_TIMEOUT_IN_SECONDS = 1200
# If a resource is in one of these states it would be considered inactive
DEAD_STATES = [
"TERMINATING",
"TERMINATED",
"FAULTY",
"FAILED",
"DELETING",
"DELETED",
"UNKNOWN_ENUM_VALUE",
"DETACHING",
"DETACHED",
]
# If a resource is in one of these states it would be considered available
DEFAULT_READY_STATES = [
"AVAILABLE",
"ACTIVE",
"RUNNING",
"PROVISIONED",
"ATTACHED",
"ASSIGNED",
"SUCCEEDED",
"PENDING_PROVIDER",
]
# If a resource is in one of these states, it would be considered deleted
DEFAULT_TERMINATED_STATES = ["TERMINATED", "DETACHED", "DELETED"]
def get_common_arg_spec(supports_create=False, supports_wait=False):
"""
Return the common set of module arguments for all OCI cloud modules.
:param supports_create: Variable to decide whether to add options related to idempotency of create operation.
:param supports_wait: Variable to decide whether to add options related to waiting for completion.
:return: A dict with applicable module options.
"""
# Note: This method is used by most OCI ansible resource modules during initialization. When making changes to this
# method, ensure that no `oci` python sdk dependencies are introduced in this method. This ensures that the modules
# can check for absence of OCI Python SDK and fail with an appropriate message. Introducing an OCI dependency in
# this method would break that error handling logic.
common_args = dict(
config_file_location=dict(type="str"),
config_profile_name=dict(type="str", default="DEFAULT"),
api_user=dict(type="str"),
api_user_fingerprint=dict(type="str", no_log=True),
api_user_key_file=dict(type="str"),
api_user_key_pass_phrase=dict(type="str", no_log=True),
auth_type=dict(
type="str",
required=False,
choices=["api_key", "instance_principal"],
default="api_key",
),
tenancy=dict(type="str"),
region=dict(type="str"),
)
if supports_create:
common_args.update(
key_by=dict(type="list"),
force_create=dict(type="bool", default=False),
)
if supports_wait:
common_args.update(
wait=dict(type="bool", default=True),
wait_timeout=dict(
type="int", default=MAX_WAIT_TIMEOUT_IN_SECONDS
),
wait_until=dict(type="str"),
)
return common_args
def get_facts_module_arg_spec(filter_by_name=False):
# Note: This method is used by most OCI ansible fact modules during initialization. When making changes to this
# method, ensure that no `oci` python sdk dependencies are introduced in this method. This ensures that the modules
# can check for absence of OCI Python SDK and fail with an appropriate message. Introducing an OCI dependency in
# this method would break that error handling logic.
facts_module_arg_spec = get_common_arg_spec()
if filter_by_name:
facts_module_arg_spec.update(name=dict(type="str"))
else:
facts_module_arg_spec.update(display_name=dict(type="str"))
return facts_module_arg_spec
def get_oci_config(module, service_client_class=None):
"""Return the OCI configuration to use for all OCI API calls. The effective OCI configuration is derived by merging
any overrides specified for configuration attributes through Ansible module options or environment variables. The
order of precedence for deriving the effective configuration dict is:
1. If a config file is provided, use that to setup the initial config dict.
2. If a config profile is specified, use that config profile to setup the config dict.
3. For each authentication attribute, check if an override is provided either through
a. Ansible Module option
b. Environment variable
and override the value in the config dict in that order."""
config = {}
config_file = module.params.get("config_file_location")
_debug("Config file through module options - {0} ".format(config_file))
if not config_file:
if "OCI_CONFIG_FILE" in os.environ:
config_file = os.environ["OCI_CONFIG_FILE"]
_debug(
"Config file through OCI_CONFIG_FILE environment variable - {0}".format(
config_file
)
)
else:
config_file = "~/.oci/config"
_debug("Config file (fallback) - {0} ".format(config_file))
config_profile = module.params.get("config_profile_name")
if not config_profile:
if "OCI_CONFIG_PROFILE" in os.environ:
config_profile = os.environ["OCI_CONFIG_PROFILE"]
else:
config_profile = "DEFAULT"
try:
config = oci.config.from_file(
file_location=config_file, profile_name=config_profile
)
except (
ConfigFileNotFound,
InvalidConfig,
InvalidPrivateKey,
MissingPrivateKeyPassphrase,
) as ex:
if not _is_instance_principal_auth(module):
# When auth_type is not instance_principal, config file is required
module.fail_json(msg=str(ex))
else:
_debug(
"Ignore {0} as the auth_type is set to instance_principal".format(
str(ex)
)
)
# if instance_principal auth is used, an empty 'config' map is used below.
config["additional_user_agent"] = "Oracle-Ansible/{0}".format(__version__)
# Merge any overrides through other IAM options
_merge_auth_option(
config,
module,
module_option_name="api_user",
env_var_name="OCI_USER_ID",
config_attr_name="user",
)
_merge_auth_option(
config,
module,
module_option_name="api_user_fingerprint",
env_var_name="OCI_USER_FINGERPRINT",
config_attr_name="fingerprint",
)
_merge_auth_option(
config,
module,
module_option_name="api_user_key_file",
env_var_name="OCI_USER_KEY_FILE",
config_attr_name="key_file",
)
_merge_auth_option(
config,
module,
module_option_name="api_user_key_pass_phrase",
env_var_name="OCI_USER_KEY_PASS_PHRASE",
config_attr_name="pass_phrase",
)
_merge_auth_option(
config,
module,
module_option_name="tenancy",
env_var_name="OCI_TENANCY",
config_attr_name="tenancy",
)
_merge_auth_option(
config,
module,
module_option_name="region",
env_var_name="OCI_REGION",
config_attr_name="region",
)
# Redirect calls to home region for IAM service.
do_not_redirect = module.params.get(
"do_not_redirect_to_home_region", False
) or os.environ.get("OCI_IDENTITY_DO_NOT_REDIRECT_TO_HOME_REGION")
if service_client_class == IdentityClient and not do_not_redirect:
_debug("Region passed for module invocation - {0} ".format(config["region"]))
identity_client = IdentityClient(config)
region_subscriptions = identity_client.list_region_subscriptions(
config["tenancy"]
).data
# Replace the region in the config with the home region.
[config["region"]] = [
rs.region_name for rs in region_subscriptions if rs.is_home_region is True
]
_debug(
"Setting region in the config to home region - {0} ".format(
config["region"]
)
)
return config
def create_service_client(module, service_client_class):
"""
Creates a service client using the common module options provided by the user.
:param module: An AnsibleModule that represents user provided options for a Task
:param service_client_class: A class that represents a client to an OCI Service
:return: A fully configured client
"""
config = get_oci_config(module, service_client_class)
kwargs = {}
if _is_instance_principal_auth(module):
try:
signer = oci.auth.signers.InstancePrincipalsSecurityTokenSigner()
except Exception as ex:
message = (
"Failed retrieving certificates from localhost. Instance principal based authentication is only"
"possible from within OCI compute instances. Exception: {0}".format(
str(ex)
)
)
module.fail_json(msg=message)
kwargs["signer"] = signer
# XXX: Validate configuration -- this may be redundant, as all Client constructors perform a validation
try:
oci.config.validate_config(config, **kwargs)
except oci.exceptions.InvalidConfig as ic:
module.fail_json(
msg="Invalid OCI configuration. Exception: {0}".format(str(ic))
)
# Create service client class with the signer
client = service_client_class(config, **kwargs)
return client
def _is_instance_principal_auth(module):
# check if auth type is overridden via module params
instance_principal_auth = (
"auth_type" in module.params
and module.params["auth_type"] == "instance_principal"
)
if not instance_principal_auth:
instance_principal_auth = (
"OCI_ANSIBLE_AUTH_TYPE" in os.environ
and os.environ["OCI_ANSIBLE_AUTH_TYPE"] == "instance_principal"
)
return instance_principal_auth
def _merge_auth_option(
config, module, module_option_name, env_var_name, config_attr_name
):
"""Merge the values for an authentication attribute from ansible module options and
environment variables with the values specified in a configuration file"""
_debug("Merging {0}".format(module_option_name))
auth_attribute = module.params.get(module_option_name)
_debug(
"\t Ansible module option {0} = {1}".format(module_option_name, auth_attribute)
)
if not auth_attribute:
if env_var_name in os.environ:
auth_attribute = os.environ[env_var_name]
_debug(
"\t Environment variable {0} = {1}".format(env_var_name, auth_attribute)
)
# An authentication attribute has been provided through an env-variable or an ansible
# option and must override the corresponding attribute's value specified in the
# config file [profile].
if auth_attribute:
_debug(
"Updating config attribute {0} -> {1} ".format(
config_attr_name, auth_attribute
)
)
config.update({config_attr_name: auth_attribute})
def bucket_details_factory(bucket_details_type, module):
bucket_details = None
if bucket_details_type == "create":
bucket_details = CreateBucketDetails()
elif bucket_details_type == "update":
bucket_details = UpdateBucketDetails()
bucket_details.compartment_id = module.params["compartment_id"]
bucket_details.name = module.params["name"]
bucket_details.public_access_type = module.params["public_access_type"]
bucket_details.metadata = module.params["metadata"]
return bucket_details
def filter_resources(all_resources, filter_params):
if not filter_params:
return all_resources
filtered_resources = []
filtered_resources.extend(
[
resource
for resource in all_resources
for key, value in filter_params.items()
if getattr(resource, key) == value
]
)
return filtered_resources
def list_all_resources(target_fn, **kwargs):
"""
Return all resources after paging through all results returned by target_fn. If a `display_name` or `name` is
provided as a kwarg, then only resources matching the specified name are returned.
:param target_fn: The target OCI SDK paged function to call
:param kwargs: All arguments that the OCI SDK paged function expects
:return: List of all objects returned by target_fn
:raises ServiceError: When the Service returned an Error response
:raises MaximumWaitTimeExceededError: When maximum wait time is exceeded while invoking target_fn
"""
filter_params = None
try:
response = call_with_backoff(target_fn, **kwargs)
except ValueError as ex:
if "unknown kwargs" in str(ex):
if "display_name" in kwargs:
if kwargs["display_name"]:
filter_params = {"display_name": kwargs["display_name"]}
del kwargs["display_name"]
elif "name" in kwargs:
if kwargs["name"]:
filter_params = {"name": kwargs["name"]}
del kwargs["name"]
response = call_with_backoff(target_fn, **kwargs)
existing_resources = response.data
while response.has_next_page:
kwargs.update(page=response.headers.get(HEADER_NEXT_PAGE))
response = call_with_backoff(target_fn, **kwargs)
existing_resources += response.data
# If the underlying SDK Service list* method doesn't support filtering by name or display_name, filter the resources
# and return the matching list of resources
return filter_resources(existing_resources, filter_params)
def _debug(s):
get_logger("oci_utils").debug(s)
def get_logger(module_name):
oci_logging = setup_logging()
return oci_logging.getLogger(module_name)
def setup_logging(
default_level="INFO",
):
"""Setup logging configuration"""
env_log_path = "LOG_PATH"
env_log_level = "LOG_LEVEL"
default_log_path = tempfile.gettempdir()
log_path = os.getenv(env_log_path, default_log_path)
log_level_str = os.getenv(env_log_level, default_level)
log_level = logging.getLevelName(log_level_str)
log_file_path = os.path.join(log_path, "oci_ansible_module.log")
logging.basicConfig(filename=log_file_path, filemode="a", level=log_level)
return logging
def check_and_update_attributes(
target_instance, attr_name, input_value, existing_value, changed
):
"""
This function checks the difference between two resource attributes of literal types and sets the attrbute
value in the target instance type holding the attribute.
:param target_instance: The instance which contains the attribute whose values to be compared
:param attr_name: Name of the attribute whose value required to be compared
:param input_value: The value of the attribute provided by user
:param existing_value: The value of the attribute in the existing resource
:param changed: Flag to indicate whether there is any difference between the values
:return: Returns a boolean value indicating whether there is any difference between the values
"""
if input_value is not None and not eq(input_value, existing_value):
changed = True
target_instance.__setattr__(attr_name, input_value)
else:
target_instance.__setattr__(attr_name, existing_value)
return changed
def check_and_update_resource(
resource_type,
get_fn,
kwargs_get,
update_fn,
primitive_params_update,
kwargs_non_primitive_update,
module,
update_attributes,
client=None,
sub_attributes_of_update_model=None,
wait_applicable=True,
states=None,
):
"""
This function handles update operation on a resource. It checks whether update is required and accordingly returns
the resource and the changed status.
:param wait_applicable: Indicates if the resource support wait
:param client: The resource Client class to use to perform the wait checks. This param must be specified if
wait_applicable is True
:param resource_type: The type of the resource. e.g. "private_ip"
:param get_fn: Function used to get the resource. e.g. virtual_network_client.get_private_ip
:param kwargs_get: Dictionary containing the arguments to be used to call get function.
e.g. {"private_ip_id": module.params["private_ip_id"]}
:param update_fn: Function used to update the resource. e.g virtual_network_client.update_private_ip
:param primitive_params_update: List of primitive parameters used for update function. e.g. ['private_ip_id']
:param kwargs_non_primitive_update: Dictionary containing the non-primitive arguments to be used to call get
function with key as the non-primitive argument type & value as the name of the non-primitive argument to be passed
to the update function. e.g. {UpdatePrivateIpDetails: "update_private_ip_details"}
:param module: Instance of AnsibleModule
:param update_attributes: Attributes in update model.
:param states: List of lifecycle states to watch for while waiting after create_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:param sub_attributes_of_update_model: Dictionary of non-primitive sub-attributes of update model. for example,
{'services': [ServiceIdRequestDetails()]} as in UpdateServiceGatewayDetails.
:return: Returns a dictionary containing the "changed" status and the resource.
"""
try:
result = dict(changed=False)
attributes_to_update, resource = get_attr_to_update(
get_fn, kwargs_get, module, update_attributes
)
if attributes_to_update:
kwargs_update = get_kwargs_update(
attributes_to_update,
kwargs_non_primitive_update,
module,
primitive_params_update,
sub_attributes_of_update_model,
)
resource = call_with_backoff(update_fn, **kwargs_update).data
if wait_applicable:
if client is None:
module.fail_json(
msg="wait_applicable is True, but client is not specified."
)
resource = wait_for_resource_lifecycle_state(
client, module, True, kwargs_get, get_fn, None, resource, states
)
result["changed"] = True
result[resource_type] = to_dict(resource)
return result
except ServiceError as ex:
module.fail_json(msg=ex.message)
def get_kwargs_update(
attributes_to_update,
kwargs_non_primitive_update,
module,
primitive_params_update,
sub_attributes_of_update_model=None,
):
kwargs_update = dict()
for param in primitive_params_update:
kwargs_update[param] = module.params[param]
for param in kwargs_non_primitive_update:
update_object = param()
for key in update_object.attribute_map:
if key in attributes_to_update:
if (
sub_attributes_of_update_model
and key in sub_attributes_of_update_model
):
setattr(update_object, key, sub_attributes_of_update_model[key])
else:
setattr(update_object, key, module.params[key])
kwargs_update[kwargs_non_primitive_update[param]] = update_object
return kwargs_update
def is_dictionary_subset(sub, super_dict):
"""
This function checks if `sub` dictionary is a subset of `super` dictionary.
:param sub: subset dictionary, for example user_provided_attr_value.
:param super_dict: super dictionary, for example resources_attr_value.
:return: True if sub is contained in super.
"""
for key in sub:
if sub[key] != super_dict[key]:
return False
return True
def are_lists_equal(s, t):
if s is None and t is None:
return True
if (s is None and len(t) >= 0) or (t is None and len(s) >= 0) or (len(s) != len(t)):
return False
if len(s) == 0:
return True
s = to_dict(s)
t = to_dict(t)
if type(s[0]) == dict:
# Handle list of dicts. Dictionary returned by the API may have additional keys. For example, a get call on
# service gateway has an attribute `services` which is a list of `ServiceIdResponseDetails`. This has a key
# `service_name` which is not provided in the list of `services` by a user while making an update call; only
# `service_id` is provided by the user in the update call.
sorted_s = sort_list_of_dictionary(s)
sorted_t = sort_list_of_dictionary(t)
for index, d in enumerate(sorted_s):
if not is_dictionary_subset(d, sorted_t[index]):
return False
return True
else:
# Handle lists of primitive types.
try:
for elem in s:
t.remove(elem)
except ValueError:
return False
return not t
def get_attr_to_update(get_fn, kwargs_get, module, update_attributes):
try:
resource = call_with_backoff(get_fn, **kwargs_get).data
except ServiceError as ex:
module.fail_json(msg=ex.message)
attributes_to_update = []
for attr in update_attributes:
resources_attr_value = getattr(resource, attr, None)
user_provided_attr_value = module.params.get(attr, None)
unequal_list_attr = (
type(resources_attr_value) == list or type(user_provided_attr_value) == list
) and not are_lists_equal(user_provided_attr_value, resources_attr_value)
unequal_attr = type(resources_attr_value) != list and to_dict(
resources_attr_value
) != to_dict(user_provided_attr_value)
if unequal_list_attr or unequal_attr:
# only update if the user has explicitly provided a value for this attribute
# otherwise, no update is necessary because the user hasn't expressed a particular
# value for that attribute
if module.params.get(attr, None):
attributes_to_update.append(attr)
return attributes_to_update, resource
def get_taggable_arg_spec(supports_create=False, supports_wait=False):
"""
Returns an arg_spec that is valid for taggable OCI resources.
:return: A dict that represents an ansible arg spec that builds over the common_arg_spec and adds free-form and
defined tags.
"""
tag_arg_spec = get_common_arg_spec(supports_create, supports_wait)
tag_arg_spec.update(
dict(freeform_tags=dict(type="dict"), defined_tags=dict(type="dict"))
)
return tag_arg_spec
def add_tags_to_model_from_module(model, module):
"""
Adds free-form and defined tags from an ansible module to a resource model
:param model: A resource model instance that supports 'freeform_tags' and 'defined_tags' as attributes
:param module: An AnsibleModule representing the options provided by the user
:return: The updated model class with the tags specified by the user.
"""
freeform_tags = module.params.get("freeform_tags", None)
defined_tags = module.params.get("defined_tags", None)
return add_tags_to_model_class(model, freeform_tags, defined_tags)
def add_tags_to_model_class(model, freeform_tags, defined_tags):
"""
Add free-form and defined tags to a resource model.
:param model: A resource model instance that supports 'freeform_tags' and 'defined_tags' as attributes
:param freeform_tags: A dict representing the freeform_tags to be applied to the model
:param defined_tags: A dict representing the defined_tags to be applied to the model
:return: The updated model class with the tags specified by the user
"""
try:
if freeform_tags is not None:
_debug("Model {0} set freeform tags to {1}".format(model, freeform_tags))
model.__setattr__("freeform_tags", freeform_tags)
if defined_tags is not None:
_debug("Model {0} set defined tags to {1}".format(model, defined_tags))
model.__setattr__("defined_tags", defined_tags)
except AttributeError as ae:
_debug("Model {0} doesn't support tags. Error {1}".format(model, ae))
return model
def check_and_create_resource(
resource_type,
create_fn,
kwargs_create,
list_fn,
kwargs_list,
module,
model,
existing_resources=None,
exclude_attributes=None,
dead_states=None,
default_attribute_values=None,
supports_sort_by_time_created=True,
):
"""
This function checks whether there is a resource with same attributes as specified in the module options. If not,
it creates and returns the resource.
:param resource_type: Type of the resource to be created.
:param create_fn: Function used in the module to handle create operation. The function should return a dict with
keys as resource & changed.
:param kwargs_create: Dictionary of parameters for create operation.
:param list_fn: List function in sdk to list all the resources of type resource_type.
:param kwargs_list: Dictionary of parameters for list operation.
:param module: Instance of AnsibleModule
:param model: Model used to create a resource.
:param exclude_attributes: The attributes which should not be used to distinguish the resource. e.g. display_name,
dns_label.
:param dead_states: List of states which can't transition to any of the usable states of the resource. This deafults
to ["TERMINATING", "TERMINATED", "FAULTY", "FAILED", "DELETING", "DELETED", "UNKNOWN_ENUM_VALUE"]
:param default_attribute_values: A dictionary containing default values for attributes.
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
if module.params.get("force_create", None):
_debug("Force creating {0}".format(resource_type))
result = call_with_backoff(create_fn, **kwargs_create)
return result
# Get the existing resources list sorted by creation time in descending order. Return the latest matching resource
# in case of multiple resource matches.
if exclude_attributes is None:
exclude_attributes = {}
if default_attribute_values is None:
default_attribute_values = {}
try:
if existing_resources is None:
if supports_sort_by_time_created:
kwargs_list["sort_by"] = "TIMECREATED"
existing_resources = list_all_resources(list_fn, **kwargs_list)
except ValueError:
# list_fn doesn't support sort_by, so remove the sort_by key in kwargs_list and retry
kwargs_list.pop("sort_by", None)
try:
existing_resources = list_all_resources(list_fn, **kwargs_list)
# Handle errors like 404 due to bad arguments to the list_all_resources call.
except ServiceError as ex:
module.fail_json(msg=ex.message)
except ServiceError as ex:
module.fail_json(msg=ex.message)
result = dict()
attributes_to_consider = _get_attributes_to_consider(
exclude_attributes, model, module
)
if "defined_tags" not in default_attribute_values:
default_attribute_values["defined_tags"] = {}
resource_matched = None
_debug(
"Trying to find a match within {0} existing resources".format(
len(existing_resources)
)
)
for resource in existing_resources:
if _is_resource_active(resource, dead_states):
_debug(
"Comparing user specified values {0} against an existing resource's "
"values {1}".format(module.params, to_dict(resource))
)
if does_existing_resource_match_user_inputs(
to_dict(resource),
module,
attributes_to_consider,
exclude_attributes,
default_attribute_values,
):
resource_matched = to_dict(resource)
break
if resource_matched:
_debug("Resource with same attributes found: {0}.".format(resource_matched))
result[resource_type] = resource_matched
result["changed"] = False
else:
_debug("No matching resource found. Attempting to create a new resource.")
result = call_with_backoff(create_fn, **kwargs_create)
return result
def _get_attributes_to_consider(exclude_attributes, model, module):
"""
Determine the attributes to detect if an existing resource already matches the requested resource state
:param exclude_attributes: Attributes to not consider for matching
:param model: The model class used to create the Resource
:param module: An instance of AnsibleModule that contains user's desires around a resource's state
:return: A list of attributes that needs to be matched
"""
# If a user explicitly requests us to match only against a set of resources (using 'key_by', use that as the list
# of attributes to consider for matching.
if "key_by" in module.params and module.params["key_by"] is not None:
attributes_to_consider = module.params["key_by"]
else:
# Consider all attributes except freeform_tags as freeform tags do not distinguish a resource.
attributes_to_consider = list(model.attribute_map)
if "freeform_tags" in attributes_to_consider:
attributes_to_consider.remove("freeform_tags")
# Temporarily removing node_count as the exisiting resource does not reflect it
if "node_count" in attributes_to_consider:
attributes_to_consider.remove("node_count")
_debug("attributes to consider: {0}".format(attributes_to_consider))
return attributes_to_consider
def _is_resource_active(resource, dead_states):
if dead_states is None:
dead_states = DEAD_STATES
if "lifecycle_state" not in resource.attribute_map:
return True
return resource.lifecycle_state not in dead_states
def is_attr_assigned_default(default_attribute_values, attr, assigned_value):
if not default_attribute_values:
return False
if attr in default_attribute_values:
default_val_for_attr = default_attribute_values.get(attr, None)
if isinstance(default_val_for_attr, dict):
# When default value for a resource's attribute is empty dictionary, check if the corresponding value of the
# existing resource's attribute is also empty.
if not default_val_for_attr:
return not assigned_value
# only compare keys that are in default_attribute_values[attr]
# this is to ensure forward compatibility when the API returns new keys that are not known during
# the time when the module author provided default values for the attribute
keys = {}
for k, v in iteritems(assigned_value.items()):
if k in default_val_for_attr:
keys[k] = v
return default_val_for_attr == keys
# non-dict, normal comparison
return default_val_for_attr == assigned_value
else:
# module author has not provided a default value for attr
return True
def create_resource(resource_type, create_fn, kwargs_create, module):
"""
Create an OCI resource
:param resource_type: Type of the resource to be created. e.g.: "vcn"
:param create_fn: Function in the SDK to create the resource. e.g. virtual_network_client.create_vcn
:param kwargs_create: Dictionary containing arguments to be used to call the create function create_fn
:param module: Instance of AnsibleModule
"""
result = dict(changed=False)
try:
resource = to_dict(call_with_backoff(create_fn, **kwargs_create).data)
_debug("Created {0}, {1}".format(resource_type, resource))
result["changed"] = True
result[resource_type] = resource
return result
except (ServiceError, TypeError) as ex:
module.fail_json(msg=str(ex))
def does_existing_resource_match_user_inputs(
existing_resource,
module,
attributes_to_compare,
exclude_attributes,
default_attribute_values=None,
):
"""
Check if 'attributes_to_compare' in an existing_resource match the desired state provided by a user in 'module'.
:param existing_resource: A dictionary representing an existing resource's values.
:param module: The AnsibleModule representing the options provided by the user.
:param attributes_to_compare: A list of attributes of a resource that are used to compare if an existing resource
matches the desire state of the resource expressed by the user in 'module'.
:param exclude_attributes: The attributes, that a module author provides, which should not be used to match the
resource. This dictionary typically includes: (a) attributes which are initialized with dynamic default values
like 'display_name', 'security_list_ids' for subnets and (b) attributes that don't have any defaults like
'dns_label' in VCNs. The attributes are part of keys and 'True' is the value for all existing keys.
:param default_attribute_values: A dictionary containing default values for attributes.
:return: True if the values for the list of attributes is the same in the existing_resource and module instances.
"""
if not default_attribute_values:
default_attribute_values = {}
for attr in attributes_to_compare:
attribute_with_default_metadata = None
if attr in existing_resource:
resources_value_for_attr = existing_resource[attr]
# Check if the user has explicitly provided the value for attr.
user_provided_value_for_attr = _get_user_provided_value(module, attr)
if user_provided_value_for_attr is not None:
res = [True]
check_if_user_value_matches_resources_attr(
attr,
resources_value_for_attr,
user_provided_value_for_attr,
exclude_attributes,
default_attribute_values,
res,
)
if not res[0]:
_debug(
"Mismatch on attribute '{0}'. User provided value is {1} & existing resource's value"
"is {2}.".format(
attr, user_provided_value_for_attr, resources_value_for_attr
)
)
return False
else:
# If the user has not explicitly provided the value for attr and attr is in exclude_list, we can
# consider this as a 'pass'. For example, if an attribute 'display_name' is not specified by user and
# that attribute is in the 'exclude_list' according to the module author(Not User), then exclude
if (
exclude_attributes.get(attr) is None
and resources_value_for_attr is not None
):
if module.argument_spec.get(attr):
attribute_with_default_metadata = module.argument_spec.get(attr)
default_attribute_value = attribute_with_default_metadata.get(
"default", None
)
if default_attribute_value is not None:
if existing_resource[attr] != default_attribute_value:
return False
# Check if attr has a value that is not default. For example, a custom `security_list_id`
# is assigned to the subnet's attribute `security_list_ids`. If the attribute is assigned a
# value that is not the default, then it must be considered a mismatch and false returned.
elif not is_attr_assigned_default(
default_attribute_values, attr, existing_resource[attr]
):
return False
else:
_debug(
"Attribute {0} is in the create model of resource {1}"
"but doesn't exist in the get model of the resource".format(
attr, existing_resource.__class__
)
)
return True
def tuplize(d):
"""
This function takes a dictionary and converts it to a list of tuples recursively.
:param d: A dictionary.
:return: List of tuples.
"""
list_of_tuples = []
key_list = sorted(list(d.keys()))
for key in key_list:
if type(d[key]) == list:
# Convert a value which is itself a list of dict to a list of tuples.
if d[key] and type(d[key][0]) == dict:
sub_tuples = []
for sub_dict in d[key]:
sub_tuples.append(tuplize(sub_dict))
# To handle comparing two None values, while creating a tuple for a {key: value}, make the first element
# in the tuple a boolean `True` if value is None so that attributes with None value are put at last
# in the sorted list.
list_of_tuples.append((sub_tuples is None, key, sub_tuples))
else:
list_of_tuples.append((d[key] is None, key, d[key]))
elif type(d[key]) == dict:
tupled_value = tuplize(d[key])
list_of_tuples.append((tupled_value is None, key, tupled_value))
else:
list_of_tuples.append((d[key] is None, key, d[key]))
return list_of_tuples
def get_key_for_comparing_dict(d):
tuple_form_of_d = tuplize(d)
return tuple_form_of_d
def sort_dictionary(d):
"""
This function sorts values of a dictionary recursively.
:param d: A dictionary.
:return: Dictionary with sorted elements.
"""
sorted_d = {}
for key in d:
if type(d[key]) == list:
if d[key] and type(d[key][0]) == dict:
sorted_value = sort_list_of_dictionary(d[key])
sorted_d[key] = sorted_value
else:
sorted_d[key] = sorted(d[key])
elif type(d[key]) == dict:
sorted_d[key] = sort_dictionary(d[key])
else:
sorted_d[key] = d[key]
return sorted_d
def sort_list_of_dictionary(list_of_dict):
"""
This functions sorts a list of dictionaries. It first sorts each value of the dictionary and then sorts the list of
individually sorted dictionaries. For sorting, each dictionary's tuple equivalent is used.
:param list_of_dict: List of dictionaries.
:return: A sorted dictionary.
"""
list_with_sorted_dict = []
for d in list_of_dict:
sorted_d = sort_dictionary(d)
list_with_sorted_dict.append(sorted_d)
return sorted(list_with_sorted_dict, key=get_key_for_comparing_dict)
def check_if_user_value_matches_resources_attr(
attribute_name,
resources_value_for_attr,
user_provided_value_for_attr,
exclude_attributes,
default_attribute_values,
res,
):
if isinstance(default_attribute_values.get(attribute_name), dict):
default_attribute_values = default_attribute_values.get(attribute_name)
if isinstance(exclude_attributes.get(attribute_name), dict):
exclude_attributes = exclude_attributes.get(attribute_name)
if isinstance(resources_value_for_attr, list) or isinstance(
user_provided_value_for_attr, list
):
# Perform a deep equivalence check for a List attribute
if exclude_attributes.get(attribute_name):
return
if (
user_provided_value_for_attr is None
and default_attribute_values.get(attribute_name) is not None
):
user_provided_value_for_attr = default_attribute_values.get(attribute_name)
if resources_value_for_attr is None and user_provided_value_for_attr is None:
return
if (
resources_value_for_attr is None
and len(user_provided_value_for_attr) >= 0
or user_provided_value_for_attr is None
and len(resources_value_for_attr) >= 0
):
res[0] = False
return
if (
resources_value_for_attr is not None
and user_provided_value_for_attr is not None
and len(resources_value_for_attr) != len(user_provided_value_for_attr)
):
res[0] = False
return
if (
user_provided_value_for_attr
and type(user_provided_value_for_attr[0]) == dict
):
# Process a list of dict
sorted_user_provided_value_for_attr = sort_list_of_dictionary(
user_provided_value_for_attr
)
sorted_resources_value_for_attr = sort_list_of_dictionary(
resources_value_for_attr
)
else:
sorted_user_provided_value_for_attr = sorted(user_provided_value_for_attr)
sorted_resources_value_for_attr = sorted(resources_value_for_attr)
# Walk through the sorted list values of the resource's value for this attribute, and compare against user
# provided values.
for index, resources_value_for_attr_part in enumerate(
sorted_resources_value_for_attr
):
check_if_user_value_matches_resources_attr(
attribute_name,
resources_value_for_attr_part,
sorted_user_provided_value_for_attr[index],
exclude_attributes,
default_attribute_values,
res,
)
elif isinstance(resources_value_for_attr, dict):
# Perform a deep equivalence check for dict typed attributes
if not resources_value_for_attr and user_provided_value_for_attr:
res[0] = False
for key in resources_value_for_attr:
if (
user_provided_value_for_attr is not None
and user_provided_value_for_attr
):
check_if_user_value_matches_resources_attr(
key,
resources_value_for_attr.get(key),
user_provided_value_for_attr.get(key),
exclude_attributes,
default_attribute_values,
res,
)
else:
if exclude_attributes.get(key) is None:
if default_attribute_values.get(key) is not None:
user_provided_value_for_attr = default_attribute_values.get(key)
check_if_user_value_matches_resources_attr(
key,
resources_value_for_attr.get(key),
user_provided_value_for_attr,
exclude_attributes,
default_attribute_values,
res,
)
else:
res[0] = is_attr_assigned_default(
default_attribute_values,
attribute_name,
resources_value_for_attr.get(key),
)
elif resources_value_for_attr != user_provided_value_for_attr:
if (
exclude_attributes.get(attribute_name) is None
and default_attribute_values.get(attribute_name) is not None
):
# As the user has not specified a value for an optional attribute, if the existing resource's
# current state has a DEFAULT value for that attribute, we must not consider this incongruence
# an issue and continue with other checks. If the existing resource's value for the attribute
# is not the default value, then the existing resource is not a match.
if not is_attr_assigned_default(
default_attribute_values, attribute_name, resources_value_for_attr
):
res[0] = False
elif user_provided_value_for_attr is not None:
res[0] = False
def are_dicts_equal(
option_name,
existing_resource_dict,
user_provided_dict,
exclude_list,
default_attribute_values,
):
if not user_provided_dict:
# User has not provided a value for the map option. In this case, the user hasn't expressed an intent around
# this optional attribute. Check if existing_resource_dict matches default.
# For example, source_details attribute in volume is optional and does not have any defaults.
return is_attr_assigned_default(
default_attribute_values, option_name, existing_resource_dict
)
# If the existing resource has an empty dict, while the user has provided entries, dicts are not equal
if not existing_resource_dict and user_provided_dict:
return False
# check if all keys of an existing resource's dict attribute matches user-provided dict's entries
for sub_attr in existing_resource_dict:
# If user has provided value for sub-attribute, then compare it with corresponding key in existing resource.
if sub_attr in user_provided_dict:
if existing_resource_dict[sub_attr] != user_provided_dict[sub_attr]:
_debug(
"Failed to match: Existing resource's attr {0} sub-attr {1} value is {2}, while user "
"provided value is {3}".format(
option_name,
sub_attr,
existing_resource_dict[sub_attr],
user_provided_dict.get(sub_attr, None),
)
)
return False
# If sub_attr not provided by user, check if the sub-attribute value of existing resource matches default value.
else:
if not should_dict_attr_be_excluded(option_name, sub_attr, exclude_list):
default_value_for_dict_attr = default_attribute_values.get(
option_name, None
)
if default_value_for_dict_attr:
# if a default value for the sub-attr was provided by the module author, fail if the existing
# resource's value for the sub-attr is not the default
if not is_attr_assigned_default(
default_value_for_dict_attr,
sub_attr,
existing_resource_dict[sub_attr],
):
return False
else:
# No default value specified by module author for sub_attr
_debug(
"Consider as match: Existing resource's attr {0} sub-attr {1} value is {2}, while user did"
"not provide a value for it. The module author also has not provided a default value for it"
"or marked it for exclusion. So ignoring this attribute during matching and continuing with"
"other checks".format(
option_name, sub_attr, existing_resource_dict[sub_attr]
)
)
return True
def should_dict_attr_be_excluded(map_option_name, option_key, exclude_list):
"""An entry for the Exclude list for excluding a map's key is specifed as a dict with the map option name as the
key, and the value as a list of keys to be excluded within that map. For example, if the keys "k1" and "k2" of a map
option named "m1" needs to be excluded, the exclude list must have an entry {'m1': ['k1','k2']} """
for exclude_item in exclude_list:
if isinstance(exclude_item, dict):
if map_option_name in exclude_item:
if option_key in exclude_item[map_option_name]:
return True
return False
def create_and_wait(
resource_type,
client,
create_fn,
kwargs_create,
get_fn,
get_param,
module,
states=None,
wait_applicable=True,
kwargs_get=None,
):
"""
A utility function to create a resource and wait for the resource to get into the state as specified in the module
options.
:param wait_applicable: Specifies if wait for create is applicable for this resource
:param resource_type: Type of the resource to be created. e.g. "vcn"
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient()
:param create_fn: Function in the SDK to create the resource. e.g. virtual_network_client.create_vcn
:param kwargs_create: Dictionary containing arguments to be used to call the create function create_fn.
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param get_param: Name of the argument in the SDK get function. e.g. "vcn_id"
:param module: Instance of AnsibleModule.
:param states: List of lifecycle states to watch for while waiting after create_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:param kwargs_get: Dictionary containing arguments to be used to call a multi-argument `get` function
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
try:
return create_or_update_resource_and_wait(
resource_type,
create_fn,
kwargs_create,
module,
wait_applicable,
get_fn,
get_param,
states,
client,
kwargs_get,
)
except MaximumWaitTimeExceeded as ex:
module.fail_json(msg=str(ex))
except ServiceError as ex:
module.fail_json(msg=ex.message)
def update_and_wait(
resource_type,
client,
update_fn,
kwargs_update,
get_fn,
get_param,
module,
states=None,
wait_applicable=True,
kwargs_get=None,
):
"""
A utility function to update a resource and wait for the resource to get into the state as specified in the module
options. It wraps the create_and_wait method as apart from the method and arguments, everything else is similar.
:param wait_applicable: Specifies if wait for create is applicable for this resource
:param resource_type: Type of the resource to be created. e.g. "vcn"
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient()
:param update_fn: Function in the SDK to update the resource. e.g. virtual_network_client.update_vcn
:param kwargs_update: Dictionary containing arguments to be used to call the update function update_fn.
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param get_param: Name of the argument in the SDK get function. e.g. "vcn_id"
:param module: Instance of AnsibleModule.
:param kwargs_get: Dictionary containing arguments to be used to call the get function which requires multiple arguments.
:param states: List of lifecycle states to watch for while waiting after update_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
try:
return create_or_update_resource_and_wait(
resource_type,
update_fn,
kwargs_update,
module,
wait_applicable,
get_fn,
get_param,
states,
client,
kwargs_get=kwargs_get,
)
except MaximumWaitTimeExceeded as ex:
module.fail_json(msg=str(ex))
except ServiceError as ex:
module.fail_json(msg=ex.message)
def create_or_update_resource_and_wait(
resource_type,
function,
kwargs_function,
module,
wait_applicable,
get_fn,
get_param,
states,
client,
update_target_resource_id_in_get_param=False,
kwargs_get=None,
):
"""
A utility function to create or update a resource and wait for the resource to get into the state as specified in
the module options.
:param resource_type: Type of the resource to be created. e.g. "vcn"
:param function: Function in the SDK to create or update the resource.
:param kwargs_function: Dictionary containing arguments to be used to call the create or update function
:param module: Instance of AnsibleModule.
:param wait_applicable: Specifies if wait for create is applicable for this resource
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param get_param: Name of the argument in the SDK get function. e.g. "vcn_id"
:param states: List of lifecycle states to watch for while waiting after create_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient()
:param kwargs_get: Dictionary containing arguments to be used to call the get function which requires multiple arguments.
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
result = create_resource(resource_type, function, kwargs_function, module)
resource = result[resource_type]
result[resource_type] = wait_for_resource_lifecycle_state(
client,
module,
wait_applicable,
kwargs_get,
get_fn,
get_param,
resource,
states,
resource_type,
)
return result
def wait_for_resource_lifecycle_state(
client,
module,
wait_applicable,
kwargs_get,
get_fn,
get_param,
resource,
states,
resource_type=None,
):
"""
A utility function to wait for the resource to get into the state as specified in
the module options.
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient
:param module: Instance of AnsibleModule.
:param wait_applicable: Specifies if wait for create is applicable for this resource
:param kwargs_get: Dictionary containing arguments to be used to call the get function which requires multiple arguments.
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param get_param: Name of the argument in the SDK get function. e.g. "vcn_id"
:param resource_type: Type of the resource to be created. e.g. "vcn"
:param states: List of lifecycle states to watch for while waiting after create_fn is called.
e.g. [module.params['wait_until'], "FAULTY"]
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
if wait_applicable and module.params.get("wait", None):
if resource_type == "compartment":
# An immediate attempt to retrieve a compartment after a compartment is created fails with
# 'Authorization failed or requested resource not found', 'status': 404}.
# This is because it takes few seconds for the permissions on a compartment to be ready.
# Wait for few seconds before attempting a get call on compartment.
_debug(
"Pausing execution for permission on the newly created compartment to be ready."
)
time.sleep(15)
if kwargs_get:
_debug(
"Waiting for resource to reach READY state. get_args: {0}".format(
kwargs_get
)
)
response_get = call_with_backoff(get_fn, **kwargs_get)
else:
_debug(
"Waiting for resource with id {0} to reach READY state.".format(
resource["id"]
)
)
response_get = call_with_backoff(get_fn, **{get_param: resource["id"]})
if states is None:
states = module.params.get("wait_until") or DEFAULT_READY_STATES
resource = to_dict(
oci.wait_until(
client,
response_get,
evaluate_response=lambda r: r.data.lifecycle_state in states,
max_wait_seconds=module.params.get(
"wait_timeout", MAX_WAIT_TIMEOUT_IN_SECONDS
),
).data
)
return resource
def wait_on_work_request(client, response, module):
try:
if module.params.get("wait", None):
_debug(
"Waiting for work request with id {0} to reach SUCCEEDED state.".format(
response.data.id
)
)
wait_response = oci.wait_until(
client,
response,
evaluate_response=lambda r: r.data.status == "SUCCEEDED",
max_wait_seconds=module.params.get(
"wait_timeout", MAX_WAIT_TIMEOUT_IN_SECONDS
),
)
else:
_debug(
"Waiting for work request with id {0} to reach ACCEPTED state.".format(
response.data.id
)
)
wait_response = oci.wait_until(
client,
response,
evaluate_response=lambda r: r.data.status == "ACCEPTED",
max_wait_seconds=module.params.get(
"wait_timeout", MAX_WAIT_TIMEOUT_IN_SECONDS
),
)
except MaximumWaitTimeExceeded as ex:
_debug(str(ex))
module.fail_json(msg=str(ex))
except ServiceError as ex:
_debug(str(ex))
module.fail_json(msg=str(ex))
return wait_response.data
def delete_and_wait(
resource_type,
client,
get_fn,
kwargs_get,
delete_fn,
kwargs_delete,
module,
states=None,
wait_applicable=True,
process_work_request=False,
):
"""A utility function to delete a resource and wait for the resource to get into the state as specified in the
module options.
:param wait_applicable: Specifies if wait for delete is applicable for this resource
:param resource_type: Type of the resource to be deleted. e.g. "vcn"
:param client: OCI service client instance to call the service periodically to retrieve data.
e.g. VirtualNetworkClient()
:param get_fn: Function in the SDK to get the resource. e.g. virtual_network_client.get_vcn
:param kwargs_get: Dictionary of arguments for get function get_fn. e.g. {"vcn_id": module.params["id"]}
:param delete_fn: Function in the SDK to delete the resource. e.g. virtual_network_client.delete_vcn
:param kwargs_delete: Dictionary of arguments for delete function delete_fn. e.g. {"vcn_id": module.params["id"]}
:param module: Instance of AnsibleModule.
:param states: List of lifecycle states to watch for while waiting after delete_fn is called. If nothing is passed,
defaults to ["TERMINATED", "DETACHED", "DELETED"].
:param process_work_request: Whether a work request is generated on an API call and if it needs to be handled.
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
"""
states_set = set(["DETACHING", "DETACHED", "DELETING", "DELETED", "TERMINATING", "TERMINATED"])
result = dict(changed=False)
result[resource_type] = dict()
try:
resource = to_dict(call_with_backoff(get_fn, **kwargs_get).data)
if resource:
if "lifecycle_state" not in resource or resource["lifecycle_state"] not in states_set:
response = call_with_backoff(delete_fn, **kwargs_delete)
if process_work_request:
wr_id = response.headers.get("opc-work-request-id")
get_wr_response = call_with_backoff(
client.get_work_request, work_request_id=wr_id
)
result["work_request"] = to_dict(
wait_on_work_request(client, get_wr_response, module)
)
# Set changed to True as work request has been created to delete the resource.
result["changed"] = True
resource = to_dict(call_with_backoff(get_fn, **kwargs_get).data)
else:
_debug("Deleted {0}, {1}".format(resource_type, resource))
result["changed"] = True
if wait_applicable and module.params.get("wait", None):
if states is None:
states = (
module.params.get("wait_until")
or DEFAULT_TERMINATED_STATES
)
try:
wait_response = oci.wait_until(
client,
get_fn(**kwargs_get),
evaluate_response=lambda r: r.data.lifecycle_state
in states,
max_wait_seconds=module.params.get(
"wait_timeout", MAX_WAIT_TIMEOUT_IN_SECONDS
),
succeed_on_not_found=True,
)
except MaximumWaitTimeExceeded as ex:
module.fail_json(msg=str(ex))
except ServiceError as ex:
if ex.status != 404:
module.fail_json(msg=ex.message)
else:
# While waiting for resource to get into terminated state, if the resource is not found.
_debug(
"API returned Status:404(Not Found) while waiting for resource to get into"
" terminated state."
)
resource["lifecycle_state"] = "DELETED"
result[resource_type] = resource
return result
# oci.wait_until() returns an instance of oci.util.Sentinel in case the resource is not found.
if type(wait_response) is not Sentinel:
resource = to_dict(wait_response.data)
else:
resource["lifecycle_state"] = "DELETED"
result[resource_type] = resource
else:
_debug(
"Resource {0} with {1} already deleted. So returning changed=False".format(
resource_type, kwargs_get
)
)
except ServiceError as ex:
# DNS API throws a 400 InvalidParameter when a zone id is provided for zone_name_or_id and if the zone
# resource is not available, instead of the expected 404. So working around this for now.
if type(client) == oci.dns.DnsClient:
if ex.status == 400 and ex.code == "InvalidParameter":
_debug(
"Resource {0} with {1} already deleted. So returning changed=False".format(
resource_type, kwargs_get
)
)
elif ex.status != 404:
module.fail_json(msg=ex.message)
result[resource_type] = dict()
return result
def are_attrs_equal(current_resource, module, attributes):
"""
Check if the specified attributes are equal in the specified 'model' and 'module'. This is used to check if an OCI
Model instance already has the values specified by an Ansible user while invoking an OCI Ansible module and if a
resource needs to be updated.
:param current_resource: A resource model instance
:param module: The AnsibleModule representing the options provided by the user
:param attributes: A list of attributes that would need to be compared in the model and the module instances.
:return: True if the values for the list of attributes is the same in the model and module instances
"""
for attr in attributes:
curr_value = getattr(current_resource, attr, None)
user_provided_value = _get_user_provided_value(module, attribute_name=attr)
if user_provided_value is not None:
if curr_value != user_provided_value:
_debug(
"are_attrs_equal - current resource's attribute "
+ attr
+ " value is "
+ str(curr_value)
+ " and this doesn't match user provided value of "
+ str(user_provided_value)
)
return False
return True
def _get_user_provided_value(module, attribute_name):
"""
Returns the user provided value for "attribute_name". We consider aliases in the module.
"""
user_provided_value = module.params.get(attribute_name, None)
if user_provided_value is None:
# If the attribute_name is set as an alias for some option X and user has provided value in the playbook using
# option X, then user provided value for attribute_name is equal to value for X.
# Get option name for attribute_name from module.aliases.
# module.aliases is a dictionary with key as alias name and its value as option name.
option_alias_for_attribute = module.aliases.get(attribute_name, None)
if option_alias_for_attribute is not None:
user_provided_value = module.params.get(option_alias_for_attribute, None)
return user_provided_value
def update_model_with_user_options(curr_model, update_model, module):
"""
Update the 'update_model' with user provided values in 'module' for the specified 'attributes' if they are different
from the values in the 'curr_model'.
:param curr_model: A resource model instance representing the state of the current resource
:param update_model: An instance of the update resource model for the current resource's type
:param module: An AnsibleModule representing the options provided by the user
:return: An updated 'update_model' instance filled with values that would need to be updated in the current resource
state to satisfy the user's requested state.
"""
attributes = update_model.attribute_map.keys()
for attr in attributes:
curr_value_for_attr = getattr(curr_model, attr, None)
user_provided_value = _get_user_provided_value(module, attribute_name=attr)
if curr_value_for_attr != user_provided_value:
if user_provided_value is not None:
# Only update if a user has specified a value for an option
_debug(
"User requested {0} for attribute {1}, whereas the current value is {2}. So adding it "
"to the update model".format(
user_provided_value, attr, curr_value_for_attr
)
)
setattr(update_model, attr, user_provided_value)
else:
# Always set current values of the resource in the update model if there is no request for change in
# values
setattr(update_model, attr, curr_value_for_attr)
return update_model
def _get_retry_strategy():
retry_strategy_builder = RetryStrategyBuilder(
max_attempts_check=True,
max_attempts=10,
retry_max_wait_between_calls_seconds=30,
retry_base_sleep_time_seconds=3,
backoff_type=oci.retry.BACKOFF_FULL_JITTER_EQUAL_ON_THROTTLE_VALUE,
)
retry_strategy_builder.add_service_error_check(
service_error_retry_config={
429: [],
400: ["QuotaExceeded", "LimitExceeded"],
409: ["Conflict"],
},
service_error_retry_on_any_5xx=True,
)
return retry_strategy_builder.get_retry_strategy()
def call_with_backoff(fn, **kwargs):
if "retry_strategy" not in kwargs:
kwargs["retry_strategy"] = _get_retry_strategy()
try:
return fn(**kwargs)
except TypeError as te:
if "unexpected keyword argument" in str(te):
# to handle older SDKs that did not support retry_strategy
del kwargs["retry_strategy"]
return fn(**kwargs)
else:
# A validation error raised by the SDK, throw it back
raise
def generic_hash(obj):
"""
Compute a hash of all the fields in the object
:param obj: Object whose hash needs to be computed
:return: a hash value for the object
"""
sum = 0
for field in obj.attribute_map.keys():
field_value = getattr(obj, field)
if isinstance(field_value, list):
for value in field_value:
sum = sum + hash(value)
elif isinstance(field_value, dict):
for k, v in field_value.items():
sum = sum + hash(hash(k) + hash(":") + hash(v))
else:
sum = sum + hash(getattr(obj, field))
return sum
def generic_eq(s, other):
if other is None:
return False
return s.__dict__ == other.__dict__
def generate_subclass(parent_class):
"""Make a class hash-able by generating a subclass with a __hash__ method that returns the sum of all fields within
the parent class"""
dict_of_method_in_subclass = {
"__init__": parent_class.__init__,
"__hash__": generic_hash,
"__eq__": generic_eq,
}
subclass_name = "GeneratedSub" + parent_class.__name__
generated_sub_class = type(
subclass_name, (parent_class,), dict_of_method_in_subclass
)
return generated_sub_class
def create_hashed_instance(class_type):
hashed_class = generate_subclass(class_type)
return hashed_class()
def get_hashed_object_list(class_type, object_with_values, attributes_class_type=None):
if object_with_values is None:
return None
hashed_class_instances = []
for object_with_value in object_with_values:
hashed_class_instances.append(
get_hashed_object(class_type, object_with_value, attributes_class_type)
)
return hashed_class_instances
def get_hashed_object(
class_type, object_with_value, attributes_class_type=None, supported_attributes=None
):
"""
Convert any class instance into hashable so that the
instances are eligible for various comparison
operation available under set() object.
:param class_type: Any class type whose instances needs to be hashable
:param object_with_value: Instance of the class type with values which
would be set in the resulting isinstance
:param attributes_class_type: A list of class types of attributes, if attribute is a custom class instance
:param supported_attributes: A list of attributes which should be considered while populating the instance
with the values in the object. This helps in avoiding new attributes of the class_type which are still not
supported by the current implementation.
:return: A hashable instance with same state of the provided object_with_value
"""
if object_with_value is None:
return None
HashedClass = generate_subclass(class_type)
hashed_class_instance = HashedClass()
if supported_attributes:
class_attributes = list(
set(hashed_class_instance.attribute_map) & set(supported_attributes)
)
else:
class_attributes = hashed_class_instance.attribute_map
for attribute in class_attributes:
attribute_value = getattr(object_with_value, attribute)
if attributes_class_type:
for attribute_class_type in attributes_class_type:
if isinstance(attribute_value, attribute_class_type):
attribute_value = get_hashed_object(
attribute_class_type, attribute_value
)
hashed_class_instance.__setattr__(attribute, attribute_value)
return hashed_class_instance
def update_class_type_attr_difference(
update_class_details, existing_instance, attr_name, attr_class, input_attr_value
):
"""
Checks the difference and updates an attribute which is represented by a class
instance. Not aplicable if the attribute type is a primitive value.
For example, if a class name is A with an attribute x, then if A.x = X(), then only
this method works.
:param update_class_details The instance which should be updated if there is change in
attribute value
:param existing_instance The instance whose attribute value is compared with input
attribute value
:param attr_name Name of the attribute whose value should be compared
:param attr_class Class type of the attribute
:param input_attr_value The value of input attribute which should replaced the current
value in case of mismatch
:return: A boolean value indicating whether attribute value has been replaced
"""
changed = False
# Here existing attribute values is an instance
existing_attr_value = get_hashed_object(
attr_class, getattr(existing_instance, attr_name)
)
if input_attr_value is None:
update_class_details.__setattr__(attr_name, existing_attr_value)
else:
changed = not input_attr_value.__eq__(existing_attr_value)
if changed:
update_class_details.__setattr__(attr_name, input_attr_value)
else:
update_class_details.__setattr__(attr_name, existing_attr_value)
return changed
def get_existing_resource(target_fn, module, **kwargs):
"""
Returns the requested resource if it exists based on the input arguments.
:param target_fn The function which should be used to find the requested resource
:param module Instance of AnsibleModule attribute value
:param kwargs A map of arguments consisting of values based on which requested resource should be searched
:return: Instance of requested resource
"""
existing_resource = None
try:
response = call_with_backoff(target_fn, **kwargs)
existing_resource = response.data
except ServiceError as ex:
if ex.status != 404:
module.fail_json(msg=ex.message)
return existing_resource
def get_attached_instance_info(
module, lookup_attached_instance, list_attachments_fn, list_attachments_args
):
config = get_oci_config(module)
identity_client = create_service_client(module, IdentityClient)
volume_attachments = []
if lookup_attached_instance:
# Get all the compartments in the tenancy
compartments = to_dict(
identity_client.list_compartments(
config.get("tenancy"), compartment_id_in_subtree=True
).data
)
# For each compartment, get the volume attachments for the compartment_id with the other args in
# list_attachments_args.
for compartment in compartments:
list_attachments_args["compartment_id"] = compartment["id"]
try:
volume_attachments += list_all_resources(
list_attachments_fn, **list_attachments_args
)
# Pass ServiceError due to authorization issue in accessing volume attachments of a compartment
except ServiceError as ex:
if ex.status == 404:
pass
else:
volume_attachments = list_all_resources(
list_attachments_fn, **list_attachments_args
)
volume_attachments = to_dict(volume_attachments)
# volume_attachments has attachments in DETACHING or DETACHED state. Return the volume attachment in ATTACHING or
# ATTACHED state
return next(
(
volume_attachment
for volume_attachment in volume_attachments
if volume_attachment["lifecycle_state"] in ["ATTACHING", "ATTACHED"]
),
None,
)
def check_mode(fn):
def wrapper(*args, **kwargs):
if os.environ.get("OCI_ANSIBLE_EXPERIMENTAL", None):
return fn(*args, **kwargs)
return None
return wrapper
def check_and_return_component_list_difference(
input_component_list, existing_components, purge_components, delete_components=False
):
if input_component_list:
existing_components, changed = get_component_list_difference(
input_component_list,
existing_components,
purge_components,
delete_components,
)
else:
existing_components = []
changed = True
return existing_components, changed
def get_component_list_difference(
input_component_list, existing_components, purge_components, delete_components=False
):
if delete_components:
if existing_components is None:
return None, False
component_differences = set(existing_components).intersection(
set(input_component_list)
)
if component_differences:
return list(set(existing_components) - component_differences), True
else:
return None, False
if existing_components is None:
return input_component_list, True
if purge_components:
components_differences = set(input_component_list).symmetric_difference(
set(existing_components)
)
if components_differences:
return input_component_list, True
components_differences = set(input_component_list).difference(
set(existing_components)
)
if components_differences:
return list(components_differences) + existing_components, True
return None, False
def write_to_file(path, content):
with open(to_bytes(path), "wb") as dest_file:
dest_file.write(content)
def get_target_resource_from_list(
module, list_resource_fn, target_resource_id=None, **kwargs
):
"""
Returns a resource filtered by identifer from a list of resources. This method should be
used as an alternative of 'get resource' method when 'get resource' is nor provided by
resource api. This method returns a wrapper of response object but that should not be
used as an input to 'wait_until' utility as this is only a partial wrapper of response object.
:param module The AnsibleModule representing the options provided by the user
:param list_resource_fn The function which lists all the resources
:param target_resource_id The identifier of the resource which should be filtered from the list
:param kwargs A map of arguments consisting of values based on which requested resource should be searched
:return: A custom wrapper which partially wraps a response object where the data field contains the target
resource, if found.
"""
class ResponseWrapper:
def __init__(self, data):
self.data = data
try:
resources = list_all_resources(list_resource_fn, **kwargs)
if resources is not None:
for resource in resources:
if resource.id == target_resource_id:
# Returning an object that mimics an OCI response as oci_utils methods assumes an Response-ish
# object
return ResponseWrapper(data=resource)
return ResponseWrapper(data=None)
except ServiceError as ex:
module.fail_json(msg=ex.message)
|
philuu12/PYTHON_4_NTWK_ENGRS
|
refs/heads/master
|
wk4_hw/Solution_wk4/ex1_paramiko.py
|
1
|
#!/usr/bin/env python
'''
Use Paramiko to retrieve the entire 'show version' output.
'''
import paramiko
import time
from getpass import getpass
MAX_BUFFER = 65535
def clear_buffer(remote_conn):
'''
Clear any data in the receive buffer
'''
if remote_conn.recv_ready():
return remote_conn.recv(MAX_BUFFER)
def disable_paging(remote_conn, cmd='terminal length 0'):
'''
Disable output paging (i.e. --More--)
'''
cmd = cmd.strip()
remote_conn.send(cmd + '\n')
time.sleep(1)
clear_buffer(remote_conn)
def send_command(remote_conn, cmd='', delay=1):
'''
Send command down the channel. Retrieve and return the output.
'''
if cmd != '':
cmd = cmd.strip()
remote_conn.send(cmd + '\n')
time.sleep(delay)
if remote_conn.recv_ready():
return remote_conn.recv(MAX_BUFFER)
else:
return ''
def main():
'''
Use Paramiko to retrieve the entire 'show version' output.
'''
ip_addr = raw_input("Enter IP address: ")
username = 'pyclass'
password = getpass()
port = 8022
remote_conn_pre = paramiko.SSHClient()
remote_conn_pre.load_system_host_keys()
remote_conn_pre.connect(ip_addr, port=port, username=username, password=password,
look_for_keys=False, allow_agent=False)
remote_conn = remote_conn_pre.invoke_shell()
time.sleep(1)
clear_buffer(remote_conn)
disable_paging(remote_conn)
output = send_command(remote_conn, cmd='show version')
print '\n>>>>'
print output
print '>>>>\n'
if __name__ == "__main__":
main()
|
tangfeixiong/nova
|
refs/heads/stable/juno
|
nova/tests/unit/db/__init__.py
|
179
|
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`db` -- Stubs for DB API
=============================
"""
|
jmetzen/scikit-learn
|
refs/heads/master
|
examples/exercises/plot_iris_exercise.py
|
323
|
"""
================================
SVM Exercise
================================
A tutorial exercise for using different SVM kernels.
This exercise is used in the :ref:`using_kernels_tut` part of the
:ref:`supervised_learning_tut` section of the :ref:`stat_learn_tut_index`.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets, svm
iris = datasets.load_iris()
X = iris.data
y = iris.target
X = X[y != 0, :2]
y = y[y != 0]
n_sample = len(X)
np.random.seed(0)
order = np.random.permutation(n_sample)
X = X[order]
y = y[order].astype(np.float)
X_train = X[:.9 * n_sample]
y_train = y[:.9 * n_sample]
X_test = X[.9 * n_sample:]
y_test = y[.9 * n_sample:]
# fit the model
for fig_num, kernel in enumerate(('linear', 'rbf', 'poly')):
clf = svm.SVC(kernel=kernel, gamma=10)
clf.fit(X_train, y_train)
plt.figure(fig_num)
plt.clf()
plt.scatter(X[:, 0], X[:, 1], c=y, zorder=10, cmap=plt.cm.Paired)
# Circle out the test data
plt.scatter(X_test[:, 0], X_test[:, 1], s=80, facecolors='none', zorder=10)
plt.axis('tight')
x_min = X[:, 0].min()
x_max = X[:, 0].max()
y_min = X[:, 1].min()
y_max = X[:, 1].max()
XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j]
Z = clf.decision_function(np.c_[XX.ravel(), YY.ravel()])
# Put the result into a color plot
Z = Z.reshape(XX.shape)
plt.pcolormesh(XX, YY, Z > 0, cmap=plt.cm.Paired)
plt.contour(XX, YY, Z, colors=['k', 'k', 'k'], linestyles=['--', '-', '--'],
levels=[-.5, 0, .5])
plt.title(kernel)
plt.show()
|
iodoom-gitorious/thelinkers-iodoom3
|
refs/heads/master
|
neo/sys/linux/pk4/id_utils.py
|
19
|
# a collection of utility functions to manipulate pak files
import os, zipfile, md5, pdb
# sorts in reverse alphabetical order like doom does for searching
def list_paks( path ):
files = os.listdir( path )
for i in files:
if ( i[-4:] != '.pk4' ):
files.remove( i )
files.sort()
files.reverse()
return files
def list_files_in_pak( pak ):
files = []
zippy = zipfile.ZipFile( pak )
files += zippy.namelist()
files.sort()
return files
# no sorting, blunt list of everything
def list_files_in_paks( path ):
files = []
zippies = list_paks( path )
for fname in zippies:
print fname
zippy = zipfile.ZipFile( os.path.join( path, fname ) )
files += zippy.namelist()
# sort and remove dupes
dico = {}
for f in files:
dico[ f ] = 1
files = dico.keys()
files.sort()
return files
# build a dictionary of names -> ( pak name, md5 ) from a path of pk4s
def md5_in_paks( path ):
ret = {}
zippies = list_paks( path )
for fname in zippies:
print fname
zippy = zipfile.ZipFile( os.path.join( path, fname ) )
for file in zippy.namelist():
if ( ret.has_key( file ) ):
continue
data = zippy.read( file )
m = md5.new()
m.update( data )
ret[ file ] = ( fname, m.hexdigest() )
return ret
# find which files need to be updated in a set of paks from an expanded list
# returns ( updated, not_found, {} )
# ignores directories
# by default, no case match is done
# if case match is set, return ( updated, not_found, { zip case -> FS case } )
# updated will contain the zip case name
def list_updated_files( pak_path, base_path, case_match = False ):
not_found = []
updated = []
case_table = {}
pak_md5 = md5_in_paks( pak_path )
for file in pak_md5.keys():
if ( file[-1] == '/' ):
continue
path = os.path.join( base_path, file )
if ( case_match ):
ret = ifind( base_path, file )
if ( not ret[ 0 ] ):
not_found.append( file )
continue
else:
case_table[ path ] = ret[ 1 ]
path = os.path.join( base_path, ret[ 1 ] )
try:
f = open( path )
data = f.read()
f.close()
except:
if ( case_match ):
raise "internal error: ifind success but later read failed"
not_found.append( file )
else:
m = md5.new()
m.update( data )
if ( m.hexdigest() != pak_md5[ file ][ 1 ] ):
print file
updated.append( file )
return ( updated, not_found, case_table )
# find which files are missing in the expanded path, and extract the directories
# returns ( files, dirs, missing )
def status_files_for_path( path, infiles ):
files = []
dirs = []
missing = []
for i in infiles:
test_path = os.path.join( path, i )
if ( os.path.isfile( test_path ) ):
files.append( i )
elif ( os.path.isdir( test_path ) ):
dirs.append( i )
else:
missing.append( i )
return ( files, dirs, missing )
# build a pak from a base path and a list of files
def build_pak( pak, path, files ):
zippy = zipfile.ZipFile( pak, 'w', zipfile.ZIP_DEFLATED )
for i in files:
source_path = os.path.join( path, i )
print source_path
zippy.write( source_path, i )
zippy.close()
# process the list of files after a run to update media
# dds/ -> verify all the .dds are present in zip ( case insensitive )
# .wav -> verify that all .wav have a .ogg version in zip ( case insensitive )
# .tga not in dds/ -> try to find a .dds for them
# work from a list of files, and a path to the base pak files
# files: text files with files line by line
# pak_path: the path to the pak files to compare against
# returns: ( [ missing ], [ bad ] )
# bad are files the function didn't know what to do about ( bug )
# missing are lowercased of all the files that where not matched in build
# the dds/ ones are all forced to .dds extension
# missing .wav are returned in the missing list both as .wav and .ogg
# ( that's handy when you need to fetch next )
def check_files_against_build( files, pak_path ):
pak_list = list_files_in_paks( pak_path )
# make it lowercase
tmp = []
for i in pak_list:
tmp.append( i.lower() )
pak_list = tmp
# read the files and make them lowercase
f = open( files )
check_files = f.readlines()
f.close()
tmp = []
for i in check_files:
s = i.lower()
s = s.replace( '\n', '' )
s = s.replace( '\r', '' )
tmp.append( s )
check_files = tmp
# start processing
bad = []
missing = []
for i in check_files:
if ( i[ :4 ] == 'dds/' ):
if ( i[ len(i)-4: ] == '.tga' ):
i = i[ :-4 ] + '.dds'
elif ( i[ len(i)-4: ] != '.dds' ):
print 'File not understood: ' + i
bad.append( i )
continue
try:
pak_list.index( i )
except:
print 'Not found: ' + i
missing.append( i )
elif ( i[ len(i)-4: ] == '.wav' ):
i = i[ :-4 ] + '.ogg'
try:
pak_list.index( i )
except:
print 'Not found: ' + i
missing.append( i )
missing.append( i[ :-4 ] + '.wav' )
elif ( i[ len(i)-4: ] == '.tga' ):
# tga, not from dds/
try:
pak_list.index( i )
except:
print 'Not found: ' + i
missing.append( i )
i = 'dds/' + i[ :-4 ] + '.dds'
print 'Add dds : ' + i
missing.append( i )
else:
try:
pak_list.index( i )
except:
print 'Not found: ' + i
missing.append( i )
return ( missing, bad )
# match a path to a file in a case insensitive way
# return ( True/False, 'walked up to' )
def ifind( base, path ):
refpath = path
path = os.path.normpath( path )
path = os.path.normcase( path )
# early out just in case
if ( os.path.exists( path ) ):
return ( True, path )
head = path
components = []
while ( len( head ) ):
( head, chunk ) = os.path.split( head )
components.append( chunk )
#print 'head: %s - components: %s' % ( head, repr( components ) )
components.reverse()
level = 0
for root, dirs, files in os.walk( base, topdown = True ):
if ( level < len( components ) - 1 ):
#print 'filter dirs: %s' % repr( dirs )
dirs_del = []
for i in dirs:
if ( not i.lower() == components[ level ].lower() ):
dirs_del.append( i )
for i in dirs_del:
dirs.remove( i )
level += 1
# we assume there is never going to be 2 dirs with only case difference
if ( len( dirs ) != 1 ):
#print '%s: ifind failed dirs matching at %s - dirs: %s' % ( refpath, root, repr( dirs ) )
return ( False, root[ len( base ) + 1: ] )
else:
# must find the file here
for i in files:
if ( i.lower() == components[-1].lower() ):
return ( True, os.path.join( root, i )[ len( base ) + 1: ] )
return ( False, root[ len( base ) + 1: ] )
# do case insensitive FS search on files list
# return [ cased files, not found (unmodified ) ]
def ifind_list( base, files ):
cased = []
notfound = []
for i in files:
ret = ifind( base, i )
if ( ret[ 0 ] ):
cased.append( ret[ 1 ] )
else:
notfound.append( i )
return [ cased, notfound ]
|
detiber/openshift-ansible-contrib
|
refs/heads/master
|
reference-architecture/vmware-ansible/playbooks/library/vmware_resource_pool.py
|
8
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Davis Phillips davis.phillips@gmail.com
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: vmware_resource_pool
short_description: Add/remove resource pools to/from vCenter
description:
- This module can be used to add/remove a resource pool to/from vCenter
version_added: 2.3
author: "Davis Phillips (@dav1x)"
notes:
- Tested on vSphere 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
datacenter:
description:
- Name of the datacenter to add the host
required: True
cluster:
description:
- Name of the cluster to add the host
required: True
resource_pool:
description:
- Resource pool name to manage
required: True
hostname:
description:
- ESXi hostname to manage
required: True
username:
description:
- ESXi username
required: True
password:
description:
- ESXi password
required: True
cpu_expandable_reservations:
description:
- In a resource pool with an expandable reservation, the reservation on a resource pool can grow beyond the specified value.
default: True
cpu_reservation:
description:
- Amount of resource that is guaranteed available to the virtual machine or resource pool.
default: 0
cpu_limit:
description:
- The utilization of a virtual machine/resource pool will not exceed this limit, even if there are available resources.
default: -1 (No limit)
cpu_shares:
description:
- Memory shares are used in case of resource contention.
choices:
- high
- custom
- low
- normal
default: Normal
mem_expandable_reservations:
description:
- In a resource pool with an expandable reservation, the reservation on a resource pool can grow beyond the specified value.
default: True
mem_reservation:
description:
- Amount of resource that is guaranteed available to the virtual machine or resource pool.
default: 0
mem_limit:
description:
- The utilization of a virtual machine/resource pool will not exceed this limit, even if there are available resources.
default: -1 (No limit)
mem_shares:
description:
- Memory shares are used in case of resource contention.
choices:
- high
- custom
- low
- normal
default: Normal
state:
description:
- Add or remove the resource pool
default: 'present'
choices:
- 'present'
- 'absent'
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Create a resource pool
- name: Add resource pool to vCenter
vmware_resource_pool:
hostname: vcsa_host
username: vcsa_user
password: vcsa_pass
datacenter: datacenter
cluster: cluster
resource_pool: resource_pool
mem_shares: normal
mem_limit: -1
mem_reservation: 0
mem_expandable_reservations: True
cpu_shares: normal
cpu_limit: -1
cpu_reservation: 0
cpu_expandable_reservations: True
state: present
'''
RETURN = """
instance:
descripton: metadata about the new resource pool
returned: always
type: dict
sample: None
"""
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
from ansible.module_utils.vmware import get_all_objs, connect_to_api, vmware_argument_spec, find_datacenter_by_name, \
find_cluster_by_name_datacenter, wait_for_task
from ansible.module_utils.basic import AnsibleModule
class VMwareResourcePool(object):
def __init__(self, module):
self.module = module
self.datacenter = module.params['datacenter']
self.cluster = module.params['cluster']
self.resource_pool = module.params['resource_pool']
self.hostname = module.params['hostname']
self.username = module.params['username']
self.password = module.params['password']
self.state = module.params['state']
self.mem_shares = module.params['mem_shares']
self.mem_limit = module.params['mem_limit']
self.mem_reservation = module.params['mem_reservation']
self.mem_expandable_reservations = module.params['cpu_expandable_reservations']
self.cpu_shares = module.params['cpu_shares']
self.cpu_limit = module.params['cpu_limit']
self.cpu_reservation = module.params['cpu_reservation']
self.cpu_expandable_reservations = module.params['cpu_expandable_reservations']
self.dc_obj = None
self.cluster_obj = None
self.host_obj = None
self.resource_pool_obj = None
self.content = connect_to_api(module)
def find_host_by_cluster_datacenter(self):
self.dc_obj = find_datacenter_by_name(self.content, self.datacenter)
self.cluster_obj = find_cluster_by_name_datacenter(self.dc_obj, self.cluster)
for host in self.cluster_obj.host:
if host.name == self.hostname:
return host, self.cluster
return None, self.cluster
def select_resource_pool(self, host):
pool_obj = None
resource_pools = get_all_objs(self.content, [vim.ResourcePool])
pool_selections = self.get_obj(
[vim.ResourcePool],
self.resource_pool,
return_all = True
)
if pool_selections:
for p in pool_selections:
if p in resource_pools:
pool_obj = p
break
return pool_obj
def get_obj(self, vimtype, name, return_all = False):
obj = list()
container = self.content.viewManager.CreateContainerView(
self.content.rootFolder, vimtype, True)
for c in container.view:
if name in [c.name, c._GetMoId()]:
if return_all is False:
return c
break
else:
obj.append(c)
if len(obj) > 0:
return obj
else:
# for backwards-compat
return None
def process_state(self):
try:
rp_states = {
'absent': {
'present': self.state_remove_rp,
'absent': self.state_exit_unchanged,
},
'present': {
'present': self.state_exit_unchanged,
'absent': self.state_add_rp,
}
}
rp_states[self.state][self.check_rp_state()]()
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg = runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg = method_fault.msg)
except Exception as e:
self.module.fail_json(msg = str(e))
def state_exit_unchanged(self):
self.module.exit_json(changed = False)
def state_remove_rp(self):
changed = True
result = None
resource_pool = self.select_resource_pool(self.host_obj)
try:
task = self.resource_pool_obj.Destroy()
success, result = wait_for_task(task)
except:
self.module.fail_json(msg = "Failed to remove resource pool '%s' '%s'" % (self.resource_pool,resource_pool))
self.module.exit_json(changed = changed, result = str(result))
def state_add_rp(self):
changed = True
result = None
root_resource_pool = None
rp_spec=vim.ResourceConfigSpec()
cpu_alloc=vim.ResourceAllocationInfo()
cpu_alloc.expandableReservation = self.cpu_expandable_reservations
cpu_alloc.limit = int(self.cpu_limit)
cpu_alloc.reservation = int(self.cpu_reservation)
cpu_alloc_shares = vim.SharesInfo()
cpu_alloc_shares.level = self.cpu_shares
cpu_alloc.shares = cpu_alloc_shares
rp_spec.cpuAllocation = cpu_alloc
mem_alloc = vim.ResourceAllocationInfo()
mem_alloc.limit = int(self.mem_limit)
mem_alloc.expandableReservation = self.mem_expandable_reservations
mem_alloc.reservation = int(self.mem_reservation)
mem_alloc_shares = vim.SharesInfo()
mem_alloc_shares.level = self.mem_shares
mem_alloc.shares = mem_alloc_shares
rp_spec.memoryAllocation = mem_alloc
self.dc_obj = find_datacenter_by_name(self.content, self.datacenter)
self.cluster_obj = find_cluster_by_name_datacenter(self.dc_obj, self.cluster)
rootResourcePool = self.cluster_obj.resourcePool
task = rootResourcePool.CreateResourcePool(self.resource_pool, rp_spec)
self.module.exit_json(changed = changed)
def check_rp_state(self):
self.host_obj, self.cluster_obj = self.find_host_by_cluster_datacenter()
self.resource_pool_obj = self.select_resource_pool(self.host_obj)
if self.resource_pool_obj is None:
return 'absent'
else:
return 'present'
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(datacenter = dict(required = True, type = 'str'),
cluster = dict(required = True, type = 'str'),
resource_pool = dict(required=True, type='str'),
hostname = dict(required = True, type = 'str'),
username = dict(required = True, type = 'str'),
password = dict(required = True, type = 'str', no_log = True),
mem_shares = dict(type = 'str', default = "normal", choices = ['high','custom','normal', 'low']),
mem_limit = dict(type = 'int',default = "-1"),
mem_reservation = dict(type = 'int',default = "0"),
mem_expandable_reservations = dict(type = 'bool',default = "True"),
cpu_shares = dict(type = 'str', default = "normal", choices = ['high','custom','normal', 'low']),
cpu_limit = dict(type = 'int',default = "-1"),
cpu_reservation = dict(type = 'int',default = "0"),
cpu_expandable_reservations = dict(type = 'bool',default = "True"),
state = dict(default = 'present', choices = ['present', 'absent'], type = 'str')))
module = AnsibleModule(argument_spec = argument_spec, supports_check_mode = True)
if not HAS_PYVMOMI:
module.fail_json(msg = 'pyvmomi is required for this module')
vmware_rp = VMwareResourcePool(module)
vmware_rp.process_state()
if __name__ == '__main__':
main()
|
varunagrawal/azure-services
|
refs/heads/master
|
varunagrawal/VarunWeb/env/Lib/site-packages/django/contrib/gis/tests/geoadmin/urls.py
|
383
|
from django.conf.urls import patterns, include
from django.contrib import admin
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
)
|
fengyie007/shadowsocks
|
refs/heads/master
|
shadowsocks/shell.py
|
2
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import json
import sys
import getopt
import logging
import traceback
from functools import wraps
from shadowsocks.common import to_bytes, to_str, IPNetwork
from shadowsocks import cryptor
VERBOSE_LEVEL = 5
verbose = 0
def check_python():
info = sys.version_info
if info[0] == 2 and not info[1] >= 6:
print('Python 2.6+ required')
sys.exit(1)
elif info[0] == 3 and not info[1] >= 3:
print('Python 3.3+ required')
sys.exit(1)
elif info[0] not in [2, 3]:
print('Python version not supported')
sys.exit(1)
def print_exception(e):
global verbose
logging.error(e)
if verbose > 0:
import traceback
traceback.print_exc()
def exception_handle(self_, err_msg=None, exit_code=None,
destroy=False, conn_err=False):
# self_: if function passes self as first arg
def process_exception(e, self=None):
print_exception(e)
if err_msg:
logging.error(err_msg)
if exit_code:
sys.exit(1)
if not self_:
return
if conn_err:
addr, port = self._client_address[0], self._client_address[1]
logging.error('%s when handling connection from %s:%d' %
(e, addr, port))
if self._config['verbose']:
traceback.print_exc()
if destroy:
self.destroy()
def decorator(func):
if self_:
@wraps(func)
def wrapper(self, *args, **kwargs):
try:
func(self, *args, **kwargs)
except Exception as e:
process_exception(e, self)
else:
@wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception as e:
process_exception(e)
return wrapper
return decorator
def print_shadowsocks():
version = ''
try:
import pkg_resources
version = pkg_resources.get_distribution('shadowsocks').version
except Exception:
pass
print('Shadowsocks %s' % version)
def find_config():
config_path = 'config.json'
if os.path.exists(config_path):
return config_path
config_path = os.path.join(os.path.dirname(__file__), '../', 'config.json')
if os.path.exists(config_path):
return config_path
return None
def check_config(config, is_local):
if config.get('daemon', None) == 'stop':
# no need to specify configuration for daemon stop
return
if is_local:
if config.get('server', None) is None:
logging.error('server addr not specified')
print_local_help()
sys.exit(2)
else:
config['server'] = to_str(config['server'])
if config.get('tunnel_remote', None) is None:
logging.error('tunnel_remote addr not specified')
print_local_help()
sys.exit(2)
else:
config['tunnel_remote'] = to_str(config['tunnel_remote'])
else:
config['server'] = to_str(config.get('server', '0.0.0.0'))
try:
config['forbidden_ip'] = \
IPNetwork(config.get('forbidden_ip', '127.0.0.0/8,::1/128'))
except Exception as e:
logging.error(e)
sys.exit(2)
if is_local and not config.get('password', None):
logging.error('password not specified')
print_help(is_local)
sys.exit(2)
if not is_local and not config.get('password', None) \
and not config.get('port_password', None) \
and not config.get('manager_address'):
logging.error('password or port_password not specified')
print_help(is_local)
sys.exit(2)
if 'local_port' in config:
config['local_port'] = int(config['local_port'])
if 'server_port' in config and type(config['server_port']) != list:
config['server_port'] = int(config['server_port'])
if 'tunnel_remote_port' in config:
config['tunnel_remote_port'] = int(config['tunnel_remote_port'])
if 'tunnel_port' in config:
config['tunnel_port'] = int(config['tunnel_port'])
if config.get('local_address', '') in [b'0.0.0.0']:
logging.warn('warning: local set to listen on 0.0.0.0, it\'s not safe')
if config.get('server', '') in ['127.0.0.1', 'localhost']:
logging.warn('warning: server set to listen on %s:%s, are you sure?' %
(to_str(config['server']), config['server_port']))
if (config.get('method', '') or '').lower() == 'table':
logging.warn('warning: table is not safe; please use a safer cipher, '
'like AES-256-CFB')
if (config.get('method', '') or '').lower() == 'rc4':
logging.warn('warning: RC4 is not safe; please use a safer cipher, '
'like AES-256-CFB')
if config.get('timeout', 300) < 100:
logging.warn('warning: your timeout %d seems too short' %
int(config.get('timeout')))
if config.get('timeout', 300) > 600:
logging.warn('warning: your timeout %d seems too long' %
int(config.get('timeout')))
if config.get('password') in [b'mypassword']:
logging.error('DON\'T USE DEFAULT PASSWORD! Please change it in your '
'config.json!')
sys.exit(1)
if config.get('user', None) is not None:
if os.name != 'posix':
logging.error('user can be used only on Unix')
sys.exit(1)
if config.get('dns_server', None) is not None:
if type(config['dns_server']) != list:
config['dns_server'] = to_str(config['dns_server'])
logging.info('Specified DNS server: %s' % config['dns_server'])
cryptor.try_cipher(config['password'], config['method'])
def get_config(is_local):
global verbose
logging.basicConfig(level=logging.INFO,
format='%(levelname)-s: %(message)s')
if is_local:
shortopts = 'hd:s:b:p:k:l:m:c:t:vqa'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'user=',
'version']
else:
shortopts = 'hd:s:p:k:m:c:t:vqa'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'workers=',
'forbidden-ip=', 'user=', 'manager-address=', 'version',
'prefer-ipv6']
try:
config_path = find_config()
optlist, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
for key, value in optlist:
if key == '-c':
config_path = value
if config_path:
logging.info('loading config from %s' % config_path)
with open(config_path, 'rb') as f:
try:
config = parse_json_in_str(f.read().decode('utf8'))
except ValueError as e:
logging.error('found an error in config.json: %s',
e.message)
sys.exit(1)
else:
config = {}
v_count = 0
for key, value in optlist:
if key == '-p':
config['server_port'] = int(value)
elif key == '-k':
config['password'] = to_bytes(value)
elif key == '-l':
config['local_port'] = int(value)
elif key == '-s':
config['server'] = to_str(value)
elif key == '-m':
config['method'] = to_str(value)
elif key == '-b':
config['local_address'] = to_str(value)
elif key == '-v':
v_count += 1
# '-vv' turns on more verbose mode
config['verbose'] = v_count
elif key == '-a':
config['one_time_auth'] = True
elif key == '-t':
config['timeout'] = int(value)
elif key == '--fast-open':
config['fast_open'] = True
elif key == '--workers':
config['workers'] = int(value)
elif key == '--manager-address':
config['manager_address'] = value
elif key == '--user':
config['user'] = to_str(value)
elif key == '--forbidden-ip':
config['forbidden_ip'] = to_str(value).split(',')
elif key in ('-h', '--help'):
if is_local:
print_local_help()
else:
print_server_help()
sys.exit(0)
elif key == '--version':
print_shadowsocks()
sys.exit(0)
elif key == '-d':
config['daemon'] = to_str(value)
elif key == '--pid-file':
config['pid-file'] = to_str(value)
elif key == '--log-file':
config['log-file'] = to_str(value)
elif key == '-q':
v_count -= 1
config['verbose'] = v_count
elif key == '--prefer-ipv6':
config['prefer_ipv6'] = True
except getopt.GetoptError as e:
print(e, file=sys.stderr)
print_help(is_local)
sys.exit(2)
if not config:
logging.error('config not specified')
print_help(is_local)
sys.exit(2)
config['password'] = to_bytes(config.get('password', b''))
config['method'] = to_str(config.get('method', 'aes-256-cfb'))
config['port_password'] = config.get('port_password', None)
config['timeout'] = int(config.get('timeout', 300))
config['fast_open'] = config.get('fast_open', False)
config['workers'] = config.get('workers', 1)
config['pid-file'] = config.get('pid-file', '/var/run/shadowsocks.pid')
config['log-file'] = config.get('log-file', '/var/log/shadowsocks.log')
config['verbose'] = config.get('verbose', False)
config['local_address'] = to_str(config.get('local_address', '127.0.0.1'))
config['local_port'] = config.get('local_port', 1080)
config['one_time_auth'] = config.get('one_time_auth', False)
config['prefer_ipv6'] = config.get('prefer_ipv6', False)
config['server_port'] = config.get('server_port', 8388)
config['tunnel_remote'] = to_str(config.get('tunnel_remote', '8.8.8.8'))
config['tunnel_remote_port'] = config.get('tunnel_remote_port', 53)
config['tunnel_port'] = config.get('tunnel_port', 53)
config['dns_server'] = config.get('dns_server', None)
logging.getLogger('').handlers = []
logging.addLevelName(VERBOSE_LEVEL, 'VERBOSE')
if config['verbose'] >= 2:
level = VERBOSE_LEVEL
elif config['verbose'] == 1:
level = logging.DEBUG
elif config['verbose'] == -1:
level = logging.WARN
elif config['verbose'] <= -2:
level = logging.ERROR
else:
level = logging.INFO
verbose = config['verbose']
logging.basicConfig(level=level,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
check_config(config, is_local)
return config
def print_help(is_local):
if is_local:
print_local_help()
else:
print_server_help()
def print_local_help():
print('''usage: sslocal [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address
-p SERVER_PORT server port, default: 8388
-b LOCAL_ADDR local binding address, default: 127.0.0.1
-l LOCAL_PORT local port, default: 1080
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
Sodium:
chacha20-poly1305, chacha20-ietf-poly1305,
*xchacha20-ietf-poly1305,
sodium:aes-256-gcm,
salsa20, chacha20, chacha20-ietf.
OpenSSL:(* v1.1)
*aes-128-ocb, *aes-192-ocb, *aes-256-ocb,
aes-128-gcm, aes-192-gcm, aes-256-gcm,
aes-128-cfb, aes-192-cfb, aes-256-cfb,
aes-128-ctr, aes-192-ctr, aes-256-ctr,
camellia-128-cfb, camellia-192-cfb,
camellia-256-cfb,
bf-cfb, cast5-cfb, des-cfb, idea-cfb,
rc2-cfb, seed-cfb,
rc4, rc4-md5, table.
-t TIMEOUT timeout in seconds, default: 300
-a ONE_TIME_AUTH one time auth
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def print_server_help():
print('''usage: ssserver [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address, default: 0.0.0.0
-p SERVER_PORT server port, default: 8388
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
Sodium:
chacha20-poly1305, chacha20-ietf-poly1305,
*xchacha20-ietf-poly1305,
sodium:aes-256-gcm,
salsa20, chacha20, chacha20-ietf.
OpenSSL:(* v1.1)
*aes-128-ocb, *aes-192-ocb, *aes-256-ocb,
aes-128-gcm, aes-192-gcm, aes-256-gcm,
aes-128-cfb, aes-192-cfb, aes-256-cfb,
aes-128-ctr, aes-192-ctr, aes-256-ctr,
camellia-128-cfb, camellia-192-cfb,
camellia-256-cfb,
bf-cfb, cast5-cfb, des-cfb, idea-cfb,
rc2-cfb, seed-cfb,
rc4, rc4-md5, table.
-t TIMEOUT timeout in seconds, default: 300
-a ONE_TIME_AUTH one time auth
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
--workers WORKERS number of workers, available on Unix/Linux
--forbidden-ip IPLIST comma seperated IP list forbidden to connect
--manager-address ADDR optional server manager UDP address, see wiki
--prefer-ipv6 resolve ipv6 address first
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def _decode_list(data):
rv = []
for item in data:
if hasattr(item, 'encode'):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.items():
if hasattr(value, 'encode'):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
def parse_json_in_str(data):
# parse json and convert everything from unicode to str
return json.loads(data, object_hook=_decode_dict)
|
msebire/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyCompatibilityInspection/underscoresInNumericLiterals.py
|
1
|
# hex
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">0xCAFE_F00D</warning>
# oct
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">0o1_23</warning>
<error descr="Python version 3.6 does not support this syntax. It requires '0o' prefix for octal literals"><warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals"><warning descr="Python version 3.4, 3.5, 3.7 do not support this syntax. It requires '0o' prefix for octal literals">01_23</warning></warning></error>
# bin
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">0b_0011_1111_0100_1110</warning>
# dec
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_000_000</warning>
# pointfloat
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.</warning>
# exponentfloat
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23e1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23E1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.e1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.E1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23e+1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23E+1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.e+1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.E+1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23e-1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23E-1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.e-1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.E-1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_0000_23e1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_0000_23E1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00e1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00E1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_0000_23e+1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_0000_23E+1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00e+1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00E+1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_0000_23e-1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_0000_23E-1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00e-1_2</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00E-1_2</warning>
# imag
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23j</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23J</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23e1_2j</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_00.00_23e1_2J</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_000_000j</warning>
<warning descr="Python version 2.6, 2.7, 3.4, 3.5 do not support underscores in numeric literals">10_000_000J</warning>
|
abhattad4/Digi-Menu
|
refs/heads/master
|
digimenu2/django/core/serializers/xml_serializer.py
|
95
|
"""
XML serializer.
"""
from __future__ import unicode_literals
from xml.dom import pulldom
from xml.sax import handler
from xml.sax.expatreader import ExpatParser as _ExpatParser
from django.apps import apps
from django.conf import settings
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils.encoding import smart_text
from django.utils.xmlutils import SimplerXMLGenerator
class Serializer(base.Serializer):
"""
Serializes a QuerySet to XML.
"""
def indent(self, level):
if self.options.get('indent', None) is not None:
self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent', None) * level)
def start_serialization(self):
"""
Start serialization -- open the XML document and the root element.
"""
self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
self.xml.startDocument()
self.xml.startElement("django-objects", {"version": "1.0"})
def end_serialization(self):
"""
End serialization -- end the document.
"""
self.indent(0)
self.xml.endElement("django-objects")
self.xml.endDocument()
def start_object(self, obj):
"""
Called as each object is handled.
"""
if not hasattr(obj, "_meta"):
raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
self.indent(1)
attrs = {"model": smart_text(obj._meta)}
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
obj_pk = obj._get_pk_val()
if obj_pk is not None:
attrs['pk'] = smart_text(obj_pk)
self.xml.startElement("object", attrs)
def end_object(self, obj):
"""
Called after handling all fields for an object.
"""
self.indent(1)
self.xml.endElement("object")
def handle_field(self, obj, field):
"""
Called to handle each field on an object (except for ForeignKeys and
ManyToManyFields)
"""
self.indent(2)
self.xml.startElement("field", {
"name": field.name,
"type": field.get_internal_type()
})
# Get a "string version" of the object's data.
if getattr(obj, field.name) is not None:
self.xml.characters(field.value_to_string(obj))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields).
"""
self._start_relational_field(field)
related_att = getattr(obj, field.get_attname())
if related_att is not None:
if self.use_natural_foreign_keys and hasattr(field.rel.to, 'natural_key'):
related = getattr(obj, field.name)
# If related object has a natural key, use it
related = related.natural_key()
# Iterable natural keys are rolled out as subelements
for key_value in related:
self.xml.startElement("natural", {})
self.xml.characters(smart_text(key_value))
self.xml.endElement("natural")
else:
self.xml.characters(smart_text(related_att))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField. Related objects are only
serialized as references to the object's PK (i.e. the related *data*
is not dumped, just the relation).
"""
if field.rel.through._meta.auto_created:
self._start_relational_field(field)
if self.use_natural_foreign_keys and hasattr(field.rel.to, 'natural_key'):
# If the objects in the m2m have a natural key, use it
def handle_m2m(value):
natural = value.natural_key()
# Iterable natural keys are rolled out as subelements
self.xml.startElement("object", {})
for key_value in natural:
self.xml.startElement("natural", {})
self.xml.characters(smart_text(key_value))
self.xml.endElement("natural")
self.xml.endElement("object")
else:
def handle_m2m(value):
self.xml.addQuickElement("object", attrs={
'pk': smart_text(value._get_pk_val())
})
for relobj in getattr(obj, field.name).iterator():
handle_m2m(relobj)
self.xml.endElement("field")
def _start_relational_field(self, field):
"""
Helper to output the <field> element for relational fields
"""
self.indent(2)
self.xml.startElement("field", {
"name": field.name,
"rel": field.rel.__class__.__name__,
"to": smart_text(field.rel.to._meta),
})
class Deserializer(base.Deserializer):
"""
Deserialize XML.
"""
def __init__(self, stream_or_string, **options):
super(Deserializer, self).__init__(stream_or_string, **options)
self.event_stream = pulldom.parse(self.stream, self._make_parser())
self.db = options.pop('using', DEFAULT_DB_ALIAS)
self.ignore = options.pop('ignorenonexistent', False)
def _make_parser(self):
"""Create a hardened XML parser (no custom/external entities)."""
return DefusedExpatParser()
def __next__(self):
for event, node in self.event_stream:
if event == "START_ELEMENT" and node.nodeName == "object":
self.event_stream.expandNode(node)
return self._handle_object(node)
raise StopIteration
def _handle_object(self, node):
"""
Convert an <object> node to a DeserializedObject.
"""
# Look up the model using the model loading mechanism. If this fails,
# bail.
Model = self._get_model_from_node(node, "model")
# Start building a data dictionary from the object.
data = {}
if node.hasAttribute('pk'):
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
node.getAttribute('pk'))
# Also start building a dict of m2m data (this is saved as
# {m2m_accessor_attribute : [list_of_related_objects]})
m2m_data = {}
field_names = {f.name for f in Model._meta.get_fields()}
# Deserialize each field.
for field_node in node.getElementsByTagName("field"):
# If the field is missing the name attribute, bail (are you
# sensing a pattern here?)
field_name = field_node.getAttribute("name")
if not field_name:
raise base.DeserializationError("<field> node is missing the 'name' attribute")
# Get the field from the Model. This will raise a
# FieldDoesNotExist if, well, the field doesn't exist, which will
# be propagated correctly unless ignorenonexistent=True is used.
if self.ignore and field_name not in field_names:
continue
field = Model._meta.get_field(field_name)
# As is usually the case, relation fields get the special treatment.
if field.rel and isinstance(field.rel, models.ManyToManyRel):
m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
elif field.rel and isinstance(field.rel, models.ManyToOneRel):
data[field.attname] = self._handle_fk_field_node(field_node, field)
else:
if field_node.getElementsByTagName('None'):
value = None
else:
value = field.to_python(getInnerText(field_node).strip())
data[field.name] = value
obj = base.build_instance(Model, data, self.db)
# Return a DeserializedObject so that the m2m data has a place to live.
return base.DeserializedObject(obj, m2m_data)
def _handle_fk_field_node(self, node, field):
"""
Handle a <field> node for a ForeignKey
"""
# Check if there is a child node named 'None', returning None if so.
if node.getElementsByTagName('None'):
return None
else:
if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
keys = node.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj = field.rel.to._default_manager.db_manager(self.db).get_by_natural_key(*field_value)
obj_pk = getattr(obj, field.rel.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.rel.to._meta.pk.rel:
obj_pk = obj_pk.pk
else:
# Otherwise, treat like a normal PK
field_value = getInnerText(node).strip()
obj_pk = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
return obj_pk
else:
field_value = getInnerText(node).strip()
return field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
def _handle_m2m_field_node(self, node, field):
"""
Handle a <field> node for a ManyToManyField.
"""
if hasattr(field.rel.to._default_manager, 'get_by_natural_key'):
def m2m_convert(n):
keys = n.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj_pk = field.rel.to._default_manager.db_manager(self.db).get_by_natural_key(*field_value).pk
else:
# Otherwise, treat like a normal PK value.
obj_pk = field.rel.to._meta.pk.to_python(n.getAttribute('pk'))
return obj_pk
else:
m2m_convert = lambda n: field.rel.to._meta.pk.to_python(n.getAttribute('pk'))
return [m2m_convert(c) for c in node.getElementsByTagName("object")]
def _get_model_from_node(self, node, attr):
"""
Helper to look up a model from a <object model=...> or a <field
rel=... to=...> node.
"""
model_identifier = node.getAttribute(attr)
if not model_identifier:
raise base.DeserializationError(
"<%s> node is missing the required '%s' attribute"
% (node.nodeName, attr))
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError(
"<%s> node has invalid model identifier: '%s'"
% (node.nodeName, model_identifier))
def getInnerText(node):
"""
Get all the inner text of a DOM node (recursively).
"""
# inspired by http://mail.python.org/pipermail/xml-sig/2005-March/011022.html
inner_text = []
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
inner_text.append(child.data)
elif child.nodeType == child.ELEMENT_NODE:
inner_text.extend(getInnerText(child))
else:
pass
return "".join(inner_text)
# Below code based on Christian Heimes' defusedxml
class DefusedExpatParser(_ExpatParser):
"""
An expat parser hardened against XML bomb attacks.
Forbids DTDs, external entity references
"""
def __init__(self, *args, **kwargs):
_ExpatParser.__init__(self, *args, **kwargs)
self.setFeature(handler.feature_external_ges, False)
self.setFeature(handler.feature_external_pes, False)
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
raise DTDForbidden(name, sysid, pubid)
def entity_decl(self, name, is_parameter_entity, value, base,
sysid, pubid, notation_name):
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
# expat 1.2
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
def external_entity_ref_handler(self, context, base, sysid, pubid):
raise ExternalReferenceForbidden(context, base, sysid, pubid)
def reset(self):
_ExpatParser.reset(self)
parser = self._parser
parser.StartDoctypeDeclHandler = self.start_doctype_decl
parser.EntityDeclHandler = self.entity_decl
parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
parser.ExternalEntityRefHandler = self.external_entity_ref_handler
class DefusedXmlException(ValueError):
"""Base exception."""
def __repr__(self):
return str(self)
class DTDForbidden(DefusedXmlException):
"""Document type definition is forbidden."""
def __init__(self, name, sysid, pubid):
super(DTDForbidden, self).__init__()
self.name = name
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class EntitiesForbidden(DefusedXmlException):
"""Entity definition is forbidden."""
def __init__(self, name, value, base, sysid, pubid, notation_name):
super(EntitiesForbidden, self).__init__()
self.name = name
self.value = value
self.base = base
self.sysid = sysid
self.pubid = pubid
self.notation_name = notation_name
def __str__(self):
tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class ExternalReferenceForbidden(DefusedXmlException):
"""Resolving an external reference is forbidden."""
def __init__(self, context, base, sysid, pubid):
super(ExternalReferenceForbidden, self).__init__()
self.context = context
self.base = base
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
return tpl.format(self.sysid, self.pubid)
|
guileschool/beagleboard
|
refs/heads/master
|
u-boot/tools/patman/patman.py
|
4
|
#!/usr/bin/env python
#
# Copyright (c) 2011 The Chromium OS Authors.
#
# SPDX-License-Identifier: GPL-2.0+
#
"""See README for more information"""
from optparse import OptionParser
import os
import re
import sys
import unittest
# Our modules
try:
from patman import checkpatch, command, gitutil, patchstream, \
project, settings, terminal, test
except ImportError:
import checkpatch
import command
import gitutil
import patchstream
import project
import settings
import terminal
import test
parser = OptionParser()
parser.add_option('-H', '--full-help', action='store_true', dest='full_help',
default=False, help='Display the README file')
parser.add_option('-c', '--count', dest='count', type='int',
default=-1, help='Automatically create patches from top n commits')
parser.add_option('-i', '--ignore-errors', action='store_true',
dest='ignore_errors', default=False,
help='Send patches email even if patch errors are found')
parser.add_option('-m', '--no-maintainers', action='store_false',
dest='add_maintainers', default=True,
help="Don't cc the file maintainers automatically")
parser.add_option('-n', '--dry-run', action='store_true', dest='dry_run',
default=False, help="Do a dry run (create but don't email patches)")
parser.add_option('-p', '--project', default=project.DetectProject(),
help="Project name; affects default option values and "
"aliases [default: %default]")
parser.add_option('-r', '--in-reply-to', type='string', action='store',
help="Message ID that this series is in reply to")
parser.add_option('-s', '--start', dest='start', type='int',
default=0, help='Commit to start creating patches from (0 = HEAD)')
parser.add_option('-t', '--ignore-bad-tags', action='store_true',
default=False, help='Ignore bad tags / aliases')
parser.add_option('--test', action='store_true', dest='test',
default=False, help='run tests')
parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
default=False, help='Verbose output of errors and warnings')
parser.add_option('--cc-cmd', dest='cc_cmd', type='string', action='store',
default=None, help='Output cc list for patch file (used by git)')
parser.add_option('--no-check', action='store_false', dest='check_patch',
default=True,
help="Don't check for patch compliance")
parser.add_option('--no-tags', action='store_false', dest='process_tags',
default=True, help="Don't process subject tags as aliaes")
parser.add_option('-T', '--thread', action='store_true', dest='thread',
default=False, help='Create patches as a single thread')
parser.usage += """
Create patches from commits in a branch, check them and email them as
specified by tags you place in the commits. Use -n to do a dry run first."""
# Parse options twice: first to get the project and second to handle
# defaults properly (which depends on project).
(options, args) = parser.parse_args()
settings.Setup(parser, options.project, '')
(options, args) = parser.parse_args()
if __name__ != "__main__":
pass
# Run our meagre tests
elif options.test:
import doctest
import func_test
sys.argv = [sys.argv[0]]
result = unittest.TestResult()
for module in (test.TestPatch, func_test.TestFunctional):
suite = unittest.TestLoader().loadTestsFromTestCase(module)
suite.run(result)
for module in ['gitutil', 'settings']:
suite = doctest.DocTestSuite(module)
suite.run(result)
# TODO: Surely we can just 'print' result?
print(result)
for test, err in result.errors:
print(err)
for test, err in result.failures:
print(err)
# Called from git with a patch filename as argument
# Printout a list of additional CC recipients for this patch
elif options.cc_cmd:
fd = open(options.cc_cmd, 'r')
re_line = re.compile('(\S*) (.*)')
for line in fd.readlines():
match = re_line.match(line)
if match and match.group(1) == args[0]:
for cc in match.group(2).split(', '):
cc = cc.strip()
if cc:
print(cc)
fd.close()
elif options.full_help:
pager = os.getenv('PAGER')
if not pager:
pager = 'more'
fname = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),
'README')
command.Run(pager, fname)
# Process commits, produce patches files, check them, email them
else:
gitutil.Setup()
if options.count == -1:
# Work out how many patches to send if we can
options.count = gitutil.CountCommitsToBranch() - options.start
col = terminal.Color()
if not options.count:
str = 'No commits found to process - please use -c flag'
sys.exit(col.Color(col.RED, str))
# Read the metadata from the commits
if options.count:
series = patchstream.GetMetaData(options.start, options.count)
cover_fname, args = gitutil.CreatePatches(options.start, options.count,
series)
# Fix up the patch files to our liking, and insert the cover letter
patchstream.FixPatches(series, args)
if cover_fname and series.get('cover'):
patchstream.InsertCoverLetter(cover_fname, series, options.count)
# Do a few checks on the series
series.DoChecks()
# Check the patches, and run them through 'git am' just to be sure
if options.check_patch:
ok = checkpatch.CheckPatches(options.verbose, args)
else:
ok = True
cc_file = series.MakeCcFile(options.process_tags, cover_fname,
not options.ignore_bad_tags,
options.add_maintainers)
# Email the patches out (giving the user time to check / cancel)
cmd = ''
its_a_go = ok or options.ignore_errors
if its_a_go:
cmd = gitutil.EmailPatches(series, cover_fname, args,
options.dry_run, not options.ignore_bad_tags, cc_file,
in_reply_to=options.in_reply_to, thread=options.thread)
else:
print(col.Color(col.RED, "Not sending emails due to errors/warnings"))
# For a dry run, just show our actions as a sanity check
if options.dry_run:
series.ShowActions(args, cmd, options.process_tags)
if not its_a_go:
print(col.Color(col.RED, "Email would not be sent"))
os.remove(cc_file)
|
aselle/tensorflow
|
refs/heads/master
|
tensorflow/contrib/distributions/python/kernel_tests/geometric_test.py
|
68
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the Geometric distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from scipy import stats
from tensorflow.contrib.distributions.python.ops import geometric
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
# In all tests that follow, we use scipy.stats.geom, which
# represents the "Shifted" Geometric distribution. Hence, loc=-1 is passed
# in to each scipy function for testing.
class GeometricTest(test.TestCase):
def testGeometricShape(self):
with self.test_session():
probs = constant_op.constant([.1] * 5)
geom = geometric.Geometric(probs=probs)
self.assertEqual([5,], geom.batch_shape_tensor().eval())
self.assertAllEqual([], geom.event_shape_tensor().eval())
self.assertEqual(tensor_shape.TensorShape([5]), geom.batch_shape)
self.assertEqual(tensor_shape.TensorShape([]), geom.event_shape)
def testInvalidP(self):
invalid_ps = [-.01, -0.01, -2.]
with self.test_session():
with self.assertRaisesOpError("Condition x >= 0"):
geom = geometric.Geometric(probs=invalid_ps, validate_args=True)
geom.probs.eval()
invalid_ps = [1.1, 3., 5.]
with self.test_session():
with self.assertRaisesOpError("Condition x <= y"):
geom = geometric.Geometric(probs=invalid_ps, validate_args=True)
geom.probs.eval()
def testGeomLogPmf(self):
with self.test_session():
batch_size = 6
probs = constant_op.constant([.2] * batch_size)
probs_v = .2
x = np.array([2., 3., 4., 5., 6., 7.], dtype=np.float32)
geom = geometric.Geometric(probs=probs)
expected_log_prob = stats.geom.logpmf(x, probs_v, loc=-1)
log_prob = geom.log_prob(x)
self.assertEqual([6,], log_prob.get_shape())
self.assertAllClose(expected_log_prob, log_prob.eval())
pmf = geom.prob(x)
self.assertEqual([6,], pmf.get_shape())
self.assertAllClose(np.exp(expected_log_prob), pmf.eval())
def testGeometricLogPmf_validate_args(self):
with self.test_session():
batch_size = 6
probs = constant_op.constant([.9] * batch_size)
x = array_ops.placeholder(dtypes.float32, shape=[6])
feed_dict = {x: [2.5, 3.2, 4.3, 5.1, 6., 7.]}
geom = geometric.Geometric(probs=probs, validate_args=True)
with self.assertRaisesOpError("Condition x == y"):
log_prob = geom.log_prob(x)
log_prob.eval(feed_dict=feed_dict)
with self.assertRaisesOpError("Condition x >= 0"):
log_prob = geom.log_prob(np.array([-1.], dtype=np.float32))
log_prob.eval()
geom = geometric.Geometric(probs=probs)
log_prob = geom.log_prob(x)
self.assertEqual([6,], log_prob.get_shape())
pmf = geom.prob(x)
self.assertEqual([6,], pmf.get_shape())
def testGeometricLogPmfMultidimensional(self):
with self.test_session():
batch_size = 6
probs = constant_op.constant([[.2, .3, .5]] * batch_size)
probs_v = np.array([.2, .3, .5])
x = np.array([[2., 3., 4., 5., 6., 7.]], dtype=np.float32).T
geom = geometric.Geometric(probs=probs)
expected_log_prob = stats.geom.logpmf(x, probs_v, loc=-1)
log_prob = geom.log_prob(x)
log_prob_values = log_prob.eval()
self.assertEqual([6, 3], log_prob.get_shape())
self.assertAllClose(expected_log_prob, log_prob_values)
pmf = geom.prob(x)
pmf_values = pmf.eval()
self.assertEqual([6, 3], pmf.get_shape())
self.assertAllClose(np.exp(expected_log_prob), pmf_values)
def testGeometricCDF(self):
with self.test_session():
batch_size = 6
probs = constant_op.constant([[.2, .4, .5]] * batch_size)
probs_v = np.array([.2, .4, .5])
x = np.array([[2., 3., 4., 5.5, 6., 7.]], dtype=np.float32).T
geom = geometric.Geometric(probs=probs)
expected_cdf = stats.geom.cdf(x, probs_v, loc=-1)
cdf = geom.cdf(x)
self.assertEqual([6, 3], cdf.get_shape())
self.assertAllClose(expected_cdf, cdf.eval())
def testGeometricEntropy(self):
with self.test_session():
probs_v = np.array([.1, .3, .25], dtype=np.float32)
geom = geometric.Geometric(probs=probs_v)
expected_entropy = stats.geom.entropy(probs_v, loc=-1)
self.assertEqual([3], geom.entropy().get_shape())
self.assertAllClose(expected_entropy, geom.entropy().eval())
def testGeometricMean(self):
with self.test_session():
probs_v = np.array([.1, .3, .25])
geom = geometric.Geometric(probs=probs_v)
expected_means = stats.geom.mean(probs_v, loc=-1)
self.assertEqual([3], geom.mean().get_shape())
self.assertAllClose(expected_means, geom.mean().eval())
def testGeometricVariance(self):
with self.test_session():
probs_v = np.array([.1, .3, .25])
geom = geometric.Geometric(probs=probs_v)
expected_vars = stats.geom.var(probs_v, loc=-1)
self.assertEqual([3], geom.variance().get_shape())
self.assertAllClose(expected_vars, geom.variance().eval())
def testGeometricStddev(self):
with self.test_session():
probs_v = np.array([.1, .3, .25])
geom = geometric.Geometric(probs=probs_v)
expected_stddevs = stats.geom.std(probs_v, loc=-1)
self.assertEqual([3], geom.stddev().get_shape())
self.assertAllClose(geom.stddev().eval(), expected_stddevs)
def testGeometricMode(self):
with self.test_session():
probs_v = np.array([.1, .3, .25])
geom = geometric.Geometric(probs=probs_v)
self.assertEqual([3,], geom.mode().get_shape())
self.assertAllClose([0.] * 3, geom.mode().eval())
def testGeometricSample(self):
with self.test_session():
probs_v = [.3, .9]
probs = constant_op.constant(probs_v)
n = constant_op.constant(100000)
geom = geometric.Geometric(probs=probs)
samples = geom.sample(n, seed=12345)
self.assertEqual([100000, 2], samples.get_shape())
sample_values = samples.eval()
self.assertFalse(np.any(sample_values < 0.0))
for i in range(2):
self.assertAllClose(sample_values[:, i].mean(),
stats.geom.mean(probs_v[i], loc=-1),
rtol=.02)
self.assertAllClose(sample_values[:, i].var(),
stats.geom.var(probs_v[i], loc=-1),
rtol=.02)
def testGeometricSampleMultiDimensional(self):
with self.test_session():
batch_size = 2
probs_v = [.3, .9]
probs = constant_op.constant([probs_v] * batch_size)
geom = geometric.Geometric(probs=probs)
n = 400000
samples = geom.sample(n, seed=12345)
self.assertEqual([n, batch_size, 2], samples.get_shape())
sample_values = samples.eval()
self.assertFalse(np.any(sample_values < 0.0))
for i in range(2):
self.assertAllClose(sample_values[:, 0, i].mean(),
stats.geom.mean(probs_v[i], loc=-1),
rtol=.02)
self.assertAllClose(sample_values[:, 0, i].var(),
stats.geom.var(probs_v[i], loc=-1),
rtol=.02)
self.assertAllClose(sample_values[:, 1, i].mean(),
stats.geom.mean(probs_v[i], loc=-1),
rtol=.02)
self.assertAllClose(sample_values[:, 1, i].var(),
stats.geom.var(probs_v[i], loc=-1),
rtol=.02)
def testGeometricAtBoundary(self):
with self.test_session():
geom = geometric.Geometric(probs=1., validate_args=True)
x = np.array([0., 2., 3., 4., 5., 6., 7.], dtype=np.float32)
expected_log_prob = stats.geom.logpmf(x, [1.], loc=-1)
# Scipy incorrectly returns nan.
expected_log_prob[np.isnan(expected_log_prob)] = 0.
log_prob = geom.log_prob(x)
self.assertEqual([7,], log_prob.get_shape())
self.assertAllClose(expected_log_prob, log_prob.eval())
pmf = geom.prob(x)
self.assertEqual([7,], pmf.get_shape())
self.assertAllClose(np.exp(expected_log_prob), pmf.eval())
expected_log_cdf = stats.geom.logcdf(x, 1., loc=-1)
log_cdf = geom.log_cdf(x)
self.assertEqual([7,], log_cdf.get_shape())
self.assertAllClose(expected_log_cdf, log_cdf.eval())
cdf = geom.cdf(x)
self.assertEqual([7,], cdf.get_shape())
self.assertAllClose(np.exp(expected_log_cdf), cdf.eval())
expected_mean = stats.geom.mean(1., loc=-1)
self.assertEqual([], geom.mean().get_shape())
self.assertAllClose(expected_mean, geom.mean().eval())
expected_variance = stats.geom.var(1., loc=-1)
self.assertEqual([], geom.variance().get_shape())
self.assertAllClose(expected_variance, geom.variance().eval())
with self.assertRaisesOpError("Entropy is undefined"):
geom.entropy().eval()
if __name__ == "__main__":
test.main()
|
gogobook/Spirit
|
refs/heads/master
|
spirit/core/utils/models_fields.py
|
12
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models.fields import SlugField
from django.utils.text import slugify
from django.utils.encoding import smart_text
__all__ = ['AutoSlugField', ]
class AutoSlugField(SlugField):
"""
Auto populates itself from another field.
It behaves like a regular SlugField.
When populate_from is provided it'll populate itself on creation,
only if a slug was not provided.
"""
def __init__(self, *args, **kwargs):
self.populate_from = kwargs.pop('populate_from', None)
super(AutoSlugField, self).__init__(*args, **kwargs)
def pre_save(self, instance, add):
default = super(AutoSlugField, self).pre_save(instance, add)
if default or not add or not self.populate_from:
return default
value = getattr(instance, self.populate_from)
if value is None:
return default
slug = slugify(smart_text(value))[:self.max_length].strip('-')
# Update the model’s attribute
setattr(instance, self.attname, slug)
return slug
def deconstruct(self):
name, path, args, kwargs = super(AutoSlugField, self).deconstruct()
if self.populate_from is not None:
kwargs['populate_from'] = self.populate_from
return name, path, args, kwargs
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.