repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
darkleons/odoo
|
refs/heads/master
|
addons/product/report/__init__.py
|
452
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import product_pricelist
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
shinglyu/ns3-h264-svc
|
refs/heads/master
|
src/visualizer/visualizer/higcontainer.py
|
189
|
import gtk
import gobject
try:
from gazpacho.widgets.base.base import SimpleContainerAdaptor
except ImportError:
pass
#root_library = 'hig'
class HIGContainer(gtk.Bin):
__gtype_name__ = 'HIGContainer'
__gproperties__ = {
'title': (str, 'Group Title', 'the group title',
'', gobject.PARAM_READWRITE|gobject.PARAM_CONSTRUCT),
}
def __init__(self, title=None):
self.__title_text = None
gtk.widget_push_composite_child()
self.__title = gobject.new(gtk.Label, visible=True, xalign=0, yalign=0.5)
self.__indent = gobject.new(gtk.Label, visible=True, label=' ')
gtk.widget_pop_composite_child()
gtk.Bin.__init__(self)
self.__title.set_parent(self)
self.__indent.set_parent(self)
if title is not None:
self.props.title = title
def do_size_request(self, requisition):
title_req = gtk.gdk.Rectangle(0, 0, *self.__title.size_request())
indent_req = gtk.gdk.Rectangle(0, 0, *self.__indent.size_request())
if self.child is None:
child_req = gtk.gdk.Rectangle()
else:
child_req = gtk.gdk.Rectangle(0, 0, *self.child.size_request())
requisition.height = (title_req.height + 6 +
max(child_req.height, indent_req.height))
requisition.width = max(title_req.width, indent_req.width + child_req.width)
def do_size_allocate(self, allocation):
self.allocation = allocation
## title
title_req = gtk.gdk.Rectangle(0, 0, *self.__title.get_child_requisition())
title_alloc = gtk.gdk.Rectangle()
title_alloc.x = allocation.x
title_alloc.y = allocation.y
title_alloc.width = min(title_req.width, allocation.width)
title_alloc.height = min(title_req.height, allocation.height)
self.__title.size_allocate(title_alloc)
## child
if self.child is None:
return
indent_req = gtk.gdk.Rectangle(0, 0, *self.__indent.get_child_requisition())
child_req = gtk.gdk.Rectangle(0, 0, *self.child.get_child_requisition())
child_alloc = gtk.gdk.Rectangle()
child_alloc.x = allocation.x + indent_req.width
child_alloc.y = allocation.y + title_alloc.height + 6
child_alloc.width = allocation.width - indent_req.width
child_alloc.height = allocation.height - 6 - title_alloc.height
self.child.size_allocate(child_alloc)
def do_forall(self, internal, callback, data):
if internal:
callback(self.__title, data)
callback(self.__indent, data)
if self.child is not None:
callback(self.child, data)
def do_set_property(self, pspec, value):
if pspec.name == 'title':
self.__title.set_markup('<span weight="bold">%s</span>' %
gobject.markup_escape_text(value))
self.__title_text = value
else:
raise AttributeError, 'unknown property %s' % pspec.name
def do_get_property(self, pspec):
if pspec.name == 'title':
return self.__title_text
else:
raise AttributeError, 'unknown property %s' % pspec.name
if __name__ == '__main__':
frame = gtk.Frame()
group = gobject.new(HIGContainer, title="Hello")
frame.add(group)
check = gtk.CheckButton("foobar")
group.add(check)
w = gtk.Window()
w.add(frame)
w.show_all()
w.connect("destroy", lambda w: gtk.main_quit())
gtk.main()
|
todaychi/hue
|
refs/heads/master
|
desktop/core/ext-py/requests-2.10.0/requests/packages/chardet/jpcntx.py
|
1776
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
|
microcom/odoo
|
refs/heads/9.0
|
addons/website_gengo/__init__.py
|
1023
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import controllers
import models
|
popazerty/test
|
refs/heads/master
|
lib/python/Tools/RedirectOutput.py
|
48
|
import sys
from enigma import ePythonOutput
class EnigmaOutput:
def write(self, data):
if isinstance(data, unicode):
data = data.encode("UTF-8")
ePythonOutput(data)
def flush():
pass
sys.stdout = sys.stderr = EnigmaOutput()
|
kormat/kazoo
|
refs/heads/master
|
kazoo/recipe/partitioner.py
|
9
|
"""Zookeeper Partitioner Implementation
:Maintainer: None
:Status: Unknown
:class:`SetPartitioner` implements a partitioning scheme using
Zookeeper for dividing up resources amongst members of a party.
This is useful when there is a set of resources that should only be
accessed by a single process at a time that multiple processes
across a cluster might want to divide up.
Example Use-Case
----------------
- Multiple workers across a cluster need to divide up a list of queues
so that no two workers own the same queue.
"""
import logging
import os
import socket
from functools import partial
from kazoo.exceptions import KazooException, LockTimeout
from kazoo.protocol.states import KazooState
from kazoo.recipe.watchers import PatientChildrenWatch
log = logging.getLogger(__name__)
class PartitionState(object):
"""High level partition state values
.. attribute:: ALLOCATING
The set needs to be partitioned, and may require an existing
partition set to be released before acquiring a new partition
of the set.
.. attribute:: ACQUIRED
The set has been partitioned and acquired.
.. attribute:: RELEASE
The set needs to be repartitioned, and the current partitions
must be released before a new allocation can be made.
.. attribute:: FAILURE
The set partition has failed. This occurs when the maximum
time to partition the set is exceeded or the Zookeeper session
is lost. The partitioner is unusable after this state and must
be recreated.
"""
ALLOCATING = "ALLOCATING"
ACQUIRED = "ACQUIRED"
RELEASE = "RELEASE"
FAILURE = "FAILURE"
class SetPartitioner(object):
"""Partitions a set amongst members of a party
This class will partition a set amongst members of a party such
that each member will be given zero or more items of the set and
each set item will be given to a single member. When new members
enter or leave the party, the set will be re-partitioned amongst
the members.
When the :class:`SetPartitioner` enters the
:attr:`~PartitionState.FAILURE` state, it is unrecoverable
and a new :class:`SetPartitioner` should be created.
Example:
.. code-block:: python
from kazoo.client import KazooClient
client = KazooClient()
qp = client.SetPartitioner(
path='/work_queues', set=('queue-1', 'queue-2', 'queue-3'))
while 1:
if qp.failed:
raise Exception("Lost or unable to acquire partition")
elif qp.release:
qp.release_set()
elif qp.acquired:
for partition in qp:
# Do something with each partition
elif qp.allocating:
qp.wait_for_acquire()
**State Transitions**
When created, the :class:`SetPartitioner` enters the
:attr:`PartitionState.ALLOCATING` state.
:attr:`~PartitionState.ALLOCATING` ->
:attr:`~PartitionState.ACQUIRED`
Set was partitioned successfully, the partition list assigned
is accessible via list/iter methods or calling list() on the
:class:`SetPartitioner` instance.
:attr:`~PartitionState.ALLOCATING` ->
:attr:`~PartitionState.FAILURE`
Allocating the set failed either due to a Zookeeper session
expiration, or failure to acquire the items of the set within
the timeout period.
:attr:`~PartitionState.ACQUIRED` ->
:attr:`~PartitionState.RELEASE`
The members of the party have changed, and the set needs to be
repartitioned. :meth:`SetPartitioner.release` should be called
as soon as possible.
:attr:`~PartitionState.ACQUIRED` ->
:attr:`~PartitionState.FAILURE`
The current partition was lost due to a Zookeeper session
expiration.
:attr:`~PartitionState.RELEASE` ->
:attr:`~PartitionState.ALLOCATING`
The current partition was released and is being re-allocated.
"""
def __init__(self, client, path, set, partition_func=None,
identifier=None, time_boundary=30, max_reaction_time=1,
state_change_event=None):
"""Create a :class:`~SetPartitioner` instance
:param client: A :class:`~kazoo.client.KazooClient` instance.
:param path: The partition path to use.
:param set: The set of items to partition.
:param partition_func: A function to use to decide how to
partition the set.
:param identifier: An identifier to use for this member of the
party when participating. Defaults to the
hostname + process id.
:param time_boundary: How long the party members must be stable
before allocation can complete.
:param max_reaction_time: Maximum reaction time for party members
change.
:param state_change_event: An optional Event object that will be set
on every state change.
"""
# Used to differentiate two states with the same names in time
self.state_id = 0
self.state = PartitionState.ALLOCATING
self.state_change_event = state_change_event or \
client.handler.event_object()
self._client = client
self._path = path
self._set = set
self._partition_set = []
self._partition_func = partition_func or self._partitioner
self._identifier = identifier or '%s-%s' % (
socket.getfqdn(), os.getpid())
self._locks = []
self._lock_path = '/'.join([path, 'locks'])
self._party_path = '/'.join([path, 'party'])
self._time_boundary = time_boundary
self._max_reaction_time = max_reaction_time
self._acquire_event = client.handler.event_object()
# Create basic path nodes
client.ensure_path(path)
client.ensure_path(self._lock_path)
client.ensure_path(self._party_path)
# Join the party
self._party = client.ShallowParty(self._party_path,
identifier=self._identifier)
self._party.join()
self._state_change = client.handler.rlock_object()
client.add_listener(self._establish_sessionwatch)
# Now watch the party and set the callback on the async result
# so we know when we're ready
self._child_watching(self._allocate_transition, async=True)
def __iter__(self):
"""Return the partitions in this partition set"""
for partition in self._partition_set:
yield partition
@property
def failed(self):
"""Corresponds to the :attr:`PartitionState.FAILURE` state"""
return self.state == PartitionState.FAILURE
@property
def release(self):
"""Corresponds to the :attr:`PartitionState.RELEASE` state"""
return self.state == PartitionState.RELEASE
@property
def allocating(self):
"""Corresponds to the :attr:`PartitionState.ALLOCATING`
state"""
return self.state == PartitionState.ALLOCATING
@property
def acquired(self):
"""Corresponds to the :attr:`PartitionState.ACQUIRED` state"""
return self.state == PartitionState.ACQUIRED
def wait_for_acquire(self, timeout=30):
"""Wait for the set to be partitioned and acquired
:param timeout: How long to wait before returning.
:type timeout: int
"""
self._acquire_event.wait(timeout)
def release_set(self):
"""Call to release the set
This method begins the step of allocating once the set has
been released.
"""
self._release_locks()
if self._locks: # pragma: nocover
# This shouldn't happen, it means we couldn't release our
# locks, abort
self._fail_out()
return
else:
with self._state_change:
if self.failed:
return
self._set_state(PartitionState.ALLOCATING)
self._child_watching(self._allocate_transition, async=True)
def finish(self):
"""Call to release the set and leave the party"""
self._release_locks()
self._fail_out()
def _fail_out(self):
with self._state_change:
self._set_state(PartitionState.FAILURE)
if self._party.participating:
try:
self._party.leave()
except KazooException: # pragma: nocover
pass
def _allocate_transition(self, result):
"""Called when in allocating mode, and the children settled"""
# Did we get an exception waiting for children to settle?
if result.exception: # pragma: nocover
self._fail_out()
return
children, async_result = result.get()
children_changed = self._client.handler.event_object()
def updated(result):
with self._state_change:
children_changed.set()
if self.acquired:
self._set_state(PartitionState.RELEASE)
with self._state_change:
# We can lose connection during processing the event
if not self.allocating:
return
# Remember the state ID to check later for race conditions
state_id = self.state_id
# updated() will be called when children change
async_result.rawlink(updated)
# Check whether the state has changed during the lock acquisition
# and abort the process if so.
def abort_if_needed():
if self.state_id == state_id:
if children_changed.is_set():
# The party has changed. Repartitioning...
self._abort_lock_acquisition()
return True
else:
return False
else:
if self.allocating or self.acquired:
# The connection was lost and user initiated a new
# allocation process. Abort it to eliminate race
# conditions with locks.
with self._state_change:
self._set_state(PartitionState.RELEASE)
return True
# Split up the set
partition_set = self._partition_func(
self._identifier, list(self._party), self._set)
# Proceed to acquire locks for the working set as needed
for member in partition_set:
lock = self._client.Lock(self._lock_path + '/' + str(member))
while True:
try:
# We mustn't lock without timeout because in that case we
# can get a deadlock if the party state will change during
# lock acquisition.
lock.acquire(timeout=self._max_reaction_time)
except LockTimeout:
if abort_if_needed():
return
except KazooException:
return self.finish()
else:
break
self._locks.append(lock)
if abort_if_needed():
return
# All locks acquired. Time for state transition.
with self._state_change:
if self.state_id == state_id and not children_changed.is_set():
self._partition_set = partition_set
self._set_state(PartitionState.ACQUIRED)
self._acquire_event.set()
return
if not abort_if_needed():
# This mustn't happen. Means a logical error.
self._fail_out()
def _release_locks(self):
"""Attempt to completely remove all the locks"""
self._acquire_event.clear()
for lock in self._locks[:]:
try:
lock.release()
except KazooException: # pragma: nocover
# We proceed to remove as many as possible, and leave
# the ones we couldn't remove
pass
else:
self._locks.remove(lock)
def _abort_lock_acquisition(self):
"""Called during lock acquisition if a party change occurs"""
self._release_locks()
if self._locks:
# This shouldn't happen, it means we couldn't release our
# locks, abort
self._fail_out()
return
self._child_watching(self._allocate_transition, async=True)
def _child_watching(self, func=None, async=False):
"""Called when children are being watched to stabilize
This actually returns immediately, child watcher spins up a
new thread/greenlet and waits for it to stabilize before
any callbacks might run.
"""
watcher = PatientChildrenWatch(self._client, self._party_path,
self._time_boundary)
asy = watcher.start()
if func is not None:
# We spin up the function in a separate thread/greenlet
# to ensure that the rawlink's it might use won't be
# blocked
if async:
func = partial(self._client.handler.spawn, func)
asy.rawlink(func)
return asy
def _establish_sessionwatch(self, state):
"""Register ourself to listen for session events, we shut down
if we become lost"""
with self._state_change:
if self.failed:
pass
elif state == KazooState.LOST:
self._client.handler.spawn(self._fail_out)
elif not self.release:
self._set_state(PartitionState.RELEASE)
return state == KazooState.LOST
def _partitioner(self, identifier, members, partitions):
# Ensure consistent order of partitions/members
all_partitions = sorted(partitions)
workers = sorted(members)
i = workers.index(identifier)
# Now return the partition list starting at our location and
# skipping the other workers
return all_partitions[i::len(workers)]
def _set_state(self, state):
self.state = state
self.state_id += 1
self.state_change_event.set()
|
ojengwa/django-1
|
refs/heads/master
|
tests/m2m_and_m2o/models.py
|
128
|
"""
Many-to-many and many-to-one relationships to the same table
Make sure to set ``related_name`` if you use relationships to the same table.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class User(models.Model):
username = models.CharField(max_length=20)
@python_2_unicode_compatible
class Issue(models.Model):
num = models.IntegerField()
cc = models.ManyToManyField(User, blank=True, related_name='test_issue_cc')
client = models.ForeignKey(User, related_name='test_issue_client')
def __str__(self):
return six.text_type(self.num)
class Meta:
ordering = ('num',)
class UnicodeReferenceModel(models.Model):
others = models.ManyToManyField("UnicodeReferenceModel")
|
stelligent/ansible-modules-core
|
refs/heads/devel
|
files/fetch.py
|
94
|
# this is a virtual module that is entirely implemented server side
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: fetch
short_description: Fetches a file from remote nodes
description:
- This module works like M(copy), but in reverse. It is used for fetching
files from remote machines and storing them locally in a file tree,
organized by hostname. Note that this module is written to transfer
log files that might not be present, so a missing remote file won't
be an error unless fail_on_missing is set to 'yes'.
version_added: "0.2"
options:
src:
description:
- The file on the remote system to fetch. This I(must) be a file, not a
directory. Recursive fetching may be supported in a later release.
required: true
default: null
aliases: []
dest:
description:
- A directory to save the file into. For example, if the I(dest)
directory is C(/backup) a I(src) file named C(/etc/profile) on host
C(host.example.com), would be saved into
C(/backup/host.example.com/etc/profile)
required: true
default: null
fail_on_missing:
version_added: "1.1"
description:
- Makes it fails when the source file is missing.
required: false
choices: [ "yes", "no" ]
default: "no"
validate_checksum:
version_added: "1.4"
description:
- Verify that the source and destination checksums match after the files are fetched.
required: false
choices: [ "yes", "no" ]
default: "yes"
aliases: [ "validate_md5" ]
flat:
version_added: "1.2"
description:
- Allows you to override the default behavior of appending
hostname/path/to/file to the destination. If dest ends with '/', it
will use the basename of the source file, similar to the copy module.
Obviously this is only handy if the filenames are unique.
requirements: []
author:
- "Ansible Core Team"
- "Michael DeHaan"
'''
EXAMPLES = '''
# Store file into /tmp/fetched/host.example.com/tmp/somefile
- fetch: src=/tmp/somefile dest=/tmp/fetched
# Specifying a path directly
- fetch: src=/tmp/somefile dest=/tmp/prefix-{{ ansible_hostname }} flat=yes
# Specifying a destination path
- fetch: src=/tmp/uniquefile dest=/tmp/special/ flat=yes
# Storing in a path relative to the playbook
- fetch: src=/tmp/uniquefile dest=special/prefix-{{ ansible_hostname }} flat=yes
'''
|
iRGBit/Dato-Core
|
refs/heads/master
|
src/unity/python/doc/scripts/doxypy-0.4.2.py
|
15
|
#!/usr/bin/env python
__applicationName__ = "doxypy"
__blurb__ = """
doxypy is an input filter for Doxygen. It preprocesses python
files so that docstrings of classes and functions are reformatted
into Doxygen-conform documentation blocks.
"""
__doc__ = __blurb__ + \
"""
In order to make Doxygen preprocess files through doxypy, simply
add the following lines to your Doxyfile:
FILTER_SOURCE_FILES = YES
INPUT_FILTER = "python /path/to/doxypy.py"
"""
__version__ = "0.4.2"
__date__ = "14th October 2009"
__website__ = "http://code.foosel.org/doxypy"
__author__ = (
"Philippe 'demod' Neumann (doxypy at demod dot org)",
"Gina 'foosel' Haeussge (gina at foosel dot net)"
)
__licenseName__ = "GPL v2"
__license__ = """This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
import re
from optparse import OptionParser, OptionGroup
class FSM(object):
"""Implements a finite state machine.
Transitions are given as 4-tuples, consisting of an origin state, a target
state, a condition for the transition (given as a reference to a function
which gets called with a given piece of input) and a pointer to a function
to be called upon the execution of the given transition.
"""
"""
@var transitions holds the transitions
@var current_state holds the current state
@var current_input holds the current input
@var current_transition hold the currently active transition
"""
def __init__(self, start_state=None, transitions=[]):
self.transitions = transitions
self.current_state = start_state
self.current_input = None
self.current_transition = None
def setStartState(self, state):
self.current_state = state
def addTransition(self, from_state, to_state, condition, callback):
self.transitions.append([from_state, to_state, condition, callback])
def makeTransition(self, input):
"""Makes a transition based on the given input.
@param input input to parse by the FSM
"""
for transition in self.transitions:
[from_state, to_state, condition, callback] = transition
if from_state == self.current_state:
match = condition(input)
if match:
self.current_state = to_state
self.current_input = input
self.current_transition = transition
if options.debug:
print >>sys.stderr, "# FSM: executing (%s -> %s) for line '%s'" % (from_state, to_state, input)
callback(match)
return
class Doxypy(object):
def __init__(self):
string_prefixes = "[uU]?[rR]?"
self.start_single_comment_re = re.compile("^\s*%s(''')" % string_prefixes)
self.end_single_comment_re = re.compile("(''')\s*$")
self.start_double_comment_re = re.compile("^\s*%s(\"\"\")" % string_prefixes)
self.end_double_comment_re = re.compile("(\"\"\")\s*$")
self.single_comment_re = re.compile("^\s*%s(''').*(''')\s*$" % string_prefixes)
self.double_comment_re = re.compile("^\s*%s(\"\"\").*(\"\"\")\s*$" % string_prefixes)
self.defclass_re = re.compile("^(\s*)(def .+:|class .+:)")
self.empty_re = re.compile("^\s*$")
self.hashline_re = re.compile("^\s*#.*$")
self.importline_re = re.compile("^\s*(import |from .+ import)")
self.multiline_defclass_start_re = re.compile("^(\s*)(def|class)(\s.*)?$")
self.multiline_defclass_end_re = re.compile(":\s*$")
## Transition list format
# ["FROM", "TO", condition, action]
transitions = [
### FILEHEAD
# single line comments
["FILEHEAD", "FILEHEAD", self.single_comment_re.search, self.appendCommentLine],
["FILEHEAD", "FILEHEAD", self.double_comment_re.search, self.appendCommentLine],
# multiline comments
["FILEHEAD", "FILEHEAD_COMMENT_SINGLE", self.start_single_comment_re.search, self.appendCommentLine],
["FILEHEAD_COMMENT_SINGLE", "FILEHEAD", self.end_single_comment_re.search, self.appendCommentLine],
["FILEHEAD_COMMENT_SINGLE", "FILEHEAD_COMMENT_SINGLE", self.catchall, self.appendCommentLine],
["FILEHEAD", "FILEHEAD_COMMENT_DOUBLE", self.start_double_comment_re.search, self.appendCommentLine],
["FILEHEAD_COMMENT_DOUBLE", "FILEHEAD", self.end_double_comment_re.search, self.appendCommentLine],
["FILEHEAD_COMMENT_DOUBLE", "FILEHEAD_COMMENT_DOUBLE", self.catchall, self.appendCommentLine],
# other lines
["FILEHEAD", "FILEHEAD", self.empty_re.search, self.appendFileheadLine],
["FILEHEAD", "FILEHEAD", self.hashline_re.search, self.appendFileheadLine],
["FILEHEAD", "FILEHEAD", self.importline_re.search, self.appendFileheadLine],
["FILEHEAD", "DEFCLASS", self.defclass_re.search, self.resetCommentSearch],
["FILEHEAD", "DEFCLASS_MULTI", self.multiline_defclass_start_re.search, self.resetCommentSearch],
["FILEHEAD", "DEFCLASS_BODY", self.catchall, self.appendFileheadLine],
### DEFCLASS
# single line comments
["DEFCLASS", "DEFCLASS_BODY", self.single_comment_re.search, self.appendCommentLine],
["DEFCLASS", "DEFCLASS_BODY", self.double_comment_re.search, self.appendCommentLine],
# multiline comments
["DEFCLASS", "COMMENT_SINGLE", self.start_single_comment_re.search, self.appendCommentLine],
["COMMENT_SINGLE", "DEFCLASS_BODY", self.end_single_comment_re.search, self.appendCommentLine],
["COMMENT_SINGLE", "COMMENT_SINGLE", self.catchall, self.appendCommentLine],
["DEFCLASS", "COMMENT_DOUBLE", self.start_double_comment_re.search, self.appendCommentLine],
["COMMENT_DOUBLE", "DEFCLASS_BODY", self.end_double_comment_re.search, self.appendCommentLine],
["COMMENT_DOUBLE", "COMMENT_DOUBLE", self.catchall, self.appendCommentLine],
# other lines
["DEFCLASS", "DEFCLASS", self.empty_re.search, self.appendDefclassLine],
["DEFCLASS", "DEFCLASS", self.defclass_re.search, self.resetCommentSearch],
["DEFCLASS", "DEFCLASS_MULTI", self.multiline_defclass_start_re.search, self.resetCommentSearch],
["DEFCLASS", "DEFCLASS_BODY", self.catchall, self.stopCommentSearch],
### DEFCLASS_BODY
["DEFCLASS_BODY", "DEFCLASS", self.defclass_re.search, self.startCommentSearch],
["DEFCLASS_BODY", "DEFCLASS_MULTI", self.multiline_defclass_start_re.search, self.startCommentSearch],
["DEFCLASS_BODY", "DEFCLASS_BODY", self.catchall, self.appendNormalLine],
### DEFCLASS_MULTI
["DEFCLASS_MULTI", "DEFCLASS", self.multiline_defclass_end_re.search, self.appendDefclassLine],
["DEFCLASS_MULTI", "DEFCLASS_MULTI", self.catchall, self.appendDefclassLine],
]
self.fsm = FSM("FILEHEAD", transitions)
self.outstream = sys.stdout
self.output = []
self.comment = []
self.filehead = []
self.defclass = []
self.indent = ""
def __closeComment(self):
"""Appends any open comment block and triggering block to the output."""
if options.autobrief:
if len(self.comment) == 1 \
or (len(self.comment) > 2 and self.comment[1].strip() == ''):
self.comment[0] = self.__docstringSummaryToBrief(self.comment[0])
if self.comment:
block = self.makeCommentBlock()
self.output.extend(block)
if self.defclass:
self.output.extend(self.defclass)
def __docstringSummaryToBrief(self, line):
"""Adds \\brief to the docstrings summary line.
A \\brief is prepended, provided no other doxygen command is at the
start of the line.
"""
stripped = line.strip()
if stripped and not stripped[0] in ('@', '\\'):
return "\\brief " + line
else:
return line
def __flushBuffer(self):
"""Flushes the current outputbuffer to the outstream."""
if self.output:
try:
if options.debug:
print >>sys.stderr, "# OUTPUT: ", self.output
print >>self.outstream, "\n".join(self.output)
self.outstream.flush()
except IOError:
# Fix for FS#33. Catches "broken pipe" when doxygen closes
# stdout prematurely upon usage of INPUT_FILTER, INLINE_SOURCES
# and FILTER_SOURCE_FILES.
pass
self.output = []
def catchall(self, input):
"""The catchall-condition, always returns true."""
return True
def resetCommentSearch(self, match):
"""Restarts a new comment search for a different triggering line.
Closes the current commentblock and starts a new comment search.
"""
if options.debug:
print >>sys.stderr, "# CALLBACK: resetCommentSearch"
self.__closeComment()
self.startCommentSearch(match)
def startCommentSearch(self, match):
"""Starts a new comment search.
Saves the triggering line, resets the current comment and saves
the current indentation.
"""
if options.debug:
print >>sys.stderr, "# CALLBACK: startCommentSearch"
self.defclass = [self.fsm.current_input]
self.comment = []
self.indent = match.group(1)
def stopCommentSearch(self, match):
"""Stops a comment search.
Closes the current commentblock, resets the triggering line and
appends the current line to the output.
"""
if options.debug:
print >>sys.stderr, "# CALLBACK: stopCommentSearch"
self.__closeComment()
self.defclass = []
self.output.append(self.fsm.current_input)
def appendFileheadLine(self, match):
"""Appends a line in the FILEHEAD state.
Closes the open comment block, resets it and appends the current line.
"""
if options.debug:
print >>sys.stderr, "# CALLBACK: appendFileheadLine"
self.__closeComment()
self.comment = []
self.output.append(self.fsm.current_input)
def appendCommentLine(self, match):
"""Appends a comment line.
The comment delimiter is removed from multiline start and ends as
well as singleline comments.
"""
if options.debug:
print >>sys.stderr, "# CALLBACK: appendCommentLine"
(from_state, to_state, condition, callback) = self.fsm.current_transition
# single line comment
if (from_state == "DEFCLASS" and to_state == "DEFCLASS_BODY") \
or (from_state == "FILEHEAD" and to_state == "FILEHEAD"):
# remove comment delimiter from begin and end of the line
activeCommentDelim = match.group(1)
line = self.fsm.current_input
self.comment.append(line[line.find(activeCommentDelim)+len(activeCommentDelim):line.rfind(activeCommentDelim)])
if (to_state == "DEFCLASS_BODY"):
self.__closeComment()
self.defclass = []
# multiline start
elif from_state == "DEFCLASS" or from_state == "FILEHEAD":
# remove comment delimiter from begin of the line
activeCommentDelim = match.group(1)
line = self.fsm.current_input
self.comment.append(line[line.find(activeCommentDelim)+len(activeCommentDelim):])
# multiline end
elif to_state == "DEFCLASS_BODY" or to_state == "FILEHEAD":
# remove comment delimiter from end of the line
activeCommentDelim = match.group(1)
line = self.fsm.current_input
self.comment.append(line[0:line.rfind(activeCommentDelim)])
if (to_state == "DEFCLASS_BODY"):
self.__closeComment()
self.defclass = []
# in multiline comment
else:
# just append the comment line
self.comment.append(self.fsm.current_input)
def appendNormalLine(self, match):
"""Appends a line to the output."""
if options.debug:
print >>sys.stderr, "# CALLBACK: appendNormalLine"
self.output.append(self.fsm.current_input)
def appendDefclassLine(self, match):
"""Appends a line to the triggering block."""
if options.debug:
print >>sys.stderr, "# CALLBACK: appendDefclassLine"
self.defclass.append(self.fsm.current_input)
def makeCommentBlock(self):
"""Indents the current comment block with respect to the current
indentation level.
@returns a list of indented comment lines
"""
doxyStart = "##"
commentLines = self.comment
commentLines = map(lambda x: "%s# %s" % (self.indent, x), commentLines)
l = [self.indent + doxyStart]
l.extend(commentLines)
return l
def parse(self, input):
"""Parses a python file given as input string and returns the doxygen-
compatible representation.
@param input the python code to parse
@returns the modified python code
"""
lines = input.split("\n")
for line in lines:
self.fsm.makeTransition(line)
if self.fsm.current_state == "DEFCLASS":
self.__closeComment()
return "\n".join(self.output)
def parseFile(self, filename):
"""Parses a python file given as input string and returns the doxygen-
compatible representation.
@param input the python code to parse
@returns the modified python code
"""
f = open(filename, 'r')
for line in f:
self.parseLine(line.rstrip('\r\n'))
if self.fsm.current_state == "DEFCLASS":
self.__closeComment()
self.__flushBuffer()
f.close()
def parseLine(self, line):
"""Parse one line of python and flush the resulting output to the
outstream.
@param line the python code line to parse
"""
self.fsm.makeTransition(line)
self.__flushBuffer()
def optParse():
"""Parses commandline options."""
parser = OptionParser(prog=__applicationName__, version="%prog " + __version__)
parser.set_usage("%prog [options] filename")
parser.add_option("--autobrief",
action="store_true", dest="autobrief",
help="use the docstring summary line as \\brief description"
)
parser.add_option("--debug",
action="store_true", dest="debug",
help="enable debug output on stderr"
)
## parse options
global options
(options, filename) = parser.parse_args()
if not filename:
print >>sys.stderr, "No filename given."
sys.exit(-1)
return filename[0]
def main():
"""Starts the parser on the file given by the filename as the first
argument on the commandline.
"""
filename = optParse()
fsm = Doxypy()
fsm.parseFile(filename)
if __name__ == "__main__":
main()
|
KISSMonX/micropython
|
refs/heads/master
|
tests/bytecode/mp-tests/class2.py
|
22
|
class A:
x = 1
y = x + z
A()
|
zabeelbashir/py-gsmmodem
|
refs/heads/master
|
test/test_serial_comms.py
|
1
|
#!/usr/bin/env python
""" Test suite for gsmmodem.serial_comms """
from __future__ import print_function
import sys, time, unittest, logging
from copy import copy
from . import compat # For Python 2.6 compatibility
import gsmmodem.serial_comms
from gsmmodem.exceptions import TimeoutException
class MockSerialPackage(object):
""" Fake serial package for the GsmModem/SerialComms classes to import during tests """
class Serial():
_REPONSE_TIME = 0.02
""" Mock serial object for use by the GsmModem class during tests """
def __init__(self, *args, **kwargs):
# The default value to read/"return" if responseSequence isn't set up, or None for nothing
#self.defaultResponse = 'OK\r\n'
self.responseSequence = []
self.flushResponseSequence = True
self.writeQueue = []
self._alive = True
self._readQueue = []
self.writeCallbackFunc = None
def read(self, timeout=None):
if len(self._readQueue) > 0:
return self._readQueue.pop(0)
elif len(self.writeQueue) > 0:
self._setupReadValue(self.writeQueue.pop(0))
if len(self._readQueue) > 0:
return self._readQueue.pop(0)
elif self.flushResponseSequence and len(self.responseSequence) > 0:
self._setupReadValue(None)
if timeout != None:
time.sleep(0.001)
# time.sleep(min(timeout, self._REPONSE_TIME))
# if timeout > self._REPONSE_TIME and len(self.writeQueue) == 0:
# time.sleep(timeout - self._REPONSE_TIME)
return ''
else:
while self._alive:
if len(self.writeQueue) > 0:
self._setupReadValue(self.writeQueue.pop(0))
if len(self._readQueue) > 0:
return self._readQueue.pop(0)
# time.sleep(self._REPONSE_TIME)
time.sleep(0.05)
def _setupReadValue(self, command):
if len(self._readQueue) == 0:
if len(self.responseSequence) > 0:
value = self.responseSequence.pop(0)
if type(value) in (float, int):
time.sleep(value)
if len(self.responseSequence) > 0:
self._setupReadValue(command)
else:
self._readQueue = list(value)
def write(self, data):
if self.writeCallbackFunc != None:
self.writeCallbackFunc(data)
self.writeQueue.append(data)
def close(self):
pass
def inWaiting(self):
rqLen = len(self._readQueue)
for item in self.responseSequence:
if type(item) in (int, float):
break
else:
rqLen += len(item)
return rqLen
class SerialException(Exception):
""" Mock Serial Exception """
class TestNotifications(unittest.TestCase):
""" Tests reading unsolicited notifications from the serial devices """
def setUp(self):
self.mockSerial = MockSerialPackage()
gsmmodem.serial_comms.serial = self.mockSerial
self.tests = (['ABC\r\n'],
[' blah blah blah \r\n', '12345\r\n'])
def test_callback(self):
""" Tests if the notification callback method is correctly called """
for test in self.tests:
callbackCalled = [False]
def callback(data):
callbackCalled[0] = [True]
self.assertIsInstance(data, list)
self.assertEqual(len(data), len(test))
for i in range(len(test)):
self.assertEqual(data[i], test[i][:-2])
serialComms = gsmmodem.serial_comms.SerialComms('-- PORT IGNORED DURING TESTS --', notifyCallbackFunc=callback)
serialComms.connect()
# Fake a notification
serialComms.serial.responseSequence = copy(test)
# Wait a bit for the event to be picked up
while len(serialComms.serial._readQueue) > 0 or len(serialComms.serial.responseSequence) > 0:
time.sleep(0.05)
self.assertTrue(callbackCalled[0], 'Notification callback function not called')
serialComms.close()
def test_noCallback(self):
""" Tests notifications when no callback method was specified (nothing should happen) """
for test in self.tests:
serialComms = gsmmodem.serial_comms.SerialComms('-- PORT IGNORED DURING TESTS --')
serialComms.connect()
# Fake a notification
serialComms.serial.responseSequence = copy(test)
# Wait a bit for the event to be picked up
while len(serialComms.serial._readQueue) > 0 or len(serialComms.serial.responseSequence) > 0:
time.sleep(0.05)
serialComms.close()
class TestSerialException(unittest.TestCase):
""" Tests SerialException handling """
def setUp(self):
self.mockSerial = MockSerialPackage()
gsmmodem.serial_comms.serial = self.mockSerial
self.serialComms = gsmmodem.serial_comms.SerialComms('-- PORT IGNORED DURING TESTS --')
self.serialComms.connect()
def tearDown(self):
self.serialComms.close()
def test_readLoopException(self):
""" Tests handling a SerialException from inside the read loop thread """
self.assertTrue(self.serialComms.alive)
exceptionRaised = [False]
callbackCalled = [False]
def brokenRead(*args, **kwargs):
exceptionRaised[0] = True
raise MockSerialPackage.SerialException()
self.serialComms.serial.read = brokenRead
def errorCallback(ex):
callbackCalled[0] = True
self.assertIsInstance(ex, MockSerialPackage.SerialException)
self.serialComms.fatalErrorCallback = errorCallback
# Let the serial comms object attempt to read something
self.serialComms.serial.responseSequence = ['12345\r\n']
while not exceptionRaised[0]:
time.sleep(0.05)
self.assertFalse(self.serialComms.alive)
time.sleep(0.05)
self.assertTrue(callbackCalled[0], 'Error callback not called on fatal error')
class TestWrite(unittest.TestCase):
""" Tests writing to the serial device """
def setUp(self):
self.mockSerial = MockSerialPackage()
gsmmodem.serial_comms.serial = self.mockSerial
self.serialComms = gsmmodem.serial_comms.SerialComms('-- PORT IGNORED DURING TESTS --')
self.serialComms.connect()
def tearDown(self):
self.serialComms.close()
def test_write(self):
""" Tests basic writing operations """
tests = ((['OK\r\n'], ['OK']),
(['ERROR\r\n'], ['ERROR']),
(['first line\r\n', 'second line\r\n', 'OK\r\n'], ['first line', 'second line', 'OK']),
# Some Huawei modems issue this response instead of ERROR for unknown commands; ensure we detect it correctly
(['COMMAND NOT SUPPORT\r\n'], ['COMMAND NOT SUPPORT']))
for actual, expected in tests:
self.serialComms.serial.responseSequence = actual
self.serialComms.serial.flushResponseSequence = True
response = self.serialComms.write('test\r')
self.assertEqual(response, expected)
# Now write without expecting a response
response = self.serialComms.write('test2\r', waitForResponse=False)
self.assertEqual(response, None)
def test_writeTimeout(self):
""" Tests that the serial comms write timeout parameter """
# Serial comms will not response (no response sequence specified)
self.assertRaises(TimeoutException, self.serialComms.write, 'test\r', waitForResponse=True, timeout=0.1)
if __name__ == "__main__":
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG)
unittest.main()
|
rallylee/gem5
|
refs/heads/master
|
src/mem/slicc/symbols/__init__.py
|
82
|
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from slicc.symbols.Action import Action
from slicc.symbols.Event import Event
from slicc.symbols.Func import Func
from slicc.symbols.State import State
from slicc.symbols.RequestType import RequestType
from slicc.symbols.StateMachine import StateMachine
from slicc.symbols.Symbol import Symbol
from slicc.symbols.SymbolTable import SymbolTable
from slicc.symbols.Transition import Transition
from slicc.symbols.Type import Type
from slicc.symbols.Var import Var
|
icomms/wqmanager
|
refs/heads/master
|
apps/ui/admin.py
|
1
|
from ui.models import LogEntry
from django.contrib import admin
class LogEntryAdmin(admin.ModelAdmin):
list_display = ('user', 'timestamp', 'event_type', 'processing_time_ms')
search_fields = ('user', 'event_type')
list_filter = ['user', 'event_type']
readonly_fields = ('user', 'timestamp', 'event_type', 'processing_time_ms', 'post_data')
admin.site.register(LogEntry, LogEntryAdmin)
|
lorenmanu/TFG-GESTION-OFERTAS-DEMANDAS
|
refs/heads/master
|
app/db_converter.py
|
1
|
#!/usr/bin/env python
"""
Fixes a MySQL dump made with the right format so it can be directly
imported to a new PostgreSQL database.
Dump using:
mysqldump --compatible=postgresql --default-character-set=utf8 -r databasename.mysql -u root databasename
"""
import re
import sys
import os
import time
import subprocess
def parse(input_filename, output_filename):
"Feed it a file, and it'll output a fixed one"
# State storage
if input_filename == "-":
num_lines = -1
else:
num_lines = int(subprocess.check_output(["wc", "-l", input_filename]).strip().split()[0])
tables = {}
current_table = None
creation_lines = []
enum_types = []
foreign_key_lines = []
fulltext_key_lines = []
sequence_lines = []
cast_lines = []
num_inserts = 0
started = time.time()
# Open output file and write header. Logging file handle will be stdout
# unless we're writing output to stdout, in which case NO PROGRESS FOR YOU.
if output_filename == "-":
output = sys.stdout
logging = open(os.devnull, "w")
else:
output = open(output_filename, "w")
logging = sys.stdout
if input_filename == "-":
input_fh = sys.stdin
else:
input_fh = open(input_filename)
output.write("-- Converted by db_converter\n")
output.write("START TRANSACTION;\n")
output.write("SET standard_conforming_strings=off;\n")
output.write("SET escape_string_warning=off;\n")
output.write("SET CONSTRAINTS ALL DEFERRED;\n\n")
for i, line in enumerate(input_fh):
time_taken = time.time() - started
percentage_done = (i+1) / float(num_lines)
secs_left = (time_taken / percentage_done) - time_taken
logging.write("\rLine %i (of %s: %.2f%%) [%s tables] [%s inserts] [ETA: %i min %i sec]" % (
i + 1,
num_lines,
((i+1)/float(num_lines))*100,
len(tables),
num_inserts,
secs_left // 60,
secs_left % 60,
))
logging.flush()
line = line.decode("utf8").strip().replace(r"\\", "WUBWUBREALSLASHWUB").replace(r"\'", "''").replace("WUBWUBREALSLASHWUB", r"\\")
# Ignore comment lines
if line.startswith("--") or line.startswith("/*") or line.startswith("LOCK TABLES") or line.startswith("DROP TABLE") or line.startswith("UNLOCK TABLES") or not line:
continue
# Outside of anything handling
if current_table is None:
# Start of a table creation statement?
if line.startswith("CREATE TABLE"):
current_table = line.split('"')[1]
tables[current_table] = {"columns": []}
creation_lines = []
# Inserting data into a table?
elif line.startswith("INSERT INTO"):
output.write(line.encode("utf8").replace("'0000-00-00 00:00:00'", "NULL") + "\n")
num_inserts += 1
# ???
else:
print "\n ! Unknown line in main body: %s" % line
# Inside-create-statement handling
else:
# Is it a column?
if line.startswith('"'):
useless, name, definition = line.strip(",").split('"',2)
try:
type, extra = definition.strip().split(" ", 1)
# This must be a tricky enum
if ')' in extra:
type, extra = definition.strip().split(")")
except ValueError:
type = definition.strip()
extra = ""
extra = re.sub("CHARACTER SET [\w\d]+\s*", "", extra.replace("unsigned", ""))
extra = re.sub("COLLATE [\w\d]+\s*", "", extra.replace("unsigned", ""))
# See if it needs type conversion
final_type = None
set_sequence = None
if type.startswith("tinyint("):
type = "int4"
set_sequence = True
final_type = "boolean"
elif type.startswith("int("):
type = "integer"
set_sequence = True
elif type.startswith("bigint("):
type = "bigint"
set_sequence = True
elif type == "longtext":
type = "text"
elif type == "mediumtext":
type = "text"
elif type == "tinytext":
type = "text"
elif type.startswith("varchar("):
size = int(type.split("(")[1].rstrip(")"))
type = "varchar(%s)" % (size * 2)
elif type.startswith("smallint("):
type = "int2"
set_sequence = True
elif type == "datetime":
type = "timestamp with time zone"
elif type == "double":
type = "double precision"
elif type.endswith("blob"):
type = "bytea"
elif type.startswith("enum(") or type.startswith("set("):
types_str = type.split("(")[1].rstrip(")").rstrip('"')
types_arr = [type_str.strip('\'') for type_str in types_str.split(",")]
# Considered using values to make a name, but its dodgy
# enum_name = '_'.join(types_arr)
enum_name = "{0}_{1}".format(current_table, name)
if enum_name not in enum_types:
output.write("CREATE TYPE {0} AS ENUM ({1}); \n".format(enum_name, types_str));
enum_types.append(enum_name)
type = enum_name
if final_type:
cast_lines.append("ALTER TABLE \"%s\" ALTER COLUMN \"%s\" DROP DEFAULT, ALTER COLUMN \"%s\" TYPE %s USING CAST(\"%s\" as %s)" % (current_table, name, name, final_type, name, final_type))
# ID fields need sequences [if they are integers?]
if name == "id" and set_sequence is True:
sequence_lines.append("CREATE SEQUENCE %s_id_seq" % (current_table))
sequence_lines.append("SELECT setval('%s_id_seq', max(id)) FROM %s" % (current_table, current_table))
sequence_lines.append("ALTER TABLE \"%s\" ALTER COLUMN \"id\" SET DEFAULT nextval('%s_id_seq')" % (current_table, current_table))
# Record it
creation_lines.append('"%s" %s %s' % (name, type, extra))
tables[current_table]['columns'].append((name, type, extra))
# Is it a constraint or something?
elif line.startswith("PRIMARY KEY"):
creation_lines.append(line.rstrip(","))
elif line.startswith("CONSTRAINT"):
foreign_key_lines.append("ALTER TABLE \"%s\" ADD CONSTRAINT %s DEFERRABLE INITIALLY DEFERRED" % (current_table, line.split("CONSTRAINT")[1].strip().rstrip(",")))
foreign_key_lines.append("CREATE INDEX ON \"%s\" %s" % (current_table, line.split("FOREIGN KEY")[1].split("REFERENCES")[0].strip().rstrip(",")))
elif line.startswith("UNIQUE KEY"):
creation_lines.append("UNIQUE (%s)" % line.split("(")[1].split(")")[0])
elif line.startswith("FULLTEXT KEY"):
fulltext_keys = " || ' ' || ".join( line.split('(')[-1].split(')')[0].replace('"', '').split(',') )
fulltext_key_lines.append("CREATE INDEX ON %s USING gin(to_tsvector('english', %s))" % (current_table, fulltext_keys))
elif line.startswith("KEY"):
pass
# Is it the end of the table?
elif line == ");":
output.write("CREATE TABLE \"%s\" (\n" % current_table)
for i, line in enumerate(creation_lines):
output.write(" %s%s\n" % (line, "," if i != (len(creation_lines) - 1) else ""))
output.write(');\n\n')
current_table = None
# ???
else:
print "\n ! Unknown line inside table creation: %s" % line
# Finish file
output.write("\n-- Post-data save --\n")
output.write("COMMIT;\n")
output.write("START TRANSACTION;\n")
# Write typecasts out
output.write("\n-- Typecasts --\n")
for line in cast_lines:
output.write("%s;\n" % line)
# Write FK constraints out
output.write("\n-- Foreign keys --\n")
for line in foreign_key_lines:
output.write("%s;\n" % line)
# Write sequences out
output.write("\n-- Sequences --\n")
for line in sequence_lines:
output.write("%s;\n" % line)
# Write full-text indexkeyses out
output.write("\n-- Full Text keys --\n")
for line in fulltext_key_lines:
output.write("%s;\n" % line)
# Finish file
output.write("\n")
output.write("COMMIT;\n")
print ""
if __name__ == "__main__":
parse(sys.argv[1], sys.argv[2])
|
steveb/tablib
|
refs/heads/develop
|
tablib/packages/odf/meta.py
|
98
|
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import METANS
from element import Element
# Autogenerated
def AutoReload(**args):
return Element(qname = (METANS,'auto-reload'), **args)
def CreationDate(**args):
return Element(qname = (METANS,'creation-date'), **args)
def DateString(**args):
return Element(qname = (METANS,'date-string'), **args)
def DocumentStatistic(**args):
return Element(qname = (METANS,'document-statistic'), **args)
def EditingCycles(**args):
return Element(qname = (METANS,'editing-cycles'), **args)
def EditingDuration(**args):
return Element(qname = (METANS,'editing-duration'), **args)
def Generator(**args):
return Element(qname = (METANS,'generator'), **args)
def HyperlinkBehaviour(**args):
return Element(qname = (METANS,'hyperlink-behaviour'), **args)
def InitialCreator(**args):
return Element(qname = (METANS,'initial-creator'), **args)
def Keyword(**args):
return Element(qname = (METANS,'keyword'), **args)
def PrintDate(**args):
return Element(qname = (METANS,'print-date'), **args)
def PrintedBy(**args):
return Element(qname = (METANS,'printed-by'), **args)
def Template(**args):
return Element(qname = (METANS,'template'), **args)
def UserDefined(**args):
return Element(qname = (METANS,'user-defined'), **args)
|
diogo149/treeano
|
refs/heads/master
|
examples/resnet/cifar10_cnn.py
|
2
|
from __future__ import division, absolute_import
from __future__ import print_function, unicode_literals
import itertools
import numpy as np
import theano
import theano.tensor as T
import treeano
import treeano.nodes as tn
import canopy
import canopy.sandbox.datasets
from treeano.sandbox.nodes import batch_normalization as bn
from treeano.sandbox.nodes import resnet
fX = theano.config.floatX
BATCH_SIZE = 256
train, valid, test = canopy.sandbox.datasets.cifar10()
groups = 3
blocks_per_group = 5
num_layers = 2
num_filters = 16
nodes = [
tn.InputNode("x", shape=(None, 3, 32, 32)),
tn.Conv2DNode("conv1", num_filters=num_filters),
bn.BatchNormalizationNode("bn1"),
tn.ReLUNode("relu1"),
]
for group in range(groups):
for block in range(blocks_per_group):
if group != 0 and block == 0:
num_filters *= 2
nodes.append(resnet.residual_block_conv_2d(
"resblock_%d_%d" % (group, block),
num_filters=num_filters,
num_layers=num_layers,
increase_dim="projection"))
else:
nodes.append(resnet.residual_block_conv_2d(
"resblock_%d_%d" % (group, block),
num_filters=num_filters,
num_layers=num_layers))
nodes += [
tn.GlobalMeanPool2DNode("global_pool"),
tn.DenseNode("logit", num_units=10),
tn.SoftmaxNode("pred"),
]
model = tn.HyperparameterNode(
"model",
tn.SequentialNode("seq", nodes),
filter_size=(3, 3),
inits=[treeano.inits.OrthogonalInit()],
pad="same",
)
with_updates = tn.HyperparameterNode(
"with_updates",
tn.AdamNode(
"adam",
{"subtree": model,
"cost": tn.TotalCostNode("cost", {
"pred": tn.ReferenceNode("pred_ref", reference="model"),
"target": tn.InputNode("y", shape=(None,), dtype="int32")},
)}),
cost_function=treeano.utils.categorical_crossentropy_i32,
)
network = with_updates.network()
network.build() # build eagerly to share weights
valid_fn = canopy.handled_fn(
network,
[canopy.handlers.time_call(key="valid_time"),
canopy.handlers.override_hyperparameters(dropout_probability=0),
canopy.handlers.batch_pad(BATCH_SIZE, keys=["x", "y"]),
canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
variables=["x", "y"])],
{"x": "x", "y": "y"},
{"valid_cost": "cost", "pred": "pred"})
def validate(in_dict, results_dict):
valid_out = valid_fn(valid)
valid_y = valid["y"]
probabilities = valid_out.pop("pred")[:len(valid_y)]
predicted_classes = np.argmax(probabilities, axis=1)
valid_out["valid_accuracy"] = (valid_y == predicted_classes).mean()
results_dict.update(valid_out)
train_fn = canopy.handled_fn(
network,
[canopy.handlers.time_call(key="total_time"),
canopy.handlers.call_after_every(1, validate),
canopy.handlers.time_call(key="train_time"),
canopy.handlers.batch_pad(BATCH_SIZE, keys=["x", "y"]),
canopy.handlers.chunk_variables(batch_size=BATCH_SIZE,
variables=["x", "y"])],
{"x": "x", "y": "y"},
{"train_cost": "cost"},
include_updates=True)
# ################################# training #################################
print("Starting training...")
canopy.evaluate_until(fn=train_fn,
gen=itertools.repeat(train),
max_iters=200)
|
dfalt974/SickRage
|
refs/heads/master
|
lib/libtrakt/exceptions.py
|
55
|
class traktException(Exception):
pass
class traktAuthException(traktException):
pass
class traktServerBusy(traktException):
pass
|
mcopik/Elemental
|
refs/heads/master
|
python/lapack_like/util.py
|
3
|
#
# Copyright (c) 2009-2015, Jack Poulson
# All rights reserved.
#
# This file is part of Elemental and is under the BSD 2-Clause License,
# which can be found in the LICENSE file in the root directory, or at
# http://opensource.org/licenses/BSD-2-Clause
#
from ..core import *
import ctypes
# Median
# ======
lib.ElMedian_i.argtypes = [c_void_p,POINTER(iType)]
lib.ElMedian_s.argtypes = [c_void_p,POINTER(sType)]
lib.ElMedian_d.argtypes = [c_void_p,POINTER(dType)]
lib.ElMedianDist_i.argtypes = [c_void_p,POINTER(iType)]
lib.ElMedianDist_s.argtypes = [c_void_p,POINTER(sType)]
lib.ElMedianDist_d.argtypes = [c_void_p,POINTER(dType)]
def Median(x):
median = TagToType(x.tag)
args = [x.obj,pointer(median)]
if type(x) is Matrix:
if x.tag == iTag: lib.ElMedian_i(*args)
elif x.tag == sTag: lib.ElMedian_s(*args)
elif x.tag == dTag: lib.ElMedian_d(*args)
else: DataExcept()
elif type(x) is DistMatrix:
if x.tag == iTag: lib.ElMedianDist_i(*args)
elif x.tag == sTag: lib.ElMedianDist_s(*args)
elif x.tag == dTag: lib.ElMedianDist_d(*args)
else: DataExcept()
else: TypeExcept()
return median
# Sort
# ====
lib.ElSort_i.argtypes = [c_void_p,c_uint]
lib.ElSort_s.argtypes = [c_void_p,c_uint]
lib.ElSort_d.argtypes = [c_void_p,c_uint]
lib.ElSortDist_i.argtypes = [c_void_p,c_uint]
lib.ElSortDist_s.argtypes = [c_void_p,c_uint]
lib.ElSortDist_d.argtypes = [c_void_p,c_uint]
def Sort(X,sort=ASCENDING):
args = [X.obj,sort]
if type(X) is Matrix:
if X.tag == iTag: lib.ElSort_i(*args)
elif X.tag == sTag: lib.ElSort_s(*args)
elif X.tag == dTag: lib.ElSort_d(*args)
else: DataExcept()
elif type(X) is DistMatrix:
if X.tag == iTag: lib.ElSortDist_i(*args)
elif X.tag == sTag: lib.ElSortDist_s(*args)
elif X.tag == dTag: lib.ElSortDist_d(*args)
else: DataExcept()
else: TypeExcept()
lib.ElTaggedSort_i.argtypes = [c_void_p,c_uint,POINTER(iType)]
lib.ElTaggedSort_s.argtypes = [c_void_p,c_uint,POINTER(sType)]
lib.ElTaggedSort_d.argtypes = [c_void_p,c_uint,POINTER(dType)]
lib.ElTaggedSortDist_i.argtypes = [c_void_p,c_uint,POINTER(iType)]
lib.ElTaggedSortDist_s.argtypes = [c_void_p,c_uint,POINTER(sType)]
lib.ElTaggedSortDist_d.argtypes = [c_void_p,c_uint,POINTER(dType)]
def TaggedSort(x,sort):
taggedOrder = (TagToType(x.tag)*x.Height())()
args = [x.obj,sort,taggedOrder]
if type(x) is Matrix:
if x.tag == iTag: lib.ElTaggedSort_i(*args)
elif x.tag == sTag: lib.ElTaggedSort_s(*args)
elif x.tag == dTag: lib.ElTaggedSort_d(*args)
else: DataExcept()
elif type(x) is DistMatrix:
if x.tag == iTag: lib.ElTaggedSortDist_i(*args)
elif x.tag == sTag: lib.ElTaggedSortDist_s(*args)
elif x.tag == dTag: lib.ElTaggedSortDist_d(*args)
else: DataExcept()
else: TypeExcept()
return taggedOrder
|
jelugbo/hebs_repo
|
refs/heads/master
|
common/test/acceptance/pages/common/logout.py
|
162
|
"""
Logout Page.
"""
from bok_choy.page_object import PageObject
from . import BASE_URL
class LogoutPage(PageObject):
"""
Logout page to logout current logged in user.
"""
url = BASE_URL + "/logout"
def is_browser_on_page(self):
return self.q(css='.cta-login').present
|
0jpq0/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/logging/__init__.py
|
63
|
# Copyright 2001-2014 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Logging package for Python. Based on PEP 282 and comments thereto in
comp.lang.python.
Copyright (C) 2001-2014 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, os, time, io, traceback, warnings, weakref, collections
from string import Template
__all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig',
'captureWarnings', 'critical', 'debug', 'disable', 'error',
'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass',
'info', 'log', 'makeLogRecord', 'setLoggerClass', 'warn', 'warning',
'getLogRecordFactory', 'setLogRecordFactory', 'lastResort']
try:
import threading
except ImportError: #pragma: no cover
threading = None
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
__status__ = "production"
# The following module attributes are no longer updated.
__version__ = "0.5.1.2"
__date__ = "07 February 2010"
#---------------------------------------------------------------------------
# Miscellaneous module data
#---------------------------------------------------------------------------
#
#_startTime is used as the base when calculating the relative time of events
#
_startTime = time.time()
#
#raiseExceptions is used to see if exceptions during handling should be
#propagated
#
raiseExceptions = True
#
# If you don't want threading information in the log, set this to zero
#
logThreads = True
#
# If you don't want multiprocessing information in the log, set this to zero
#
logMultiprocessing = True
#
# If you don't want process information in the log, set this to zero
#
logProcesses = True
#---------------------------------------------------------------------------
# Level related stuff
#---------------------------------------------------------------------------
#
# Default levels and level names, these can be replaced with any positive set
# of values having corresponding names. There is a pseudo-level, NOTSET, which
# is only really there as a lower limit for user-defined levels. Handlers and
# loggers are initialized with NOTSET so that they will log all messages, even
# at user-defined levels.
#
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
_levelToName = {
CRITICAL: 'CRITICAL',
ERROR: 'ERROR',
WARNING: 'WARNING',
INFO: 'INFO',
DEBUG: 'DEBUG',
NOTSET: 'NOTSET',
}
_nameToLevel = {
'CRITICAL': CRITICAL,
'ERROR': ERROR,
'WARN': WARNING,
'WARNING': WARNING,
'INFO': INFO,
'DEBUG': DEBUG,
'NOTSET': NOTSET,
}
def getLevelName(level):
"""
Return the textual representation of logging level 'level'.
If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
INFO, DEBUG) then you get the corresponding string. If you have
associated levels with names using addLevelName then the name you have
associated with 'level' is returned.
If a numeric value corresponding to one of the defined levels is passed
in, the corresponding string representation is returned.
Otherwise, the string "Level %s" % level is returned.
"""
# See Issue #22386 for the reason for this convoluted expression
return _levelToName.get(level, _nameToLevel.get(level, ("Level %s" % level)))
def addLevelName(level, levelName):
"""
Associate 'levelName' with 'level'.
This is used when converting levels to text during message formatting.
"""
_acquireLock()
try: #unlikely to cause an exception, but you never know...
_levelToName[level] = levelName
_nameToLevel[levelName] = level
finally:
_releaseLock()
if hasattr(sys, '_getframe'):
currentframe = lambda: sys._getframe(3)
else: #pragma: no cover
def currentframe():
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
except Exception:
return sys.exc_info()[2].tb_frame.f_back
#
# _srcfile is used when walking the stack to check when we've got the first
# caller stack frame, by skipping frames whose filename is that of this
# module's source. It therefore should contain the filename of this module's
# source file.
#
# Ordinarily we would use __file__ for this, but frozen modules don't always
# have __file__ set, for some reason (see Issue #21736). Thus, we get the
# filename from a handy code object from a function defined in this module.
# (There's no particular reason for picking addLevelName.)
#
_srcfile = os.path.normcase(addLevelName.__code__.co_filename)
# _srcfile is only used in conjunction with sys._getframe().
# To provide compatibility with older versions of Python, set _srcfile
# to None if _getframe() is not available; this value will prevent
# findCaller() from being called. You can also do this if you want to avoid
# the overhead of fetching caller information, even when _getframe() is
# available.
#if not hasattr(sys, '_getframe'):
# _srcfile = None
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in _nameToLevel:
raise ValueError("Unknown level: %r" % level)
rv = _nameToLevel[level]
else:
raise TypeError("Level not an integer or a valid string: %r" % level)
return rv
#---------------------------------------------------------------------------
# Thread-related stuff
#---------------------------------------------------------------------------
#
#_lock is used to serialize access to shared data structures in this module.
#This needs to be an RLock because fileConfig() creates and configures
#Handlers, and so might arbitrary user threads. Since Handler code updates the
#shared dictionary _handlers, it needs to acquire the lock. But if configuring,
#the lock would already have been acquired - so we need an RLock.
#The same argument applies to Loggers and Manager.loggerDict.
#
if threading:
_lock = threading.RLock()
else: #pragma: no cover
_lock = None
def _acquireLock():
"""
Acquire the module-level lock for serializing access to shared data.
This should be released with _releaseLock().
"""
if _lock:
_lock.acquire()
def _releaseLock():
"""
Release the module-level lock acquired by calling _acquireLock().
"""
if _lock:
_lock.release()
#---------------------------------------------------------------------------
# The logging record
#---------------------------------------------------------------------------
class LogRecord(object):
"""
A LogRecord instance represents an event being logged.
LogRecord instances are created every time something is logged. They
contain all the information pertinent to the event being logged. The
main information passed in is in msg and args, which are combined
using str(msg) % args to create the message field of the record. The
record also includes information such as when the record was created,
the source line where the logging call was made, and any exception
information to be logged.
"""
def __init__(self, name, level, pathname, lineno,
msg, args, exc_info, func=None, sinfo=None, **kwargs):
"""
Initialize a logging record with interesting information.
"""
ct = time.time()
self.name = name
self.msg = msg
#
# The following statement allows passing of a dictionary as a sole
# argument, so that you can do something like
# logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
# Suggested by Stefan Behnel.
# Note that without the test for args[0], we get a problem because
# during formatting, we test to see if the arg is present using
# 'if self.args:'. If the event being logged is e.g. 'Value is %d'
# and if the passed arg fails 'if self.args:' then no formatting
# is done. For example, logger.warning('Value is %d', 0) would log
# 'Value is %d' instead of 'Value is 0'.
# For the use case of passing a dictionary, this should not be a
# problem.
# Issue #21172: a request was made to relax the isinstance check
# to hasattr(args[0], '__getitem__'). However, the docs on string
# formatting still seem to suggest a mapping object is required.
# Thus, while not removing the isinstance check, it does now look
# for collections.Mapping rather than, as before, dict.
if (args and len(args) == 1 and isinstance(args[0], collections.Mapping)
and args[0]):
args = args[0]
self.args = args
self.levelname = getLevelName(level)
self.levelno = level
self.pathname = pathname
try:
self.filename = os.path.basename(pathname)
self.module = os.path.splitext(self.filename)[0]
except (TypeError, ValueError, AttributeError):
self.filename = pathname
self.module = "Unknown module"
self.exc_info = exc_info
self.exc_text = None # used to cache the traceback text
self.stack_info = sinfo
self.lineno = lineno
self.funcName = func
self.created = ct
self.msecs = (ct - int(ct)) * 1000
self.relativeCreated = (self.created - _startTime) * 1000
if logThreads and threading:
self.thread = threading.get_ident()
self.threadName = threading.current_thread().name
else: # pragma: no cover
self.thread = None
self.threadName = None
if not logMultiprocessing: # pragma: no cover
self.processName = None
else:
self.processName = 'MainProcess'
mp = sys.modules.get('multiprocessing')
if mp is not None:
# Errors may occur if multiprocessing has not finished loading
# yet - e.g. if a custom import hook causes third-party code
# to run when multiprocessing calls import. See issue 8200
# for an example
try:
self.processName = mp.current_process().name
except Exception: #pragma: no cover
pass
if logProcesses and hasattr(os, 'getpid'):
self.process = os.getpid()
else:
self.process = None
def __str__(self):
return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
self.pathname, self.lineno, self.msg)
def getMessage(self):
"""
Return the message for this LogRecord.
Return the message for this LogRecord after merging any user-supplied
arguments with the message.
"""
msg = str(self.msg)
if self.args:
msg = msg % self.args
return msg
#
# Determine which class to use when instantiating log records.
#
_logRecordFactory = LogRecord
def setLogRecordFactory(factory):
"""
Set the factory to be used when instantiating a log record.
:param factory: A callable which will be called to instantiate
a log record.
"""
global _logRecordFactory
_logRecordFactory = factory
def getLogRecordFactory():
"""
Return the factory to be used when instantiating a log record.
"""
return _logRecordFactory
def makeLogRecord(dict):
"""
Make a LogRecord whose attributes are defined by the specified dictionary,
This function is useful for converting a logging event received over
a socket connection (which is sent as a dictionary) into a LogRecord
instance.
"""
rv = _logRecordFactory(None, None, "", 0, "", (), None, None)
rv.__dict__.update(dict)
return rv
#---------------------------------------------------------------------------
# Formatter classes and functions
#---------------------------------------------------------------------------
class PercentStyle(object):
default_format = '%(message)s'
asctime_format = '%(asctime)s'
asctime_search = '%(asctime)'
def __init__(self, fmt):
self._fmt = fmt or self.default_format
def usesTime(self):
return self._fmt.find(self.asctime_search) >= 0
def format(self, record):
return self._fmt % record.__dict__
class StrFormatStyle(PercentStyle):
default_format = '{message}'
asctime_format = '{asctime}'
asctime_search = '{asctime'
def format(self, record):
return self._fmt.format(**record.__dict__)
class StringTemplateStyle(PercentStyle):
default_format = '${message}'
asctime_format = '${asctime}'
asctime_search = '${asctime}'
def __init__(self, fmt):
self._fmt = fmt or self.default_format
self._tpl = Template(self._fmt)
def usesTime(self):
fmt = self._fmt
return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0
def format(self, record):
return self._tpl.substitute(**record.__dict__)
BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
_STYLES = {
'%': (PercentStyle, BASIC_FORMAT),
'{': (StrFormatStyle, '{levelname}:{name}:{message}'),
'$': (StringTemplateStyle, '${levelname}:${name}:${message}'),
}
class Formatter(object):
"""
Formatter instances are used to convert a LogRecord to text.
Formatters need to know how a LogRecord is constructed. They are
responsible for converting a LogRecord to (usually) a string which can
be interpreted by either a human or an external system. The base Formatter
allows a formatting string to be specified. If none is supplied, the
default value of "%s(message)" is used.
The Formatter can be initialized with a format string which makes use of
knowledge of the LogRecord attributes - e.g. the default value mentioned
above makes use of the fact that the user's message and arguments are pre-
formatted into a LogRecord's message attribute. Currently, the useful
attributes in a LogRecord are described by:
%(name)s Name of the logger (logging channel)
%(levelno)s Numeric logging level for the message (DEBUG, INFO,
WARNING, ERROR, CRITICAL)
%(levelname)s Text logging level for the message ("DEBUG", "INFO",
"WARNING", "ERROR", "CRITICAL")
%(pathname)s Full pathname of the source file where the logging
call was issued (if available)
%(filename)s Filename portion of pathname
%(module)s Module (name portion of filename)
%(lineno)d Source line number where the logging call was issued
(if available)
%(funcName)s Function name
%(created)f Time when the LogRecord was created (time.time()
return value)
%(asctime)s Textual time when the LogRecord was created
%(msecs)d Millisecond portion of the creation time
%(relativeCreated)d Time in milliseconds when the LogRecord was created,
relative to the time the logging module was loaded
(typically at application startup time)
%(thread)d Thread ID (if available)
%(threadName)s Thread name (if available)
%(process)d Process ID (if available)
%(message)s The result of record.getMessage(), computed just as
the record is emitted
"""
converter = time.localtime
def __init__(self, fmt=None, datefmt=None, style='%'):
"""
Initialize the formatter with specified format strings.
Initialize the formatter either with the specified format string, or a
default as described above. Allow for specialized date formatting with
the optional datefmt argument (if omitted, you get the ISO8601 format).
Use a style parameter of '%', '{' or '$' to specify that you want to
use one of %-formatting, :meth:`str.format` (``{}``) formatting or
:class:`string.Template` formatting in your format string.
.. versionchanged: 3.2
Added the ``style`` parameter.
"""
if style not in _STYLES:
raise ValueError('Style must be one of: %s' % ','.join(
_STYLES.keys()))
self._style = _STYLES[style][0](fmt)
self._fmt = self._style._fmt
self.datefmt = datefmt
default_time_format = '%Y-%m-%d %H:%M:%S'
default_msec_format = '%s,%03d'
def formatTime(self, record, datefmt=None):
"""
Return the creation time of the specified LogRecord as formatted text.
This method should be called from format() by a formatter which
wants to make use of a formatted time. This method can be overridden
in formatters to provide for any specific requirement, but the
basic behaviour is as follows: if datefmt (a string) is specified,
it is used with time.strftime() to format the creation time of the
record. Otherwise, the ISO8601 format is used. The resulting
string is returned. This function uses a user-configurable function
to convert the creation time to a tuple. By default, time.localtime()
is used; to change this for a particular formatter instance, set the
'converter' attribute to a function with the same signature as
time.localtime() or time.gmtime(). To change it for all formatters,
for example if you want all logging times to be shown in GMT,
set the 'converter' attribute in the Formatter class.
"""
ct = self.converter(record.created)
if datefmt:
s = time.strftime(datefmt, ct)
else:
t = time.strftime(self.default_time_format, ct)
s = self.default_msec_format % (t, record.msecs)
return s
def formatException(self, ei):
"""
Format and return the specified exception information as a string.
This default implementation just uses
traceback.print_exception()
"""
sio = io.StringIO()
tb = ei[2]
# See issues #9427, #1553375. Commented out for now.
#if getattr(self, 'fullstack', False):
# traceback.print_stack(tb.tb_frame.f_back, file=sio)
traceback.print_exception(ei[0], ei[1], tb, None, sio)
s = sio.getvalue()
sio.close()
if s[-1:] == "\n":
s = s[:-1]
return s
def usesTime(self):
"""
Check if the format uses the creation time of the record.
"""
return self._style.usesTime()
def formatMessage(self, record):
return self._style.format(record)
def formatStack(self, stack_info):
"""
This method is provided as an extension point for specialized
formatting of stack information.
The input data is a string as returned from a call to
:func:`traceback.print_stack`, but with the last trailing newline
removed.
The base implementation just returns the value passed in.
"""
return stack_info
def format(self, record):
"""
Format the specified record as text.
The record's attribute dictionary is used as the operand to a
string formatting operation which yields the returned string.
Before formatting the dictionary, a couple of preparatory steps
are carried out. The message attribute of the record is computed
using LogRecord.getMessage(). If the formatting string uses the
time (as determined by a call to usesTime(), formatTime() is
called to format the event time. If there is exception information,
it is formatted using formatException() and appended to the message.
"""
record.message = record.getMessage()
if self.usesTime():
record.asctime = self.formatTime(record, self.datefmt)
s = self.formatMessage(record)
if record.exc_info:
# Cache the traceback text to avoid converting it multiple times
# (it's constant anyway)
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
if s[-1:] != "\n":
s = s + "\n"
s = s + record.exc_text
if record.stack_info:
if s[-1:] != "\n":
s = s + "\n"
s = s + self.formatStack(record.stack_info)
return s
#
# The default formatter to use when no other is specified
#
_defaultFormatter = Formatter()
class BufferingFormatter(object):
"""
A formatter suitable for formatting a number of records.
"""
def __init__(self, linefmt=None):
"""
Optionally specify a formatter which will be used to format each
individual record.
"""
if linefmt:
self.linefmt = linefmt
else:
self.linefmt = _defaultFormatter
def formatHeader(self, records):
"""
Return the header string for the specified records.
"""
return ""
def formatFooter(self, records):
"""
Return the footer string for the specified records.
"""
return ""
def format(self, records):
"""
Format the specified records and return the result as a string.
"""
rv = ""
if len(records) > 0:
rv = rv + self.formatHeader(records)
for record in records:
rv = rv + self.linefmt.format(record)
rv = rv + self.formatFooter(records)
return rv
#---------------------------------------------------------------------------
# Filter classes and functions
#---------------------------------------------------------------------------
class Filter(object):
"""
Filter instances are used to perform arbitrary filtering of LogRecords.
Loggers and Handlers can optionally use Filter instances to filter
records as desired. The base filter class only allows events which are
below a certain point in the logger hierarchy. For example, a filter
initialized with "A.B" will allow events logged by loggers "A.B",
"A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
initialized with the empty string, all events are passed.
"""
def __init__(self, name=''):
"""
Initialize a filter.
Initialize with the name of the logger which, together with its
children, will have its events allowed through the filter. If no
name is specified, allow every event.
"""
self.name = name
self.nlen = len(name)
def filter(self, record):
"""
Determine if the specified record is to be logged.
Is the specified record to be logged? Returns 0 for no, nonzero for
yes. If deemed appropriate, the record may be modified in-place.
"""
if self.nlen == 0:
return True
elif self.name == record.name:
return True
elif record.name.find(self.name, 0, self.nlen) != 0:
return False
return (record.name[self.nlen] == ".")
class Filterer(object):
"""
A base class for loggers and handlers which allows them to share
common code.
"""
def __init__(self):
"""
Initialize the list of filters to be an empty list.
"""
self.filters = []
def addFilter(self, filter):
"""
Add the specified filter to this handler.
"""
if not (filter in self.filters):
self.filters.append(filter)
def removeFilter(self, filter):
"""
Remove the specified filter from this handler.
"""
if filter in self.filters:
self.filters.remove(filter)
def filter(self, record):
"""
Determine if a record is loggable by consulting all the filters.
The default is to allow the record to be logged; any filter can veto
this and the record is then dropped. Returns a zero value if a record
is to be dropped, else non-zero.
.. versionchanged: 3.2
Allow filters to be just callables.
"""
rv = True
for f in self.filters:
if hasattr(f, 'filter'):
result = f.filter(record)
else:
result = f(record) # assume callable - will raise if not
if not result:
rv = False
break
return rv
#---------------------------------------------------------------------------
# Handler classes and functions
#---------------------------------------------------------------------------
_handlers = weakref.WeakValueDictionary() #map of handler names to handlers
_handlerList = [] # added to allow handlers to be removed in reverse of order initialized
def _removeHandlerRef(wr):
"""
Remove a handler reference from the internal cleanup list.
"""
# This function can be called during module teardown, when globals are
# set to None. It can also be called from another thread. So we need to
# pre-emptively grab the necessary globals and check if they're None,
# to prevent race conditions and failures during interpreter shutdown.
acquire, release, handlers = _acquireLock, _releaseLock, _handlerList
if acquire and release and handlers:
acquire()
try:
if wr in handlers:
handlers.remove(wr)
finally:
release()
def _addHandlerRef(handler):
"""
Add a handler to the internal cleanup list using a weak reference.
"""
_acquireLock()
try:
_handlerList.append(weakref.ref(handler, _removeHandlerRef))
finally:
_releaseLock()
class Handler(Filterer):
"""
Handler instances dispatch logging events to specific destinations.
The base handler class. Acts as a placeholder which defines the Handler
interface. Handlers can optionally use Formatter instances to format
records as desired. By default, no formatter is specified; in this case,
the 'raw' message as determined by record.message is logged.
"""
def __init__(self, level=NOTSET):
"""
Initializes the instance - basically setting the formatter to None
and the filter list to empty.
"""
Filterer.__init__(self)
self._name = None
self.level = _checkLevel(level)
self.formatter = None
# Add the handler to the global _handlerList (for cleanup on shutdown)
_addHandlerRef(self)
self.createLock()
def get_name(self):
return self._name
def set_name(self, name):
_acquireLock()
try:
if self._name in _handlers:
del _handlers[self._name]
self._name = name
if name:
_handlers[name] = self
finally:
_releaseLock()
name = property(get_name, set_name)
def createLock(self):
"""
Acquire a thread lock for serializing access to the underlying I/O.
"""
if threading:
self.lock = threading.RLock()
else: #pragma: no cover
self.lock = None
def acquire(self):
"""
Acquire the I/O thread lock.
"""
if self.lock:
self.lock.acquire()
def release(self):
"""
Release the I/O thread lock.
"""
if self.lock:
self.lock.release()
def setLevel(self, level):
"""
Set the logging level of this handler. level must be an int or a str.
"""
self.level = _checkLevel(level)
def format(self, record):
"""
Format the specified record.
If a formatter is set, use it. Otherwise, use the default formatter
for the module.
"""
if self.formatter:
fmt = self.formatter
else:
fmt = _defaultFormatter
return fmt.format(record)
def emit(self, record):
"""
Do whatever it takes to actually log the specified logging record.
This version is intended to be implemented by subclasses and so
raises a NotImplementedError.
"""
raise NotImplementedError('emit must be implemented '
'by Handler subclasses')
def handle(self, record):
"""
Conditionally emit the specified logging record.
Emission depends on filters which may have been added to the handler.
Wrap the actual emission of the record with acquisition/release of
the I/O thread lock. Returns whether the filter passed the record for
emission.
"""
rv = self.filter(record)
if rv:
self.acquire()
try:
self.emit(record)
finally:
self.release()
return rv
def setFormatter(self, fmt):
"""
Set the formatter for this handler.
"""
self.formatter = fmt
def flush(self):
"""
Ensure all logging output has been flushed.
This version does nothing and is intended to be implemented by
subclasses.
"""
pass
def close(self):
"""
Tidy up any resources used by the handler.
This version removes the handler from an internal map of handlers,
_handlers, which is used for handler lookup by name. Subclasses
should ensure that this gets called from overridden close()
methods.
"""
#get the module data lock, as we're updating a shared structure.
_acquireLock()
try: #unlikely to raise an exception, but you never know...
if self._name and self._name in _handlers:
del _handlers[self._name]
finally:
_releaseLock()
def handleError(self, record):
"""
Handle errors which occur during an emit() call.
This method should be called from handlers when an exception is
encountered during an emit() call. If raiseExceptions is false,
exceptions get silently ignored. This is what is mostly wanted
for a logging system - most users will not care about errors in
the logging system, they are more interested in application errors.
You could, however, replace this with a custom handler if you wish.
The record which was being processed is passed in to this method.
"""
if raiseExceptions and sys.stderr: # see issue 13807
t, v, tb = sys.exc_info()
try:
sys.stderr.write('--- Logging error ---\n')
traceback.print_exception(t, v, tb, None, sys.stderr)
sys.stderr.write('Call stack:\n')
# Walk the stack frame up until we're out of logging,
# so as to print the calling context.
frame = tb.tb_frame
while (frame and os.path.dirname(frame.f_code.co_filename) ==
__path__[0]):
frame = frame.f_back
if frame:
traceback.print_stack(frame, file=sys.stderr)
else:
# couldn't find the right stack frame, for some reason
sys.stderr.write('Logged from file %s, line %s\n' % (
record.filename, record.lineno))
# Issue 18671: output logging message and arguments
try:
sys.stderr.write('Message: %r\n'
'Arguments: %s\n' % (record.msg,
record.args))
except Exception:
sys.stderr.write('Unable to print the message and arguments'
' - possible formatting error.\nUse the'
' traceback above to help find the error.\n'
)
except OSError: #pragma: no cover
pass # see issue 5971
finally:
del t, v, tb
class StreamHandler(Handler):
"""
A handler class which writes logging records, appropriately formatted,
to a stream. Note that this class does not close the stream, as
sys.stdout or sys.stderr may be used.
"""
terminator = '\n'
def __init__(self, stream=None):
"""
Initialize the handler.
If stream is not specified, sys.stderr is used.
"""
Handler.__init__(self)
if stream is None:
stream = sys.stderr
self.stream = stream
def flush(self):
"""
Flushes the stream.
"""
self.acquire()
try:
if self.stream and hasattr(self.stream, "flush"):
self.stream.flush()
finally:
self.release()
def emit(self, record):
"""
Emit a record.
If a formatter is specified, it is used to format the record.
The record is then written to the stream with a trailing newline. If
exception information is present, it is formatted using
traceback.print_exception and appended to the stream. If the stream
has an 'encoding' attribute, it is used to determine how to do the
output to the stream.
"""
try:
msg = self.format(record)
stream = self.stream
stream.write(msg)
stream.write(self.terminator)
self.flush()
except Exception:
self.handleError(record)
class FileHandler(StreamHandler):
"""
A handler class which writes formatted logging records to disk files.
"""
def __init__(self, filename, mode='a', encoding=None, delay=False):
"""
Open the specified file and use it as the stream for logging.
"""
#keep the absolute path, otherwise derived classes which use this
#may come a cropper when the current directory changes
self.baseFilename = os.path.abspath(filename)
self.mode = mode
self.encoding = encoding
self.delay = delay
if delay:
#We don't open the stream, but we still need to call the
#Handler constructor to set level, formatter, lock etc.
Handler.__init__(self)
self.stream = None
else:
StreamHandler.__init__(self, self._open())
def close(self):
"""
Closes the stream.
"""
self.acquire()
try:
if self.stream:
self.flush()
if hasattr(self.stream, "close"):
self.stream.close()
self.stream = None
# Issue #19523: call unconditionally to
# prevent a handler leak when delay is set
StreamHandler.close(self)
finally:
self.release()
def _open(self):
"""
Open the current base file with the (original) mode and encoding.
Return the resulting stream.
"""
return open(self.baseFilename, self.mode, encoding=self.encoding)
def emit(self, record):
"""
Emit a record.
If the stream was not opened because 'delay' was specified in the
constructor, open it before calling the superclass's emit.
"""
if self.stream is None:
self.stream = self._open()
StreamHandler.emit(self, record)
class _StderrHandler(StreamHandler):
"""
This class is like a StreamHandler using sys.stderr, but always uses
whatever sys.stderr is currently set to rather than the value of
sys.stderr at handler construction time.
"""
def __init__(self, level=NOTSET):
"""
Initialize the handler.
"""
Handler.__init__(self, level)
@property
def stream(self):
return sys.stderr
_defaultLastResort = _StderrHandler(WARNING)
lastResort = _defaultLastResort
#---------------------------------------------------------------------------
# Manager classes and functions
#---------------------------------------------------------------------------
class PlaceHolder(object):
"""
PlaceHolder instances are used in the Manager logger hierarchy to take
the place of nodes for which no loggers have been defined. This class is
intended for internal use only and not as part of the public API.
"""
def __init__(self, alogger):
"""
Initialize with the specified logger being a child of this placeholder.
"""
self.loggerMap = { alogger : None }
def append(self, alogger):
"""
Add the specified logger as a child of this placeholder.
"""
if alogger not in self.loggerMap:
self.loggerMap[alogger] = None
#
# Determine which class to use when instantiating loggers.
#
_loggerClass = None
def setLoggerClass(klass):
"""
Set the class to be used when instantiating a logger. The class should
define __init__() such that only a name argument is required, and the
__init__() should call Logger.__init__()
"""
if klass != Logger:
if not issubclass(klass, Logger):
raise TypeError("logger not derived from logging.Logger: "
+ klass.__name__)
global _loggerClass
_loggerClass = klass
def getLoggerClass():
"""
Return the class to be used when instantiating a logger.
"""
return _loggerClass
class Manager(object):
"""
There is [under normal circumstances] just one Manager instance, which
holds the hierarchy of loggers.
"""
def __init__(self, rootnode):
"""
Initialize the manager with the root node of the logger hierarchy.
"""
self.root = rootnode
self.disable = 0
self.emittedNoHandlerWarning = False
self.loggerDict = {}
self.loggerClass = None
self.logRecordFactory = None
def getLogger(self, name):
"""
Get a logger with the specified name (channel name), creating it
if it doesn't yet exist. This name is a dot-separated hierarchical
name, such as "a", "a.b", "a.b.c" or similar.
If a PlaceHolder existed for the specified name [i.e. the logger
didn't exist but a child of it did], replace it with the created
logger and fix up the parent/child references which pointed to the
placeholder to now point to the logger.
"""
rv = None
if not isinstance(name, str):
raise TypeError('A logger name must be a string')
_acquireLock()
try:
if name in self.loggerDict:
rv = self.loggerDict[name]
if isinstance(rv, PlaceHolder):
ph = rv
rv = (self.loggerClass or _loggerClass)(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupChildren(ph, rv)
self._fixupParents(rv)
else:
rv = (self.loggerClass or _loggerClass)(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupParents(rv)
finally:
_releaseLock()
return rv
def setLoggerClass(self, klass):
"""
Set the class to be used when instantiating a logger with this Manager.
"""
if klass != Logger:
if not issubclass(klass, Logger):
raise TypeError("logger not derived from logging.Logger: "
+ klass.__name__)
self.loggerClass = klass
def setLogRecordFactory(self, factory):
"""
Set the factory to be used when instantiating a log record with this
Manager.
"""
self.logRecordFactory = factory
def _fixupParents(self, alogger):
"""
Ensure that there are either loggers or placeholders all the way
from the specified logger to the root of the logger hierarchy.
"""
name = alogger.name
i = name.rfind(".")
rv = None
while (i > 0) and not rv:
substr = name[:i]
if substr not in self.loggerDict:
self.loggerDict[substr] = PlaceHolder(alogger)
else:
obj = self.loggerDict[substr]
if isinstance(obj, Logger):
rv = obj
else:
assert isinstance(obj, PlaceHolder)
obj.append(alogger)
i = name.rfind(".", 0, i - 1)
if not rv:
rv = self.root
alogger.parent = rv
def _fixupChildren(self, ph, alogger):
"""
Ensure that children of the placeholder ph are connected to the
specified logger.
"""
name = alogger.name
namelen = len(name)
for c in ph.loggerMap.keys():
#The if means ... if not c.parent.name.startswith(nm)
if c.parent.name[:namelen] != name:
alogger.parent = c.parent
c.parent = alogger
#---------------------------------------------------------------------------
# Logger classes and functions
#---------------------------------------------------------------------------
class Logger(Filterer):
"""
Instances of the Logger class represent a single logging channel. A
"logging channel" indicates an area of an application. Exactly how an
"area" is defined is up to the application developer. Since an
application can have any number of areas, logging channels are identified
by a unique string. Application areas can be nested (e.g. an area
of "input processing" might include sub-areas "read CSV files", "read
XLS files" and "read Gnumeric files"). To cater for this natural nesting,
channel names are organized into a namespace hierarchy where levels are
separated by periods, much like the Java or Python package namespace. So
in the instance given above, channel names might be "input" for the upper
level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
There is no arbitrary limit to the depth of nesting.
"""
def __init__(self, name, level=NOTSET):
"""
Initialize the logger with a name and an optional level.
"""
Filterer.__init__(self)
self.name = name
self.level = _checkLevel(level)
self.parent = None
self.propagate = True
self.handlers = []
self.disabled = False
def setLevel(self, level):
"""
Set the logging level of this logger. level must be an int or a str.
"""
self.level = _checkLevel(level)
def debug(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'DEBUG'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
"""
if self.isEnabledFor(DEBUG):
self._log(DEBUG, msg, args, **kwargs)
def info(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'INFO'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
"""
if self.isEnabledFor(INFO):
self._log(INFO, msg, args, **kwargs)
def warning(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'WARNING'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
"""
if self.isEnabledFor(WARNING):
self._log(WARNING, msg, args, **kwargs)
def warn(self, msg, *args, **kwargs):
warnings.warn("The 'warn' method is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
self.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'ERROR'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.error("Houston, we have a %s", "major problem", exc_info=1)
"""
if self.isEnabledFor(ERROR):
self._log(ERROR, msg, args, **kwargs)
def exception(self, msg, *args, **kwargs):
"""
Convenience method for logging an ERROR with exception information.
"""
kwargs['exc_info'] = True
self.error(msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'CRITICAL'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
"""
if self.isEnabledFor(CRITICAL):
self._log(CRITICAL, msg, args, **kwargs)
fatal = critical
def log(self, level, msg, *args, **kwargs):
"""
Log 'msg % args' with the integer severity 'level'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
"""
if not isinstance(level, int):
if raiseExceptions:
raise TypeError("level must be an integer")
else:
return
if self.isEnabledFor(level):
self._log(level, msg, args, **kwargs)
def findCaller(self, stack_info=False):
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
"""
f = currentframe()
#On some versions of IronPython, currentframe() returns None if
#IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
rv = "(unknown file)", 0, "(unknown function)", None
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == _srcfile:
f = f.f_back
continue
sinfo = None
if stack_info:
sio = io.StringIO()
sio.write('Stack (most recent call last):\n')
traceback.print_stack(f, file=sio)
sinfo = sio.getvalue()
if sinfo[-1] == '\n':
sinfo = sinfo[:-1]
sio.close()
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
break
return rv
def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
func=None, extra=None, sinfo=None):
"""
A factory method which can be overridden in subclasses to create
specialized LogRecords.
"""
rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
sinfo)
if extra is not None:
for key in extra:
if (key in ["message", "asctime"]) or (key in rv.__dict__):
raise KeyError("Attempt to overwrite %r in LogRecord" % key)
rv.__dict__[key] = extra[key]
return rv
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
"""
Low-level logging routine which creates a LogRecord and then calls
all the handlers of this logger to handle the record.
"""
sinfo = None
if _srcfile:
#IronPython doesn't track Python frames, so findCaller raises an
#exception on some versions of IronPython. We trap it here so that
#IronPython can use logging.
try:
fn, lno, func, sinfo = self.findCaller(stack_info)
except ValueError: # pragma: no cover
fn, lno, func = "(unknown file)", 0, "(unknown function)"
else: # pragma: no cover
fn, lno, func = "(unknown file)", 0, "(unknown function)"
if exc_info:
if not isinstance(exc_info, tuple):
exc_info = sys.exc_info()
record = self.makeRecord(self.name, level, fn, lno, msg, args,
exc_info, func, extra, sinfo)
self.handle(record)
def handle(self, record):
"""
Call the handlers for the specified record.
This method is used for unpickled records received from a socket, as
well as those created locally. Logger-level filtering is applied.
"""
if (not self.disabled) and self.filter(record):
self.callHandlers(record)
def addHandler(self, hdlr):
"""
Add the specified handler to this logger.
"""
_acquireLock()
try:
if not (hdlr in self.handlers):
self.handlers.append(hdlr)
finally:
_releaseLock()
def removeHandler(self, hdlr):
"""
Remove the specified handler from this logger.
"""
_acquireLock()
try:
if hdlr in self.handlers:
self.handlers.remove(hdlr)
finally:
_releaseLock()
def hasHandlers(self):
"""
See if this logger has any handlers configured.
Loop through all handlers for this logger and its parents in the
logger hierarchy. Return True if a handler was found, else False.
Stop searching up the hierarchy whenever a logger with the "propagate"
attribute set to zero is found - that will be the last logger which
is checked for the existence of handlers.
"""
c = self
rv = False
while c:
if c.handlers:
rv = True
break
if not c.propagate:
break
else:
c = c.parent
return rv
def callHandlers(self, record):
"""
Pass a record to all relevant handlers.
Loop through all handlers for this logger and its parents in the
logger hierarchy. If no handler was found, output a one-off error
message to sys.stderr. Stop searching up the hierarchy whenever a
logger with the "propagate" attribute set to zero is found - that
will be the last logger whose handlers are called.
"""
c = self
found = 0
while c:
for hdlr in c.handlers:
found = found + 1
if record.levelno >= hdlr.level:
hdlr.handle(record)
if not c.propagate:
c = None #break out
else:
c = c.parent
if (found == 0):
if lastResort:
if record.levelno >= lastResort.level:
lastResort.handle(record)
elif raiseExceptions and not self.manager.emittedNoHandlerWarning:
sys.stderr.write("No handlers could be found for logger"
" \"%s\"\n" % self.name)
self.manager.emittedNoHandlerWarning = True
def getEffectiveLevel(self):
"""
Get the effective level for this logger.
Loop through this logger and its parents in the logger hierarchy,
looking for a non-zero logging level. Return the first one found.
"""
logger = self
while logger:
if logger.level:
return logger.level
logger = logger.parent
return NOTSET
def isEnabledFor(self, level):
"""
Is this logger enabled for level 'level'?
"""
if self.manager.disable >= level:
return False
return level >= self.getEffectiveLevel()
def getChild(self, suffix):
"""
Get a logger which is a descendant to this one.
This is a convenience method, such that
logging.getLogger('abc').getChild('def.ghi')
is the same as
logging.getLogger('abc.def.ghi')
It's useful, for example, when the parent logger is named using
__name__ rather than a literal string.
"""
if self.root is not self:
suffix = '.'.join((self.name, suffix))
return self.manager.getLogger(suffix)
class RootLogger(Logger):
"""
A root logger is not that different to any other logger, except that
it must have a logging level and there is only one instance of it in
the hierarchy.
"""
def __init__(self, level):
"""
Initialize the logger with the name "root".
"""
Logger.__init__(self, "root", level)
_loggerClass = Logger
class LoggerAdapter(object):
"""
An adapter for loggers which makes it easier to specify contextual
information in logging output.
"""
def __init__(self, logger, extra):
"""
Initialize the adapter with a logger and a dict-like object which
provides contextual information. This constructor signature allows
easy stacking of LoggerAdapters, if so desired.
You can effectively pass keyword arguments as shown in the
following example:
adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2"))
"""
self.logger = logger
self.extra = extra
def process(self, msg, kwargs):
"""
Process the logging message and keyword arguments passed in to
a logging call to insert contextual information. You can either
manipulate the message itself, the keyword args or both. Return
the message and kwargs modified (or not) to suit your needs.
Normally, you'll only need to override this one method in a
LoggerAdapter subclass for your specific needs.
"""
kwargs["extra"] = self.extra
return msg, kwargs
#
# Boilerplate convenience methods
#
def debug(self, msg, *args, **kwargs):
"""
Delegate a debug call to the underlying logger.
"""
self.log(DEBUG, msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
"""
Delegate an info call to the underlying logger.
"""
self.log(INFO, msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
"""
Delegate a warning call to the underlying logger.
"""
self.log(WARNING, msg, *args, **kwargs)
def warn(self, msg, *args, **kwargs):
warnings.warn("The 'warn' method is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
self.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
"""
Delegate an error call to the underlying logger.
"""
self.log(ERROR, msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
"""
Delegate an exception call to the underlying logger.
"""
kwargs["exc_info"] = True
self.log(ERROR, msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
"""
Delegate a critical call to the underlying logger.
"""
self.log(CRITICAL, msg, *args, **kwargs)
def log(self, level, msg, *args, **kwargs):
"""
Delegate a log call to the underlying logger, after adding
contextual information from this adapter instance.
"""
if self.isEnabledFor(level):
msg, kwargs = self.process(msg, kwargs)
self.logger._log(level, msg, args, **kwargs)
def isEnabledFor(self, level):
"""
Is this logger enabled for level 'level'?
"""
if self.logger.manager.disable >= level:
return False
return level >= self.getEffectiveLevel()
def setLevel(self, level):
"""
Set the specified level on the underlying logger.
"""
self.logger.setLevel(level)
def getEffectiveLevel(self):
"""
Get the effective level for the underlying logger.
"""
return self.logger.getEffectiveLevel()
def hasHandlers(self):
"""
See if the underlying logger has any handlers.
"""
return self.logger.hasHandlers()
root = RootLogger(WARNING)
Logger.root = root
Logger.manager = Manager(Logger.root)
#---------------------------------------------------------------------------
# Configuration classes and functions
#---------------------------------------------------------------------------
def basicConfig(**kwargs):
"""
Do basic configuration for the logging system.
This function does nothing if the root logger already has handlers
configured. It is a convenience method intended for use by simple scripts
to do one-shot configuration of the logging package.
The default behaviour is to create a StreamHandler which writes to
sys.stderr, set a formatter using the BASIC_FORMAT format string, and
add the handler to the root logger.
A number of optional keyword arguments may be specified, which can alter
the default behaviour.
filename Specifies that a FileHandler be created, using the specified
filename, rather than a StreamHandler.
filemode Specifies the mode to open the file, if filename is specified
(if filemode is unspecified, it defaults to 'a').
format Use the specified format string for the handler.
datefmt Use the specified date/time format.
style If a format string is specified, use this to specify the
type of format string (possible values '%', '{', '$', for
%-formatting, :meth:`str.format` and :class:`string.Template`
- defaults to '%').
level Set the root logger level to the specified level.
stream Use the specified stream to initialize the StreamHandler. Note
that this argument is incompatible with 'filename' - if both
are present, 'stream' is ignored.
handlers If specified, this should be an iterable of already created
handlers, which will be added to the root handler. Any handler
in the list which does not have a formatter assigned will be
assigned the formatter created in this function.
Note that you could specify a stream created using open(filename, mode)
rather than passing the filename and mode in. However, it should be
remembered that StreamHandler does not close its stream (since it may be
using sys.stdout or sys.stderr), whereas FileHandler closes its stream
when the handler is closed.
.. versionchanged:: 3.2
Added the ``style`` parameter.
.. versionchanged:: 3.3
Added the ``handlers`` parameter. A ``ValueError`` is now thrown for
incompatible arguments (e.g. ``handlers`` specified together with
``filename``/``filemode``, or ``filename``/``filemode`` specified
together with ``stream``, or ``handlers`` specified together with
``stream``.
"""
# Add thread safety in case someone mistakenly calls
# basicConfig() from multiple threads
_acquireLock()
try:
if len(root.handlers) == 0:
handlers = kwargs.get("handlers")
if handlers is None:
if "stream" in kwargs and "filename" in kwargs:
raise ValueError("'stream' and 'filename' should not be "
"specified together")
else:
if "stream" in kwargs or "filename" in kwargs:
raise ValueError("'stream' or 'filename' should not be "
"specified together with 'handlers'")
if handlers is None:
filename = kwargs.get("filename")
if filename:
mode = kwargs.get("filemode", 'a')
h = FileHandler(filename, mode)
else:
stream = kwargs.get("stream")
h = StreamHandler(stream)
handlers = [h]
dfs = kwargs.get("datefmt", None)
style = kwargs.get("style", '%')
if style not in _STYLES:
raise ValueError('Style must be one of: %s' % ','.join(
_STYLES.keys()))
fs = kwargs.get("format", _STYLES[style][1])
fmt = Formatter(fs, dfs, style)
for h in handlers:
if h.formatter is None:
h.setFormatter(fmt)
root.addHandler(h)
level = kwargs.get("level")
if level is not None:
root.setLevel(level)
finally:
_releaseLock()
#---------------------------------------------------------------------------
# Utility functions at module level.
# Basically delegate everything to the root logger.
#---------------------------------------------------------------------------
def getLogger(name=None):
"""
Return a logger with the specified name, creating it if necessary.
If no name is specified, return the root logger.
"""
if name:
return Logger.manager.getLogger(name)
else:
return root
def critical(msg, *args, **kwargs):
"""
Log a message with severity 'CRITICAL' on the root logger. If the logger
has no handlers, call basicConfig() to add a console handler with a
pre-defined format.
"""
if len(root.handlers) == 0:
basicConfig()
root.critical(msg, *args, **kwargs)
fatal = critical
def error(msg, *args, **kwargs):
"""
Log a message with severity 'ERROR' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format.
"""
if len(root.handlers) == 0:
basicConfig()
root.error(msg, *args, **kwargs)
def exception(msg, *args, **kwargs):
"""
Log a message with severity 'ERROR' on the root logger, with exception
information. If the logger has no handlers, basicConfig() is called to add
a console handler with a pre-defined format.
"""
kwargs['exc_info'] = True
error(msg, *args, **kwargs)
def warning(msg, *args, **kwargs):
"""
Log a message with severity 'WARNING' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format.
"""
if len(root.handlers) == 0:
basicConfig()
root.warning(msg, *args, **kwargs)
def warn(msg, *args, **kwargs):
warnings.warn("The 'warn' function is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
warning(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
"""
Log a message with severity 'INFO' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format.
"""
if len(root.handlers) == 0:
basicConfig()
root.info(msg, *args, **kwargs)
def debug(msg, *args, **kwargs):
"""
Log a message with severity 'DEBUG' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format.
"""
if len(root.handlers) == 0:
basicConfig()
root.debug(msg, *args, **kwargs)
def log(level, msg, *args, **kwargs):
"""
Log 'msg % args' with the integer severity 'level' on the root logger. If
the logger has no handlers, call basicConfig() to add a console handler
with a pre-defined format.
"""
if len(root.handlers) == 0:
basicConfig()
root.log(level, msg, *args, **kwargs)
def disable(level):
"""
Disable all logging calls of severity 'level' and below.
"""
root.manager.disable = level
def shutdown(handlerList=_handlerList):
"""
Perform any cleanup actions in the logging system (e.g. flushing
buffers).
Should be called at application exit.
"""
for wr in reversed(handlerList[:]):
#errors might occur, for example, if files are locked
#we just ignore them if raiseExceptions is not set
try:
h = wr()
if h:
try:
h.acquire()
h.flush()
h.close()
except (OSError, ValueError):
# Ignore errors which might be caused
# because handlers have been closed but
# references to them are still around at
# application exit.
pass
finally:
h.release()
except: # ignore everything, as we're shutting down
if raiseExceptions:
raise
#else, swallow
#Let's try and shutdown automatically on application exit...
import atexit
atexit.register(shutdown)
# Null handler
class NullHandler(Handler):
"""
This handler does nothing. It's intended to be used to avoid the
"No handlers could be found for logger XXX" one-off warning. This is
important for library code, which may contain code to log events. If a user
of the library does not configure logging, the one-off warning might be
produced; to avoid this, the library developer simply needs to instantiate
a NullHandler and add it to the top-level logger of the library module or
package.
"""
def handle(self, record):
"""Stub."""
def emit(self, record):
"""Stub."""
def createLock(self):
self.lock = None
# Warnings integration
_warnings_showwarning = None
def _showwarning(message, category, filename, lineno, file=None, line=None):
"""
Implementation of showwarnings which redirects to logging, which will first
check to see if the file parameter is None. If a file is specified, it will
delegate to the original warnings implementation of showwarning. Otherwise,
it will call warnings.formatwarning and will log the resulting string to a
warnings logger named "py.warnings" with level logging.WARNING.
"""
if file is not None:
if _warnings_showwarning is not None:
_warnings_showwarning(message, category, filename, lineno, file, line)
else:
s = warnings.formatwarning(message, category, filename, lineno, line)
logger = getLogger("py.warnings")
if not logger.handlers:
logger.addHandler(NullHandler())
logger.warning("%s", s)
def captureWarnings(capture):
"""
If capture is true, redirect all warnings to the logging package.
If capture is False, ensure that warnings are not redirected to logging
but to their original destinations.
"""
global _warnings_showwarning
if capture:
if _warnings_showwarning is None:
_warnings_showwarning = warnings.showwarning
warnings.showwarning = _showwarning
else:
if _warnings_showwarning is not None:
warnings.showwarning = _warnings_showwarning
_warnings_showwarning = None
|
mdsafwan/Deal-My-Stuff
|
refs/heads/master
|
login/migrations/0007_auto_20151115_1408.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('login', '0006_auto_20151115_1149'),
]
operations = [
migrations.AlterModelOptions(
name='user_login',
options={'managed': True},
),
migrations.AlterModelTable(
name='user_login',
table='user_login',
),
]
|
CorverDevelopment/Pandora
|
refs/heads/master
|
src/pandora/monitor.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from os import path
from watchdog.events import PatternMatchingEventHandler
from watchdog.observers import Observer
from watchdog.observers.polling import PollingObserver
import inspect
import datetime as dt
import itertools
import sys
import time
class Monitor(object):
"""Monitoring your project files.
**Parameters**
:param basedir: The directory to start watching from.
Defaults to `'.'`.
**Attributes**
:ivar observer: The watchdog.Observer instance.
:ivar handlers: All the attached handlers.
:ivar running: Boolean flag, to control the running while loop.
**Basic Usage** ::
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- filename: monitor.py
from pandora import Monitor
import logging
monitor = Monitor()
logging.basicConfig(
level=logging.INFO,
format='%(asctime)-15s %(levelname)-6s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
logger = logging.getLogger(__name__)
@monitor.listen(['monitor.py'], on_startup=False)
def reload(event=None, source=None):
logger.info('Should reload the monitor.')
monitor.restart_process()
monitor.run()
"""
def __init__(self, basedir='.', polling=False):
"""Initialize the Monitor."""
if not polling:
self.observer = Observer()
else:
self.observer = PollingObserver()
self.handlers = []
self.basedir = basedir
self.running = False
def stop(self):
"""Stop the observer manually."""
self.running = False
self.observer.stop()
self.observer.join()
def run(self, delay=1):
"""Start the observer and run the handlers when files change.
This will catch the `KeyboardInterrupt` (`control-c`) exception. It
will stop and join the observer process. After this your script
continues.
"""
self.observer.start()
self.running = True
tpl = 'Waiting for changes [{:4s}]'
items = [tpl.format('.' * i) for i in range(1, 5)]
spinner = itertools.cycle(items)
try:
while self.running:
now = dt.datetime.now()
sys.stdout.write(now.strftime('%H:%M:%S - '))
sys.stdout.write(next(spinner))
sys.stdout.flush()
time.sleep(delay)
sys.stdout.write('\b' * (len(items[0]) + 11))
sys.stdout.flush()
for handler in self.handlers:
handler.run()
except KeyboardInterrupt: # pragma: no cover
self.observer.stop()
self.observer.join()
def listen(self, patterns, on_startup=False):
"""A decorator to attach a handler to the observer.
The patterns are looped through, and the `basedir` is prepended
to each pattern. If the patterns starts with an exclamation mark (!),
the pattern is marked as an `exclude` pattern.
**Parameters**
:param patterns: A list of patterns to match files agains.
Example: `['*.py', '*.ini', '!monitor.py']`.
:param on_startup: Should the handler be ran as soon as the
monitor is ran?
Defaults to `False`.
:type patterns: list
:type on_startup: bool
:return: A wrapper function, to be used as decorator.
:rtype: callable
"""
include = []
exclude = []
for pattern in patterns:
if pattern[0] == '!':
pattern = pattern[1:]
into = exclude
else:
into = include
into.append(path.join(self.basedir, pattern))
if not len(include):
include = None
if not len(exclude):
exclude = None
def wrapper(callback):
handler = Handler(include, exclude, callback, on_startup)
self.handlers.append(handler)
self.observer.schedule(handler, self.basedir, recursive=True)
return callback
return wrapper
def restart_process(self): # pragma: no cover
"""Restart the current python process.
This is particulairy handy if your monitor `.py` file has changed. It
simply calls :meth:`stop` and restarts itself.
"""
from os import execl
import sys
self.stop()
execl(sys.executable, sys.executable, *sys.argv)
class Handler(PatternMatchingEventHandler):
"""The actual `watchdog` event handler.
The handler will run the attached callback on the `any_event`. This means
that if `anything` happens to the file, the `must_run` flag will be set
and the monitor will run the handler on the next iteration.
By using the `must_run` flag and leting the monitor calling the handler
on every `1 second` iteration, we make sure that the callback is not ran
2 or 3 times in a row.
**Parameters**
:param include: Files matching this list are included.
Example: `['*.py', '*.ini']`.
:param exclude: Files matching this list are excluded.
Example: `['*/__init__.py']`.
:param callback: A callable that will be ran after the event.
:param on_startup: Should the handler be ran as soon as the
monitor is ran?
:type include: list
:type exclude: list
:type callback: callable
:type on_startup: bool
**Attributes**
:ivar must_run: A flag that will be set to `True` when the `any_event`
is triggered by `watchdog`.
"""
def __init__(self, include, exclude, callback, on_startup):
"""Initialize the Handler."""
self.name = callback.__name__
self.must_run = False
self.changed_src_path = None
self.changed_event = None
self.callback = callback
self.callback_args = inspect.getargspec(self.callback).args
super(Handler, self).__init__(include, exclude)
if on_startup:
self.must_run = True
def on_any_event(self, event):
"""Toggle the `must_run` attribute to `True`.
**Parameters**
:param event: The `watchdog` event.
:type event: watchdog.events.FileSystemEvent
"""
if not self.must_run:
self.changed_src_path = event.src_path
self.changed_event = event.event_type
self.must_run = True
def run(self):
"""Check the `must_run` attribute and executes the callback."""
if self.must_run:
if self.changed_src_path:
now = dt.datetime.now()
sys.stdout.write('{:s} - {:>15s} - {:7s} {}\n'.format(
now.strftime('%H:%M:%S'), self.callback.__name__,
self.changed_event,
self.changed_src_path))
sys.stdout.flush()
args = dict()
if 'event' in self.callback_args:
args['event'] = self.changed_event
if 'source' in self.callback_args:
args['source'] = self.changed_src_path
self.callback(**args)
self.must_run = False
self.changed_src_path = None
def __repr__(self):
patterns = []
if self._patterns:
patterns.append(', '.join('"%s"' % p for p in
self._patterns))
if self._ignore_patterns:
patterns.append(', '.join('"!%s"' % p for p in
self._ignore_patterns))
return '<Handler {:s} [{:s}]>'.format(
self.name, ', '.join(patterns))
|
lukeroge/Ralybot
|
refs/heads/python3.4
|
plugins/steam_store.py
|
34
|
import re
import requests
from bs4 import BeautifulSoup
from cloudbot import hook
from cloudbot.util import web, formatting
# CONSTANTS
steam_re = re.compile(r'.*://store.steampowered.com/app/([0-9]+)?.*', re.I)
API_URL = "http://store.steampowered.com/api/appdetails/"
STORE_URL = "http://store.steampowered.com/app/{}/"
# OTHER FUNCTIONS
def format_game(app_id, show_url=True):
"""
Takes a Steam Store app ID and returns a formatted string with data about that app ID
:type app_id: string
:return: string
"""
params = {'appids': app_id}
try:
request = requests.get(API_URL, params=params, timeout=15)
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
return "Could not get game info: {}".format(e)
data = request.json()
game = data[app_id]["data"]
# basic info
out = ["\x02{}\x02".format(game["name"])]
desc = " ".join(formatting.strip_html(game["about_the_game"]).split())
out.append(formatting.truncate(desc, 75))
# genres
try:
genres = ", ".join([g['description'] for g in game["genres"]])
out.append("\x02{}\x02".format(genres))
except KeyError:
# some things have no genre
pass
# release date
if game['release_date']['coming_soon']:
out.append("coming \x02{}\x02".format(game['release_date']['date']))
else:
out.append("released \x02{}\x02".format(game['release_date']['date']))
# pricing
if game['is_free']:
out.append("\x02free\x02")
elif not game.get("price_overview"):
# game has no pricing, it's probably not released yet
pass
else:
price = game['price_overview']
# the steam API sends prices as an int like "9999" for $19.99, we divmod to get the actual price
if price['final'] == price['initial']:
out.append("\x02$%d.%02d\x02" % divmod(price['final'], 100))
else:
price_now = "$%d.%02d" % divmod(price['final'], 100)
price_original = "$%d.%02d" % divmod(price['initial'], 100)
out.append("\x02{}\x02 (was \x02{}\x02)".format(price_now, price_original))
if show_url:
url = web.try_shorten(STORE_URL.format(game['steam_appid']))
out.append(url)
return " - ".join(out)
# HOOK FUNCTIONS
@hook.command()
def steam(text):
"""<query> - Search for specified game/trailer/DLC"""
params = {'term': text.strip().lower()}
try:
request = requests.get("http://store.steampowered.com/search/", params=params)
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
return "Could not get game info: {}".format(e)
soup = BeautifulSoup(request.text, from_encoding="utf-8")
result = soup.find('a', {'class': 'search_result_row'})
if not result:
return "No game found."
app_id = result['data-ds-appid']
return format_game(app_id)
@hook.regex(steam_re)
def steam_url(match):
app_id = match.group(1)
return format_game(app_id, show_url=False)
|
supersven/intellij-community
|
refs/heads/master
|
python/testData/copyPaste/IndentIncrease.src.py
|
83
|
<selection>def foo():
pass</selection>
|
yongshengwang/builthue
|
refs/heads/master
|
desktop/core/ext-py/pytz-2014.2/pytz/lazy.py
|
514
|
from threading import RLock
try:
from UserDict import DictMixin
except ImportError:
from collections import Mapping as DictMixin
# With lazy loading, we might end up with multiple threads triggering
# it at the same time. We need a lock.
_fill_lock = RLock()
class LazyDict(DictMixin):
"""Dictionary populated on first use."""
data = None
def __getitem__(self, key):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return self.data[key.upper()]
def __contains__(self, key):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return key in self.data
def __iter__(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return iter(self.data)
def __len__(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return len(self.data)
def keys(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return self.data.keys()
class LazyList(list):
"""List populated on first use."""
_props = [
'__str__', '__repr__', '__unicode__',
'__hash__', '__sizeof__', '__cmp__',
'__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove',
'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__',
'__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__',
'__getitem__', '__setitem__', '__delitem__', '__iter__',
'__reversed__', '__getslice__', '__setslice__', '__delslice__']
def __new__(cls, fill_iter=None):
if fill_iter is None:
return list()
# We need a new class as we will be dynamically messing with its
# methods.
class LazyList(list):
pass
fill_iter = [fill_iter]
def lazy(name):
def _lazy(self, *args, **kw):
_fill_lock.acquire()
try:
if len(fill_iter) > 0:
list.extend(self, fill_iter.pop())
for method_name in cls._props:
delattr(LazyList, method_name)
finally:
_fill_lock.release()
return getattr(list, name)(self, *args, **kw)
return _lazy
for name in cls._props:
setattr(LazyList, name, lazy(name))
new_list = LazyList()
return new_list
# Not all versions of Python declare the same magic methods.
# Filter out properties that don't exist in this version of Python
# from the list.
LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)]
class LazySet(set):
"""Set populated on first use."""
_props = (
'__str__', '__repr__', '__unicode__',
'__hash__', '__sizeof__', '__cmp__',
'__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__contains__', '__len__', '__nonzero__',
'__getitem__', '__setitem__', '__delitem__', '__iter__',
'__sub__', '__and__', '__xor__', '__or__',
'__rsub__', '__rand__', '__rxor__', '__ror__',
'__isub__', '__iand__', '__ixor__', '__ior__',
'add', 'clear', 'copy', 'difference', 'difference_update',
'discard', 'intersection', 'intersection_update', 'isdisjoint',
'issubset', 'issuperset', 'pop', 'remove',
'symmetric_difference', 'symmetric_difference_update',
'union', 'update')
def __new__(cls, fill_iter=None):
if fill_iter is None:
return set()
class LazySet(set):
pass
fill_iter = [fill_iter]
def lazy(name):
def _lazy(self, *args, **kw):
_fill_lock.acquire()
try:
if len(fill_iter) > 0:
for i in fill_iter.pop():
set.add(self, i)
for method_name in cls._props:
delattr(LazySet, method_name)
finally:
_fill_lock.release()
return getattr(set, name)(self, *args, **kw)
return _lazy
for name in cls._props:
setattr(LazySet, name, lazy(name))
new_set = LazySet()
return new_set
# Not all versions of Python declare the same magic methods.
# Filter out properties that don't exist in this version of Python
# from the list.
LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)]
|
anbangr/trusted-nova
|
refs/heads/master
|
nova/virt/xenapi/vm_utils.py
|
4
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright 2011 Piston Cloud Computing, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper methods for operations related to the management of VM records and
their attributes like VDIs, VIFs, as well as their lookup functions.
"""
import contextlib
import cPickle as pickle
import decimal
import json
import os
import re
import time
import urllib
import urlparse
import uuid
from xml.dom import minidom
from xml.parsers import expat
from eventlet import greenthread
from nova import exception
from nova import flags
from nova.image import glance
from nova import log as logging
from nova.openstack.common import cfg
from nova import utils
from nova.compute import instance_types
from nova.compute import power_state
from nova.virt.disk import api as disk
from nova.virt import xenapi
from nova.virt.xenapi import volume_utils
LOG = logging.getLogger(__name__)
xenapi_vm_utils_opts = [
cfg.StrOpt('default_os_type',
default='linux',
help='Default OS type'),
cfg.IntOpt('block_device_creation_timeout',
default=10,
help='time to wait for a block device to be created'),
cfg.IntOpt('max_kernel_ramdisk_size',
default=16 * 1024 * 1024,
help='maximum size in bytes of kernel or ramdisk images'),
cfg.StrOpt('sr_matching_filter',
default='other-config:i18n-key=local-storage',
help='Filter for finding the SR to be used to install guest '
'instances on. The default value is the Local Storage in '
'default XenServer/XCP installations. To select an SR '
'with a different matching criteria, you could set it to '
'other-config:my_favorite_sr=true. On the other hand, to '
'fall back on the Default SR, as displayed by XenCenter, '
'set this flag to: default-sr:true'),
cfg.BoolOpt('xenapi_sparse_copy',
default=True,
help='Whether to use sparse_copy for copying data on a '
'resize down (False will use standard dd). This speeds '
'up resizes down considerably since large runs of zeros '
'won\'t have to be rsynced')
]
FLAGS = flags.FLAGS
FLAGS.register_opts(xenapi_vm_utils_opts)
XENAPI_POWER_STATE = {
'Halted': power_state.SHUTDOWN,
'Running': power_state.RUNNING,
'Paused': power_state.PAUSED,
'Suspended': power_state.SUSPENDED,
'Crashed': power_state.CRASHED}
SECTOR_SIZE = 512
MBR_SIZE_SECTORS = 63
MBR_SIZE_BYTES = MBR_SIZE_SECTORS * SECTOR_SIZE
KERNEL_DIR = '/boot/guest'
class ImageType:
"""Enumeration class for distinguishing different image types
| 0 - kernel image (goes on dom0's filesystem)
| 1 - ramdisk image (goes on dom0's filesystem)
| 2 - disk image (local SR, partitioned by objectstore plugin)
| 3 - raw disk image (local SR, NOT partitioned by plugin)
| 4 - vhd disk image (local SR, NOT inspected by XS, PV assumed for
| linux, HVM assumed for Windows)
| 5 - ISO disk image (local SR, NOT partitioned by plugin)
"""
KERNEL = 0
RAMDISK = 1
DISK = 2
DISK_RAW = 3
DISK_VHD = 4
DISK_ISO = 5
_ids = (KERNEL, RAMDISK, DISK, DISK_RAW, DISK_VHD, DISK_ISO)
KERNEL_STR = "kernel"
RAMDISK_STR = "ramdisk"
DISK_STR = "os"
DISK_RAW_STR = "os_raw"
DISK_VHD_STR = "vhd"
DISK_ISO_STR = "iso"
_strs = (KERNEL_STR, RAMDISK_STR, DISK_STR, DISK_RAW_STR, DISK_VHD_STR,
DISK_ISO_STR)
@classmethod
def to_string(cls, image_type):
return dict(zip(ImageType._ids, ImageType._strs)).get(image_type)
@classmethod
def from_string(cls, image_type_str):
return dict(zip(ImageType._strs, ImageType._ids)).get(image_type_str)
class VMHelper(xenapi.HelperBase):
"""
The class that wraps the helper methods together.
"""
@classmethod
def create_vm(cls, session, instance, kernel, ramdisk,
use_pv_kernel=False):
"""Create a VM record. Returns a Deferred that gives the new
VM reference.
the use_pv_kernel flag indicates whether the guest is HVM or PV
There are 3 scenarios:
1. Using paravirtualization, kernel passed in
2. Using paravirtualization, kernel within the image
3. Using hardware virtualization
"""
inst_type_id = instance.instance_type_id
instance_type = instance_types.get_instance_type(inst_type_id)
mem = str(long(instance_type['memory_mb']) * 1024 * 1024)
vcpus = str(instance_type['vcpus'])
rec = {
'actions_after_crash': 'destroy',
'actions_after_reboot': 'restart',
'actions_after_shutdown': 'destroy',
'affinity': '',
'blocked_operations': {},
'ha_always_run': False,
'ha_restart_priority': '',
'HVM_boot_params': {},
'HVM_boot_policy': '',
'is_a_template': False,
'memory_dynamic_min': mem,
'memory_dynamic_max': mem,
'memory_static_min': '0',
'memory_static_max': mem,
'memory_target': mem,
'name_description': '',
'name_label': instance.name,
'other_config': {'allowvssprovider': str(False),
'nova_uuid': str(instance.uuid), },
'PCI_bus': '',
'platform': {'acpi': 'true', 'apic': 'true', 'pae': 'true',
'viridian': 'true', 'timeoffset': '0'},
'PV_args': '',
'PV_bootloader': '',
'PV_bootloader_args': '',
'PV_kernel': '',
'PV_legacy_args': '',
'PV_ramdisk': '',
'recommendations': '',
'tags': [],
'user_version': '0',
'VCPUs_at_startup': vcpus,
'VCPUs_max': vcpus,
'VCPUs_params': {},
'xenstore_data': {}}
# Complete VM configuration record according to the image type
# non-raw/raw with PV kernel/raw in HVM mode
if use_pv_kernel:
rec['platform']['nx'] = 'false'
if instance.kernel_id:
# 1. Kernel explicitly passed in, use that
rec['PV_args'] = 'root=/dev/xvda1'
rec['PV_kernel'] = kernel
rec['PV_ramdisk'] = ramdisk
else:
# 2. Use kernel within the image
rec['PV_bootloader'] = 'pygrub'
else:
# 3. Using hardware virtualization
rec['platform']['nx'] = 'true'
rec['HVM_boot_params'] = {'order': 'dc'}
rec['HVM_boot_policy'] = 'BIOS order'
LOG.debug(_('Created VM %s...'), instance.name)
vm_ref = session.call_xenapi('VM.create', rec)
instance_name = instance.name
LOG.debug(_('Created VM %(instance_name)s as %(vm_ref)s.') % locals())
return vm_ref
@classmethod
def ensure_free_mem(cls, session, instance):
inst_type_id = instance.instance_type_id
instance_type = instance_types.get_instance_type(inst_type_id)
mem = long(instance_type['memory_mb']) * 1024 * 1024
#get free memory from host
host = session.get_xenapi_host()
host_free_mem = long(session.call_xenapi("host.compute_free_memory",
host))
return host_free_mem >= mem
@classmethod
def find_vbd_by_number(cls, session, vm_ref, number):
"""Get the VBD reference from the device number"""
vbd_refs = session.call_xenapi("VM.get_VBDs", vm_ref)
if vbd_refs:
for vbd_ref in vbd_refs:
try:
vbd_rec = session.call_xenapi("VBD.get_record", vbd_ref)
if vbd_rec['userdevice'] == str(number):
return vbd_ref
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
raise volume_utils.StorageError(
_('VBD not found in instance %s') % vm_ref)
@classmethod
def unplug_vbd(cls, session, vbd_ref):
"""Unplug VBD from VM"""
try:
vbd_ref = session.call_xenapi('VBD.unplug', vbd_ref)
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
if exc.details[0] != 'DEVICE_ALREADY_DETACHED':
raise volume_utils.StorageError(
_('Unable to unplug VBD %s') % vbd_ref)
@classmethod
def destroy_vbd(cls, session, vbd_ref):
"""Destroy VBD from host database"""
try:
session.call_xenapi('VBD.destroy', vbd_ref)
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
raise volume_utils.StorageError(
_('Unable to destroy VBD %s') % vbd_ref)
@classmethod
def create_vbd(cls, session, vm_ref, vdi_ref, userdevice,
vbd_type='disk', read_only=False, bootable=False):
"""Create a VBD record and returns its reference."""
vbd_rec = {}
vbd_rec['VM'] = vm_ref
vbd_rec['VDI'] = vdi_ref
vbd_rec['userdevice'] = str(userdevice)
vbd_rec['bootable'] = bootable
vbd_rec['mode'] = read_only and 'RO' or 'RW'
vbd_rec['type'] = vbd_type
vbd_rec['unpluggable'] = True
vbd_rec['empty'] = False
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
LOG.debug(_('Creating %(vbd_type)s-type VBD for VM %(vm_ref)s,'
' VDI %(vdi_ref)s ... ') % locals())
vbd_ref = session.call_xenapi('VBD.create', vbd_rec)
LOG.debug(_('Created VBD %(vbd_ref)s for VM %(vm_ref)s,'
' VDI %(vdi_ref)s.') % locals())
return vbd_ref
@classmethod
def destroy_vdi(cls, session, vdi_ref):
try:
session.call_xenapi('VDI.destroy', vdi_ref)
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
raise volume_utils.StorageError(
_('Unable to destroy VDI %s') % vdi_ref)
@classmethod
def create_vdi(cls, session, sr_ref, name_label, virtual_size, read_only):
"""Create a VDI record and returns its reference."""
vdi_ref = session.call_xenapi("VDI.create",
{'name_label': name_label,
'name_description': '',
'SR': sr_ref,
'virtual_size': str(virtual_size),
'type': 'User',
'sharable': False,
'read_only': read_only,
'xenstore_data': {},
'other_config': {},
'sm_config': {},
'tags': []})
LOG.debug(_('Created VDI %(vdi_ref)s (%(name_label)s,'
' %(virtual_size)s, %(read_only)s) on %(sr_ref)s.')
% locals())
return vdi_ref
@classmethod
def copy_vdi(cls, session, sr_ref, vdi_to_copy_ref):
"""Copy a VDI and return the new VDIs reference."""
vdi_ref = session.call_xenapi('VDI.copy', vdi_to_copy_ref, sr_ref)
LOG.debug(_('Copied VDI %(vdi_ref)s from VDI '
'%(vdi_to_copy_ref)s on %(sr_ref)s.') % locals())
return vdi_ref
@classmethod
def clone_vdi(cls, session, vdi_to_clone_ref):
"""Clones a VDI and return the new VDIs reference."""
vdi_ref = session.call_xenapi('VDI.clone', vdi_to_clone_ref)
LOG.debug(_('Cloned VDI %(vdi_ref)s from VDI '
'%(vdi_to_clone_ref)s') % locals())
return vdi_ref
@classmethod
def set_vdi_name_label(cls, session, vdi_uuid, name_label):
vdi_ref = session.call_xenapi("VDI.get_by_uuid", vdi_uuid)
session.call_xenapi("VDI.set_name_label", vdi_ref, name_label)
@classmethod
def get_vdi_for_vm_safely(cls, session, vm_ref):
"""Retrieves the primary VDI for a VM"""
vbd_refs = session.call_xenapi("VM.get_VBDs", vm_ref)
for vbd in vbd_refs:
vbd_rec = session.call_xenapi("VBD.get_record", vbd)
# Convention dictates the primary VDI will be userdevice 0
if vbd_rec['userdevice'] == '0':
vdi_rec = session.call_xenapi("VDI.get_record", vbd_rec['VDI'])
return vbd_rec['VDI'], vdi_rec
raise exception.Error(_("No primary VDI found for"
"%(vm_ref)s") % locals())
@classmethod
def create_snapshot(cls, session, instance, vm_ref, label):
"""Creates Snapshot (Template) VM, Snapshot VBD, Snapshot VDI,
Snapshot VHD"""
LOG.debug(_("Snapshotting VM %(vm_ref)s with label '%(label)s'...")
% locals())
vm_vdi_ref, vm_vdi_rec = cls.get_vdi_for_vm_safely(session, vm_ref)
sr_ref = vm_vdi_rec["SR"]
original_parent_uuid = get_vhd_parent_uuid(session, vm_vdi_ref)
template_vm_ref = session.call_xenapi('VM.snapshot', vm_ref, label)
template_vdi_rec = cls.get_vdi_for_vm_safely(session,
template_vm_ref)[1]
template_vdi_uuid = template_vdi_rec["uuid"]
LOG.debug(_('Created snapshot %(template_vm_ref)s from'
' VM %(vm_ref)s.') % locals())
parent_uuid, base_uuid = _wait_for_vhd_coalesce(
session, instance, sr_ref, vm_vdi_ref, original_parent_uuid)
template_vdi_uuids = {'base': base_uuid,
'image': parent_uuid,
'snap': template_vdi_uuid}
return template_vm_ref, template_vdi_uuids
@classmethod
def get_sr_path(cls, session):
"""Return the path to our storage repository
This is used when we're dealing with VHDs directly, either by taking
snapshots or by restoring an image in the DISK_VHD format.
"""
sr_ref = cls.safe_find_sr(session)
sr_rec = session.call_xenapi("SR.get_record", sr_ref)
sr_uuid = sr_rec["uuid"]
return os.path.join(FLAGS.xenapi_sr_base_path, sr_uuid)
@classmethod
def find_cached_image(cls, session, image_id, sr_ref):
"""Returns the vdi-ref of the cached image."""
for vdi_ref, vdi_rec in _get_all_vdis_in_sr(session, sr_ref):
if ('image-id' in vdi_rec['other_config'] and
vdi_rec['other_config']['image-id'] == image_id):
return vdi_ref
return None
@classmethod
def upload_image(cls, context, session, instance, vdi_uuids, image_id):
""" Requests that the Glance plugin bundle the specified VDIs and
push them into Glance using the specified human-friendly name.
"""
# NOTE(sirp): Currently we only support uploading images as VHD, there
# is no RAW equivalent (yet)
LOG.debug(_("Asking xapi to upload %(vdi_uuids)s as"
" ID %(image_id)s") % locals())
glance_host, glance_port = glance.pick_glance_api_server()
properties = {}
properties['auto_disk_config'] = instance.auto_disk_config
properties['os_type'] = instance.os_type or FLAGS.default_os_type
params = {'vdi_uuids': vdi_uuids,
'image_id': image_id,
'glance_host': glance_host,
'glance_port': glance_port,
'sr_path': cls.get_sr_path(session),
'auth_token': getattr(context, 'auth_token', None),
'properties': properties}
kwargs = {'params': pickle.dumps(params)}
session.call_plugin('glance', 'upload_vhd', kwargs)
@classmethod
def resize_disk(cls, session, vdi_ref, instance_type):
# Copy VDI over to something we can resize
# NOTE(jerdfelt): Would be nice to just set vdi_ref to read/write
sr_ref = cls.safe_find_sr(session)
copy_ref = session.call_xenapi('VDI.copy', vdi_ref, sr_ref)
try:
# Resize partition and filesystem down
cls.auto_configure_disk(session=session,
vdi_ref=copy_ref,
new_gb=instance_type['root_gb'])
# Create new VDI
new_ref = cls.fetch_blank_disk(session,
instance_type['id'])
new_uuid = session.call_xenapi('VDI.get_uuid', new_ref)
# Manually copy contents over
virtual_size = instance_type['root_gb'] * 1024 * 1024 * 1024
_copy_partition(session, copy_ref, new_ref, 1, virtual_size)
return new_ref, new_uuid
finally:
cls.destroy_vdi(session, copy_ref)
@classmethod
def auto_configure_disk(cls, session, vdi_ref, new_gb):
"""Partition and resize FS to match the size specified by
instance_types.root_gb.
This is a fail-safe to prevent accidentally destroying data on a disk
erroneously marked as auto_disk_config=True.
The criteria for allowing resize are:
1. 'auto_disk_config' must be true for the instance (and image).
(If we've made it here, then auto_disk_config=True.)
2. The disk must have only one partition.
3. The file-system on the one partition must be ext3 or ext4.
"""
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
partitions = _get_partitions(dev)
if len(partitions) != 1:
return
_num, start, old_sectors, ptype = partitions[0]
if ptype in ('ext3', 'ext4'):
new_sectors = new_gb * 1024 * 1024 * 1024 / SECTOR_SIZE
_resize_part_and_fs(dev, start, old_sectors, new_sectors)
@classmethod
def _generate_disk(cls, session, instance, vm_ref, userdevice, name,
size_mb, fs_type):
"""
Steps to programmatically generate a disk:
1. Create VDI of desired size
2. Attach VDI to compute worker
3. Create partition
4. Create VBD between instance VM and VDI
"""
# 1. Create VDI
sr_ref = cls.safe_find_sr(session)
name_label = '%s-%s' % (instance.name, name)
ONE_MEG = 1024 * 1024
virtual_size = size_mb * ONE_MEG
vdi_ref = cls.create_vdi(
session, sr_ref, name_label, virtual_size, read_only=False)
try:
# 2. Attach VDI to compute worker (VBD hotplug)
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
# 3. Create partition
dev_path = utils.make_dev_path(dev)
utils.execute('parted', '--script', dev_path,
'mklabel', 'msdos', run_as_root=True)
partition_start = 0
partition_end = size_mb
utils.execute('parted', '--script', dev_path,
'mkpart', 'primary',
str(partition_start),
str(partition_end),
run_as_root=True)
partition_path = utils.make_dev_path(dev, partition=1)
if fs_type == 'linux-swap':
utils.execute('mkswap', partition_path,
run_as_root=True)
elif fs_type is not None:
utils.execute('mkfs', '-t', fs_type, partition_path,
run_as_root=True)
# 4. Create VBD between instance VM and swap VDI
cls.create_vbd(session, vm_ref, vdi_ref, userdevice,
bootable=False)
except Exception:
with utils.save_and_reraise_exception():
cls.destroy_vdi(session, vdi_ref)
@classmethod
def generate_swap(cls, session, instance, vm_ref, userdevice, swap_mb):
# NOTE(jk0): We use a FAT32 filesystem for the Windows swap
# partition because that is what parted supports.
is_windows = instance.os_type == "windows"
fs_type = "vfat" if is_windows else "linux-swap"
cls._generate_disk(session, instance, vm_ref, userdevice,
'swap', swap_mb, fs_type)
@classmethod
def generate_ephemeral(cls, session, instance, vm_ref, userdevice,
size_gb):
cls._generate_disk(session, instance, vm_ref, userdevice,
'ephemeral', size_gb * 1024,
FLAGS.default_ephemeral_format)
@classmethod
def fetch_blank_disk(cls, session, instance_type_id):
# Size the blank harddrive to suit the machine type:
one_gig = 1024 * 1024 * 1024
req_type = instance_types.get_instance_type(instance_type_id)
req_size = req_type['root_gb']
LOG.debug("Creating blank HD of size %(req_size)d gigs"
% locals())
vdi_size = one_gig * req_size
LOG.debug("ISO vm create: Looking for the SR")
sr_ref = cls.safe_find_sr(session)
vdi_ref = cls.create_vdi(session, sr_ref, 'blank HD', vdi_size, False)
return vdi_ref
@classmethod
def create_kernel_image(cls, context, session, instance, image, user_id,
project_id, image_type):
"""Creates kernel/ramdisk file from the image stored in the cache.
If the image is not present in the cache, it streams it from glance.
Returns: A list of dictionaries that describe VDIs
"""
filename = ""
if FLAGS.cache_images:
args = {}
args['cached-image'] = image
args['new-image-uuid'] = str(uuid.uuid4())
filename = session.call_plugin('glance', 'create_kernel_ramdisk',
args)
if filename == "":
return cls.fetch_image(context, session, instance, image,
user_id, project_id, image_type)
else:
return [dict(vdi_type=ImageType.to_string(image_type),
vdi_uuid=None,
file=filename)]
@classmethod
def create_image(cls, context, session, instance, image, user_id,
project_id, image_type):
"""Creates VDI from the image stored in the local cache. If the image
is not present in the cache, it streams it from glance.
Returns: A list of dictionaries that describe VDIs
"""
if FLAGS.cache_images == False or image_type == ImageType.DISK_ISO:
# If caching is disabled, we do not have to keep a copy of the
# image. Fetch the image from glance.
return cls.fetch_image(context, session,
instance, instance.image_ref,
instance.user_id, instance.project_id,
image_type)
sr_ref = cls.safe_find_sr(session)
sr_type = session.call_xenapi('SR.get_record', sr_ref)["type"]
vdi_return_list = []
if FLAGS.use_cow_images and sr_type != "ext":
LOG.warning(_("Fast cloning is only supported on default local SR "
"of type ext. SR on this system was found to be of "
"type %(sr_type)s. Ignoring the cow flag.")
% locals())
vdi_ref = cls.find_cached_image(session, image, sr_ref)
if vdi_ref is None:
vdis = cls.fetch_image(context, session, instance, image, user_id,
project_id, image_type)
vdi_ref = session.call_xenapi('VDI.get_by_uuid',
vdis[0]['vdi_uuid'])
session.call_xenapi('VDI.add_to_other_config',
vdi_ref, "image-id", str(image))
session.call_xenapi('VDI.set_name_label',
vdi_ref, "Cached glance image")
for vdi in vdis:
if vdi["vdi_type"] == "swap":
session.call_xenapi('VDI.add_to_other_config',
vdi_ref, "swap-disk",
str(vdi['vdi_uuid']))
if FLAGS.use_cow_images and sr_type == 'ext':
new_vdi_ref = cls.clone_vdi(session, vdi_ref)
else:
new_vdi_ref = cls.copy_vdi(session, sr_ref, vdi_ref)
# Set the name label for the image we just created and remove image id
# field from other-config.
session.call_xenapi('VDI.set_name_label', new_vdi_ref, instance.name)
session.call_xenapi('VDI.remove_from_other_config',
new_vdi_ref, "image-id")
vdi_return_list.append(dict(
vdi_type=("os" if image_type == ImageType.DISK_VHD
else ImageType.to_string(image_type)),
vdi_uuid=session.call_xenapi('VDI.get_uuid', new_vdi_ref),
file=None))
# Create a swap disk if the glance image had one associated with it.
vdi_rec = session.call_xenapi('VDI.get_record', vdi_ref)
if 'swap-disk' in vdi_rec['other_config']:
swap_disk_uuid = vdi_rec['other_config']['swap-disk']
swap_vdi_ref = session.call_xenapi('VDI.get_by_uuid',
swap_disk_uuid)
new_swap_vdi_ref = cls.copy_vdi(session, sr_ref, swap_vdi_ref)
new_swap_vdi_uuid = session.call_xenapi('VDI.get_uuid',
new_swap_vdi_ref)
session.call_xenapi('VDI.set_name_label', new_swap_vdi_ref,
instance.name + "-swap")
vdi_return_list.append(dict(vdi_type="swap",
vdi_uuid=new_swap_vdi_uuid,
file=None))
return vdi_return_list
@classmethod
def fetch_image(cls, context, session, instance, image, _user_id,
_project_id, image_type):
"""Fetch image from glance based on image type.
Returns: A single filename if image_type is KERNEL or RAMDISK
A list of dictionaries that describe VDIs, otherwise
"""
if image_type == ImageType.DISK_VHD:
return cls._fetch_image_glance_vhd(context,
session, instance, image, image_type)
else:
return cls._fetch_image_glance_disk(context,
session, instance, image, image_type)
@classmethod
def _retry_glance_download_vhd(cls, context, session, image):
# NOTE(sirp): The Glance plugin runs under Python 2.4
# which does not have the `uuid` module. To work around this,
# we generate the uuids here (under Python 2.6+) and
# pass them as arguments
uuid_stack = [str(uuid.uuid4()) for i in xrange(3)]
max_attempts = FLAGS.glance_num_retries + 1
sleep_time = 0.5
for attempt_num in xrange(1, max_attempts + 1):
glance_host, glance_port = glance.pick_glance_api_server()
params = {'image_id': image,
'glance_host': glance_host,
'glance_port': glance_port,
'uuid_stack': uuid_stack,
'sr_path': cls.get_sr_path(session),
'num_retries': 0,
'auth_token': getattr(context, 'auth_token', None)}
kwargs = {'params': pickle.dumps(params)}
LOG.info(_('download_vhd %(image)s '
'attempt %(attempt_num)d/%(max_attempts)d '
'from %(glance_host)s:%(glance_port)s') % locals())
try:
result = session.call_plugin('glance', 'download_vhd', kwargs)
return json.loads(result)
except cls.XenAPI.Failure as exc:
_type, _method, error = exc.details[:3]
if error == 'RetryableError':
LOG.error(_('download_vhd failed: %r') %
(exc.details[3:],))
else:
raise
time.sleep(sleep_time)
sleep_time = min(2 * sleep_time, 15)
raise exception.CouldNotFetchImage(image=image)
@classmethod
def _fetch_image_glance_vhd(cls, context, session, instance, image,
_image_type):
"""Tell glance to download an image and put the VHDs into the SR
Returns: A list of dictionaries that describe VDIs
"""
LOG.debug(_("Asking xapi to fetch vhd image %(image)s")
% locals())
sr_ref = cls.safe_find_sr(session)
vdis = cls._retry_glance_download_vhd(context, session, image)
# 'download_vhd' will return a list of dictionaries describing VDIs.
# The dictionary will contain 'vdi_type' and 'vdi_uuid' keys.
# 'vdi_type' can be 'os' or 'swap' right now.
for vdi in vdis:
LOG.debug(_("xapi 'download_vhd' returned VDI of "
"type '%(vdi_type)s' with UUID '%(vdi_uuid)s'") % vdi)
cls.scan_sr(session, sr_ref)
# Pull out the UUID of the first VDI (which is the os VDI)
os_vdi_uuid = vdis[0]['vdi_uuid']
# Set the name-label to ease debugging
vdi_ref = session.call_xenapi("VDI.get_by_uuid", os_vdi_uuid)
primary_name_label = instance.name
session.call_xenapi("VDI.set_name_label", vdi_ref, primary_name_label)
cls._check_vdi_size(context, session, instance, os_vdi_uuid)
return vdis
@classmethod
def _get_vdi_chain_size(cls, session, vdi_uuid):
"""Compute the total size of a VDI chain, starting with the specified
VDI UUID.
This will walk the VDI chain to the root, add the size of each VDI into
the total.
"""
size_bytes = 0
for vdi_rec in walk_vdi_chain(session, vdi_uuid):
cur_vdi_uuid = vdi_rec['uuid']
vdi_size_bytes = int(vdi_rec['physical_utilisation'])
LOG.debug(_('vdi_uuid=%(cur_vdi_uuid)s vdi_size_bytes='
'%(vdi_size_bytes)d') % locals())
size_bytes += vdi_size_bytes
return size_bytes
@classmethod
def _check_vdi_size(cls, context, session, instance, vdi_uuid):
size_bytes = cls._get_vdi_chain_size(session, vdi_uuid)
# FIXME(jk0): this was copied directly from compute.manager.py, let's
# refactor this to a common area
instance_type_id = instance['instance_type_id']
instance_type = instance_types.get_instance_type(instance_type_id)
allowed_size_gb = instance_type['root_gb']
allowed_size_bytes = allowed_size_gb * 1024 * 1024 * 1024
LOG.debug(_("image_size_bytes=%(size_bytes)d, allowed_size_bytes="
"%(allowed_size_bytes)d") % locals())
if size_bytes > allowed_size_bytes:
LOG.info(_("Image size %(size_bytes)d exceeded"
" instance_type allowed size "
"%(allowed_size_bytes)d")
% locals())
raise exception.ImageTooLarge()
@classmethod
def _fetch_image_glance_disk(cls, context, session, instance, image,
image_type):
"""Fetch the image from Glance
NOTE:
Unlike _fetch_image_glance_vhd, this method does not use the Glance
plugin; instead, it streams the disks through domU to the VDI
directly.
Returns: A single filename if image_type is KERNEL_RAMDISK
A list of dictionaries that describe VDIs, otherwise
"""
instance_id = instance.id
# FIXME(sirp): Since the Glance plugin seems to be required for the
# VHD disk, it may be worth using the plugin for both VHD and RAW and
# DISK restores
LOG.debug(_("Fetching image %(image)s") % locals())
LOG.debug(_("Image Type: %s"), ImageType.to_string(image_type))
if image_type == ImageType.DISK_ISO:
sr_ref = cls.safe_find_iso_sr(session)
LOG.debug(_("ISO: Found sr possibly containing the ISO image"))
else:
sr_ref = cls.safe_find_sr(session)
glance_client, image_id = glance.get_glance_client(context, image)
glance_client.set_auth_token(getattr(context, 'auth_token', None))
meta, image_file = glance_client.get_image(image_id)
virtual_size = int(meta['size'])
vdi_size = virtual_size
LOG.debug(_("Size for image %(image)s:" +
"%(virtual_size)d") % locals())
if image_type == ImageType.DISK:
# Make room for MBR.
vdi_size += MBR_SIZE_BYTES
elif (image_type in (ImageType.KERNEL, ImageType.RAMDISK) and
vdi_size > FLAGS.max_kernel_ramdisk_size):
max_size = FLAGS.max_kernel_ramdisk_size
raise exception.Error(
_("Kernel/Ramdisk image is too large: %(vdi_size)d bytes, "
"max %(max_size)d bytes") % locals())
name_label = instance.name
vdi_ref = cls.create_vdi(session, sr_ref, name_label, vdi_size, False)
# From this point we have a VDI on Xen host;
# If anything goes wrong, we need to remember its uuid.
try:
filename = None
vdi_uuid = session.call_xenapi("VDI.get_uuid", vdi_ref)
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
_stream_disk(dev, image_type, virtual_size, image_file)
if image_type in (ImageType.KERNEL, ImageType.RAMDISK):
# We need to invoke a plugin for copying the
# content of the VDI into the proper path.
LOG.debug(_("Copying VDI %s to /boot/guest on dom0"), vdi_ref)
fn = "copy_kernel_vdi"
args = {}
args['vdi-ref'] = vdi_ref
# Let the plugin copy the correct number of bytes.
args['image-size'] = str(vdi_size)
if FLAGS.cache_images:
args['cached-image'] = image
filename = session.call_plugin('glance', fn, args)
# Remove the VDI as it is not needed anymore.
cls.destroy_vdi(session, vdi_ref)
LOG.debug(_("Kernel/Ramdisk VDI %s destroyed"), vdi_ref)
return [dict(vdi_type=ImageType.to_string(image_type),
vdi_uuid=None,
file=filename)]
else:
return [dict(vdi_type=ImageType.to_string(image_type),
vdi_uuid=vdi_uuid,
file=None)]
except (cls.XenAPI.Failure, IOError, OSError) as e:
# We look for XenAPI and OS failures.
LOG.exception(_("instance %s: Failed to fetch glance image"),
instance_id)
e.args = e.args + ([dict(vdi_type=ImageType.
to_string(image_type),
vdi_uuid=vdi_uuid,
file=filename)],)
raise e
@classmethod
def determine_disk_image_type(cls, image_meta):
"""Disk Image Types are used to determine where the kernel will reside
within an image. To figure out which type we're dealing with, we use
the following rules:
1. If we're using Glance, we can use the image_type field to
determine the image_type
2. If we're not using Glance, then we need to deduce this based on
whether a kernel_id is specified.
"""
disk_format = image_meta['disk_format']
disk_format_map = {
'ami': 'DISK',
'aki': 'KERNEL',
'ari': 'RAMDISK',
'raw': 'DISK_RAW',
'vhd': 'DISK_VHD',
'iso': 'DISK_ISO',
}
try:
image_type_str = disk_format_map[disk_format]
except KeyError:
raise exception.InvalidDiskFormat(disk_format=disk_format)
image_type = getattr(ImageType, image_type_str)
image_ref = image_meta['id']
msg = _("Detected %(image_type_str)s format for image %(image_ref)s")
LOG.debug(msg % locals())
return image_type
@classmethod
def determine_is_pv(cls, session, vdi_ref, disk_image_type, os_type):
"""
Determine whether the VM will use a paravirtualized kernel or if it
will use hardware virtualization.
1. Glance (VHD): then we use `os_type`, raise if not set
2. Glance (DISK_RAW): use Pygrub to figure out if pv kernel is
available
3. Glance (DISK): pv is assumed
4. Glance (DISK_ISO): no pv is assumed
"""
LOG.debug(_("Looking up vdi %s for PV kernel"), vdi_ref)
if disk_image_type == ImageType.DISK_VHD:
# 1. VHD
if os_type == 'windows':
is_pv = False
else:
is_pv = True
elif disk_image_type == ImageType.DISK_RAW:
# 2. RAW
with vdi_attached_here(session, vdi_ref, read_only=True) as dev:
is_pv = _is_vdi_pv(dev)
elif disk_image_type == ImageType.DISK:
# 3. Disk
is_pv = True
elif disk_image_type == ImageType.DISK_ISO:
# 4. ISO
is_pv = False
else:
raise exception.Error(_("Unknown image format %(disk_image_type)s")
% locals())
return is_pv
@classmethod
def set_vm_name_label(cls, session, vm_ref, name_label):
session.call_xenapi("VM.set_name_label", vm_ref, name_label)
@classmethod
def list_vms(cls, session):
for vm_ref, vm_rec in cls.get_all_refs_and_recs(session, 'VM'):
if (vm_rec["resident_on"] != session.get_xenapi_host() or
vm_rec["is_a_template"] or vm_rec["is_control_domain"]):
continue
else:
yield vm_ref, vm_rec
@classmethod
def lookup(cls, session, name_label):
"""Look the instance up and return it if available"""
vm_refs = session.call_xenapi("VM.get_by_name_label", name_label)
n = len(vm_refs)
if n == 0:
return None
elif n > 1:
raise exception.InstanceExists(name=name_label)
else:
return vm_refs[0]
@classmethod
def lookup_vm_vdis(cls, session, vm_ref):
"""Look for the VDIs that are attached to the VM"""
# Firstly we get the VBDs, then the VDIs.
# TODO(Armando): do we leave the read-only devices?
vbd_refs = session.call_xenapi("VM.get_VBDs", vm_ref)
vdi_refs = []
if vbd_refs:
for vbd_ref in vbd_refs:
try:
vdi_ref = session.call_xenapi("VBD.get_VDI", vbd_ref)
# Test valid VDI
record = session.call_xenapi("VDI.get_record", vdi_ref)
LOG.debug(_('VDI %s is still available'), record['uuid'])
except cls.XenAPI.Failure, exc:
LOG.exception(exc)
else:
vdi_refs.append(vdi_ref)
if len(vdi_refs) > 0:
return vdi_refs
else:
return None
@classmethod
def preconfigure_instance(cls, session, instance, vdi_ref, network_info):
"""Makes alterations to the image before launching as part of spawn.
"""
# As mounting the image VDI is expensive, we only want do do it once,
# if at all, so determine whether it's required first, and then do
# everything
mount_required = False
key, net, metadata = _prepare_injectables(instance, network_info)
mount_required = key or net or metadata
if not mount_required:
return
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
_mounted_processing(dev, key, net, metadata)
@classmethod
def lookup_kernel_ramdisk(cls, session, vm):
vm_rec = session.call_xenapi("VM.get_record", vm)
if 'PV_kernel' in vm_rec and 'PV_ramdisk' in vm_rec:
return (vm_rec['PV_kernel'], vm_rec['PV_ramdisk'])
else:
return (None, None)
@classmethod
def is_snapshot(cls, session, vm):
vm_rec = session.call_xenapi("VM.get_record", vm)
if 'is_a_template' in vm_rec and 'is_a_snapshot' in vm_rec:
return vm_rec['is_a_template'] and vm_rec['is_a_snapshot']
else:
return False
@classmethod
def compile_info(cls, record):
"""Fill record with VM status information"""
LOG.info(_("(VM_UTILS) xenserver vm state -> |%s|"),
record['power_state'])
LOG.info(_("(VM_UTILS) xenapi power_state -> |%s|"),
XENAPI_POWER_STATE[record['power_state']])
return {'state': XENAPI_POWER_STATE[record['power_state']],
'max_mem': long(record['memory_static_max']) >> 10,
'mem': long(record['memory_dynamic_max']) >> 10,
'num_cpu': record['VCPUs_max'],
'cpu_time': 0}
@classmethod
def compile_diagnostics(cls, record):
"""Compile VM diagnostics data"""
try:
diags = {}
vm_uuid = record["uuid"]
xml = get_rrd(get_rrd_server(), vm_uuid)
if xml:
rrd = minidom.parseString(xml)
for i, node in enumerate(rrd.firstChild.childNodes):
# We don't want all of the extra garbage
if i >= 3 and i <= 11:
ref = node.childNodes
# Name and Value
if len(ref) > 6:
_ref_zero = ref[0].firstChild.data
diags[_ref_zero] = ref[6].firstChild.data
return diags
except expat.ExpatError as e:
LOG.exception(_('Unable to parse rrd of %(vm_uuid)s') % locals())
return {"Unable to retrieve diagnostics": e}
@classmethod
def compile_metrics(cls, start_time, stop_time=None):
"""Compile bandwidth usage, cpu, and disk metrics for all VMs on
this host"""
start_time = int(start_time)
xml = get_rrd_updates(get_rrd_server(), start_time)
if xml:
doc = minidom.parseString(xml)
return parse_rrd_update(doc, start_time, stop_time)
raise exception.CouldNotFetchMetrics()
@classmethod
def scan_sr(cls, session, sr_ref=None):
"""Scans the SR specified by sr_ref"""
if sr_ref:
LOG.debug(_("Re-scanning SR %s"), sr_ref)
session.call_xenapi('SR.scan', sr_ref)
@classmethod
def scan_default_sr(cls, session):
"""Looks for the system default SR and triggers a re-scan"""
cls.scan_sr(session, cls.find_sr(session))
@classmethod
def safe_find_sr(cls, session):
"""Same as find_sr except raises a NotFound exception if SR cannot be
determined
"""
sr_ref = cls.find_sr(session)
if sr_ref is None:
raise exception.StorageRepositoryNotFound()
return sr_ref
@classmethod
def find_sr(cls, session):
"""Return the storage repository to hold VM images"""
host = session.get_xenapi_host()
try:
tokens = FLAGS.sr_matching_filter.split(':')
filter_criteria = tokens[0]
filter_pattern = tokens[1]
except IndexError:
# oops, flag is invalid
LOG.warning(_("Flag sr_matching_filter '%s' does not respect "
"formatting convention"), FLAGS.sr_matching_filter)
return None
if filter_criteria == 'other-config':
key, value = filter_pattern.split('=', 1)
for sr_ref, sr_rec in cls.get_all_refs_and_recs(session, 'SR'):
if not (key in sr_rec['other_config'] and
sr_rec['other_config'][key] == value):
continue
for pbd_ref in sr_rec['PBDs']:
pbd_rec = cls.get_rec(session, 'PBD', pbd_ref)
if pbd_rec and pbd_rec['host'] == host:
return sr_ref
elif filter_criteria == 'default-sr' and filter_pattern == 'true':
pool_ref = session.call_xenapi('pool.get_all')[0]
return session.call_xenapi('pool.get_default_SR', pool_ref)
# No SR found!
LOG.warning(_("XenAPI is unable to find a Storage Repository to "
"install guest instances on. Please check your "
"configuration and/or configure the flag "
"'sr_matching_filter'"))
return None
@classmethod
def safe_find_iso_sr(cls, session):
"""Same as find_iso_sr except raises a NotFound exception if SR
cannot be determined
"""
sr_ref = cls.find_iso_sr(session)
if sr_ref is None:
raise exception.NotFound(_('Cannot find SR of content-type ISO'))
return sr_ref
@classmethod
def find_iso_sr(cls, session):
"""Return the storage repository to hold ISO images"""
host = session.get_xenapi_host()
for sr_ref, sr_rec in cls.get_all_refs_and_recs(session, 'SR'):
LOG.debug(_("ISO: looking at SR %(sr_rec)s") % locals())
if not sr_rec['content_type'] == 'iso':
LOG.debug(_("ISO: not iso content"))
continue
if not 'i18n-key' in sr_rec['other_config']:
LOG.debug(_("ISO: iso content_type, no 'i18n-key' key"))
continue
if not sr_rec['other_config']['i18n-key'] == 'local-storage-iso':
LOG.debug(_("ISO: iso content_type, i18n-key value not "
"'local-storage-iso'"))
continue
LOG.debug(_("ISO: SR MATCHing our criteria"))
for pbd_ref in sr_rec['PBDs']:
LOG.debug(_("ISO: ISO, looking to see if it is host local"))
pbd_rec = cls.get_rec(session, 'PBD', pbd_ref)
if not pbd_rec:
LOG.debug(_("ISO: PBD %(pbd_ref)s disappeared") % locals())
continue
pbd_rec_host = pbd_rec['host']
LOG.debug(_("ISO: PBD matching, want %(pbd_rec)s, " +
"have %(host)s") % locals())
if pbd_rec_host == host:
LOG.debug(_("ISO: SR with local PBD"))
return sr_ref
return None
def get_rrd_server():
"""Return server's scheme and address to use for retrieving RRD XMLs."""
xs_url = urlparse.urlparse(FLAGS.xenapi_connection_url)
return [xs_url.scheme, xs_url.netloc]
def get_rrd(server, vm_uuid):
"""Return the VM RRD XML as a string"""
try:
xml = urllib.urlopen("%s://%s:%s@%s/vm_rrd?uuid=%s" % (
server[0],
FLAGS.xenapi_connection_username,
FLAGS.xenapi_connection_password,
server[1],
vm_uuid))
return xml.read()
except IOError:
LOG.exception(_('Unable to obtain RRD XML for VM %(vm_uuid)s with '
'server details: %(server)s.') % locals())
return None
def get_rrd_updates(server, start_time):
"""Return the RRD updates XML as a string"""
try:
xml = urllib.urlopen("%s://%s:%s@%s/rrd_updates?start=%s" % (
server[0],
FLAGS.xenapi_connection_username,
FLAGS.xenapi_connection_password,
server[1],
start_time))
return xml.read()
except IOError:
LOG.exception(_('Unable to obtain RRD XML updates with '
'server details: %(server)s.') % locals())
return None
def parse_rrd_meta(doc):
data = {}
meta = doc.getElementsByTagName('meta')[0]
for tag in ('start', 'end', 'step'):
data[tag] = int(meta.getElementsByTagName(tag)[0].firstChild.data)
legend = meta.getElementsByTagName('legend')[0]
data['legend'] = [child.firstChild.data for child in legend.childNodes]
return data
def parse_rrd_data(doc):
dnode = doc.getElementsByTagName('data')[0]
return [dict(
time=int(child.getElementsByTagName('t')[0].firstChild.data),
values=[decimal.Decimal(valnode.firstChild.data)
for valnode in child.getElementsByTagName('v')])
for child in dnode.childNodes]
def parse_rrd_update(doc, start, until=None):
sum_data = {}
meta = parse_rrd_meta(doc)
data = parse_rrd_data(doc)
for col, collabel in enumerate(meta['legend']):
_datatype, _objtype, uuid, name = collabel.split(':')
vm_data = sum_data.get(uuid, dict())
if name.startswith('vif'):
vm_data[name] = integrate_series(data, col, start, until)
else:
vm_data[name] = average_series(data, col, until)
sum_data[uuid] = vm_data
return sum_data
def average_series(data, col, until=None):
vals = [row['values'][col] for row in data
if (not until or (row['time'] <= until)) and
row['values'][col].is_finite()]
if vals:
try:
return (sum(vals) / len(vals)).quantize(decimal.Decimal('1.0000'))
except decimal.InvalidOperation:
# (mdragon) Xenserver occasionally returns odd values in
# data that will throw an error on averaging (see bug 918490)
# These are hard to find, since, whatever those values are,
# Decimal seems to think they are a valid number, sortof.
# We *think* we've got the the cases covered, but just in
# case, log and return NaN, so we don't break reporting of
# other statistics.
LOG.error(_("Invalid statistics data from Xenserver: %s")
% str(vals))
return decimal.Decimal('NaN')
else:
return decimal.Decimal('0.0000')
def integrate_series(data, col, start, until=None):
total = decimal.Decimal('0.0000')
prev_time = int(start)
prev_val = None
for row in reversed(data):
if not until or (row['time'] <= until):
time = row['time']
val = row['values'][col]
if val.is_nan():
val = decimal.Decimal('0.0000')
if prev_val is None:
prev_val = val
if prev_val >= val:
total += ((val * (time - prev_time)) +
(decimal.Decimal('0.5000') * (prev_val - val) *
(time - prev_time)))
else:
total += ((prev_val * (time - prev_time)) +
(decimal.Decimal('0.5000') * (val - prev_val) *
(time - prev_time)))
prev_time = time
prev_val = val
return total.quantize(decimal.Decimal('1.0000'))
def _get_all_vdis_in_sr(session, sr_ref):
for vdi_ref in session.call_xenapi('SR.get_VDIs', sr_ref):
try:
vdi_rec = session.call_xenapi('VDI.get_record', vdi_ref)
yield vdi_ref, vdi_rec
except VMHelper.XenAPI.Failure:
continue
#TODO(sirp): This code comes from XS5.6 pluginlib.py, we should refactor to
# use that implmenetation
def get_vhd_parent(session, vdi_rec):
"""
Returns the VHD parent of the given VDI record, as a (ref, rec) pair.
Returns None if we're at the root of the tree.
"""
if 'vhd-parent' in vdi_rec['sm_config']:
parent_uuid = vdi_rec['sm_config']['vhd-parent']
parent_ref = session.call_xenapi("VDI.get_by_uuid", parent_uuid)
parent_rec = session.call_xenapi("VDI.get_record", parent_ref)
vdi_uuid = vdi_rec['uuid']
LOG.debug(_("VHD %(vdi_uuid)s has parent %(parent_ref)s") % locals())
return parent_ref, parent_rec
else:
return None
def get_vhd_parent_uuid(session, vdi_ref):
vdi_rec = session.call_xenapi("VDI.get_record", vdi_ref)
ret = get_vhd_parent(session, vdi_rec)
if ret:
_parent_ref, parent_rec = ret
return parent_rec["uuid"]
else:
return None
def walk_vdi_chain(session, vdi_uuid):
"""Yield vdi_recs for each element in a VDI chain"""
# TODO(jk0): perhaps make get_vhd_parent use this
while True:
vdi_ref = session.call_xenapi("VDI.get_by_uuid", vdi_uuid)
vdi_rec = session.call_xenapi("VDI.get_record", vdi_ref)
yield vdi_rec
parent_uuid = vdi_rec['sm_config'].get('vhd-parent')
if parent_uuid:
vdi_uuid = parent_uuid
else:
break
def _wait_for_vhd_coalesce(session, instance, sr_ref, vdi_ref,
original_parent_uuid):
""" Spin until the parent VHD is coalesced into its parent VHD
Before coalesce:
* original_parent_vhd
* parent_vhd
snapshot
After coalesce:
* parent_vhd
snapshot
"""
def _another_child_vhd():
if not original_parent_uuid:
return False
# Search for any other vdi which parents to original parent and is not
# in the active vm/instance vdi chain.
vdi_uuid = session.call_xenapi('VDI.get_record', vdi_ref)['uuid']
parent_vdi_uuid = get_vhd_parent_uuid(session, vdi_ref)
for _ref, rec in _get_all_vdis_in_sr(session, sr_ref):
if ((rec['uuid'] != vdi_uuid) and
(rec['uuid'] != parent_vdi_uuid) and
(rec['sm_config'].get('vhd-parent') == original_parent_uuid)):
# Found another vhd which too parents to original parent.
return True
# Found no other vdi with the same parent.
return False
# Check if original parent has any other child. If so, coalesce will
# not take place.
if _another_child_vhd():
parent_uuid = get_vhd_parent_uuid(session, vdi_ref)
parent_ref = session.call_xenapi("VDI.get_by_uuid", parent_uuid)
base_uuid = get_vhd_parent_uuid(session, parent_ref)
return parent_uuid, base_uuid
max_attempts = FLAGS.xenapi_vhd_coalesce_max_attempts
for i in xrange(max_attempts):
VMHelper.scan_sr(session, sr_ref)
parent_uuid = get_vhd_parent_uuid(session, vdi_ref)
if original_parent_uuid and (parent_uuid != original_parent_uuid):
LOG.debug(_("Parent %(parent_uuid)s doesn't match original parent"
" %(original_parent_uuid)s, waiting for coalesce...")
% locals())
else:
parent_ref = session.call_xenapi("VDI.get_by_uuid", parent_uuid)
base_uuid = get_vhd_parent_uuid(session, parent_ref)
return parent_uuid, base_uuid
greenthread.sleep(FLAGS.xenapi_vhd_coalesce_poll_interval)
msg = (_("VHD coalesce attempts exceeded (%(max_attempts)d)"
", giving up...") % locals())
raise exception.Error(msg)
def remap_vbd_dev(dev):
"""Return the appropriate location for a plugged-in VBD device
Ubuntu Maverick moved xvd? -> sd?. This is considered a bug and will be
fixed in future versions:
https://bugs.launchpad.net/ubuntu/+source/linux/+bug/684875
For now, we work around it by just doing a string replace.
"""
# NOTE(sirp): This hack can go away when we pull support for Maverick
should_remap = FLAGS.xenapi_remap_vbd_dev
if not should_remap:
return dev
old_prefix = 'xvd'
new_prefix = FLAGS.xenapi_remap_vbd_dev_prefix
remapped_dev = dev.replace(old_prefix, new_prefix)
return remapped_dev
def _wait_for_device(dev):
"""Wait for device node to appear"""
for i in xrange(0, FLAGS.block_device_creation_timeout):
dev_path = utils.make_dev_path(dev)
if os.path.exists(dev_path):
return
time.sleep(1)
raise volume_utils.StorageError(
_('Timeout waiting for device %s to be created') % dev)
@contextlib.contextmanager
def vdi_attached_here(session, vdi_ref, read_only=False):
this_vm_ref = get_this_vm_ref(session)
vbd_ref = VMHelper.create_vbd(session, this_vm_ref, vdi_ref,
'autodetect', read_only=read_only,
bootable=False)
try:
LOG.debug(_('Plugging VBD %s ... '), vbd_ref)
session.call_xenapi("VBD.plug", vbd_ref)
try:
LOG.debug(_('Plugging VBD %s done.'), vbd_ref)
orig_dev = session.call_xenapi("VBD.get_device", vbd_ref)
LOG.debug(_('VBD %(vbd_ref)s plugged as %(orig_dev)s') % locals())
dev = remap_vbd_dev(orig_dev)
if dev != orig_dev:
LOG.debug(_('VBD %(vbd_ref)s plugged into wrong dev, '
'remapping to %(dev)s') % locals())
if dev != 'autodetect':
# NOTE(johannes): Unit tests will end up with a device called
# 'autodetect' which obviously won't exist. It's not ideal,
# but the alternatives were much messier
_wait_for_device(dev)
yield dev
finally:
LOG.debug(_('Destroying VBD for VDI %s ... '), vdi_ref)
vbd_unplug_with_retry(session, vbd_ref)
finally:
try:
VMHelper.destroy_vbd(session, vbd_ref)
except volume_utils.StorageError:
# destroy_vbd() will log error
pass
LOG.debug(_('Destroying VBD for VDI %s done.'), vdi_ref)
def vbd_unplug_with_retry(session, vbd_ref):
"""Call VBD.unplug on the given VBD, with a retry if we get
DEVICE_DETACH_REJECTED. For reasons which I don't understand, we're
seeing the device still in use, even when all processes using the device
should be dead."""
while True:
try:
VMHelper.unplug_vbd(session, vbd_ref)
LOG.debug(_('VBD.unplug successful first time.'))
return
except VMHelper.XenAPI.Failure, e:
if (len(e.details) > 0 and
e.details[0] == 'DEVICE_DETACH_REJECTED'):
LOG.debug(_('VBD.unplug rejected: retrying...'))
greenthread.sleep(1)
LOG.debug(_('Not sleeping anymore!'))
elif (len(e.details) > 0 and
e.details[0] == 'DEVICE_ALREADY_DETACHED'):
LOG.debug(_('VBD.unplug successful eventually.'))
return
else:
LOG.error(_('Ignoring XenAPI.Failure in VBD.unplug: %s'),
e)
return
def get_this_vm_uuid():
with file('/sys/hypervisor/uuid') as f:
return f.readline().strip()
def get_this_vm_ref(session):
return session.call_xenapi("VM.get_by_uuid", get_this_vm_uuid())
def _is_vdi_pv(dev):
LOG.debug(_("Running pygrub against %s"), dev)
dev_path = utils.make_dev_path(dev)
output = os.popen('pygrub -qn %s' % dev_path)
for line in output.readlines():
#try to find kernel string
m = re.search('(?<=kernel:)/.*(?:>)', line)
if m and m.group(0).find('xen') != -1:
LOG.debug(_("Found Xen kernel %s") % m.group(0))
return True
LOG.debug(_("No Xen kernel found. Booting HVM."))
return False
def _get_partitions(dev):
"""Return partition information (num, size, type) for a device."""
dev_path = utils.make_dev_path(dev)
out, _err = utils.execute('parted', '--script', '--machine',
dev_path, 'unit s', 'print',
run_as_root=True)
lines = [line for line in out.split('\n') if line]
partitions = []
LOG.debug(_("Partitions:"))
for line in lines[2:]:
num, start, end, size, ptype = line.split(':')[:5]
start = int(start.rstrip('s'))
end = int(end.rstrip('s'))
size = int(size.rstrip('s'))
LOG.debug(_(" %(num)s: %(ptype)s %(size)d sectors") % locals())
partitions.append((num, start, size, ptype))
return partitions
def _stream_disk(dev, image_type, virtual_size, image_file):
offset = 0
if image_type == ImageType.DISK:
offset = MBR_SIZE_BYTES
_write_partition(virtual_size, dev)
dev_path = utils.make_dev_path(dev)
with utils.temporary_chown(dev_path):
with open(dev_path, 'wb') as f:
f.seek(offset)
for chunk in image_file:
f.write(chunk)
def _write_partition(virtual_size, dev):
dev_path = utils.make_dev_path(dev)
primary_first = MBR_SIZE_SECTORS
primary_last = MBR_SIZE_SECTORS + (virtual_size / SECTOR_SIZE) - 1
LOG.debug(_('Writing partition table %(primary_first)d %(primary_last)d'
' to %(dev_path)s...') % locals())
def execute(*cmd, **kwargs):
return utils.execute(*cmd, **kwargs)
execute('parted', '--script', dev_path, 'mklabel', 'msdos',
run_as_root=True)
execute('parted', '--script', dev_path, 'mkpart', 'primary',
'%ds' % primary_first,
'%ds' % primary_last,
run_as_root=True)
LOG.debug(_('Writing partition table %s done.'), dev_path)
def _resize_part_and_fs(dev, start, old_sectors, new_sectors):
"""Resize partition and fileystem.
This assumes we are dealing with a single primary partition and using
ext3 or ext4.
"""
size = new_sectors - start
end = new_sectors - 1
dev_path = utils.make_dev_path(dev)
partition_path = utils.make_dev_path(dev, partition=1)
# Replay journal if FS wasn't cleanly unmounted
# Exit Code 1 = File system errors corrected
# 2 = File system errors corrected, system needs a reboot
utils.execute('e2fsck', '-f', '-y', partition_path, run_as_root=True,
check_exit_code=[0, 1, 2])
# Remove ext3 journal (making it ext2)
utils.execute('tune2fs', '-O ^has_journal', partition_path,
run_as_root=True)
if new_sectors < old_sectors:
# Resizing down, resize filesystem before partition resize
utils.execute('resize2fs', partition_path, '%ds' % size,
run_as_root=True)
utils.execute('parted', '--script', dev_path, 'rm', '1',
run_as_root=True)
utils.execute('parted', '--script', dev_path, 'mkpart',
'primary',
'%ds' % start,
'%ds' % end,
run_as_root=True)
if new_sectors > old_sectors:
# Resizing up, resize filesystem after partition resize
utils.execute('resize2fs', partition_path, run_as_root=True)
# Add back journal
utils.execute('tune2fs', '-j', partition_path, run_as_root=True)
def _sparse_copy(src_path, dst_path, virtual_size, block_size=4096):
"""Copy data, skipping long runs of zeros to create a sparse file."""
start_time = time.time()
EMPTY_BLOCK = '\0' * block_size
bytes_read = 0
skipped_bytes = 0
left = virtual_size
LOG.debug(_("Starting sparse_copy src=%(src_path)s dst=%(dst_path)s "
"virtual_size=%(virtual_size)d block_size=%(block_size)d"),
locals())
# NOTE(sirp): we need read/write access to the devices; since we don't have
# the luxury of shelling out to a sudo'd command, we temporarily take
# ownership of the devices.
with utils.temporary_chown(src_path):
with utils.temporary_chown(dst_path):
with open(src_path, "r") as src:
with open(dst_path, "w") as dst:
data = src.read(min(block_size, left))
while data:
if data == EMPTY_BLOCK:
dst.seek(block_size, os.SEEK_CUR)
left -= block_size
bytes_read += block_size
skipped_bytes += block_size
else:
dst.write(data)
data_len = len(data)
left -= data_len
bytes_read += data_len
if left <= 0:
break
data = src.read(min(block_size, left))
duration = time.time() - start_time
compression_pct = float(skipped_bytes) / bytes_read * 100
LOG.debug(_("Finished sparse_copy in %(duration).2f secs, "
"%(compression_pct).2f%% reduction in size"), locals())
def _copy_partition(session, src_ref, dst_ref, partition, virtual_size):
# Part of disk taken up by MBR
virtual_size -= MBR_SIZE_BYTES
with vdi_attached_here(session, src_ref, read_only=True) as src:
src_path = utils.make_dev_path(src, partition=partition)
with vdi_attached_here(session, dst_ref, read_only=False) as dst:
dst_path = utils.make_dev_path(dst, partition=partition)
_write_partition(virtual_size, dst)
if FLAGS.xenapi_sparse_copy:
_sparse_copy(src_path, dst_path, virtual_size)
else:
num_blocks = virtual_size / SECTOR_SIZE
utils.execute('dd',
'if=%s' % src_path,
'of=%s' % dst_path,
'count=%d' % num_blocks,
run_as_root=True)
def _mount_filesystem(dev_path, dir):
"""mounts the device specified by dev_path in dir"""
try:
_out, err = utils.execute('mount',
'-t', 'ext2,ext3,ext4,reiserfs',
dev_path, dir, run_as_root=True)
except exception.ProcessExecutionError as e:
err = str(e)
return err
def _find_guest_agent(base_dir, agent_rel_path):
"""
tries to locate a guest agent at the path
specificed by agent_rel_path
"""
agent_path = os.path.join(base_dir, agent_rel_path)
if os.path.isfile(agent_path):
# The presence of the guest agent
# file indicates that this instance can
# reconfigure the network from xenstore data,
# so manipulation of files in /etc is not
# required
LOG.info(_('XenServer tools installed in this '
'image are capable of network injection. '
'Networking files will not be'
'manipulated'))
return True
xe_daemon_filename = os.path.join(base_dir,
'usr', 'sbin', 'xe-daemon')
if os.path.isfile(xe_daemon_filename):
LOG.info(_('XenServer tools are present '
'in this image but are not capable '
'of network injection'))
else:
LOG.info(_('XenServer tools are not '
'installed in this image'))
return False
def _mounted_processing(device, key, net, metadata):
"""Callback which runs with the image VDI attached"""
# NB: Partition 1 hardcoded
dev_path = utils.make_dev_path(device, partition=1)
with utils.tempdir() as tmpdir:
# Mount only Linux filesystems, to avoid disturbing NTFS images
err = _mount_filesystem(dev_path, tmpdir)
if not err:
try:
# This try block ensures that the umount occurs
if not _find_guest_agent(tmpdir, FLAGS.xenapi_agent_path):
LOG.info(_('Manipulating interface files '
'directly'))
# for xenapi, we don't 'inject' admin_password here,
# it's handled at instance startup time
disk.inject_data_into_fs(tmpdir,
key, net, None, metadata,
utils.execute)
finally:
utils.execute('umount', dev_path, run_as_root=True)
else:
LOG.info(_('Failed to mount filesystem (expected for '
'non-linux instances): %s') % err)
def _prepare_injectables(inst, networks_info):
"""
prepares the ssh key and the network configuration file to be
injected into the disk image
"""
#do the import here - Cheetah.Template will be loaded
#only if injection is performed
from Cheetah import Template as t
template = t.Template
template_data = open(FLAGS.injected_network_template).read()
metadata = inst['metadata']
key = str(inst['key_data'])
net = None
if networks_info:
ifc_num = -1
interfaces_info = []
have_injected_networks = False
for (network_ref, info) in networks_info:
ifc_num += 1
if not network_ref['injected']:
continue
have_injected_networks = True
ip_v4 = ip_v6 = None
if 'ips' in info and len(info['ips']) > 0:
ip_v4 = info['ips'][0]
if 'ip6s' in info and len(info['ip6s']) > 0:
ip_v6 = info['ip6s'][0]
if len(info['dns']) > 0:
dns = info['dns'][0]
else:
dns = ''
interface_info = {'name': 'eth%d' % ifc_num,
'address': ip_v4 and ip_v4['ip'] or '',
'netmask': ip_v4 and ip_v4['netmask'] or '',
'gateway': info['gateway'],
'broadcast': info['broadcast'],
'dns': dns,
'address_v6': ip_v6 and ip_v6['ip'] or '',
'netmask_v6': ip_v6 and ip_v6['netmask'] or '',
'gateway_v6': ip_v6 and info['gateway_v6'] or '',
'use_ipv6': FLAGS.use_ipv6}
interfaces_info.append(interface_info)
if have_injected_networks:
net = str(template(template_data,
searchList=[{'interfaces': interfaces_info,
'use_ipv6': FLAGS.use_ipv6}]))
return key, net, metadata
|
hurricup/intellij-community
|
refs/heads/master
|
python/testData/completion/className/stringLiteralAfterDot/mypackage/__init__.py
|
166
|
class Shazam:
pass
|
ibab/tensorflow
|
refs/heads/master
|
tensorflow/contrib/ctc/__init__.py
|
14
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for CTC (Connectionist Temporal Classification).
@@ctc_loss
@@ctc_greedy_decoder
@@ctc_beam_search_decoder
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import
from tensorflow.contrib.ctc.ctc_ops import *
|
janeen666/mi-instrument
|
refs/heads/master
|
mi/dataset/driver/cg_stc_eng/stc/cg_stc_eng_stc_common_driver.py
|
7
|
#!/usr/local/bin/python2.7
# #
# OOIPLACEHOLDER
#
# Copyright 2014 Raytheon Co.
# #
from mi.core.log import get_logger
from mi.dataset.parser.cg_stc_eng_stc import CgStcEngStcParser
from mi.dataset.dataset_driver import DataSetDriver
class CgStcEngDriver:
def __init__(self, source_file_path, particle_data_handler, parser_config):
self._source_file_path = source_file_path
self._particle_data_handler = particle_data_handler
self._parser_config = parser_config
def process(self):
log = get_logger()
def exception_callback(exception):
log.debug("ERROR: %r", exception)
self._particle_data_handler.setParticleDataCaptureFailure()
with open(self._source_file_path, 'rb') as stream_handle:
parser = CgStcEngStcParser(self._parser_config, None, stream_handle,
lambda state, ingested: None,
lambda data: None, exception_callback)
driver = DataSetDriver(parser, self._particle_data_handler)
driver.processFileStream()
return self._particle_data_handler
|
usc-isi/essex-baremetal-support
|
refs/heads/master
|
nova/notifier/list_notifier.py
|
5
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova import flags
from nova import log as logging
from nova.openstack.common import cfg
from nova import utils
list_notifier_drivers_opt = cfg.MultiStrOpt('list_notifier_drivers',
default=['nova.notifier.no_op_notifier'],
help='List of drivers to send notifications')
FLAGS = flags.FLAGS
FLAGS.register_opt(list_notifier_drivers_opt)
LOG = logging.getLogger(__name__)
drivers = None
class ImportFailureNotifier(object):
"""Noisily re-raises some exception over-and-over when notify is called."""
def __init__(self, exception):
self.exception = exception
def notify(self, message):
raise self.exception
def _get_drivers():
"""Instantiates and returns drivers based on the flag values."""
global drivers
if not drivers:
drivers = []
for notification_driver in FLAGS.list_notifier_drivers:
try:
drivers.append(utils.import_object(notification_driver))
except exception.ClassNotFound as e:
drivers.append(ImportFailureNotifier(e))
return drivers
def notify(message):
"""Passes notification to multiple notifiers in a list."""
for driver in _get_drivers():
try:
driver.notify(message)
except Exception as e:
LOG.exception(_("Problem '%(e)s' attempting to send to "
"notification driver %(driver)s." % locals()))
def _reset_drivers():
"""Used by unit tests to reset the drivers."""
global drivers
drivers = None
|
hehongliang/tensorflow
|
refs/heads/master
|
tensorflow/python/debug/wrappers/grpc_wrapper.py
|
29
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debugger wrapper session that sends debug data to file:// URLs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import signal
import sys
import traceback
import six
# Google-internal import(s).
from tensorflow.python.debug.lib import common
from tensorflow.python.debug.wrappers import framework
def publish_traceback(debug_server_urls,
graph,
feed_dict,
fetches,
old_graph_version):
"""Publish traceback and source code if graph version is new.
`graph.version` is compared with `old_graph_version`. If the former is higher
(i.e., newer), the graph traceback and the associated source code is sent to
the debug server at the specified gRPC URLs.
Args:
debug_server_urls: A single gRPC debug server URL as a `str` or a `list` of
debug server URLs.
graph: A Python `tf.Graph` object.
feed_dict: Feed dictionary given to the `Session.run()` call.
fetches: Fetches from the `Session.run()` call.
old_graph_version: Old graph version to compare to.
Returns:
If `graph.version > old_graph_version`, the new graph version as an `int`.
Else, the `old_graph_version` is returned.
"""
# TODO(cais): Consider moving this back to the top, after grpc becomes a
# pip dependency of tensorflow or tf_debug.
# pylint:disable=g-import-not-at-top
from tensorflow.python.debug.lib import source_remote
# pylint:enable=g-import-not-at-top
if graph.version > old_graph_version:
run_key = common.get_run_key(feed_dict, fetches)
source_remote.send_graph_tracebacks(
debug_server_urls, run_key, traceback.extract_stack(), graph,
send_source=True)
return graph.version
else:
return old_graph_version
class GrpcDebugWrapperSession(framework.NonInteractiveDebugWrapperSession):
"""Debug Session wrapper that send debug data to gRPC stream(s)."""
def __init__(self,
sess,
grpc_debug_server_addresses,
watch_fn=None,
thread_name_filter=None,
log_usage=True):
"""Constructor of DumpingDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
grpc_debug_server_addresses: (`str` or `list` of `str`) Single or a list
of the gRPC debug server addresses, in the format of
<host:port>, with or without the "grpc://" prefix. For example:
"localhost:7000",
["localhost:7000", "192.168.0.2:8000"]
watch_fn: (`Callable`) A Callable that can be used to define per-run
debug ops and watched tensors. See the doc of
`NonInteractiveDebugWrapperSession.__init__()` for details.
thread_name_filter: Regular-expression white list for threads on which the
wrapper session will be active. See doc of `BaseDebugWrapperSession` for
more details.
log_usage: (`bool`) whether the usage of this class is to be logged.
Raises:
TypeError: If `grpc_debug_server_addresses` is not a `str` or a `list`
of `str`.
"""
if log_usage:
pass # No logging for open-source.
framework.NonInteractiveDebugWrapperSession.__init__(
self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter)
if isinstance(grpc_debug_server_addresses, str):
self._grpc_debug_server_urls = [
self._normalize_grpc_url(grpc_debug_server_addresses)]
elif isinstance(grpc_debug_server_addresses, list):
self._grpc_debug_server_urls = []
for address in grpc_debug_server_addresses:
if not isinstance(address, str):
raise TypeError(
"Expected type str in list grpc_debug_server_addresses, "
"received type %s" % type(address))
self._grpc_debug_server_urls.append(self._normalize_grpc_url(address))
else:
raise TypeError(
"Expected type str or list in grpc_debug_server_addresses, "
"received type %s" % type(grpc_debug_server_addresses))
def prepare_run_debug_urls(self, fetches, feed_dict):
"""Implementation of abstract method in superclass.
See doc of `NonInteractiveDebugWrapperSession.prepare_run_debug_urls()`
for details.
Args:
fetches: Same as the `fetches` argument to `Session.run()`
feed_dict: Same as the `feed_dict` argument to `Session.run()`
Returns:
debug_urls: (`str` or `list` of `str`) file:// debug URLs to be used in
this `Session.run()` call.
"""
return self._grpc_debug_server_urls
def _normalize_grpc_url(self, address):
return (common.GRPC_URL_PREFIX + address
if not address.startswith(common.GRPC_URL_PREFIX) else address)
def _signal_handler(unused_signal, unused_frame):
while True:
response = six.moves.input(
"\nSIGINT received. Quit program? (Y/n): ").strip()
if response in ("", "Y", "y"):
sys.exit(0)
elif response in ("N", "n"):
break
def register_signal_handler():
try:
signal.signal(signal.SIGINT, _signal_handler)
except ValueError:
# This can happen if we are not in the MainThread.
pass
class TensorBoardDebugWrapperSession(GrpcDebugWrapperSession):
"""A tfdbg Session wrapper that can be used with TensorBoard Debugger Plugin.
This wrapper is the same as `GrpcDebugWrapperSession`, except that it uses a
predefined `watch_fn` that
1) uses `DebugIdentity` debug ops with the `gated_grpc` attribute set to
`True` to allow the interactive enabling and disabling of tensor
breakpoints.
2) watches all tensors in the graph.
This saves the need for the user to define a `watch_fn`.
"""
def __init__(self,
sess,
grpc_debug_server_addresses,
thread_name_filter=None,
send_traceback_and_source_code=True,
log_usage=True):
"""Constructor of TensorBoardDebugWrapperSession.
Args:
sess: The `tf.Session` instance to be wrapped.
grpc_debug_server_addresses: gRPC address(es) of debug server(s), as a
`str` or a `list` of `str`s. E.g., "localhost:2333",
"grpc://localhost:2333", ["192.168.0.7:2333", "192.168.0.8:2333"].
thread_name_filter: Optional filter for thread names.
send_traceback_and_source_code: Whether traceback of graph elements and
the source code are to be sent to the debug server(s).
log_usage: Whether the usage of this class is to be logged (if
applicable).
"""
def _gated_grpc_watch_fn(fetches, feeds):
del fetches, feeds # Unused.
return framework.WatchOptions(
debug_ops=["DebugIdentity(gated_grpc=true)"])
super(TensorBoardDebugWrapperSession, self).__init__(
sess,
grpc_debug_server_addresses,
watch_fn=_gated_grpc_watch_fn,
thread_name_filter=thread_name_filter,
log_usage=log_usage)
self._send_traceback_and_source_code = send_traceback_and_source_code
# Keeps track of the latest version of Python graph object that has been
# sent to the debug servers.
self._sent_graph_version = -1
register_signal_handler()
def run(self,
fetches,
feed_dict=None,
options=None,
run_metadata=None,
callable_runner=None,
callable_runner_args=None,
callable_options=None):
if self._send_traceback_and_source_code:
self._sent_graph_version = publish_traceback(
self._grpc_debug_server_urls, self.graph, feed_dict, fetches,
self._sent_graph_version)
return super(TensorBoardDebugWrapperSession, self).run(
fetches,
feed_dict=feed_dict,
options=options,
run_metadata=run_metadata,
callable_runner=callable_runner,
callable_runner_args=callable_runner_args,
callable_options=callable_options)
|
kvar/ansible
|
refs/heads/seas_master_2.9.5
|
test/integration/targets/module_utils/module_utils/qux1/quux.py
|
298
|
data = 'qux1'
|
AALEKH/server
|
refs/heads/10.2-redis-plugin
|
storage/tokudb/mysql-test/tokudb/t/fast_update_blobs.py
|
73
|
#!/usr/bin/env python
import sys
import random
import string
def main():
print "# generated by tokudb_fast_update_blobs.py"
print "source include/have_tokudb.inc;"
print "source include/have_innodb.inc;"
print "set default_storage_engine='tokudb';"
print "disable_warnings;"
print "drop table if exists t;"
print "enable_warnings;"
nrows = 10
blob_types = [ 'tinyblob', 'blob', 'mediumblob', 'longblob', 'text' ]
for a in blob_types:
for b in blob_types:
for c in blob_types:
for n in [ 'null', 'not null' ]:
test_blobs([ a, b, c ] , n, nrows)
return 0
def test_blobs(cols, n, nrows):
print "create table tt (id bigint unsigned primary key,"
# print " f0 int %s," % (n)
for i in range(len(cols)):
if i < len(cols)-1:
print " b%d %s %s," % (i, cols[i], n)
else:
print " b%d %s %s" % (i, cols[i], n)
print ") engine=tokudb;"
for id in range(1,nrows):
if n == 'null':
print "insert into tt (id) values (%d);" % (id)
else:
print "insert into tt values (%d,'','','');" % (id)
print "create table ti like tt;"
print "alter table ti engine=innodb;"
print "insert into ti select * from tt;"
for id in range(1,nrows):
for i in range(3):
long_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(random.randint(1,32)))
print "update noar tt set b%d='%s' where id=%d;" % (i, long_str, id)
print "update noar ti set b%d='%s' where id=%d;" % (i, long_str, id)
print "let $diff_tables = test.tt, test.ti;"
print "source include/diff_tables.inc;"
print "drop table tt, ti;"
sys.exit(main())
|
Fireblend/chromium-crosswalk
|
refs/heads/master
|
third_party/cython/src/Cython/Compiler/FusedNode.py
|
90
|
import copy
from Cython.Compiler import (ExprNodes, PyrexTypes, MemoryView,
ParseTreeTransforms, StringEncoding,
Errors)
from Cython.Compiler.ExprNodes import CloneNode, ProxyNode, TupleNode
from Cython.Compiler.Nodes import (FuncDefNode, CFuncDefNode, StatListNode,
DefNode)
class FusedCFuncDefNode(StatListNode):
"""
This node replaces a function with fused arguments. It deep-copies the
function for every permutation of fused types, and allocates a new local
scope for it. It keeps track of the original function in self.node, and
the entry of the original function in the symbol table is given the
'fused_cfunction' attribute which points back to us.
Then when a function lookup occurs (to e.g. call it), the call can be
dispatched to the right function.
node FuncDefNode the original function
nodes [FuncDefNode] list of copies of node with different specific types
py_func DefNode the fused python function subscriptable from
Python space
__signatures__ A DictNode mapping signature specialization strings
to PyCFunction nodes
resulting_fused_function PyCFunction for the fused DefNode that delegates
to specializations
fused_func_assignment Assignment of the fused function to the function name
defaults_tuple TupleNode of defaults (letting PyCFunctionNode build
defaults would result in many different tuples)
specialized_pycfuncs List of synthesized pycfunction nodes for the
specializations
code_object CodeObjectNode shared by all specializations and the
fused function
fused_compound_types All fused (compound) types (e.g. floating[:])
"""
__signatures__ = None
resulting_fused_function = None
fused_func_assignment = None
defaults_tuple = None
decorators = None
child_attrs = StatListNode.child_attrs + [
'__signatures__', 'resulting_fused_function', 'fused_func_assignment']
def __init__(self, node, env):
super(FusedCFuncDefNode, self).__init__(node.pos)
self.nodes = []
self.node = node
is_def = isinstance(self.node, DefNode)
if is_def:
# self.node.decorators = []
self.copy_def(env)
else:
self.copy_cdef(env)
# Perform some sanity checks. If anything fails, it's a bug
for n in self.nodes:
assert not n.entry.type.is_fused
assert not n.local_scope.return_type.is_fused
if node.return_type.is_fused:
assert not n.return_type.is_fused
if not is_def and n.cfunc_declarator.optional_arg_count:
assert n.type.op_arg_struct
node.entry.fused_cfunction = self
# Copy the nodes as AnalyseDeclarationsTransform will prepend
# self.py_func to self.stats, as we only want specialized
# CFuncDefNodes in self.nodes
self.stats = self.nodes[:]
def copy_def(self, env):
"""
Create a copy of the original def or lambda function for specialized
versions.
"""
fused_compound_types = PyrexTypes.unique(
[arg.type for arg in self.node.args if arg.type.is_fused])
permutations = PyrexTypes.get_all_specialized_permutations(fused_compound_types)
self.fused_compound_types = fused_compound_types
if self.node.entry in env.pyfunc_entries:
env.pyfunc_entries.remove(self.node.entry)
for cname, fused_to_specific in permutations:
copied_node = copy.deepcopy(self.node)
self._specialize_function_args(copied_node.args, fused_to_specific)
copied_node.return_type = self.node.return_type.specialize(
fused_to_specific)
copied_node.analyse_declarations(env)
# copied_node.is_staticmethod = self.node.is_staticmethod
# copied_node.is_classmethod = self.node.is_classmethod
self.create_new_local_scope(copied_node, env, fused_to_specific)
self.specialize_copied_def(copied_node, cname, self.node.entry,
fused_to_specific, fused_compound_types)
PyrexTypes.specialize_entry(copied_node.entry, cname)
copied_node.entry.used = True
env.entries[copied_node.entry.name] = copied_node.entry
if not self.replace_fused_typechecks(copied_node):
break
self.orig_py_func = self.node
self.py_func = self.make_fused_cpdef(self.node, env, is_def=True)
def copy_cdef(self, env):
"""
Create a copy of the original c(p)def function for all specialized
versions.
"""
permutations = self.node.type.get_all_specialized_permutations()
# print 'Node %s has %d specializations:' % (self.node.entry.name,
# len(permutations))
# import pprint; pprint.pprint([d for cname, d in permutations])
if self.node.entry in env.cfunc_entries:
env.cfunc_entries.remove(self.node.entry)
# Prevent copying of the python function
self.orig_py_func = orig_py_func = self.node.py_func
self.node.py_func = None
if orig_py_func:
env.pyfunc_entries.remove(orig_py_func.entry)
fused_types = self.node.type.get_fused_types()
self.fused_compound_types = fused_types
for cname, fused_to_specific in permutations:
copied_node = copy.deepcopy(self.node)
# Make the types in our CFuncType specific
type = copied_node.type.specialize(fused_to_specific)
entry = copied_node.entry
copied_node.type = type
entry.type, type.entry = type, entry
entry.used = (entry.used or
self.node.entry.defined_in_pxd or
env.is_c_class_scope or
entry.is_cmethod)
if self.node.cfunc_declarator.optional_arg_count:
self.node.cfunc_declarator.declare_optional_arg_struct(
type, env, fused_cname=cname)
copied_node.return_type = type.return_type
self.create_new_local_scope(copied_node, env, fused_to_specific)
# Make the argument types in the CFuncDeclarator specific
self._specialize_function_args(copied_node.cfunc_declarator.args,
fused_to_specific)
type.specialize_entry(entry, cname)
env.cfunc_entries.append(entry)
# If a cpdef, declare all specialized cpdefs (this
# also calls analyse_declarations)
copied_node.declare_cpdef_wrapper(env)
if copied_node.py_func:
env.pyfunc_entries.remove(copied_node.py_func.entry)
self.specialize_copied_def(
copied_node.py_func, cname, self.node.entry.as_variable,
fused_to_specific, fused_types)
if not self.replace_fused_typechecks(copied_node):
break
if orig_py_func:
self.py_func = self.make_fused_cpdef(orig_py_func, env,
is_def=False)
else:
self.py_func = orig_py_func
def _specialize_function_args(self, args, fused_to_specific):
for arg in args:
if arg.type.is_fused:
arg.type = arg.type.specialize(fused_to_specific)
if arg.type.is_memoryviewslice:
MemoryView.validate_memslice_dtype(arg.pos, arg.type.dtype)
def create_new_local_scope(self, node, env, f2s):
"""
Create a new local scope for the copied node and append it to
self.nodes. A new local scope is needed because the arguments with the
fused types are aready in the local scope, and we need the specialized
entries created after analyse_declarations on each specialized version
of the (CFunc)DefNode.
f2s is a dict mapping each fused type to its specialized version
"""
node.create_local_scope(env)
node.local_scope.fused_to_specific = f2s
# This is copied from the original function, set it to false to
# stop recursion
node.has_fused_arguments = False
self.nodes.append(node)
def specialize_copied_def(self, node, cname, py_entry, f2s, fused_types):
"""Specialize the copy of a DefNode given the copied node,
the specialization cname and the original DefNode entry"""
type_strings = [
PyrexTypes.specialization_signature_string(fused_type, f2s)
for fused_type in fused_types
]
node.specialized_signature_string = '|'.join(type_strings)
node.entry.pymethdef_cname = PyrexTypes.get_fused_cname(
cname, node.entry.pymethdef_cname)
node.entry.doc = py_entry.doc
node.entry.doc_cname = py_entry.doc_cname
def replace_fused_typechecks(self, copied_node):
"""
Branch-prune fused type checks like
if fused_t is int:
...
Returns whether an error was issued and whether we should stop in
in order to prevent a flood of errors.
"""
num_errors = Errors.num_errors
transform = ParseTreeTransforms.ReplaceFusedTypeChecks(
copied_node.local_scope)
transform(copied_node)
if Errors.num_errors > num_errors:
return False
return True
def _fused_instance_checks(self, normal_types, pyx_code, env):
"""
Genereate Cython code for instance checks, matching an object to
specialized types.
"""
if_ = 'if'
for specialized_type in normal_types:
# all_numeric = all_numeric and specialized_type.is_numeric
py_type_name = specialized_type.py_type_name()
specialized_type_name = specialized_type.specialization_string
pyx_code.context.update(locals())
pyx_code.put_chunk(
u"""
{{if_}} isinstance(arg, {{py_type_name}}):
dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'
""")
if_ = 'elif'
if not normal_types:
# we need an 'if' to match the following 'else'
pyx_code.putln("if 0: pass")
def _dtype_name(self, dtype):
if dtype.is_typedef:
return '___pyx_%s' % dtype
return str(dtype).replace(' ', '_')
def _dtype_type(self, dtype):
if dtype.is_typedef:
return self._dtype_name(dtype)
return str(dtype)
def _sizeof_dtype(self, dtype):
if dtype.is_pyobject:
return 'sizeof(void *)'
else:
return "sizeof(%s)" % self._dtype_type(dtype)
def _buffer_check_numpy_dtype_setup_cases(self, pyx_code):
"Setup some common cases to match dtypes against specializations"
if pyx_code.indenter("if dtype.kind in ('i', 'u'):"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_int")
pyx_code.dedent()
if pyx_code.indenter("elif dtype.kind == 'f':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_float")
pyx_code.dedent()
if pyx_code.indenter("elif dtype.kind == 'c':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_complex")
pyx_code.dedent()
if pyx_code.indenter("elif dtype.kind == 'O':"):
pyx_code.putln("pass")
pyx_code.named_insertion_point("dtype_object")
pyx_code.dedent()
match = "dest_sig[{{dest_sig_idx}}] = '{{specialized_type_name}}'"
no_match = "dest_sig[{{dest_sig_idx}}] = None"
def _buffer_check_numpy_dtype(self, pyx_code, specialized_buffer_types):
"""
Match a numpy dtype object to the individual specializations.
"""
self._buffer_check_numpy_dtype_setup_cases(pyx_code)
for specialized_type in specialized_buffer_types:
dtype = specialized_type.dtype
pyx_code.context.update(
itemsize_match=self._sizeof_dtype(dtype) + " == itemsize",
signed_match="not (%s_is_signed ^ dtype_signed)" % self._dtype_name(dtype),
dtype=dtype,
specialized_type_name=specialized_type.specialization_string)
dtypes = [
(dtype.is_int, pyx_code.dtype_int),
(dtype.is_float, pyx_code.dtype_float),
(dtype.is_complex, pyx_code.dtype_complex)
]
for dtype_category, codewriter in dtypes:
if dtype_category:
cond = '{{itemsize_match}} and arg.ndim == %d' % (
specialized_type.ndim,)
if dtype.is_int:
cond += ' and {{signed_match}}'
if codewriter.indenter("if %s:" % cond):
# codewriter.putln("print 'buffer match found based on numpy dtype'")
codewriter.putln(self.match)
codewriter.putln("break")
codewriter.dedent()
def _buffer_parse_format_string_check(self, pyx_code, decl_code,
specialized_type, env):
"""
For each specialized type, try to coerce the object to a memoryview
slice of that type. This means obtaining a buffer and parsing the
format string.
TODO: separate buffer acquisition from format parsing
"""
dtype = specialized_type.dtype
if specialized_type.is_buffer:
axes = [('direct', 'strided')] * specialized_type.ndim
else:
axes = specialized_type.axes
memslice_type = PyrexTypes.MemoryViewSliceType(dtype, axes)
memslice_type.create_from_py_utility_code(env)
pyx_code.context.update(
coerce_from_py_func=memslice_type.from_py_function,
dtype=dtype)
decl_code.putln(
"{{memviewslice_cname}} {{coerce_from_py_func}}(object)")
pyx_code.context.update(
specialized_type_name=specialized_type.specialization_string,
sizeof_dtype=self._sizeof_dtype(dtype))
pyx_code.put_chunk(
u"""
# try {{dtype}}
if itemsize == -1 or itemsize == {{sizeof_dtype}}:
memslice = {{coerce_from_py_func}}(arg)
if memslice.memview:
__PYX_XDEC_MEMVIEW(&memslice, 1)
# print 'found a match for the buffer through format parsing'
%s
break
else:
__pyx_PyErr_Clear()
""" % self.match)
def _buffer_checks(self, buffer_types, pyx_code, decl_code, env):
"""
Generate Cython code to match objects to buffer specializations.
First try to get a numpy dtype object and match it against the individual
specializations. If that fails, try naively to coerce the object
to each specialization, which obtains the buffer each time and tries
to match the format string.
"""
from Cython.Compiler import ExprNodes
if buffer_types:
if pyx_code.indenter(u"else:"):
# The first thing to find a match in this loop breaks out of the loop
if pyx_code.indenter(u"while 1:"):
pyx_code.put_chunk(
u"""
if numpy is not None:
if isinstance(arg, numpy.ndarray):
dtype = arg.dtype
elif (__pyx_memoryview_check(arg) and
isinstance(arg.base, numpy.ndarray)):
dtype = arg.base.dtype
else:
dtype = None
itemsize = -1
if dtype is not None:
itemsize = dtype.itemsize
kind = ord(dtype.kind)
dtype_signed = kind == ord('i')
""")
pyx_code.indent(2)
pyx_code.named_insertion_point("numpy_dtype_checks")
self._buffer_check_numpy_dtype(pyx_code, buffer_types)
pyx_code.dedent(2)
for specialized_type in buffer_types:
self._buffer_parse_format_string_check(
pyx_code, decl_code, specialized_type, env)
pyx_code.putln(self.no_match)
pyx_code.putln("break")
pyx_code.dedent()
pyx_code.dedent()
else:
pyx_code.putln("else: %s" % self.no_match)
def _buffer_declarations(self, pyx_code, decl_code, all_buffer_types):
"""
If we have any buffer specializations, write out some variable
declarations and imports.
"""
decl_code.put_chunk(
u"""
ctypedef struct {{memviewslice_cname}}:
void *memview
void __PYX_XDEC_MEMVIEW({{memviewslice_cname}} *, int have_gil)
bint __pyx_memoryview_check(object)
""")
pyx_code.local_variable_declarations.put_chunk(
u"""
cdef {{memviewslice_cname}} memslice
cdef Py_ssize_t itemsize
cdef bint dtype_signed
cdef char kind
itemsize = -1
""")
pyx_code.imports.put_chunk(
u"""
try:
import numpy
except ImportError:
numpy = None
""")
seen_int_dtypes = set()
for buffer_type in all_buffer_types:
dtype = buffer_type.dtype
if dtype.is_typedef:
#decl_code.putln("ctypedef %s %s" % (dtype.resolve(),
# self._dtype_name(dtype)))
decl_code.putln('ctypedef %s %s "%s"' % (dtype.resolve(),
self._dtype_name(dtype),
dtype.declaration_code("")))
if buffer_type.dtype.is_int:
if str(dtype) not in seen_int_dtypes:
seen_int_dtypes.add(str(dtype))
pyx_code.context.update(dtype_name=self._dtype_name(dtype),
dtype_type=self._dtype_type(dtype))
pyx_code.local_variable_declarations.put_chunk(
u"""
cdef bint {{dtype_name}}_is_signed
{{dtype_name}}_is_signed = <{{dtype_type}}> -1 < 0
""")
def _split_fused_types(self, arg):
"""
Specialize fused types and split into normal types and buffer types.
"""
specialized_types = PyrexTypes.get_specialized_types(arg.type)
# Prefer long over int, etc
# specialized_types.sort()
seen_py_type_names = set()
normal_types, buffer_types = [], []
for specialized_type in specialized_types:
py_type_name = specialized_type.py_type_name()
if py_type_name:
if py_type_name in seen_py_type_names:
continue
seen_py_type_names.add(py_type_name)
normal_types.append(specialized_type)
elif specialized_type.is_buffer or specialized_type.is_memoryviewslice:
buffer_types.append(specialized_type)
return normal_types, buffer_types
def _unpack_argument(self, pyx_code):
pyx_code.put_chunk(
u"""
# PROCESSING ARGUMENT {{arg_tuple_idx}}
if {{arg_tuple_idx}} < len(args):
arg = args[{{arg_tuple_idx}}]
elif '{{arg.name}}' in kwargs:
arg = kwargs['{{arg.name}}']
else:
{{if arg.default:}}
arg = defaults[{{default_idx}}]
{{else}}
raise TypeError("Expected at least %d arguments" % len(args))
{{endif}}
""")
def make_fused_cpdef(self, orig_py_func, env, is_def):
"""
This creates the function that is indexable from Python and does
runtime dispatch based on the argument types. The function gets the
arg tuple and kwargs dict (or None) and the defaults tuple
as arguments from the Binding Fused Function's tp_call.
"""
from Cython.Compiler import TreeFragment, Code, MemoryView, UtilityCode
# { (arg_pos, FusedType) : specialized_type }
seen_fused_types = set()
context = {
'memviewslice_cname': MemoryView.memviewslice_cname,
'func_args': self.node.args,
'n_fused': len([arg for arg in self.node.args]),
'name': orig_py_func.entry.name,
}
pyx_code = Code.PyxCodeWriter(context=context)
decl_code = Code.PyxCodeWriter(context=context)
decl_code.put_chunk(
u"""
cdef extern from *:
void __pyx_PyErr_Clear "PyErr_Clear" ()
""")
decl_code.indent()
pyx_code.put_chunk(
u"""
def __pyx_fused_cpdef(signatures, args, kwargs, defaults):
dest_sig = [{{for _ in range(n_fused)}}None,{{endfor}}]
if kwargs is None:
kwargs = {}
cdef Py_ssize_t i
# instance check body
""")
pyx_code.indent() # indent following code to function body
pyx_code.named_insertion_point("imports")
pyx_code.named_insertion_point("local_variable_declarations")
fused_index = 0
default_idx = 0
all_buffer_types = set()
for i, arg in enumerate(self.node.args):
if arg.type.is_fused and arg.type not in seen_fused_types:
seen_fused_types.add(arg.type)
context.update(
arg_tuple_idx=i,
arg=arg,
dest_sig_idx=fused_index,
default_idx=default_idx,
)
normal_types, buffer_types = self._split_fused_types(arg)
self._unpack_argument(pyx_code)
self._fused_instance_checks(normal_types, pyx_code, env)
self._buffer_checks(buffer_types, pyx_code, decl_code, env)
fused_index += 1
all_buffer_types.update(buffer_types)
if arg.default:
default_idx += 1
if all_buffer_types:
self._buffer_declarations(pyx_code, decl_code, all_buffer_types)
env.use_utility_code(Code.UtilityCode.load_cached("Import", "ImportExport.c"))
pyx_code.put_chunk(
u"""
candidates = []
for sig in signatures:
match_found = False
for src_type, dst_type in zip(sig.strip('()').split('|'), dest_sig):
if dst_type is not None:
if src_type == dst_type:
match_found = True
else:
match_found = False
break
if match_found:
candidates.append(sig)
if not candidates:
raise TypeError("No matching signature found")
elif len(candidates) > 1:
raise TypeError("Function call with ambiguous argument types")
else:
return signatures[candidates[0]]
""")
fragment_code = pyx_code.getvalue()
# print decl_code.getvalue()
# print fragment_code
fragment = TreeFragment.TreeFragment(fragment_code, level='module')
ast = TreeFragment.SetPosTransform(self.node.pos)(fragment.root)
UtilityCode.declare_declarations_in_scope(decl_code.getvalue(),
env.global_scope())
ast.scope = env
ast.analyse_declarations(env)
py_func = ast.stats[-1] # the DefNode
self.fragment_scope = ast.scope
if isinstance(self.node, DefNode):
py_func.specialized_cpdefs = self.nodes[:]
else:
py_func.specialized_cpdefs = [n.py_func for n in self.nodes]
return py_func
def update_fused_defnode_entry(self, env):
copy_attributes = (
'name', 'pos', 'cname', 'func_cname', 'pyfunc_cname',
'pymethdef_cname', 'doc', 'doc_cname', 'is_member',
'scope'
)
entry = self.py_func.entry
for attr in copy_attributes:
setattr(entry, attr,
getattr(self.orig_py_func.entry, attr))
self.py_func.name = self.orig_py_func.name
self.py_func.doc = self.orig_py_func.doc
env.entries.pop('__pyx_fused_cpdef', None)
if isinstance(self.node, DefNode):
env.entries[entry.name] = entry
else:
env.entries[entry.name].as_variable = entry
env.pyfunc_entries.append(entry)
self.py_func.entry.fused_cfunction = self
for node in self.nodes:
if isinstance(self.node, DefNode):
node.fused_py_func = self.py_func
else:
node.py_func.fused_py_func = self.py_func
node.entry.as_variable = entry
self.synthesize_defnodes()
self.stats.append(self.__signatures__)
def analyse_expressions(self, env):
"""
Analyse the expressions. Take care to only evaluate default arguments
once and clone the result for all specializations
"""
for fused_compound_type in self.fused_compound_types:
for fused_type in fused_compound_type.get_fused_types():
for specialization_type in fused_type.types:
if specialization_type.is_complex:
specialization_type.create_declaration_utility_code(env)
if self.py_func:
self.__signatures__ = self.__signatures__.analyse_expressions(env)
self.py_func = self.py_func.analyse_expressions(env)
self.resulting_fused_function = self.resulting_fused_function.analyse_expressions(env)
self.fused_func_assignment = self.fused_func_assignment.analyse_expressions(env)
self.defaults = defaults = []
for arg in self.node.args:
if arg.default:
arg.default = arg.default.analyse_expressions(env)
defaults.append(ProxyNode(arg.default))
else:
defaults.append(None)
for i, stat in enumerate(self.stats):
stat = self.stats[i] = stat.analyse_expressions(env)
if isinstance(stat, FuncDefNode):
for arg, default in zip(stat.args, defaults):
if default is not None:
arg.default = CloneNode(default).coerce_to(arg.type, env)
if self.py_func:
args = [CloneNode(default) for default in defaults if default]
self.defaults_tuple = TupleNode(self.pos, args=args)
self.defaults_tuple = self.defaults_tuple.analyse_types(env, skip_children=True)
self.defaults_tuple = ProxyNode(self.defaults_tuple)
self.code_object = ProxyNode(self.specialized_pycfuncs[0].code_object)
fused_func = self.resulting_fused_function.arg
fused_func.defaults_tuple = CloneNode(self.defaults_tuple)
fused_func.code_object = CloneNode(self.code_object)
for i, pycfunc in enumerate(self.specialized_pycfuncs):
pycfunc.code_object = CloneNode(self.code_object)
pycfunc = self.specialized_pycfuncs[i] = pycfunc.analyse_types(env)
pycfunc.defaults_tuple = CloneNode(self.defaults_tuple)
return self
def synthesize_defnodes(self):
"""
Create the __signatures__ dict of PyCFunctionNode specializations.
"""
if isinstance(self.nodes[0], CFuncDefNode):
nodes = [node.py_func for node in self.nodes]
else:
nodes = self.nodes
signatures = [
StringEncoding.EncodedString(node.specialized_signature_string)
for node in nodes]
keys = [ExprNodes.StringNode(node.pos, value=sig)
for node, sig in zip(nodes, signatures)]
values = [ExprNodes.PyCFunctionNode.from_defnode(node, True)
for node in nodes]
self.__signatures__ = ExprNodes.DictNode.from_pairs(self.pos,
zip(keys, values))
self.specialized_pycfuncs = values
for pycfuncnode in values:
pycfuncnode.is_specialization = True
def generate_function_definitions(self, env, code):
if self.py_func:
self.py_func.pymethdef_required = True
self.fused_func_assignment.generate_function_definitions(env, code)
for stat in self.stats:
if isinstance(stat, FuncDefNode) and stat.entry.used:
code.mark_pos(stat.pos)
stat.generate_function_definitions(env, code)
def generate_execution_code(self, code):
# Note: all def function specialization are wrapped in PyCFunction
# nodes in the self.__signatures__ dictnode.
for default in self.defaults:
if default is not None:
default.generate_evaluation_code(code)
if self.py_func:
self.defaults_tuple.generate_evaluation_code(code)
self.code_object.generate_evaluation_code(code)
for stat in self.stats:
code.mark_pos(stat.pos)
if isinstance(stat, ExprNodes.ExprNode):
stat.generate_evaluation_code(code)
else:
stat.generate_execution_code(code)
if self.__signatures__:
self.resulting_fused_function.generate_evaluation_code(code)
code.putln(
"((__pyx_FusedFunctionObject *) %s)->__signatures__ = %s;" %
(self.resulting_fused_function.result(),
self.__signatures__.result()))
code.put_giveref(self.__signatures__.result())
self.fused_func_assignment.generate_execution_code(code)
# Dispose of results
self.resulting_fused_function.generate_disposal_code(code)
self.defaults_tuple.generate_disposal_code(code)
self.code_object.generate_disposal_code(code)
for default in self.defaults:
if default is not None:
default.generate_disposal_code(code)
def annotate(self, code):
for stat in self.stats:
stat.annotate(code)
|
jpetto/olympia
|
refs/heads/master
|
src/olympia/users/tests/test_models.py
|
1
|
# -*- coding: utf-8 -*-
import datetime
import hashlib
from base64 import encodestring
from urlparse import urlparse
from django import forms
from django.conf import settings
from django.contrib.auth.hashers import (is_password_usable, check_password,
make_password, identify_hasher)
from django.core import mail
from django.utils import encoding, translation
from mock import patch
from nose.tools import eq_
from olympia import amo
from olympia.amo.tests import TestCase
from olympia.access.models import Group, GroupUser
from olympia.addons.models import Addon, AddonUser
from olympia.amo.signals import _connect, _disconnect
from olympia.bandwagon.models import Collection, CollectionWatcher
from olympia.reviews.models import Review
from olympia.translations.models import Translation
from olympia.users.models import (
BlacklistedEmailDomain, BlacklistedPassword,
BlacklistedName, get_hexdigest, UserEmailField, UserProfile)
from olympia.users.utils import find_users
class TestUserProfile(TestCase):
fixtures = ('base/addon_3615', 'base/user_2519', 'base/user_4043307',
'users/test_backends')
def test_anonymize(self):
u = UserProfile.objects.get(id='4043307')
eq_(u.email, 'jbalogh@mozilla.com')
u.anonymize()
x = UserProfile.objects.get(id='4043307')
eq_(x.email, None)
def test_email_confirmation_code(self):
u = UserProfile.objects.get(id='4043307')
u.confirmationcode = 'blah'
u.email_confirmation_code()
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].subject, 'Please confirm your email address')
assert mail.outbox[0].body.find('%s/confirm/%s' %
(u.id, u.confirmationcode)) > 0
@patch.object(settings, 'SEND_REAL_EMAIL', False)
def test_email_confirmation_code_even_with_fake_email(self):
u = UserProfile.objects.get(id='4043307')
u.confirmationcode = 'blah'
u.email_confirmation_code()
eq_(len(mail.outbox), 1)
eq_(mail.outbox[0].subject, 'Please confirm your email address')
def test_welcome_name(self):
u1 = UserProfile(username='sc')
u2 = UserProfile(username='sc', display_name="Sarah Connor")
u3 = UserProfile()
eq_(u1.welcome_name, 'sc')
eq_(u2.welcome_name, 'Sarah Connor')
eq_(u3.welcome_name, '')
def test_welcome_name_anonymous(self):
user = UserProfile(
username='anonymous-bb4f3cbd422e504080e32f2d9bbfcee0')
assert user.welcome_name == 'Anonymous user bb4f3c'
def test_welcome_name_anonymous_with_display(self):
user = UserProfile(display_name='John Connor')
user.anonymize_username()
assert user.welcome_name == 'John Connor'
def test_has_anonymous_username_no_names(self):
user = UserProfile(display_name=None)
user.anonymize_username()
assert user.has_anonymous_username()
def test_has_anonymous_username_username_set(self):
user = UserProfile(username='bob', display_name=None)
assert not user.has_anonymous_username()
def test_has_anonymous_username_display_name_set(self):
user = UserProfile(display_name='Bob Bobbertson')
user.anonymize_username()
assert user.has_anonymous_username()
def test_has_anonymous_username_both_names_set(self):
user = UserProfile(username='bob', display_name='Bob Bobbertson')
assert not user.has_anonymous_username()
def test_has_anonymous_display_name_no_names(self):
user = UserProfile(display_name=None)
user.anonymize_username()
assert user.has_anonymous_display_name()
def test_has_anonymous_display_name_username_set(self):
user = UserProfile(username='bob', display_name=None)
assert not user.has_anonymous_display_name()
def test_has_anonymous_display_name_display_name_set(self):
user = UserProfile(display_name='Bob Bobbertson')
user.anonymize_username()
assert not user.has_anonymous_display_name()
def test_has_anonymous_display_name_both_names_set(self):
user = UserProfile(username='bob', display_name='Bob Bobbertson')
assert not user.has_anonymous_display_name()
def test_add_admin_powers(self):
u = UserProfile.objects.get(username='jbalogh')
assert not u.is_staff
assert not u.is_superuser
GroupUser.objects.create(group=Group.objects.get(name='Admins'),
user=u)
assert u.is_staff
assert u.is_superuser
def test_dont_add_admin_powers(self):
Group.objects.create(name='API', rules='API.Users:*')
u = UserProfile.objects.get(username='jbalogh')
GroupUser.objects.create(group=Group.objects.get(name='API'),
user=u)
assert not u.is_staff
assert not u.is_superuser
def test_remove_admin_powers(self):
Group.objects.create(name='Admins', rules='*:*')
u = UserProfile.objects.get(username='jbalogh')
g = GroupUser.objects.create(
group=Group.objects.filter(name='Admins')[0], user=u)
g.delete()
assert not u.is_staff
assert not u.is_superuser
def test_picture_url(self):
"""
Test for a preview URL if image is set, or default image otherwise.
"""
u = UserProfile(id=1234, picture_type='image/png',
modified=datetime.date.today())
u.picture_url.index('/userpics/0/1/1234.png?modified=')
u = UserProfile(id=1234567890, picture_type='image/png',
modified=datetime.date.today())
u.picture_url.index('/userpics/1234/1234567/1234567890.png?modified=')
u = UserProfile(id=1234, picture_type=None)
assert u.picture_url.endswith('/anon_user.png')
def test_review_replies(self):
"""
Make sure that developer replies are not returned as if they were
original reviews.
"""
addon = Addon.objects.get(id=3615)
u = UserProfile.objects.get(pk=2519)
version = addon.get_version()
new_review = Review(version=version, user=u, rating=2, body='hello',
addon=addon)
new_review.save()
new_reply = Review(version=version, user=u, reply_to=new_review,
addon=addon, body='my reply')
new_reply.save()
review_list = [r.pk for r in u.reviews]
eq_(len(review_list), 1)
assert new_review.pk in review_list, (
'Original review must show up in review list.')
assert new_reply.pk not in review_list, (
'Developer reply must not show up in review list.')
def test_addons_listed(self):
"""Make sure we're returning distinct add-ons."""
AddonUser.objects.create(addon_id=3615, user_id=2519, listed=True)
u = UserProfile.objects.get(id=2519)
addons = u.addons_listed.values_list('id', flat=True)
eq_(sorted(addons), [3615])
def test_addons_not_listed(self):
"""Make sure user is not listed when another is."""
AddonUser.objects.create(addon_id=3615, user_id=2519, listed=False)
AddonUser.objects.create(addon_id=3615, user_id=4043307, listed=True)
u = UserProfile.objects.get(id=2519)
addons = u.addons_listed.values_list('id', flat=True)
assert 3615 not in addons
def test_my_addons(self):
"""Test helper method to get N addons."""
addon1 = Addon.objects.create(name='test-1', type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon_id=addon1.id, user_id=2519, listed=True)
addon2 = Addon.objects.create(name='test-2', type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon_id=addon2.id, user_id=2519, listed=True)
addons = UserProfile.objects.get(id=2519).my_addons()
eq_(sorted(a.name for a in addons), [addon1.name, addon2.name])
def test_my_addons_with_unlisted_addons(self):
"""Test helper method can return unlisted addons."""
addon1 = Addon.objects.create(name='test-1', type=amo.ADDON_EXTENSION)
AddonUser.objects.create(addon_id=addon1.id, user_id=2519, listed=True)
addon2 = Addon.objects.create(name='test-2', type=amo.ADDON_EXTENSION,
is_listed=False)
AddonUser.objects.create(addon_id=addon2.id, user_id=2519, listed=True)
addons = UserProfile.objects.get(id=2519).my_addons(with_unlisted=True)
eq_(sorted(a.name for a in addons), [addon1.name, addon2.name])
def test_mobile_collection(self):
u = UserProfile.objects.get(id='4043307')
assert not Collection.objects.filter(author=u)
c = u.mobile_collection()
eq_(c.type, amo.COLLECTION_MOBILE)
eq_(c.slug, 'mobile')
def test_favorites_collection(self):
u = UserProfile.objects.get(id='4043307')
assert not Collection.objects.filter(author=u)
c = u.favorites_collection()
eq_(c.type, amo.COLLECTION_FAVORITES)
eq_(c.slug, 'favorites')
def test_get_url_path(self):
eq_(UserProfile(username='yolo').get_url_path(),
'/en-US/firefox/user/yolo/')
eq_(UserProfile(username='yolo', id=1).get_url_path(),
'/en-US/firefox/user/yolo/')
eq_(UserProfile(id=1).get_url_path(),
'/en-US/firefox/user/1/')
eq_(UserProfile(username='<yolo>', id=1).get_url_path(),
'/en-US/firefox/user/1/')
@patch.object(settings, 'LANGUAGE_CODE', 'en-US')
def test_activate_locale(self):
eq_(translation.get_language(), 'en-us')
with UserProfile(username='yolo').activate_lang():
eq_(translation.get_language(), 'en-us')
with UserProfile(username='yolo', lang='fr').activate_lang():
eq_(translation.get_language(), 'fr')
def test_remove_locale(self):
u = UserProfile.objects.create()
u.bio = {'en-US': 'my bio', 'fr': 'ma bio'}
u.save()
u.remove_locale('fr')
qs = (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
eq_(sorted(qs.filter(id=u.bio_id)), ['en-US'])
def test_get_fallback(self):
"""Return the translation for the locale fallback."""
user = UserProfile.objects.create(
lang='fr', bio={'en-US': 'my bio', 'fr': 'ma bio'})
self.trans_eq(user.bio, 'my bio', 'en-US') # Uses current locale.
with self.activate(locale='de'):
user = UserProfile.objects.get(pk=user.pk) # Reload.
# Uses the default fallback.
self.trans_eq(user.bio, 'ma bio', 'fr')
def test_mobile_addons(self):
user = UserProfile.objects.get(id='4043307')
addon1 = Addon.objects.create(name='test-1', type=amo.ADDON_EXTENSION)
addon2 = Addon.objects.create(name='test-2', type=amo.ADDON_EXTENSION)
mobile_collection = user.mobile_collection()
mobile_collection.add_addon(addon1)
other_collection = Collection.objects.create(name='other')
other_collection.add_addon(addon2)
assert user.mobile_addons.count() == 1
assert user.mobile_addons[0] == addon1.pk
def test_favorite_addons(self):
user = UserProfile.objects.get(id='4043307')
addon1 = Addon.objects.create(name='test-1', type=amo.ADDON_EXTENSION)
addon2 = Addon.objects.create(name='test-2', type=amo.ADDON_EXTENSION)
favorites_collection = user.favorites_collection()
favorites_collection.add_addon(addon1)
other_collection = Collection.objects.create(name='other')
other_collection.add_addon(addon2)
assert user.favorite_addons.count() == 1
assert user.favorite_addons[0] == addon1.pk
def test_watching(self):
user = UserProfile.objects.get(id='4043307')
watched_collection1 = Collection.objects.create(name='watched-1')
watched_collection2 = Collection.objects.create(name='watched-2')
Collection.objects.create(name='other')
CollectionWatcher.objects.create(user=user,
collection=watched_collection1)
CollectionWatcher.objects.create(user=user,
collection=watched_collection2)
assert len(user.watching) == 2
assert tuple(user.watching) == (watched_collection1.pk,
watched_collection2.pk)
def test_fxa_migrated_not_migrated(self):
user = UserProfile(fxa_id=None)
assert user.fxa_migrated() is False
def test_fxa_migrated_not_migrated_empty_string(self):
user = UserProfile(fxa_id='')
assert user.fxa_migrated() is False
def test_fxa_migrated_migrated(self):
user = UserProfile(fxa_id='db27f8')
assert user.fxa_migrated() is True
class TestPasswords(TestCase):
utf = u'\u0627\u0644\u062a\u0637\u0628'
bytes_ = '\xb1\x98og\x88\x87\x08q'
def test_invalid_old_password(self):
u = UserProfile(password=self.utf)
assert u.check_password(self.utf) is False
assert u.has_usable_password() is True
def test_invalid_new_password(self):
u = UserProfile()
u.set_password(self.utf)
assert u.check_password('wrong') is False
assert u.has_usable_password() is True
def test_valid_old_password(self):
hsh = hashlib.md5(encoding.smart_str(self.utf)).hexdigest()
u = UserProfile(password=hsh)
assert u.check_password(self.utf) is True
# Make sure we updated the old password.
algo, salt, hsh = u.password.split('$')
eq_(algo, 'sha512')
eq_(hsh, get_hexdigest(algo, salt, self.utf))
assert u.has_usable_password() is True
def test_valid_new_password(self):
u = UserProfile()
u.set_password(self.utf)
assert u.check_password(self.utf) is True
assert u.has_usable_password() is True
def test_persona_sha512_md5(self):
md5 = hashlib.md5('password').hexdigest()
hsh = hashlib.sha512(self.bytes_ + md5).hexdigest()
u = UserProfile(password='sha512+MD5$%s$%s' %
(self.bytes_, hsh))
assert u.check_password('password') is True
assert u.has_usable_password() is True
def test_persona_sha512_base64(self):
hsh = hashlib.sha512(self.bytes_ + 'password').hexdigest()
u = UserProfile(password='sha512+base64$%s$%s' %
(encodestring(self.bytes_), hsh))
assert u.check_password('password') is True
assert u.has_usable_password() is True
def test_persona_sha512_base64_maybe_utf8(self):
hsh = hashlib.sha512(self.bytes_ + self.utf.encode('utf8')).hexdigest()
u = UserProfile(password='sha512+base64$%s$%s' %
(encodestring(self.bytes_), hsh))
assert u.check_password(self.utf) is True
assert u.has_usable_password() is True
def test_persona_sha512_base64_maybe_latin1(self):
passwd = u'fo\xf3'
hsh = hashlib.sha512(self.bytes_ + passwd.encode('latin1')).hexdigest()
u = UserProfile(password='sha512+base64$%s$%s' %
(encodestring(self.bytes_), hsh))
assert u.check_password(passwd) is True
assert u.has_usable_password() is True
def test_persona_sha512_base64_maybe_not_latin1(self):
passwd = u'fo\xf3'
hsh = hashlib.sha512(self.bytes_ + passwd.encode('latin1')).hexdigest()
u = UserProfile(password='sha512+base64$%s$%s' %
(encodestring(self.bytes_), hsh))
assert u.check_password(self.utf) is False
assert u.has_usable_password() is True
def test_persona_sha512_md5_base64(self):
md5 = hashlib.md5('password').hexdigest()
hsh = hashlib.sha512(self.bytes_ + md5).hexdigest()
u = UserProfile(password='sha512+MD5+base64$%s$%s' %
(encodestring(self.bytes_), hsh))
assert u.check_password('password') is True
assert u.has_usable_password() is True
def test_sha512(self):
encoded = make_password('lètmein', 'seasalt', 'sha512')
self.assertEqual(
encoded,
'sha512$seasalt$16bf4502ffdfce9551b90319d06674e6faa3e174144123d'
'392d94470ebf0aa77096b871f9e84f60ed2bac2f10f755368b068e52547e04'
'35fef8b4f6ca237d7d8')
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password('lètmein', encoded))
self.assertFalse(check_password('lètmeinz', encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "sha512")
# Blank passwords
blank_encoded = make_password('', 'seasalt', 'sha512')
self.assertTrue(blank_encoded.startswith('sha512$'))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password('', blank_encoded))
self.assertFalse(check_password(' ', blank_encoded))
def test_empty_password(self):
profile = UserProfile(password=None)
assert profile.has_usable_password() is False
assert not check_password(None, profile.password)
assert not profile.check_password(None)
profile = UserProfile(password='')
assert profile.has_usable_password() is False
assert not check_password('', profile.password)
assert not profile.check_password('')
class TestBlacklistedName(TestCase):
fixtures = ['users/test_backends']
def test_blocked(self):
eq_(BlacklistedName.blocked('IE6Fan'), True)
eq_(BlacklistedName.blocked('IE6fantastic'), True)
eq_(BlacklistedName.blocked('IE6'), False)
eq_(BlacklistedName.blocked('testo'), False)
class TestBlacklistedEmailDomain(TestCase):
fixtures = ['users/test_backends']
def test_blocked(self):
eq_(BlacklistedEmailDomain.blocked('mailinator.com'), True)
assert not BlacklistedEmailDomain.blocked('mozilla.com')
class TestFlushURLs(TestCase):
fixtures = ['base/user_2519']
def setUp(self):
super(TestFlushURLs, self).setUp()
_connect()
def tearDown(self):
_disconnect()
super(TestFlushURLs, self).tearDown()
@patch('olympia.amo.tasks.flush_front_end_cache_urls.apply_async')
def test_flush(self, flush):
user = UserProfile.objects.get(pk=2519)
user.save()
assert user.picture_url in flush.call_args[1]['args'][0]
assert urlparse(user.picture_url).query.find('modified') > -1
class TestUserEmailField(TestCase):
fixtures = ['base/user_2519']
def test_success(self):
user = UserProfile.objects.get(pk=2519)
eq_(UserEmailField().clean(user.email), user)
def test_failure(self):
with self.assertRaises(forms.ValidationError):
UserEmailField().clean('xxx')
def test_empty_email(self):
UserProfile.objects.create(email='')
with self.assertRaises(forms.ValidationError) as e:
UserEmailField().clean('')
eq_(e.exception.messages[0], 'This field is required.')
class TestBlacklistedPassword(TestCase):
def test_blacklisted(self):
BlacklistedPassword.objects.create(password='password')
assert BlacklistedPassword.blocked('password')
assert not BlacklistedPassword.blocked('passw0rd')
class TestUserHistory(TestCase):
def test_user_history(self):
user = UserProfile.objects.create(email='foo@bar.com')
eq_(user.history.count(), 0)
user.update(email='foopy@barby.com')
eq_(user.history.count(), 1)
user.update(email='foopy@barby.com')
eq_(user.history.count(), 1)
def test_user_find(self):
user = UserProfile.objects.create(email='luke@jedi.com')
# Checks that you can have multiple copies of the same email and
# that we only get distinct results back.
user.update(email='dark@sith.com')
user.update(email='luke@jedi.com')
user.update(email='dark@sith.com')
eq_([user], list(find_users('luke@jedi.com')))
eq_([user], list(find_users('dark@sith.com')))
def test_user_find_multiple(self):
user_1 = UserProfile.objects.create(username='user_1',
email='luke@jedi.com')
user_1.update(email='dark@sith.com')
user_2 = UserProfile.objects.create(username='user_2',
email='luke@jedi.com')
eq_([user_1, user_2], list(find_users('luke@jedi.com')))
class TestUserManager(TestCase):
fixtures = ('users/test_backends', )
def test_create_user(self):
user = UserProfile.objects.create_user("test", "test@test.com", 'xxx')
assert user.pk is not None
def test_create_superuser(self):
user = UserProfile.objects.create_superuser(
"test",
"test@test.com",
'xxx'
)
assert user.pk is not None
Group.objects.get(name="Admins") in user.groups.all()
assert user.is_staff
assert user.is_superuser
|
jrversteegh/softsailor
|
refs/heads/master
|
deps/numpy-1.6.1/numpy/doc/byteswapping.py
|
95
|
'''
=============================
Byteswapping and byte order
=============================
Introduction to byte ordering and ndarrays
==========================================
The ``ndarray`` is an object that provide a python array interface to data
in memory.
It often happens that the memory that you want to view with an array is
not of the same byte ordering as the computer on which you are running
Python.
For example, I might be working on a computer with a little-endian CPU -
such as an Intel Pentium, but I have loaded some data from a file
written by a computer that is big-endian. Let's say I have loaded 4
bytes from a file written by a Sun (big-endian) computer. I know that
these 4 bytes represent two 16-bit integers. On a big-endian machine, a
two-byte integer is stored with the Most Significant Byte (MSB) first,
and then the Least Significant Byte (LSB). Thus the bytes are, in memory order:
#. MSB integer 1
#. LSB integer 1
#. MSB integer 2
#. LSB integer 2
Let's say the two integers were in fact 1 and 770. Because 770 = 256 *
3 + 2, the 4 bytes in memory would contain respectively: 0, 1, 3, 2.
The bytes I have loaded from the file would have these contents:
>>> big_end_str = chr(0) + chr(1) + chr(3) + chr(2)
>>> big_end_str
'\\x00\\x01\\x03\\x02'
We might want to use an ``ndarray`` to access these integers. In that
case, we can create an array around this memory, and tell numpy that
there are two integers, and that they are 16 bit and big-endian:
>>> import numpy as np
>>> big_end_arr = np.ndarray(shape=(2,),dtype='>i2', buffer=big_end_str)
>>> big_end_arr[0]
1
>>> big_end_arr[1]
770
Note the array ``dtype`` above of ``>i2``. The ``>`` means 'big-endian'
(``<`` is little-endian) and ``i2`` means 'signed 2-byte integer'. For
example, if our data represented a single unsigned 4-byte little-endian
integer, the dtype string would be ``<u4``.
In fact, why don't we try that?
>>> little_end_u4 = np.ndarray(shape=(1,),dtype='<u4', buffer=big_end_str)
>>> little_end_u4[0] == 1 * 256**1 + 3 * 256**2 + 2 * 256**3
True
Returning to our ``big_end_arr`` - in this case our underlying data is
big-endian (data endianness) and we've set the dtype to match (the dtype
is also big-endian). However, sometimes you need to flip these around.
Changing byte ordering
======================
As you can imagine from the introduction, there are two ways you can
affect the relationship between the byte ordering of the array and the
underlying memory it is looking at:
* Change the byte-ordering information in the array dtype so that it
interprets the undelying data as being in a different byte order.
This is the role of ``arr.newbyteorder()``
* Change the byte-ordering of the underlying data, leaving the dtype
interpretation as it was. This is what ``arr.byteswap()`` does.
The common situations in which you need to change byte ordering are:
#. Your data and dtype endianess don't match, and you want to change
the dtype so that it matches the data.
#. Your data and dtype endianess don't match, and you want to swap the
data so that they match the dtype
#. Your data and dtype endianess match, but you want the data swapped
and the dtype to reflect this
Data and dtype endianness don't match, change dtype to match data
-----------------------------------------------------------------
We make something where they don't match:
>>> wrong_end_dtype_arr = np.ndarray(shape=(2,),dtype='<i2', buffer=big_end_str)
>>> wrong_end_dtype_arr[0]
256
The obvious fix for this situation is to change the dtype so it gives
the correct endianness:
>>> fixed_end_dtype_arr = wrong_end_dtype_arr.newbyteorder()
>>> fixed_end_dtype_arr[0]
1
Note the the array has not changed in memory:
>>> fixed_end_dtype_arr.tostring() == big_end_str
True
Data and type endianness don't match, change data to match dtype
----------------------------------------------------------------
You might want to do this if you need the data in memory to be a certain
ordering. For example you might be writing the memory out to a file
that needs a certain byte ordering.
>>> fixed_end_mem_arr = wrong_end_dtype_arr.byteswap()
>>> fixed_end_mem_arr[0]
1
Now the array *has* changed in memory:
>>> fixed_end_mem_arr.tostring() == big_end_str
False
Data and dtype endianness match, swap data and dtype
----------------------------------------------------
You may have a correctly specified array dtype, but you need the array
to have the opposite byte order in memory, and you want the dtype to
match so the array values make sense. In this case you just do both of
the previous operations:
>>> swapped_end_arr = big_end_arr.byteswap().newbyteorder()
>>> swapped_end_arr[0]
1
>>> swapped_end_arr.tostring() == big_end_str
False
'''
|
gasbasd/tgapp-stroller2
|
refs/heads/master
|
sample-ecommerce/sample_ecommerce/__init__.py
|
1
|
# -*- coding: utf-8 -*-
"""The sample-ecommerce package"""
|
gsi-upm/SmartSim
|
refs/heads/master
|
smartbody/data/behaviorsets/BehaviorSetCommon.py
|
1
|
def createRetargetInstance(srcSkelName, tgtSkelName):
endJoints = StringVec();
endJoints.append('l_ankle')
endJoints.append('l_forefoot')
endJoints.append('l_toe')
endJoints.append('l_wrist')
endJoints.append('r_ankle')
endJoints.append('r_forefoot')
endJoints.append('r_toe')
endJoints.append('r_wrist')
relativeJoints = StringVec();
relativeJoints.append('spine1')
relativeJoints.append('spine2')
relativeJoints.append('spine3')
relativeJoints.append('spine4')
relativeJoints.append('spine5')
relativeJoints.append('r_sternoclavicular')
relativeJoints.append('l_sternoclavicular')
relativeJoints.append('r_acromioclavicular')
relativeJoints.append('l_acromioclavicular')
# replace retarget each animation with just a simple retarget instance
retargetManager = scene.getRetargetManager()
retarget = retargetManager.getRetarget(srcSkelName,tgtSkelName)
if retarget == None:
retarget = retargetManager.createRetarget(srcSkelName,tgtSkelName)
retarget.initRetarget(endJoints,relativeJoints)
def retargetMotion(motionName, srcSkelName, tgtSkelName, outDir) :
testMotion = scene.getMotion(motionName)
if testMotion is None:
return
outMotionName = tgtSkelName + motionName
existMotion = scene.getMotion(outMotionName)
if existMotion != None : # do nothing if the retargeted motion is already there
return
tgtSkel = scene.getSkeleton(tgtSkelName)
if (tgtSkel == None) :
return
if not os.path.exists(outDir):
os.makedirs(outDir)
offsetJoints = VecMap();
endJoints = StringVec();
#endJoints.append('l_ankle')
endJoints.append('l_forefoot')
endJoints.append('l_toe')
endJoints.append('l_acromioclavicular')
#endJoints.append('r_ankle')
endJoints.append('r_forefoot')
endJoints.append('r_toe')
endJoints.append('r_acromioclavicular')
relativeJoints = StringVec();
relativeJoints.append('spine1')
relativeJoints.append('spine2')
relativeJoints.append('spine3')
relativeJoints.append('spine4')
relativeJoints.append('spine5')
endJoints.append('l_sternoclavicular')
endJoints.append('r_sternoclavicular')
effectorJoints = StringVec();
if tgtSkel.getJointByName('r_toe') != None:
effectorJoints.append('r_toe')
effectorJoints.append('l_toe')
else:
effectorJoints.append('r_ankle')
effectorJoints.append('l_ankle')
if tgtSkel.getJointByName('r_forefoot') != None:
effectorJoints.append('r_forefoot')
effectorJoints.append('l_forefoot')
#effectorJoints.append('l_toe')
effectorRoots = StringVec();
effectorRoots.append('r_hip')
effectorRoots.append('l_hip')
effectorRoots.append('r_hip')
effectorRoots.append('l_hip')
print 'Retarget motion = ' + motionName;
outMotion = testMotion.retarget(outMotionName,srcSkelName,tgtSkelName, endJoints, relativeJoints, offsetJoints);
cleanMotion = testMotion.constrain(outMotionName, srcSkelName, tgtSkelName, outMotionName, effectorJoints, effectorRoots);
saveCommand = 'animation ' + outMotionName + ' save ' + outDir + outMotionName + '.skm';
print 'Save command = ' + saveCommand;
scene.command(saveCommand)
|
imco/nmx
|
refs/heads/nmx
|
src/csvutils.py
|
2
|
def escapeQuotes(string):
return string.replace('"','""');
|
curiousguy13/shogun
|
refs/heads/develop
|
applications/easysvm/esvm/mldata_arff.py
|
29
|
#!/usr/bin/env python
"""Classes to encapsulate the idea of a dataset in machine learning,
including file access.
This file contains the ARFF class for people who have arff installed.
"""
#############################################################################################
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, see http://www.gnu.org/licenses #
# or write to the Free Software Foundation, Inc., 51 Franklin Street, #
# Fifth Floor, Boston, MA 02110-1301 USA #
# #
#############################################################################################
try:
import arff
have_arff = True
except ImportError:
have_arff = False
import sys
from numpy import array, concatenate
import csv
from esvm.mldata import DatasetFileBase
class DatasetFileARFF(DatasetFileBase):
"""Attribute-Relation File Format file, uses module arff.
Labels are in the first column.
"""
def __init__(self,filename,extype,dataname='ARFFdata',comment=''):
"""Do the base class init, then add some arff specific metadata"""
if not have_arff:
print 'import arff failed, currently cannot support ARFF file format'
return
DatasetFileBase.__init__(self,filename,extype)
self.dataname = dataname
self.comment = comment
def readlines(self,idx=None):
"""Read from file and split data into examples and labels"""
fp = open(self.filename,'r')
(dataname,issparse,alist,data) = arff.arffread(fp)
fp.close()
self.dataname = dataname
#if (alist[0][0]!='label'):
# sys.stderr.write('First column of ARFF file needs to be the label\n')
# sys.exit(-1)
if idx is None:
idx = range(len(data))
labels = [data[ix][0] for ix in idx]
labels = array(labels)
if self.extype == 'vec':
examples = [data[ix][1:] for ix in idx]
examples = array(examples).T
print '%d features, %d examples' % examples.shape
elif self.extype == 'seq':
examples = [data[ix][1] for ix in idx]
print 'sequence length = %d, %d examples' % (len(examples[0]),len(examples))
elif self.extype == 'mseq':
examples = [data[ix][1:] for ix in idx]
printstr = 'sequence lengths = '
for seq in examples[0]:
printstr += '%d, ' % len(seq)
printstr += '%d examples' % len(examples)
print printstr
return (examples, labels)
def writelines(self,examples,labels,idx=None):
"""Merge the examples and labels and write to file"""
alist = [('label',1,[])]
if idx is not None:
examples = examples[idx]
labels = labels[idx]
if self.extype == 'vec':
data = list(concatenate((labels.reshape(len(labels),1),examples.T),axis=1))
for ix in xrange(examples.shape[0]):
attname = 'att%d' % ix
alist.append((attname,1,[]))
elif self.extype == 'seq':
data = zip(labels,examples)
alist.append(('sequence',0,[]))
elif self.extype == 'mseq':
data = []
for ix,curlab in enumerate(labels):
data.append([curlab]+list(examples[ix]))
alist.append(('upstream sequence',0,[]))
alist.append(('downstream sequence',0,[]))
fp = open(self.filename,'w')
arff.arffwrite(fp,alist,data,name=self.dataname,comment=self.comment)
fp.close()
|
mountaindust/Parasitoids
|
refs/heads/master
|
Bayes_Plot.py
|
1
|
#! /usr/bin/env python3
'''
This module is for plotting the posterior distributions from Bayes_Run.py
Author: Christopher Strickland
Email: wcstrick@live.unc.edu
'''
import sys, os
import warnings
from collections import OrderedDict
import numpy as np
import pymc as pm
import matplotlib.pyplot as plt
import matplotlib.cm as cm
plt.rcParams['image.cmap'] = 'viridis'
cmap = cm.get_cmap('Accent')
plt.ion()
if __name__ != "__main__":
database_name = 'mcmcdb.h5'
db = pm.database.hdf5.load(database_name)
else:
db = None
def plot_traces(db=db,path='./diagnostics',format='png'):
'''Plot the traces of the unknown variables to check for convergence.
Also compute several convergence methods and print them out.'''
lw = 1 #line width
# Specify variables to include in each figure and subplot
# Each sublist is a figure. Each OrderedDict is a subplot with the
# key as the trace name and the val as the LaTeX string name.
var_names = []
var_names.append([OrderedDict([('f_a1', r'$f:a_1$'), ('f_a2', r'$f:a_2$')])])
var_names[0].append(OrderedDict([('f_b1', r'$f:b_1$'), ('f_b2', r'$f:b_2$'),
('g_aw', r'$g:a_w$'), ('g_bw', r'$g:b_w$')]))
var_names[0].append(OrderedDict([('sig_x', r'$\sigma_x$'), ('sig_y', r'$\sigma_y$'),
('sig_xl', r'local $\sigma_x$'),
('sig_yl', r'local $\sigma_y$')]))
var_names[0].append(OrderedDict([('corr', r'$\rho$'), ('corr_l', r'local $\rho$'),
('lam', r'$\lambda$')]))
var_names.append([OrderedDict([('xi', r'$\xi$'), ('em_obs_prob', r'emerg obs prob'),
('grid_obs_prob', r'grid obs prob')])])
sent_names = []
for name in db.trace_names[0]:
if name[:13] == 'sent_obs_prob':
id = name[-1]
sent_names.append((name, id))
var_names[1].append(OrderedDict(sent_names))
plt.figure()
plt.hold(True)
f_clrs = [0.3, 0.7]
g_clrs = [0.1, 0.9]
sig_clrs = [0.01, 0.99, 0.25, 0.75]
corr_lam_clrs = [0.01, 0.25, 0.5]
probs_clrs = [0.01, 0.5, 0.99]
clrs_list = [f_clrs, f_clrs+g_clrs, sig_clrs, corr_lam_clrs, probs_clrs]
plt.title("Traces of unknown model parameters")
for ii in range(len(var_names[0])):
plt.subplot(len(var_names[0]), 1, ii+1)
cnt = 0
for name, label in var_names[0][ii].items():
plt.plot(db.trace(name, chain=None)[:], label="trace of "+label,
c=cmap(clrs_list[ii][cnt]), lw=lw)
cnt += 1
leg = plt.legend(loc="upper left")
leg.get_frame().set_alpha(0.7)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
plt.draw()
plt.pause(0.0001)
plt.figure()
plt.hold(True)
plt.subplot(211)
plt.title("Traces of unknown Bayesian model parameters")
# xi, em_obs_prob, grid_obs_prob
cnt = 0
for name, label in var_names[1][0].items():
plt.plot(db.trace(name, chain=None)[:], label="trace of "+label,
c=cmap(probs_clrs[cnt]), lw=lw)
cnt += 1
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
# sent_obs_probs
plt.subplot(212)
cnt = 0
for name, label in var_names[1][1].items():
plt.plot(db.trace(name, chain=None)[:], label="trace of prob field "+label,
c=cmap(.10+cnt*.16), lw=lw)
cnt += 1
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
plt.draw()
##### Convergence tests #####
# Geweke
f, axarr = plt.subplots(len(var_names[0]), sharex=True)
axarr[0].set_title('Geweke Plots')
axarr[0].hold(True)
for ii in range(len(var_names[0])):
cnt = 0
ymax = 0
ymin = 0
for name, label in var_names[0][ii].items():
scores = pm.geweke(db.trace(name, chain=None)[:])
x, y = np.transpose(scores)
axarr[ii].scatter(x.tolist(), y.tolist(), label=label,
c=cmap(clrs_list[ii][cnt]))
ymax = max(ymax, np.max(y))
ymin = min(ymin, np.min(y))
cnt += 1
# Legend
leg = axarr[ii].legend(loc="upper left",prop={'size':9})
leg.get_frame().set_alpha(0.7)
# Labels
axarr[ii].set_ylabel('Z-score')
# Plot lines at +/- 2 std from zero
axarr[ii].plot((np.min(x), np.max(x)), (2, 2), '--')
axarr[ii].plot((np.min(x), np.max(x)), (-2, -2), '--')
# Plot bounds
axarr[ii].set_ylim(min(-2.5, ymin), max(2.5, ymax))
axarr[ii].set_xlim(0, np.max(x))
axarr[-1].set_xlabel('First iteration')
plt.hold(False)
if not os.path.exists(path):
os.mkdir(path)
if not path.endswith('/'):
path += '/'
plt.savefig("{}.{}".format(path+'_Geweke',format),dpi=200)
plt.draw()
def plot_f_g(db=db, start=0, stop=None):
'''Plot the posterior distributions for the f and g model functions.
Arguments:
db: database object
start: where to begin in the trace (with all chains taken together)
'''
plt.figure()
ax = plt.subplot(311) # f-a_1, f-a_2
# Get color cycler
color_cycle = ax._get_lines.prop_cycler
plt.title(r"Posterior distributions of $f$ and $g$ model functions")
plt.xlim(0,24)
plt.hold(True)
clrdict = next(color_cycle)
plt.hist(db.trace("f_a1",chain=None)[start:stop], histtype='stepfilled', bins=27,
alpha=0.85, label=r"posterior of position param $f_a1$",
color=clrdict['color'], normed=True)
clrdict = next(color_cycle)
plt.hist(db.trace("f_a2",chain=None)[start:stop], histtype='stepfilled', bins=27,
alpha=0.85, label=r"posterior of position param $f_a2$",
color=clrdict['color'], normed=True)
plt.hold(False)
leg = plt.legend(loc="upper center")
leg.get_frame().set_alpha(0.7)
ax = plt.subplot(312) #f-b_1, f-b_2
plt.xlim(1,15)
plt.hold(True)
clrdict = next(color_cycle)
plt.hist(db.trace("f_b1",chain=None)[start:stop], histtype='stepfilled', bins=25,
alpha=0.85, label=r"posterior of shape param $b_1$",
color=clrdict['color'], normed=True)
clrdict = next(color_cycle)
plt.hist(db.trace("f_b2",chain=None)[start:stop], histtype='stepfilled', bins=25,
alpha=0.85, label=r"posterior of shape param $b_2$",
color=clrdict['color'], normed=True)
plt.hold(False)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
ax = plt.subplot(313) #g-a_w, g-b_w
plt.xlim(0,15)
# Get new color cycler
color_cycle = ax._get_lines.prop_cycler
plt.hold(True)
clrdict = next(color_cycle)
plt.hist(db.trace("g_aw",chain=None)[start:stop], histtype='stepfilled', bins=25,
alpha=0.85, label=r"posterior of position param $g_aw$",
color=clrdict['color'], normed=True)
unused = next(color_cycle)
clrdict = next(color_cycle)
plt.hist(db.trace("g_bw",chain=None)[start:stop], histtype='stepfilled', bins=25,
alpha=0.85, label=r"posterior of shape param $g_bw$",
color=clrdict['color'], normed=True)
plt.hold(False)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
plt.draw()
plt.pause(0.0001)
def plot_sprd_vars(db=db,start=0,stop=None):
'''Plot posteriors of covariance variables for local/wind diffusion, and for
flight time.
Arguments:
db: database object
start: where to begin in the trace (with all chains taken together)
'''
plt.figure()
ax = plt.subplot(411)
plt.title("Posterior distribs for diffusion covariance & flight time")
plt.hold(True)
plt.hist(db.trace("sig_x",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label=r"posterior of wind $\sigma_x$",
normed=True)
plt.hist(db.trace("sig_y",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label=r"posterior of wind $\sigma_y$",
normed=True)
plt.hold(False)
plt.xlim(0,300)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
ax = plt.subplot(412)
plt.hold(True)
plt.hist(db.trace("sig_xl",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label=r"posterior of local $\sigma_x$",
normed=True)
plt.hist(db.trace("sig_yl",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label=r"posterior of local $\sigma_y$",
normed=True)
plt.hold(False)
plt.xlim(0,300)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
ax = plt.subplot(413)
color_cycle = ax._get_lines.prop_cycler
# get some new colors
unused = next(color_cycle)
unused = next(color_cycle)
clrdict = next(color_cycle)
plt.hold(True)
plt.hist(db.trace("corr",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label=r"posterior of wind $\rho$",
color=clrdict['color'], normed=True)
clrdict = next(color_cycle)
plt.hist(db.trace("corr_l",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label=r"posterior of local $\rho$",
color=clrdict['color'], normed=True)
plt.hold(False)
plt.xlim(-1,1)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
ax = plt.subplot(414)
color_cycle = ax._get_lines.prop_cycler
# get a new color
for ii in range(3):
unused = next(color_cycle)
clrdict = next(color_cycle)
# this is discrete data. need to bin it correctly
tr = db.trace("n_periods",chain=None)[start:stop]
plt.hold(True)
plt.hist(tr, bins=np.arange(tr.min(),tr.max()+2,1)-.5,
histtype='stepfilled', alpha=0.85,
label=r"posterior of avg flight time (min)",
color=clrdict['color'], normed=True)
plt.hold(False)
plt.xlim(0,80)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
plt.draw()
plt.pause(0.0001)
def plot_sent_obs_probs(db=db,start=0,stop=None):
'''Plot posteriors for emergence observation probability in each
sentinel field.
Arguments:
db: database object
start: where to begin in the trace (with all chains taken together)
'''
# Get sentinel field info
N_fields = 0
field_names = []
field_ids = []
for name in db.trace_names[0]:
if name[:13] == 'sent_obs_prob':
N_fields += 1
field_names.append(name)
field_ids.append(name[-1])
plt.figure()
for ii in range(N_fields):
ax = plt.subplot(N_fields,1,ii+1)
if ii == 0:
plt.title("Posterior distribs for sentinel field emerg obs probs")
plt.hist(db.trace(field_names[ii],chain=None)[start:stop],
histtype='stepfilled', bins=25, alpha=0.85,
label="field {}".format(field_ids[ii]),
normed=True)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
plt.draw()
plt.pause(0.0001)
def plot_other(db=db,start=0,stop=None):
'''Plot posteriors for lambda, xi, grid_obs_prob, em_obs_prob and A_collected
Arguments:
db: database object
start: where to begin in the trace (with all chains taken together)
'''
plt.figure()
ax = plt.subplot(411)
plt.title(r"Posteriors for $\lambda$, $\xi$, grid_obs_prob and em_obs_prob")
plt.hist(db.trace("lam",chain=None)[start:stop], histtype='stepfilled', bins=25,
alpha=0.85, label=r"posterior for $\lambda$", normed=True)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
ax = plt.subplot(412)
plt.hist(db.trace("xi",chain=None)[start:stop], histtype='stepfilled', bins=25,
alpha=0.85, label=r"posterior for $\xi$", normed=True)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
ax = plt.subplot(413)
plt.hold(True)
plt.hist(db.trace("grid_obs_prob",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label=r"posterior for grid_obs_prob",
normed=True)
plt.hist(db.trace("em_obs_prob",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label=r"posterior for em_obs_prob",
normed=True)
plt.hold(False)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
ax = plt.subplot(414)
plt.hist(db.trace("A_collected",chain=None)[start:stop], histtype='stepfilled',
bins=25, alpha=0.85, label="posterior for A_collected", normed=True)
leg = plt.legend(loc="upper right")
leg.get_frame().set_alpha(0.7)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
plt.draw()
plt.pause(0.0001)
if __name__ == "__main__":
nargs = len(sys.argv) - 1
if nargs == 0:
print("Please include the database name as an argument, optionally\n"+
"followed by the module to run (with or w/o start and stop point.")
elif nargs > 0:
database_name = sys.argv[1]
if database_name[-3:] != '.h5':
database_name += '.h5'
if os.path.isfile(database_name):
db = pm.database.hdf5.load(database_name)
else:
print("Invalid filename: {}".format(database_name))
nargs = 0
if nargs > 1:
'''Run the requested plot'''
if nargs == 3:
start = int(sys.argv[3])
elif nargs > 3:
start = int(sys.argv[3])
stop = int(sys.argv[4])
else:
start = 0
stop = None
if sys.argv[2] == 'plot_traces':
plot_traces(db)
elif sys.argv[2] == 'plot_f_g':
plot_f_g(db,start,stop)
elif sys.argv[2] == 'plot_sprd_vars':
plot_sprd_vars(db,start,stop)
elif sys.argv[2] == 'plot_sent_obs_probs':
plot_sent_obs_probs(db,start,stop)
elif sys.argv[2] == 'plot_other':
plot_other(db,start,stop)
else:
print('Method not found.')
input("Press Enter to finish...")
elif nargs == 1:
def get_args(strin):
args = strin[1:].strip().split()
if len(args) == 1:
args.append(None)
else:
args[1] = int(args[1])
args[0] = int(args[0])
return args
while True:
'''Open an interactive menu'''
print("----------Plot MCMC Results----------")
print("(1) Plot traces")
print("(2) Plot f & g argument posteriors")
print("(3) Plot diffusion posteriors")
print("(4) Plot sentinel field posteriors")
print("(5) Plot others")
print("(6) Quit")
print("2-5 may be followed by a start number and a stop number,\n"+
"separted by a space.")
cmd = input(":")
try:
if cmd[0] == "1":
plot_traces(db)
elif cmd[0] == "2":
if cmd[1:].strip() == '':
plot_f_g(db)
else:
plot_f_g(db,*get_args(cmd))
elif cmd[0] == "3":
if cmd[1:].strip() == '':
plot_sprd_vars(db)
else:
plot_sprd_vars(db,*get_args(cmd))
elif cmd[0] == "4":
if cmd[1:].strip() == '':
plot_sent_obs_probs(db)
else:
plot_sent_obs_probs(db,*get_args(cmd))
elif cmd[0] == "5":
if cmd[1:].strip() == '':
plot_other(db)
else:
plot_other(db,*get_args(cmd))
elif cmd[0] == "6" or cmd[0] == "q" or cmd[0] == "Q":
break
else:
print("Command not found.")
except ValueError:
print("Could not parse start number {}.".format(cmd[1:].strip()))
|
ntuecon/server
|
refs/heads/master
|
pyenv/Lib/site-packages/libpasteurize/fixes/fix_memoryview.py
|
71
|
u"""
Fixer for memoryview(s) -> buffer(s).
Explicit because some memoryview methods are invalid on buffer objects.
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name
class FixMemoryview(fixer_base.BaseFix):
explicit = True # User must specify that they want this.
PATTERN = u"""
power< name='memoryview' trailer< '(' [any] ')' >
rest=any* >
"""
def transform(self, node, results):
name = results[u"name"]
name.replace(Name(u"buffer", prefix=name.prefix))
|
barachka/odoo
|
refs/heads/master
|
addons/account/ir_sequence.py
|
336
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import api
from openerp.osv import fields, osv
class ir_sequence_fiscalyear(osv.osv):
_name = 'account.sequence.fiscalyear'
_rec_name = "sequence_main_id"
_columns = {
"sequence_id": fields.many2one("ir.sequence", 'Sequence', required=True,
ondelete='cascade'),
"sequence_main_id": fields.many2one("ir.sequence", 'Main Sequence',
required=True, ondelete='cascade'),
"fiscalyear_id": fields.many2one('account.fiscalyear', 'Fiscal Year',
required=True, ondelete='cascade')
}
_sql_constraints = [
('main_id', 'CHECK (sequence_main_id != sequence_id)',
'Main Sequence must be different from current !'),
]
class ir_sequence(osv.osv):
_inherit = 'ir.sequence'
_columns = {
'fiscal_ids': fields.one2many('account.sequence.fiscalyear',
'sequence_main_id', 'Sequences', copy=True)
}
@api.cr_uid_ids_context
def _next(self, cr, uid, seq_ids, context=None):
if context is None:
context = {}
for seq in self.browse(cr, uid, seq_ids, context):
for line in seq.fiscal_ids:
if line.fiscalyear_id.id == context.get('fiscalyear_id'):
return super(ir_sequence, self)._next(cr, uid, [line.sequence_id.id], context)
return super(ir_sequence, self)._next(cr, uid, seq_ids, context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mushorg/conpot
|
refs/heads/master
|
conpot/protocols/snmp/snmp_server.py
|
1
|
# Copyright (C) 2013 Lukas Rist <glaslos@gmail.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import logging
import os
from lxml import etree
import conpot.core as conpot_core
from conpot.core.protocol_wrapper import conpot_protocol
from conpot.protocols.snmp.command_responder import CommandResponder
logger = logging.getLogger()
@conpot_protocol
class SNMPServer(object):
def __init__(self, template, template_directory, args):
"""
:param host: hostname or ip address on which to server the snmp service (string).
:param port: listen port (integer).
:param template: path to the protocol specific xml configuration file (string).
"""
self.dom = etree.parse(template)
self.cmd_responder = None
self.compiled_mibs = args.mibcache
self.raw_mibs = os.path.join(template_directory, "snmp", "mibs")
def xml_general_config(self, dom):
snmp_config = dom.xpath("//snmp/config/*")
if snmp_config:
for entity in snmp_config:
# TARPIT: individual response delays
if entity.attrib["name"].lower() == "tarpit":
if entity.attrib["command"].lower() == "get":
self.cmd_responder.resp_app_get.tarpit = (
self.config_sanitize_tarpit(entity.text)
)
elif entity.attrib["command"].lower() == "set":
self.cmd_responder.resp_app_set.tarpit = (
self.config_sanitize_tarpit(entity.text)
)
elif entity.attrib["command"].lower() == "next":
self.cmd_responder.resp_app_next.tarpit = (
self.config_sanitize_tarpit(entity.text)
)
elif entity.attrib["command"].lower() == "bulk":
self.cmd_responder.resp_app_bulk.tarpit = (
self.config_sanitize_tarpit(entity.text)
)
# EVASION: response thresholds
if entity.attrib["name"].lower() == "evasion":
if entity.attrib["command"].lower() == "get":
self.cmd_responder.resp_app_get.threshold = (
self.config_sanitize_threshold(entity.text)
)
elif entity.attrib["command"].lower() == "set":
self.cmd_responder.resp_app_set.threshold = (
self.config_sanitize_threshold(entity.text)
)
elif entity.attrib["command"].lower() == "next":
self.cmd_responder.resp_app_next.threshold = (
self.config_sanitize_threshold(entity.text)
)
elif entity.attrib["command"].lower() == "bulk":
self.cmd_responder.resp_app_bulk.threshold = (
self.config_sanitize_threshold(entity.text)
)
def xml_mib_config(self):
mibs = self.dom.xpath("//snmp/mibs/*")
# parse mibs and oid tables
for mib in mibs:
mib_name = mib.attrib["name"]
for symbol in mib:
symbol_name = symbol.attrib["name"]
# retrieve instance from template
if "instance" in symbol.attrib:
# convert instance to (int-)tuple
symbol_instance = symbol.attrib["instance"].split(".")
symbol_instance = tuple(map(int, symbol_instance))
else:
# use default instance (0)
symbol_instance = (0,)
# retrieve value from databus
value = conpot_core.get_databus().get_value(
symbol.xpath("./value/text()")[0]
)
profile_map_name = symbol.xpath("./value/text()")[0]
# register this MIB instance to the command responder
self.cmd_responder.register(
mib_name, symbol_name, symbol_instance, value, profile_map_name
)
def config_sanitize_tarpit(self, value):
# checks tarpit value for being either a single int or float,
# or a series of two concatenated integers and/or floats separated by semicolon and returns
# either the (sanitized) value or zero.
if value is not None:
x, _, y = value.partition(";")
try:
_ = float(x)
except ValueError:
logger.error(
"SNMP invalid tarpit value: '%s'. Assuming no latency.", value
)
# first value is invalid, ignore the whole setting.
return "0;0"
try:
_ = float(y)
# both values are fine.
return value
except ValueError:
# second value is invalid, use the first one.
return x
else:
return "0;0"
def config_sanitize_threshold(self, value):
# checks DoS thresholds for being either a single int or a series of two concatenated integers
# separated by semicolon and returns either the (sanitized) value or zero.
if value is not None:
x, _, y = value.partition(";")
try:
_ = int(x)
except ValueError:
logger.error(
"SNMP invalid evasion threshold: '%s'. Assuming no DoS evasion.",
value,
)
# first value is invalid, ignore the whole setting.
return "0;0"
try:
_ = int(y)
# both values are fine.
return value
except ValueError:
# second value is invalid, use the first and ignore the second.
return str(x) + ";0"
else:
return "0;0"
def start(self, host, port):
self.cmd_responder = CommandResponder(
host, port, self.raw_mibs, self.compiled_mibs
)
self.xml_general_config(self.dom)
self.xml_mib_config()
logger.info("SNMP server started on: %s", (host, self.get_port()))
self.cmd_responder.serve_forever()
def stop(self):
if self.cmd_responder:
self.cmd_responder.stop()
def get_port(self):
if self.cmd_responder:
return self.cmd_responder.server_port
else:
return None
|
simone201/neak-gs3-jb
|
refs/heads/master
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
j127/python_koans
|
refs/heads/master
|
python2/runner/runner_tests/test_helper.py
|
244
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from runner import helper
class TestHelper(unittest.TestCase):
def test_that_get_class_name_works_with_a_string_instance(self):
self.assertEqual("str", helper.cls_name(str()))
def test_that_get_class_name_works_with_a_4(self):
self.assertEquals("int", helper.cls_name(4))
def test_that_get_class_name_works_with_a_tuple(self):
self.assertEquals("tuple", helper.cls_name((3,"pie", [])))
|
grnet/synnefo
|
refs/heads/develop
|
snf-admin-app/synnefo_admin/admin/resources/ip_logs/filters.py
|
1
|
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from synnefo.db.models import IPAddressHistory, VirtualMachine
import django_filters
from synnefo_admin.admin.queries_common import (process_queries, model_filter,
get_model_field)
@model_filter
def filter_user(queryset, queries):
q = process_queries("user", queries)
user_ids = get_model_field("user", q, 'uuid')
return queryset.filter(user_id__in=user_ids)
@model_filter
def filter_vm(queryset, queries):
q = process_queries("vm", queries)
ids = get_model_field("vm", q, 'id')
return queryset.filter(server_id__in=ids)
@model_filter
def filter_network(queryset, queries):
q = process_queries("network", queries)
ids = get_model_field("network", q, 'id')
return queryset.filter(network_id__in=ids)
@model_filter
def filter_ip(queryset, queries):
q = process_queries("ip", queries)
return queryset.filter(q)
class IPLogFilterSet(django_filters.FilterSet):
"""A collection of filters for ip log.
This filter collection is based on django-filter's FilterSet.
"""
user = django_filters.CharFilter(label='OF User', action=filter_user)
vm = django_filters.CharFilter(label='OF VM', action=filter_vm)
net = django_filters.CharFilter(label='OF Network', action=filter_network)
ip = django_filters.CharFilter(label='OF IP', action=filter_ip)
class Meta:
model = IPAddressHistory
fields = ('user', 'vm', 'net', 'ip')
|
OptimusGitEtna/RestSymf
|
refs/heads/master
|
Python-3.4.2/Tools/scripts/lfcr.py
|
116
|
#! /usr/bin/env python3
"Replace LF with CRLF in argument files. Print names of changed files."
import sys, re, os
def main():
for filename in sys.argv[1:]:
if os.path.isdir(filename):
print(filename, "Directory!")
continue
with open(filename, "rb") as f:
data = f.read()
if b'\0' in data:
print(filename, "Binary!")
continue
newdata = re.sub(b"\r?\n", b"\r\n", data)
if newdata != data:
print(filename)
with open(filename, "wb") as f:
f.write(newdata)
if __name__ == '__main__':
main()
|
gaocegege/kubernetes
|
refs/heads/master
|
examples/cluster-dns/images/backend/server.py
|
504
|
#!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
PORT_NUMBER = 8000
# This class will handles any incoming request.
class HTTPHandler(BaseHTTPRequestHandler):
# Handler for the GET requests
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/html')
self.end_headers()
self.wfile.write("Hello World!")
try:
# Create a web server and define the handler to manage the incoming request.
server = HTTPServer(('', PORT_NUMBER), HTTPHandler)
print 'Started httpserver on port ' , PORT_NUMBER
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down the web server'
server.socket.close()
|
DANCEcollaborative/forum-xblock
|
refs/heads/master
|
XBlock Integration Files/xdjangobb/xblock/lib/python2.7/site-packages/django/core/files/utils.py
|
901
|
class FileProxyMixin(object):
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
softspace = property(lambda self: self.file.softspace)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
xreadlines = property(lambda self: self.file.xreadlines)
def __iter__(self):
return iter(self.file)
|
vikas1885/test1
|
refs/heads/master
|
common/djangoapps/student/migrations/0044_linkedin_add_company_identifier.py
|
102
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'LinkedInAddToProfileConfiguration.company_identifier'
db.add_column('student_linkedinaddtoprofileconfiguration', 'company_identifier',
self.gf('django.db.models.fields.TextField')(default=''),
keep_default=False)
def backwards(self, orm):
# Deleting field 'LinkedInAddToProfileConfiguration.company_identifier'
db.delete_column('student_linkedinaddtoprofileconfiguration', 'company_identifier')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.anonymoususerid': {
'Meta': {'object_name': 'AnonymousUserId'},
'anonymous_user_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseaccessrole': {
'Meta': {'unique_together': "(('user', 'org', 'course_id', 'role'),)", 'object_name': 'CourseAccessRole'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'org': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollmentallowed': {
'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'CourseEnrollmentAllowed'},
'auto_enroll': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.dashboardconfiguration': {
'Meta': {'object_name': 'DashboardConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recent_enrollment_time_delta': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'student.linkedinaddtoprofileconfiguration': {
'Meta': {'object_name': 'LinkedInAddToProfileConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'company_identifier': ('django.db.models.fields.TextField', [], {}),
'dashboard_tracking_code': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.loginfailures': {
'Meta': {'object_name': 'LoginFailures'},
'failure_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lockout_until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.passwordhistory': {
'Meta': {'object_name': 'PasswordHistory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'time_set': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'allow_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'city': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'student.usersignupsource': {
'Meta': {'object_name': 'UserSignupSource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.userstanding': {
'Meta': {'object_name': 'UserStanding'},
'account_status': ('django.db.models.fields.CharField', [], {'max_length': '31', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'standing_last_changed_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'standing'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
|
iulian787/spack
|
refs/heads/develop
|
var/spack/repos/builtin.mock/packages/requires-virtual/package.py
|
2
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class RequiresVirtual(Package):
"""Package that requires a virtual dependency and is registered
as an external.
"""
homepage = "http://www.example.com"
url = "http://www.example.com/a-1.0.tar.gz"
version('2.0', '2.0_a_hash')
depends_on('stuff')
|
lamby/buildinfo.debian.net
|
refs/heads/master
|
bidb/api/utils.py
|
1
|
import re
import hashlib
from debian import deb822
from dateutil.parser import parse
from django.db import transaction, IntegrityError
from django.core.files.base import ContentFile
from django.core.files.storage import default_storage
from bidb.keys.models import Key
from bidb.packages.models import Source, Architecture, Binary
from bidb.buildinfo.models import Buildinfo, Origin
SUPPORTED_FORMATS = {'0.2', '1.0'}
re_binary = re.compile(
r'^(?P<name>[^_]+)_(?P<version>[^_]+)_(?P<architecture>[^\.]+)\.u?deb$',
)
re_installed_build_depends = re.compile(
r'^(?P<package>[^ ]+) \(= (?P<version>.+)\)'
)
class InvalidSubmission(Exception):
pass
@transaction.atomic
def parse_submission(request):
raw_text = request.read()
try:
data = deb822.Deb822(raw_text)
except TypeError:
raise InvalidSubmission("Could not parse RFC-822 format data.")
raw_text_gpg_stripped = data.dump()
## Parse GPG info #########################################################
uid = None
data.raw_text = raw_text
gpg_info = data.get_gpg_info()
for x in ('VALIDSIG', 'NO_PUBKEY'):
try:
uid = gpg_info[x][0]
break
except (KeyError, IndexError):
pass
if uid is None:
raise InvalidSubmission("Could not determine GPG uid")
## Check whether .buildinfo already exists ################################
def create_submission(buildinfo):
submission = buildinfo.submissions.create(
key=Key.objects.get_or_create(uid=uid)[0],
)
default_storage.save(
submission.get_storage_name(),
ContentFile(raw_text),
)
return submission
## Parse new .buildinfo ###################################################
def get_or_create(model, field):
try:
return model.objects.get_or_create(name=data[field])[0]
except KeyError:
raise InvalidSubmission("Missing required field: {}".format(field))
if data.get('Format') not in SUPPORTED_FORMATS:
raise InvalidSubmission(
"Only {} 'Format:' versions are supported".format(
', '.join(sorted(SUPPORTED_FORMATS)),
)
)
sha1 = hashlib.sha1(raw_text_gpg_stripped.encode('utf-8')).hexdigest()
try:
with transaction.atomic():
buildinfo = Buildinfo.objects.create(
sha1=sha1,
source=get_or_create(Source, 'Source'),
architecture=get_or_create(Architecture, 'Architecture'),
version=data['version'],
build_path=data.get('Build-Path', ''),
build_date=parse(data.get('Build-Date', '')),
build_origin=get_or_create(Origin, 'Build-Origin'),
build_architecture=get_or_create(Architecture, 'Build-Architecture'),
environment=data.get('Environment', ''),
)
except IntegrityError:
# Already exists; just attach a new Submission instance
return create_submission(Buildinfo.objects.get(sha1=sha1)), False
default_storage.save(
buildinfo.get_storage_name(),
ContentFile(raw_text_gpg_stripped),
)
## Parse binaries #########################################################
try:
binary_names = set(data['Binary'].split(' '))
except KeyError:
raise InvalidSubmission("Missing 'Binary' field")
if not binary_names:
raise InvalidSubmission("Invalid 'Binary' field")
binaries = {}
for x in binary_names:
# Save instances for lookup later
binaries[x] = buildinfo.binaries.create(
binary=Binary.objects.get_or_create(name=x)[0],
)
## Parse checksums ########################################################
hashes = ('Md5', 'Sha1', 'Sha256')
checksums = {}
for x in hashes:
for y in data['Checksums-%s' % x].strip().splitlines():
checksum, size, filename = y.strip().split()
# Check size
try:
size = int(size)
if size < 0:
raise ValueError()
except ValueError:
raise InvalidSubmission(
"Invalid size for {}: {}".format(filename, size),
)
checksums.setdefault(filename, {
'size': size,
'binary': None,
})['checksum_{}'.format(x.lower())] = checksum
existing = checksums[filename]['size']
if size != existing:
raise InvalidSubmission("Mismatched file size in "
"Checksums-{}: {} != {}".format(x, existing, size))
## Create Checksum instances ##############################################
for k, v in sorted(checksums.items()):
# Match with Binary instances if possible
m = re_binary.match(k)
if m is not None:
v['binary'] = binaries.get(m.group('name'))
buildinfo.checksums.create(filename=k, **v)
## Validate Installed-Build-Depends #######################################
for x in data['Installed-Build-Depends'].strip().splitlines():
m = re_installed_build_depends.match(x.strip())
if m is None:
raise InvalidSubmission(
"Invalid entry in Installed-Build-Depends: {}".format(x),
)
return create_submission(buildinfo), True
|
noironetworks/group-based-policy
|
refs/heads/master
|
gbpservice/contrib/nfp/configurator/agents/nfp_service.py
|
1
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import oslo_messaging as messaging
import six
from gbpservice.contrib.nfp.configurator.agents import agent_base
from gbpservice.contrib.nfp.configurator.lib import (
nfp_service_constants as const)
from gbpservice.contrib.nfp.configurator.lib import utils as load_driver
from gbpservice.nfp.core import event as nfp_event
from gbpservice.nfp.core import log as nfp_logging
LOG = nfp_logging.getLogger(__name__)
class ConfigScriptRpcManager(agent_base.AgentBaseRPCManager):
""" Implements ConfigScriptRpcManager class which receives requests
from Configurator module.
Methods of this class are invoked by the configurator. Events are
created according to the requests received and enqueued to worker queues.
"""
RPC_API_VERSION = '1.0'
target = messaging.Target(version=RPC_API_VERSION)
def __init__(self, sc, conf):
"""Instantiates child and parent class objects.
:param sc: Service Controller object that is used to communicate
with process model core file.
:param conf: Configuration object that is used for configuration
parameter access.
"""
super(ConfigScriptRpcManager, self).__init__(sc, conf)
def run_nfp_service(self, context, resource_data):
""" Receives request to execute config script.
:param context: RPC context
:param kwargs: Contains configuration script and request information
"""
msg = ("ConfigScriptRpcManager received Create Heat request.")
LOG.debug(msg)
arg_dict = {'context': context,
'resource_data': resource_data}
ev = self.sc.new_event(id=const.CREATE_NFP_SERVICE_EVENT,
data=arg_dict, key=None)
self.sc.post_event(ev)
class ConfigScriptEventHandler(agent_base.AgentBaseEventHandler):
""" Handler class which invokes nfp_service driver methods
Worker processes dequeue the worker queues and invokes the
appropriate handler class methods for ConfigScript methods.
"""
def __init__(self, sc, drivers, rpcmgr):
""" Initializes parent and child class objects.
:param sc: Service Controller object that is used to communicate
with process model.
:param drivers: Dictionary of driver name to object mapping
:param rpcmgr: ConfigScriptRpcManager class object
"""
super(ConfigScriptEventHandler, self).__init__(sc, drivers, rpcmgr)
self.sc = sc
self.drivers = drivers
self.rpcmgr = rpcmgr
def _get_driver(self):
""" Retrieves driver object given the service type.
"""
driver_id = const.SERVICE_TYPE
return self.drivers[driver_id]
def handle_event(self, ev):
""" Demultiplexes the nfp_service request to appropriate
driver methods.
:param ev: Event object sent from process model event handler
"""
try:
agent_info = ev.data['context']
notification_context = agent_info['context']
resource = agent_info['resource']
resource_data = ev.data['resource_data']
msg = ("Worker process with ID: %s starting to "
"handle task: %s of type ConfigScript. "
% (os.getpid(), ev.id))
LOG.debug(msg)
driver = self._get_driver()
self.method = getattr(driver, "run_%s" % resource)
result = self.method(notification_context, resource_data)
except Exception as err:
result = const.ERROR_RESULT
msg = ("Failed to handle event: %s. %s"
% (ev.id, str(err).capitalize()))
LOG.error(msg)
finally:
del agent_info['notification_data']
del agent_info['service_vendor']
service_type = agent_info.pop('resource_type')
if result in const.UNHANDLED_RESULT:
data = {'status_code': const.UNHANDLED_RESULT}
else:
data = {'status_code': const.FAILURE,
'error_msg': result}
msg = {'info': {'service_type': service_type,
'context': notification_context},
'notification': [{'resource': resource,
'data': data}]
}
self.notify._notification(msg)
def events_init(sc, drivers, rpcmgr):
"""Registers events with core service controller.
All the events will come to handle_event method of class instance
registered in 'handler' field.
:param drivers: Driver instances registered with the service agent
:param rpcmgr: Instance to receive all the RPC messages from configurator
module.
Returns: None
"""
event = nfp_event.Event(
id=const.CREATE_NFP_SERVICE_EVENT,
handler=ConfigScriptEventHandler(sc, drivers, rpcmgr))
sc.register_events([event])
def load_drivers(conf):
"""Imports all the driver files corresponding to this agent.
Returns: Dictionary of driver objects with a specified service type and
vendor name
"""
ld = load_driver.ConfiguratorUtils(conf)
drivers = ld.load_drivers(const.SERVICE_TYPE)
for service_type, driver_name in six.iteritems(drivers):
driver_obj = driver_name(conf=conf)
drivers[service_type] = driver_obj
return drivers
def register_service_agent(cm, sc, conf, rpcmgr):
"""Registers ConfigScript service agent with configurator module.
:param cm: Instance of configurator module
:param sc: Instance of core service controller
:param conf: Instance of oslo configuration
:param rpcmgr: Instance containing RPC methods which are invoked by
configurator module on corresponding RPC message arrival
"""
service_type = const.SERVICE_TYPE
cm.register_service_agent(service_type, rpcmgr)
def init_agent(cm, sc, conf):
"""Initializes Config Script agent.
:param cm: Instance of configuration module
:param sc: Instance of core service controller
:param conf: Instance of oslo configuration
"""
try:
drivers = load_drivers(conf)
except Exception as err:
msg = ("Config Script failed to load drivers. %s"
% (str(err).capitalize()))
LOG.error(msg)
raise Exception(err)
else:
msg = ("Config Script loaded drivers successfully.")
LOG.debug(msg)
rpcmgr = ConfigScriptRpcManager(sc, conf)
try:
events_init(sc, drivers, rpcmgr)
except Exception as err:
msg = ("Config Script Events initialization unsuccessful. %s"
% (str(err).capitalize()))
LOG.error(msg)
raise Exception(err)
else:
msg = ("Config Script Events initialization successful.")
LOG.debug(msg)
try:
register_service_agent(cm, sc, conf, rpcmgr)
except Exception as err:
msg = ("Config Script service agent registration unsuccessful. %s"
% (str(err).capitalize()))
LOG.error(msg)
raise Exception(err)
else:
msg = ("Config Script service agent registration successful.")
LOG.debug(msg)
msg = ("ConfigScript as a Service Module Initialized.")
LOG.info(msg)
def init_agent_complete(cm, sc, conf):
""" Initializes periodic tasks
"""
msg = (" Config Script agent init complete")
LOG.info(msg)
|
mogoweb/webkit_for_android5.1
|
refs/heads/master
|
webkit/Source/WebKit/chromium/scripts/generate_devtools_grd.py
|
26
|
#!/usr/bin/env python
#
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Creates a grd file for packaging the inspector files."""
from __future__ import with_statement
import errno
import os
import shutil
import sys
from xml.dom import minidom
kDevToolsResourcePrefix = 'IDR_DEVTOOLS_'
kGrdTemplate = '''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="0" current_release="1">
<outputs>
<output filename="grit/devtools_resources.h" type="rc_header">
<emit emit_type='prepend'></emit>
</output>
<output filename="grit/devtools_resources_map.cc" type="resource_file_map_source" />
<output filename="grit/devtools_resources_map.h" type="resource_map_header" />
<output filename="devtools_resources.pak" type="data_package" />
</outputs>
<release seq="1">
<includes></includes>
</release>
</grit>
'''
class ParsedArgs:
def __init__(self, source_files, image_dirs, output_filename):
self.source_files = source_files
self.image_dirs = image_dirs
self.output_filename = output_filename
def parse_args(argv):
images_position = argv.index('--images')
output_position = argv.index('--output')
source_files = argv[:images_position]
image_dirs = argv[images_position + 1:output_position]
return ParsedArgs(source_files, image_dirs, argv[output_position + 1])
def make_name_from_filename(filename):
return (filename.replace('/', '_')
.replace('\\', '_')
.replace('.', '_')).upper()
def add_file_to_grd(grd_doc, filename):
includes_node = grd_doc.getElementsByTagName('includes')[0]
includes_node.appendChild(grd_doc.createTextNode('\n '))
new_include_node = grd_doc.createElement('include')
new_include_node.setAttribute('name', make_name_from_filename(filename))
new_include_node.setAttribute('file', filename)
new_include_node.setAttribute('type', 'BINDATA')
includes_node.appendChild(new_include_node)
def main(argv):
parsed_args = parse_args(argv[1:])
doc = minidom.parseString(kGrdTemplate)
output_directory = os.path.dirname(parsed_args.output_filename)
try:
os.makedirs(os.path.join(output_directory, 'Images'))
except OSError, e:
if e.errno != errno.EEXIST:
raise e
for filename in parsed_args.source_files:
shutil.copy(filename, output_directory)
add_file_to_grd(doc, os.path.basename(filename))
for dirname in parsed_args.image_dirs:
for filename in os.listdir(dirname):
if not filename.endswith('.png') and not filename.endswith('.gif'):
continue
shutil.copy(os.path.join(dirname, filename),
os.path.join(output_directory, 'Images'))
add_file_to_grd(doc, os.path.join('Images', filename))
with open(parsed_args.output_filename, 'w') as output_file:
output_file.write(doc.toxml(encoding='UTF-8'))
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
ar7z1/ansible
|
refs/heads/devel
|
lib/ansible/utils/module_docs_fragments/onyx.py
|
58
|
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
provider:
description:
- A dict object containing connection details.
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when building the connection to the remote device.
default: 22
username:
description:
- Configures the username to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead.
password:
description:
- Specifies the password to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
timeout:
description:
- Specifies the timeout in seconds for communicating with the network device
for either connecting or sending commands. If the timeout is
exceeded before the operation is completed, the module will error.
default: 10
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to
the remote device. This value is the path to the
key used to authenticate the SSH session. If the value is not specified
in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE)
will be used instead.
authorize:
description:
- Instructs the module to enter privileged mode on the remote device
before sending any commands. If not specified, the device will
attempt to execute all commands in non-privileged mode. If the value
is not specified in the task, the value of environment variable
C(ANSIBLE_NET_AUTHORIZE) will be used instead.
type: bool
default: 'no'
auth_pass:
description:
- Specifies the password to use if required to enter privileged mode
on the remote device. If I(authorize) is false, then this argument
does nothing. If the value is not specified in the task, the value of
environment variable C(ANSIBLE_NET_AUTH_PASS) will be used instead.
"""
|
nicolasdespres/ninja
|
refs/heads/master
|
misc/ci.py
|
10
|
#!/usr/bin/env python3
import os
ignores = [
'.git/',
'misc/afl-fuzz-tokens/',
'ninja_deps',
'src/depfile_parser.cc',
'src/lexer.cc',
]
error_count = 0
def error(path, msg):
global error_count
error_count += 1
print('\x1b[1;31m{}\x1b[0;31m{}\x1b[0m'.format(path, msg))
for root, directory, filenames in os.walk('.'):
for filename in filenames:
path = os.path.join(root, filename)[2:]
if any([path.startswith(x) for x in ignores]):
continue
with open(path, 'rb') as file:
line_nr = 1
try:
for line in [x.decode() for x in file.readlines()]:
if len(line) == 0 or line[-1] != '\n':
error(path, ' missing newline at end of file.')
if len(line) > 1:
if line[-2] == '\r':
error(path, ' has Windows line endings.')
break
if line[-2] == ' ' or line[-2] == '\t':
error(path, ':{} has trailing whitespace.'.format(line_nr))
line_nr += 1
except UnicodeError:
pass # binary file
exit(error_count)
|
nhomar/odoo
|
refs/heads/8.0
|
addons/account_followup/account_followup.py
|
20
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import api
from openerp.osv import fields, osv
from lxml import etree
from openerp.tools.translate import _
class followup(osv.osv):
_name = 'account_followup.followup'
_description = 'Account Follow-up'
_rec_name = 'name'
_columns = {
'followup_line': fields.one2many('account_followup.followup.line', 'followup_id', 'Follow-up', copy=True),
'company_id': fields.many2one('res.company', 'Company', required=True),
'name': fields.related('company_id', 'name', string = "Name", readonly=True, type="char"),
}
_defaults = {
'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'account_followup.followup', context=c),
}
_sql_constraints = [('company_uniq', 'unique(company_id)', 'Only one follow-up per company is allowed')]
class followup_line(osv.osv):
def _get_default_template(self, cr, uid, ids, context=None):
try:
return self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_followup', 'email_template_account_followup_default')[1]
except ValueError:
return False
_name = 'account_followup.followup.line'
_description = 'Follow-up Criteria'
_columns = {
'name': fields.char('Follow-Up Action', required=True),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of follow-up lines."),
'delay': fields.integer('Due Days', help="The number of days after the due date of the invoice to wait before sending the reminder. Could be negative if you want to send a polite alert beforehand.", required=True),
'followup_id': fields.many2one('account_followup.followup', 'Follow Ups', required=True, ondelete="cascade"),
'description': fields.text('Printed Message', translate=True),
'send_email':fields.boolean('Send an Email', help="When processing, it will send an email"),
'send_letter':fields.boolean('Send a Letter', help="When processing, it will print a letter"),
'manual_action':fields.boolean('Manual Action', help="When processing, it will set the manual action to be taken for that customer. "),
'manual_action_note':fields.text('Action To Do', placeholder="e.g. Give a phone call, check with others , ..."),
'manual_action_responsible_id':fields.many2one('res.users', 'Assign a Responsible', ondelete='set null'),
'email_template_id':fields.many2one('email.template', 'Email Template', ondelete='set null'),
}
_order = 'delay'
_sql_constraints = [('days_uniq', 'unique(followup_id, delay)', 'Days of the follow-up levels must be different')]
_defaults = {
'send_email': True,
'send_letter': True,
'manual_action':False,
'description': """
Dear %(partner_name)s,
Exception made if there was a mistake of ours, it seems that the following amount stays unpaid. Please, take appropriate measures in order to carry out this payment in the next 8 days.
Would your payment have been carried out after this mail was sent, please ignore this message. Do not hesitate to contact our accounting department.
Best Regards,
""",
'email_template_id': _get_default_template,
}
def _check_description(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
if line.description:
try:
line.description % {'partner_name': '', 'date':'', 'user_signature': '', 'company_name': ''}
except:
return False
return True
_constraints = [
(_check_description, 'Your description is invalid, use the right legend or %% if you want to use the percent character.', ['description']),
]
class account_move_line(osv.osv):
def _get_result(self, cr, uid, ids, name, arg, context=None):
res = {}
for aml in self.browse(cr, uid, ids, context=context):
res[aml.id] = aml.debit - aml.credit
return res
_inherit = 'account.move.line'
_columns = {
'followup_line_id': fields.many2one('account_followup.followup.line', 'Follow-up Level',
ondelete='restrict'), #restrict deletion of the followup line
'followup_date': fields.date('Latest Follow-up', select=True),
'result':fields.function(_get_result, type='float', method=True,
string="Balance") #'balance' field is not the same
}
class res_partner(osv.osv):
def fields_view_get(self, cr, uid, view_id=None, view_type=None, context=None, toolbar=False, submenu=False):
res = super(res_partner, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context,
toolbar=toolbar, submenu=submenu)
context = context or {}
if view_type == 'form' and context.get('Followupfirst'):
doc = etree.XML(res['arch'], parser=None, base_url=None)
first_node = doc.xpath("//page[@name='followup_tab']")
root = first_node[0].getparent()
root.insert(0, first_node[0])
res['arch'] = etree.tostring(doc, encoding="utf-8")
return res
def _get_latest(self, cr, uid, ids, names, arg, context=None, company_id=None):
res={}
if company_id == None:
company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
else:
company = self.pool.get('res.company').browse(cr, uid, company_id, context=context)
for partner in self.browse(cr, uid, ids, context=context):
amls = partner.unreconciled_aml_ids
latest_date = False
latest_level = False
latest_days = False
latest_level_without_lit = False
latest_days_without_lit = False
for aml in amls:
if (aml.company_id == company) and (aml.followup_line_id != False) and (not latest_days or latest_days < aml.followup_line_id.delay):
latest_days = aml.followup_line_id.delay
latest_level = aml.followup_line_id.id
if (aml.company_id == company) and (not latest_date or latest_date < aml.followup_date):
latest_date = aml.followup_date
if (aml.company_id == company) and (aml.blocked == False) and (aml.followup_line_id != False and
(not latest_days_without_lit or latest_days_without_lit < aml.followup_line_id.delay)):
latest_days_without_lit = aml.followup_line_id.delay
latest_level_without_lit = aml.followup_line_id.id
res[partner.id] = {'latest_followup_date': latest_date,
'latest_followup_level_id': latest_level,
'latest_followup_level_id_without_lit': latest_level_without_lit}
return res
@api.cr_uid_ids_context
def do_partner_manual_action(self, cr, uid, partner_ids, context=None):
#partner_ids -> res.partner
for partner in self.browse(cr, uid, partner_ids, context=context):
#Check action: check if the action was not empty, if not add
action_text= ""
if partner.payment_next_action:
action_text = (partner.payment_next_action or '') + "\n" + (partner.latest_followup_level_id_without_lit.manual_action_note or '')
else:
action_text = partner.latest_followup_level_id_without_lit.manual_action_note or ''
#Check date: only change when it did not exist already
action_date = partner.payment_next_action_date or fields.date.context_today(self, cr, uid, context=context)
# Check responsible: if partner has not got a responsible already, take from follow-up
responsible_id = False
if partner.payment_responsible_id:
responsible_id = partner.payment_responsible_id.id
else:
p = partner.latest_followup_level_id_without_lit.manual_action_responsible_id
responsible_id = p and p.id or False
self.write(cr, uid, [partner.id], {'payment_next_action_date': action_date,
'payment_next_action': action_text,
'payment_responsible_id': responsible_id})
def do_partner_print(self, cr, uid, wizard_partner_ids, data, context=None):
#wizard_partner_ids are ids from special view, not from res.partner
if not wizard_partner_ids:
return {}
data['partner_ids'] = wizard_partner_ids
datas = {
'ids': wizard_partner_ids,
'model': 'account_followup.followup',
'form': data
}
return self.pool['report'].get_action(cr, uid, [], 'account_followup.report_followup', data=datas, context=context)
@api.cr_uid_ids_context
def do_partner_mail(self, cr, uid, partner_ids, context=None):
if context is None:
context = {}
ctx = context.copy()
ctx['followup'] = True
#partner_ids are res.partner ids
# If not defined by latest follow-up level, it will be the default template if it can find it
mtp = self.pool.get('email.template')
unknown_mails = 0
for partner in self.browse(cr, uid, partner_ids, context=ctx):
if partner.email and partner.email.strip():
level = partner.latest_followup_level_id_without_lit
if level and level.send_email and level.email_template_id and level.email_template_id.id:
mtp.send_mail(cr, uid, level.email_template_id.id, partner.id, context=ctx)
else:
mail_template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid,
'account_followup', 'email_template_account_followup_default')
mtp.send_mail(cr, uid, mail_template_id[1], partner.id, context=ctx)
else:
unknown_mails = unknown_mails + 1
action_text = _("Email not sent because of email address of partner not filled in")
if partner.payment_next_action_date:
payment_action_date = min(fields.date.context_today(self, cr, uid, context=ctx), partner.payment_next_action_date)
else:
payment_action_date = fields.date.context_today(self, cr, uid, context=ctx)
if partner.payment_next_action:
payment_next_action = partner.payment_next_action + " \n " + action_text
else:
payment_next_action = action_text
self.write(cr, uid, [partner.id], {'payment_next_action_date': payment_action_date,
'payment_next_action': payment_next_action}, context=ctx)
return unknown_mails
def get_followup_table_html(self, cr, uid, ids, context=None):
""" Build the html tables to be included in emails send to partners,
when reminding them their overdue invoices.
:param ids: [id] of the partner for whom we are building the tables
:rtype: string
"""
from report import account_followup_print
assert len(ids) == 1
if context is None:
context = {}
partner = self.browse(cr, uid, ids[0], context=context)
#copy the context to not change global context. Overwrite it because _() looks for the lang in local variable 'context'.
#Set the language to use = the partner language
context = dict(context, lang=partner.lang)
followup_table = ''
if partner.unreconciled_aml_ids:
company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
current_date = fields.date.context_today(self, cr, uid, context=context)
rml_parse = account_followup_print.report_rappel(cr, uid, "followup_rml_parser")
final_res = rml_parse._lines_get_with_partner(partner, company.id)
for currency_dict in final_res:
currency = currency_dict.get('line', [{'currency_id': company.currency_id}])[0]['currency_id']
followup_table += '''
<table border="2" width=100%%>
<tr>
<td>''' + _("Invoice Date") + '''</td>
<td>''' + _("Description") + '''</td>
<td>''' + _("Reference") + '''</td>
<td>''' + _("Due Date") + '''</td>
<td>''' + _("Amount") + " (%s)" % (currency.symbol) + '''</td>
<td>''' + _("Lit.") + '''</td>
</tr>
'''
total = 0
for aml in currency_dict['line']:
block = aml['blocked'] and 'X' or ' '
total += aml['balance']
strbegin = "<TD>"
strend = "</TD>"
date = aml['date_maturity'] or aml['date']
if date <= current_date and aml['balance'] > 0:
strbegin = "<TD><B>"
strend = "</B></TD>"
followup_table +="<TR>" + strbegin + str(aml['date']) + strend + strbegin + aml['name'] + strend + strbegin + (aml['ref'] or '') + strend + strbegin + str(date) + strend + strbegin + str(aml['balance']) + strend + strbegin + block + strend + "</TR>"
total = reduce(lambda x, y: x+y['balance'], currency_dict['line'], 0.00)
total = rml_parse.formatLang(total, dp='Account', currency_obj=currency)
followup_table += '''<tr> </tr>
</table>
<center>''' + _("Amount due") + ''' : %s </center>''' % (total)
return followup_table
def write(self, cr, uid, ids, vals, context=None):
if vals.get("payment_responsible_id", False):
for part in self.browse(cr, uid, ids, context=context):
if part.payment_responsible_id <> vals["payment_responsible_id"]:
#Find partner_id of user put as responsible
responsible_partner_id = self.pool.get("res.users").browse(cr, uid, vals['payment_responsible_id'], context=context).partner_id.id
self.pool.get("mail.thread").message_post(cr, uid, 0,
body = _("You became responsible to do the next action for the payment follow-up of") + " <b><a href='#id=" + str(part.id) + "&view_type=form&model=res.partner'> " + part.name + " </a></b>",
type = 'comment',
subtype = "mail.mt_comment", context = context,
model = 'res.partner', res_id = part.id,
partner_ids = [responsible_partner_id])
return super(res_partner, self).write(cr, uid, ids, vals, context=context)
def action_done(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'payment_next_action_date': False, 'payment_next_action':'', 'payment_responsible_id': False}, context=context)
def do_button_print(self, cr, uid, ids, context=None):
assert(len(ids) == 1)
company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
#search if the partner has accounting entries to print. If not, it may not be present in the
#psql view the report is based on, so we need to stop the user here.
if not self.pool.get('account.move.line').search(cr, uid, [
('partner_id', '=', ids[0]),
('account_id.type', '=', 'receivable'),
('reconcile_id', '=', False),
('state', '!=', 'draft'),
('company_id', '=', company_id),
('date_maturity', '<=', fields.date.context_today(self,cr,uid)),
], context=context):
raise osv.except_osv(_('Error!'),_("The partner does not have any accounting entries to print in the overdue report for the current company."))
self.message_post(cr, uid, [ids[0]], body=_('Printed overdue payments report'), context=context)
#build the id of this partner in the psql view. Could be replaced by a search with [('company_id', '=', company_id),('partner_id', '=', ids[0])]
wizard_partner_ids = [ids[0] * 10000 + company_id]
followup_ids = self.pool.get('account_followup.followup').search(cr, uid, [('company_id', '=', company_id)], context=context)
if not followup_ids:
raise osv.except_osv(_('Error!'),_("There is no followup plan defined for the current company."))
data = {
'date': fields.date.today(),
'followup_id': followup_ids[0],
}
#call the print overdue report on this partner
return self.do_partner_print(cr, uid, wizard_partner_ids, data, context=context)
def _get_amounts_and_date(self, cr, uid, ids, name, arg, context=None):
'''
Function that computes values for the followup functional fields. Note that 'payment_amount_due'
is similar to 'credit' field on res.partner except it filters on user's company.
'''
res = {}
company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
current_date = fields.date.context_today(self, cr, uid, context=context)
for partner in self.browse(cr, uid, ids, context=context):
worst_due_date = False
amount_due = amount_overdue = 0.0
for aml in partner.unreconciled_aml_ids:
if (aml.company_id == company):
date_maturity = aml.date_maturity or aml.date
if not worst_due_date or date_maturity < worst_due_date:
worst_due_date = date_maturity
amount_due += aml.result
if (date_maturity <= current_date):
amount_overdue += aml.result
res[partner.id] = {'payment_amount_due': amount_due,
'payment_amount_overdue': amount_overdue,
'payment_earliest_due_date': worst_due_date}
return res
def _get_followup_overdue_query(self, cr, uid, args, overdue_only=False, context=None):
'''
This function is used to build the query and arguments to use when making a search on functional fields
* payment_amount_due
* payment_amount_overdue
Basically, the query is exactly the same except that for overdue there is an extra clause in the WHERE.
:param args: arguments given to the search in the usual domain notation (list of tuples)
:param overdue_only: option to add the extra argument to filter on overdue accounting entries or not
:returns: a tuple with
* the query to execute as first element
* the arguments for the execution of this query
:rtype: (string, [])
'''
company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
having_where_clause = ' AND '.join(map(lambda x: '(SUM(bal2) %s %%s)' % (x[1]), args))
having_values = [x[2] for x in args]
query = self.pool.get('account.move.line')._query_get(cr, uid, context=context)
overdue_only_str = overdue_only and 'AND date_maturity <= NOW()' or ''
return ('''SELECT pid AS partner_id, SUM(bal2) FROM
(SELECT CASE WHEN bal IS NOT NULL THEN bal
ELSE 0.0 END AS bal2, p.id as pid FROM
(SELECT (debit-credit) AS bal, partner_id
FROM account_move_line l
WHERE account_id IN
(SELECT id FROM account_account
WHERE type=\'receivable\' AND active)
''' + overdue_only_str + '''
AND reconcile_id IS NULL
AND company_id = %s
AND ''' + query + ''') AS l
RIGHT JOIN res_partner p
ON p.id = partner_id ) AS pl
GROUP BY pid HAVING ''' + having_where_clause, [company_id] + having_values)
def _payment_overdue_search(self, cr, uid, obj, name, args, context=None):
if not args:
return []
query, query_args = self._get_followup_overdue_query(cr, uid, args, overdue_only=True, context=context)
cr.execute(query, query_args)
res = cr.fetchall()
if not res:
return [('id','=','0')]
return [('id','in', [x[0] for x in res])]
def _payment_earliest_date_search(self, cr, uid, obj, name, args, context=None):
if not args:
return []
company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
having_where_clause = ' AND '.join(map(lambda x: '(MIN(l.date_maturity) %s %%s)' % (x[1]), args))
having_values = [x[2] for x in args]
query = self.pool.get('account.move.line')._query_get(cr, uid, context=context)
cr.execute('SELECT partner_id FROM account_move_line l '\
'WHERE account_id IN '\
'(SELECT id FROM account_account '\
'WHERE type=\'receivable\' AND active) '\
'AND l.company_id = %s '
'AND reconcile_id IS NULL '\
'AND '+query+' '\
'AND partner_id IS NOT NULL '\
'GROUP BY partner_id HAVING '+ having_where_clause,
[company_id] + having_values)
res = cr.fetchall()
if not res:
return [('id','=','0')]
return [('id','in', [x[0] for x in res])]
def _payment_due_search(self, cr, uid, obj, name, args, context=None):
if not args:
return []
query, query_args = self._get_followup_overdue_query(cr, uid, args, overdue_only=False, context=context)
cr.execute(query, query_args)
res = cr.fetchall()
if not res:
return [('id','=','0')]
return [('id','in', [x[0] for x in res])]
def _get_partners(self, cr, uid, ids, context=None):
#this function search for the partners linked to all account.move.line 'ids' that have been changed
partners = set()
for aml in self.browse(cr, uid, ids, context=context):
if aml.partner_id:
partners.add(aml.partner_id.id)
return list(partners)
_inherit = "res.partner"
_columns = {
'payment_responsible_id':fields.many2one('res.users', ondelete='set null', string='Follow-up Responsible',
help="Optionally you can assign a user to this field, which will make him responsible for the action.",
track_visibility="onchange", copy=False),
'payment_note':fields.text('Customer Payment Promise', help="Payment Note", track_visibility="onchange", copy=False),
'payment_next_action':fields.text('Next Action', copy=False,
help="This is the next action to be taken. It will automatically be set when the partner gets a follow-up level that requires a manual action. ",
track_visibility="onchange"),
'payment_next_action_date': fields.date('Next Action Date', copy=False,
help="This is when the manual follow-up is needed. "
"The date will be set to the current date when the partner "
"gets a follow-up level that requires a manual action. "
"Can be practical to set manually e.g. to see if he keeps "
"his promises."),
'unreconciled_aml_ids':fields.one2many('account.move.line', 'partner_id', domain=['&', ('reconcile_id', '=', False), '&',
('account_id.active','=', True), '&', ('account_id.type', '=', 'receivable'), ('state', '!=', 'draft')]),
'latest_followup_date':fields.function(_get_latest, method=True, type='date', string="Latest Follow-up Date",
help="Latest date that the follow-up level of the partner was changed",
store=False, multi="latest"),
'latest_followup_level_id':fields.function(_get_latest, method=True,
type='many2one', relation='account_followup.followup.line', string="Latest Follow-up Level",
help="The maximum follow-up level",
store={
'res.partner': (lambda self, cr, uid, ids, c: ids,[],10),
'account.move.line': (_get_partners, ['followup_line_id'], 10),
},
multi="latest"),
'latest_followup_level_id_without_lit':fields.function(_get_latest, method=True,
type='many2one', relation='account_followup.followup.line', string="Latest Follow-up Level without litigation",
help="The maximum follow-up level without taking into account the account move lines with litigation",
store={
'res.partner': (lambda self, cr, uid, ids, c: ids,[],10),
'account.move.line': (_get_partners, ['followup_line_id'], 10),
},
multi="latest"),
'payment_amount_due':fields.function(_get_amounts_and_date,
type='float', string="Amount Due",
store = False, multi="followup",
fnct_search=_payment_due_search),
'payment_amount_overdue':fields.function(_get_amounts_and_date,
type='float', string="Amount Overdue",
store = False, multi="followup",
fnct_search = _payment_overdue_search),
'payment_earliest_due_date':fields.function(_get_amounts_and_date,
type='date',
string = "Worst Due Date",
multi="followup",
fnct_search=_payment_earliest_date_search),
}
class account_config_settings(osv.TransientModel):
_name = 'account.config.settings'
_inherit = 'account.config.settings'
def open_followup_level_form(self, cr, uid, ids, context=None):
res_ids = self.pool.get('account_followup.followup').search(cr, uid, [], context=context)
return {
'type': 'ir.actions.act_window',
'name': 'Payment Follow-ups',
'res_model': 'account_followup.followup',
'res_id': res_ids and res_ids[0] or False,
'view_mode': 'form,tree',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
lowiki-org/localwiki-backend-server
|
refs/heads/master
|
localwiki/main/models.py
|
31
|
from __future__ import absolute_import
from .celery import app as celery_app
|
fengbaicanhe/intellij-community
|
refs/heads/master
|
python/testData/selectWord/literal/after2.py
|
83
|
x = r"<selection>hello world again</selection>"
|
shingonoide/odoo
|
refs/heads/deverp_8.0
|
addons/base_vat/__openerp__.py
|
262
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'VAT Number Validation',
'version': '1.0',
'category': 'Hidden/Dependency',
'description': """
VAT validation for Partner's VAT numbers.
=========================================
After installing this module, values entered in the VAT field of Partners will
be validated for all supported countries. The country is inferred from the
2-letter country code that prefixes the VAT number, e.g. ``BE0477472701``
will be validated using the Belgian rules.
There are two different levels of VAT number validation:
--------------------------------------------------------
* By default, a simple off-line check is performed using the known validation
rules for the country, usually a simple check digit. This is quick and
always available, but allows numbers that are perhaps not truly allocated,
or not valid anymore.
* When the "VAT VIES Check" option is enabled (in the configuration of the user's
Company), VAT numbers will be instead submitted to the online EU VIES
database, which will truly verify that the number is valid and currently
allocated to a EU company. This is a little bit slower than the simple
off-line check, requires an Internet connection, and may not be available
all the time. If the service is not available or does not support the
requested country (e.g. for non-EU countries), a simple check will be performed
instead.
Supported countries currently include EU countries, and a few non-EU countries
such as Chile, Colombia, Mexico, Norway or Russia. For unsupported countries,
only the country code will be validated.
""",
'author': 'OpenERP SA',
'depends': ['account'],
'website': 'https://www.odoo.com/page/accounting',
'data': ['base_vat_view.xml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
paulsoh/moxie
|
refs/heads/master
|
moxie/social/apps/tornado_app/utils.py
|
76
|
import warnings
from functools import wraps
from social.utils import setting_name
from social.strategies.utils import get_strategy
from social.backends.utils import get_backend
DEFAULTS = {
'STORAGE': 'social.apps.tornado_app.models.TornadoStorage',
'STRATEGY': 'social.strategies.tornado_strategy.TornadoStrategy'
}
def get_helper(request_handler, name):
return request_handler.settings.get(setting_name(name),
DEFAULTS.get(name, None))
def load_strategy(request_handler):
strategy = get_helper(request_handler, 'STRATEGY')
storage = get_helper(request_handler, 'STORAGE')
return get_strategy(strategy, storage, request_handler)
def load_backend(request_handler, strategy, name, redirect_uri):
backends = get_helper(request_handler, 'AUTHENTICATION_BACKENDS')
Backend = get_backend(backends, name)
return Backend(strategy, redirect_uri)
def psa(redirect_uri=None):
def decorator(func):
@wraps(func)
def wrapper(self, backend, *args, **kwargs):
uri = redirect_uri
if uri and not uri.startswith('/'):
uri = self.reverse_url(uri, backend)
self.strategy = load_strategy(self)
self.backend = load_backend(self, self.strategy, backend, uri)
return func(self, backend, *args, **kwargs)
return wrapper
return decorator
def strategy(*args, **kwargs):
warnings.warn('@strategy decorator is deprecated, use @psa instead')
return psa(*args, **kwargs)
|
ichpuchtli/Geometry-Genocide
|
refs/heads/master
|
game.py
|
1
|
#| Author: Sam Macpherson
__doc__ = 'This module contains a series of classes which contribute to the \
overall game play'
from math import sin, cos, pi, acos, asin, radians, ceil
from random import uniform, choice
from library import System,Global,Sprite,Vector, Text, Time, Draw
class Logo(Sprite):
""" Main logo object """
def __init__(self):
self.image = 'logo.png'
self.position = Vector(0,0)
self.load_image()
self.center = self.get_center()
self.transparency(0)
self.time = Time()
def reload(self):
# Fade in
if self.time < 3:
self.transparency(80*self.time.period()/1000.0)
class PlayBtn(Sprite):
""" Play button object """
def __init__(self):
self.position = Vector(0,0)
self.image = 'play.png'
self.load_image()
self.center = self.get_center()
self.transparency(0)
self.time = Time()
def reload(self):
# Fade in
if self.time < 3:
self.transparency(80*self.time.period()/1000.0)
class ExitBtn(Sprite):
""" Exit button object """
def __init__(self):
self.position = Vector(0,0)
self.image = 'exit.png'
self.load_image()
self.center = self.get_center()
self.transparency(0)
self.time = Time()
def reload(self):
# Fade in
if self.time < 3:
self.transparency(80*self.time.period()/1000.0)
class Euclidean(Draw):
""" Base class for all matrix objects """
def render(self):
""" Substitutes trace method for render method """
self.trace()
def pinata(self,radius):
""" Creates a set of circle instances associated with points in a matrix """
for i in range(len(self.matrix)):
Global.enemies += [Circle(self.position+self.matrix[i],radius,self.color,
self.color2)]
def follow(self):
""" Calculates the unit direction vector from object position to player
position a displacer is added to slightly disperse a horde of enemies"""
self.direction = -(self.position-(Global.SpaceShip_position+self.displacer))
def attack(self):
""" Similar to the follow method except the object is directed slightly
ahead of the spaceship"""
self.direction = -(self.position-(self.displacer+Global.SpaceShip_position+\
Global.SpaceShip_direction*100))
def accelerate(self):
""" Overrides base method in system, to account for deathstars, enemy
objects are attracted to the closest deathstart"""
if Global.deathstars != []:
closest_deathstar = sorted(Global.deathstars,key=
lambda x: abs(self.position-x.position))[0]
self.direction = -(self.position-(closest_deathstar.position+self.displacer))
Draw.accelerate(self)
def load(self):
""" Prepares enemy object """
self.position = Vector()*50 + Vector.origin
self.position *= [choice([1,-1]),choice([1,-1])]
self.original = self.copy()
self.speed = 0.05
self.direction = Vector()
self.displacer = Vector()*32
self.time = Time()
def bounce(self):
""" flips direction of objects to stop going off screen """
if abs(self.position.x) >= Global.window[0]/2:
self.direction.x *= -1
self.position.x *= 0.99
elif abs(self.position.y) >= Global.window[1]/2:
self.direction.y *= -1
self.position.y *= 0.99
def reload(self):
""" base reload method """
self.bounce()
self.rotate(Global.milliseconds/10)
self.accelerate()
def fusion(self,radius):
""" draws a set of circles associated with points in a matrix """
for i in range(len(self.matrix)):
self.nice_circle(self.position+self.matrix[i],radius,self.color,self.color2)
def remove(self):
Global.enemies.remove(self)
def destruct(self):
""" base destruct method"""
Global.particles.append(Explosion(self.position.tupl(),15,self.color))
Global.enemies.remove(self)
Global.score += self.score
class CrossHair(Sprite,System):
""" Cursor object """
def __init__(self):
self.image = 'cursor.png'
self.load()
self.position = Vector(0,0)
def reload(self):
self.position.x, self.position.y = self.get_mouse_position()
class Square(Euclidean):
'Square Enemy'
def __init__(self):
self.position = Vector(0,0)
self.play('square.wav')
self.matrix = [15,15], [-15,15], [-15,-15],[15,15], [15,-15],[-15,-15]\
,[-15,15],[15,-15]
self.color = 255,32,255
self.color2 = 255,32,255
self.load()
self.speed = 0.15
self.score = 450
def reload(self):
self.follow()
self.rotate(self.direction.angle())
self.accelerate()
def destruct(self):
Euclidean.destruct(self)
Global.enemies += [Square2(Vector(10,5) + self.position),
Square2(Vector(-10,-5) + self.position)]
class Square2(Euclidean):
""" Child square object """
def __init__(self,vector):
self.matrix = [10,10], [-10,10], [-10,-10],[10,10], [10,-10],[-10,-10],\
[-10,10],[10,-10]
self.color = 255,32,255
self.color2 = 255,32,255
self.load()
self.speed = 0.2
self.score = 150
self.position = vector
def reload(self):
self.bounce()
self.rotate(Global.milliseconds/5)
self.accelerate()
class Octagon(Euclidean):
""" Octagon Enemy"""
def __init__(self):
self.play('octagon.wav')
self.position = Vector(0,0)
self.matrix = [1.207,0.5], [0.5,1.207], [-0.5,1.207],[-1.207,0.5]\
,[-1.207,-0.5],[-0.5,-1.207],[0.5,-1.207],[1.207,-0.5]
self.scale(25)
self.color = 255,128,32
self.color2 = 255,128,64
self.load()
self.speed = 0.25
self.score = 1650
def render(self):
self.trace()
self.fusion(7)
def destruct(self):
Euclidean.destruct(self)
self.pinata(7)
def reload(self):
self.attack()
self.rotate(Global.milliseconds/10)
self.accelerate()
class Triangle2(Euclidean):
""" Triangle enemy """
def __init__(self):
self.position = Vector(0,0)
self.play('triangle2.wav')
self.matrix = [-0.5,-3**0.5/4], [0.5,-3**0.5/4], [0,3**0.5/4]
self.scale(30)
self.color = 174,203,0
self.color2 = 0,128,0
self.load()
self.speed = 0.2
self.score = 550
def reload(self):
self.bounce()
self.rotate(Global.milliseconds/18)
self.accelerate()
def render(self):
self.trace()
self.fusion(8)
def destruct(self):
Euclidean.destruct(self)
self.pinata(8)
class Rhombus(Euclidean):
"""Rhombus enemy"""
def __init__(self):
self.play('rhombus.wav')
self.position = Vector(0,0)
self.matrix = [-15,0], [0, 25], [15,0], [0, -25]
self.color = 0,200,255
self.color2 = 0,140,200
self.load()
self.speed = 0.15
self.score = 100
def reload(self):
self.follow()
self.accelerate()
class Circle(Euclidean):
""" Circle enemy"""
def __init__(self,position=None,radius=10,color=(32,64,255),
color2=(50,200,255)):
self.position = Vector(0,0)
if position:
self.position.x,self.position.y = position
else:
self.position.x,self.position.y = Vector.origin + Vector()*50
self.position *= [choice([1,-1]),choice([1,-1])]
self.radius = radius
self.speed = 0.35
self.direction = ~Vector()
self.displacer = ~Vector()*25
self.color = color
self.color2 = color2
self.score = 300
def reload(self):
self.follow()
self.accelerate()
def render(self):
self.reload()
self.nice_circle(self.position,self.radius,self.color,self.color2)
class Explosion(Draw):
""" Explosion object """
def __init__(self,pos,size,color,span=1,speed=1):
self.particles = []
self.color = color
self.speed = speed
self.span = span
self.position = Vector(pos[0],pos[1])
for i in range(size):
self.particles.append(Vector() + Vector())
self.time = Time()
def render(self):
if self.time > self.span:
self.destruct()
for particle in self.particles:
self.line(self.position+particle*self.speed*self.time.period(),self.position+particle\
*self.speed*self.time.period()*1.1,self.color)
def destruct(self):
Global.particles.remove(self)
class DeathStar(Sprite):
""" Deathstar enemy"""
def __init__(self):
self.play('deathstar.wav')
self.image = 'deathstar.png'
self.position = -(Global.SpaceShip_position+Vector(0,0))
self.lives = 20
if abs(self.position-Global.SpaceShip_position) < 100:
self.position += 150,150
self.circles = 5
self.load()
def hit(self):
self.lives -= 1
def reload(self):
if self.lives < 1:
self.play('deathstar2.wav')
self.destruct()
for i in range(self.circles):
Global.enemies += [Circle(self.position+Vector()*100)]
for terrorist in Global.enemies:
if abs(terrorist.position-self.position) < 50:
terrorist.destruct()
self.circles += 1
break
def destruct(self):
Global.deathstars.remove(self)
# Global.particles.append(Explosion(self.position.tupl(),100,(235,97,61)))
class Pinwheel(Euclidean):
""" Pinwheel enemy"""
def __init__(self):
self.play('pinwheel.wav')
self.matrix = [0,0], [0,1],[0.5,0.5],[-0.5,-0.5],[0,-1],[0,0],[1,0],\
[0.5,-0.5],[-0.5,0.5], [-1,0]
self.position = Vector(0,0)
self.scale(20)
self.load()
self.color = 200,64,255
self.color2 = 76,0,184
self.score = 50
def reload(self):
self.bounce()
self.rotate(-Global.milliseconds/10)
self.accelerate()
class Bullet(Euclidean):
""" Bullet object """
def __init__(self,position=(0,0),angle=0):
self.matrix = [-2,0], [0,1], [2,0], [0,-1]
self.scale(5)
self.color = 255,0,0
self.color2 = 255,200,200
self.position = Vector(position[0],position[1])
self.direction = Vector(cos(radians(angle)),sin(radians(angle)))
self.speed = 1
self.original = self.copy()
self.time = Time()
self.rotate(angle)
def destruct(self):
Global.bullets.remove(self)
return True
def accelerate(self):
Draw.accelerate(self);
def reload(self):
if abs(self.position.x)-50 < Global.window[0]/2 and \
abs(self.position.y)-50 < Global.window[1]/2:
for planet in Global.deathstars:
if abs(planet.position-self.position) < 64:
planet.hit()
self.destruct()
return True
break
for terrorist in Global.enemies:
if abs(terrorist.position-self.position) < 38:
self.destruct()
terrorist.destruct()
return True
break
else:
self.accelerate()
else:
self.destruct()
class SpaceShip(Sprite):
""" SpaceShip object player controlled"""
def init(self):
self.image = 'player.png'
self.load()
self.position = Vector(0,0)
self.speed = 0.35
self.shooting = False
self.shot_delay = 0.15
self.direction = Vector(0,0)
self.time = Time()
def start_shooting(self):
self.shooting = True
def stop_shooting(self):
self.shooting = False
def reload(self):
if self.shooting:
if self.time > self.shot_delay:
self.time.reset()
angle = (self._cursor.position - self.position).angle()
Global.bullets += [Bullet(self.position.tupl(),angle)]
if Global.score > 150000:
Global.bullets += [Bullet(self.position.tupl(),angle-3)]
if Global.score > 25000:
Global.bullets += [Bullet(self.position.tupl(),angle+3)]
elif Global.score > 50000:
self.shot_delay = 0.09
elif Global.score > 10000:
self.shot_delay = 0.12
if (self.position.x <= -Global.window[0]/2 and self.direction.x < 0) or \
(self.position.x >= Global.window[0]/2 and self.direction.x > 0):
self.direction.x = 0
elif (self.position.y <= -Global.window[1]/2 and self.direction.y < 0) or \
(self.position.y >= Global.window[1]/2 and self.direction.y > 0):
self.direction.y = 0
for terrorist in Global.enemies + Global.deathstars:
if abs(terrorist.position-self.position) < 32:
self.destruct()
break
else:
self.accelerate()
Global.SpaceShip_position = self.position.tupl()
Global.SpaceShip_direction = ~self.direction
def destruct(self):
while len(Global.enemies) > 0:
Global.enemies[-1].remove()
while len(Global.deathstars) > 0:
Global.deathstars[-1].destruct()
Global.lives -= 1
if Global.lives < 1:
Global.particles.append(Explosion(self.position.tupl(),200,(255,255,200),5,0.2))
self.load_main_menu()
self.score = Text() << Global.score
self.score.log('scores.txt')
self.play('die.wav')
else:
Global.particles.append(Explosion(self.position.tupl(),200,(255,255,200)))
self.init()
self.play('die1.wav')
class Gameplay(SpaceShip):
"""Game play class"""
def start_game(self):
# initiate spaceship
SpaceShip.init(self)
# initiate timers
self.reset_clocks(True)
self.cluster_size = 8
def battlefield(self):
# if in game
if not self.main_menu:
self.assult()
self.render()
self._cursor.render()
for obj in Global.bullets:
obj.render()
for obj in Global.deathstars:
obj.render()
for obj in Global.enemies:
obj.render()
for obj in Global.particles:
obj.render()
else:
for obj in Global.particles:
obj.render()
def add(self,geo=None,x=1):
"""Spawns any enemy object if geo is undefined spawn random enemy"""
for i in range(x):
if geo is None:
geo = choice((Pinwheel,Rhombus,Square,Rhombus))
self.cluster.append(geo)
geo = None
def add_cluster(self,enemy,num):
""" Spawn a cluster of enemies """
for i in range(num):
self.cluster.append(enemy)
def reset_clocks(self,init=False):
""" creates a list of Time instances """
self.timer = [Time() for i in range(8)]
self.cluster = []
""" main game play algorithm """
def assult(self):
# delay the spawning of enemies
if self.cluster != []:
if self.timer[0] > 0.01:
self.timer[0].reset()
Global.enemies += [self.cluster.pop(0)()]
# Every 60s
if self.timer[1] > 60:
self.timer[1].reset()
Global.deathstars += [DeathStar()]
#before 2 minute
if self.timer[-1] < 120:
# Every 3s
if self.timer[2] > 3:
self.timer[2].reset()
self.add(None,2)
# add an enemy if number gets below 20
if len(Global.enemies) < 25:
self.add()
# Every 11s
if self.timer[3] > 11:
self.timer[3].reset()
self.add(None,2)
self.add(Square)
# Every 17s
if self.timer[4] > 17:
self.timer[4].reset()
self.add(None,2)
self.add_cluster(choice((Pinwheel,Rhombus)),self.cluster_size)
self.cluster_size += 1
# Every 23s
if self.timer[5] > 23:
self.timer[5].reset()
self.add(Triangle2)
self.add_cluster(Square,self.cluster_size)
self.cluster_size += 1
# Every 39s
if self.timer[6] > 39:
self.timer[6].reset()
self.add(None,3)
self.add(Octagon)
else: # if after 2 mins since game start
# Every 3sec
if self.timer[2] > 3:
self.timer[2].reset()
self.add(Square)
self.add(Pinwheel)
self.add(Triangle2)
self.add()
# Every 11 sec
if self.timer[3] > 11:
self.timer[3].reset()
self.add_cluster(choice((Triangle2,Square,Rhombus)),self.cluster_size)
self.add(None,2)
# Every 17 sec
if self.timer[4] > 17:
self.timer[4].reset()
self.add(choice((Octagon,Circle,Triangle2)),self.cluster_size)
self.add(None,3)
self.add(Octagon)
# Every 25 sec
if self.timer[5] > 25:
self.timer[5].reset()
Global.deathstars += [DeathStar()]
self.add(None,4)
# Every 32 sec
if self.timer[5] > 32:
self.timer[5].reset()
Global.deathstars += [DeathStar()]
self.add(None,5)
# Every 46s
if self.timer[6] > 46:
self.timer[6].reset()
self.add_cluster(choice(Triangle2,Rhombus,Circle,Pinwheel),self.cluster_size)
self.add(None,10)
|
andela-bojengwa/talk
|
refs/heads/master
|
venv/lib/python2.7/site-packages/rest_framework/utils/model_meta.py
|
5
|
"""
Helper function for returning the field information that is associated
with a model class. This includes returning all the forward and reverse
relationships and their associated metadata.
Usage: `get_field_info(model)` returns a `FieldInfo` instance.
"""
from collections import namedtuple
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils import six
from rest_framework.compat import OrderedDict
import inspect
FieldInfo = namedtuple('FieldResult', [
'pk', # Model field instance
'fields', # Dict of field name -> model field instance
'forward_relations', # Dict of field name -> RelationInfo
'reverse_relations', # Dict of field name -> RelationInfo
'fields_and_pk', # Shortcut for 'pk' + 'fields'
'relations' # Shortcut for 'forward_relations' + 'reverse_relations'
])
RelationInfo = namedtuple('RelationInfo', [
'model_field',
'related',
'to_many',
'has_through_model'
])
def _resolve_model(obj):
"""
Resolve supplied `obj` to a Django model class.
`obj` must be a Django model class itself, or a string
representation of one. Useful in situations like GH #1225 where
Django may not have resolved a string-based reference to a model in
another model's foreign key definition.
String representations should have the format:
'appname.ModelName'
"""
if isinstance(obj, six.string_types) and len(obj.split('.')) == 2:
app_name, model_name = obj.split('.')
resolved_model = models.get_model(app_name, model_name)
if resolved_model is None:
msg = "Django did not return a model for {0}.{1}"
raise ImproperlyConfigured(msg.format(app_name, model_name))
return resolved_model
elif inspect.isclass(obj) and issubclass(obj, models.Model):
return obj
raise ValueError("{0} is not a Django model".format(obj))
def get_field_info(model):
"""
Given a model class, returns a `FieldInfo` instance, which is a
`namedtuple`, containing metadata about the various field types on the model
including information about their relationships.
"""
opts = model._meta.concrete_model._meta
pk = _get_pk(opts)
fields = _get_fields(opts)
forward_relations = _get_forward_relationships(opts)
reverse_relations = _get_reverse_relationships(opts)
fields_and_pk = _merge_fields_and_pk(pk, fields)
relationships = _merge_relationships(forward_relations, reverse_relations)
return FieldInfo(pk, fields, forward_relations, reverse_relations,
fields_and_pk, relationships)
def _get_pk(opts):
pk = opts.pk
while pk.rel and pk.rel.parent_link:
# If model is a child via multi-table inheritance, use parent's pk.
pk = pk.rel.to._meta.pk
return pk
def _get_fields(opts):
fields = OrderedDict()
for field in [field for field in opts.fields if field.serialize and not field.rel]:
fields[field.name] = field
return fields
def _get_forward_relationships(opts):
"""
Returns an `OrderedDict` of field names to `RelationInfo`.
"""
forward_relations = OrderedDict()
for field in [field for field in opts.fields if field.serialize and field.rel]:
forward_relations[field.name] = RelationInfo(
model_field=field,
related=_resolve_model(field.rel.to),
to_many=False,
has_through_model=False
)
# Deal with forward many-to-many relationships.
for field in [field for field in opts.many_to_many if field.serialize]:
forward_relations[field.name] = RelationInfo(
model_field=field,
related=_resolve_model(field.rel.to),
to_many=True,
has_through_model=(
not field.rel.through._meta.auto_created
)
)
return forward_relations
def _get_reverse_relationships(opts):
"""
Returns an `OrderedDict` of field names to `RelationInfo`.
"""
# Note that we have a hack here to handle internal API differences for
# this internal API across Django 1.7 -> Django 1.8.
# See: https://code.djangoproject.com/ticket/24208
reverse_relations = OrderedDict()
for relation in opts.get_all_related_objects():
accessor_name = relation.get_accessor_name()
related = getattr(relation, 'related_model', relation.model)
reverse_relations[accessor_name] = RelationInfo(
model_field=None,
related=related,
to_many=relation.field.rel.multiple,
has_through_model=False
)
# Deal with reverse many-to-many relationships.
for relation in opts.get_all_related_many_to_many_objects():
accessor_name = relation.get_accessor_name()
related = getattr(relation, 'related_model', relation.model)
reverse_relations[accessor_name] = RelationInfo(
model_field=None,
related=related,
to_many=True,
has_through_model=(
(getattr(relation.field.rel, 'through', None) is not None)
and not relation.field.rel.through._meta.auto_created
)
)
return reverse_relations
def _merge_fields_and_pk(pk, fields):
fields_and_pk = OrderedDict()
fields_and_pk['pk'] = pk
fields_and_pk[pk.name] = pk
fields_and_pk.update(fields)
return fields_and_pk
def _merge_relationships(forward_relations, reverse_relations):
return OrderedDict(
list(forward_relations.items()) +
list(reverse_relations.items())
)
|
UIKit0/marsyas
|
refs/heads/master
|
scripts/large-evaluators/local-echonest.py
|
5
|
#!/usr/bin/env python
import sys
import os.path
import os
import time
import pipes
import json
import mar_collection
def echonest_upload(full_filename):
print "trying: ", full_filename
cmd = "echonest-analyzer %s" % pipes.quote(full_filename)
os.system(cmd)
time.sleep(0.5)
json_data = open( full_filename+'.json')
data = json.load(json_data)
json_data.close()
bpm = data["track"]["tempo"]
print bpm
return bpm
def main(mf_name):
coll = mar_collection.MarCollection(mf_name)
filenames = coll.get_filenames()
bpm_mf_name = os.path.basename(mf_name)
bpm_mf_name = bpm_mf_name.replace(".mf", "-echonest_bpm.mf")
bpm_coll = mar_collection.MarCollection(bpm_mf_name)
for filename in filenames:
#print filename
#continue
bpm = echonest_upload(filename)
bpm_coll.set_item(filename, bpm)
bpm_coll.write()
mf_name = sys.argv[1]
main(mf_name)
|
titimoby/connected
|
refs/heads/master
|
jsserver/node_modules/ponte/node_modules/mosca/node_modules/leveldown/node_modules/prebuild/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
|
2542
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the common.py file."""
import gyp.common
import unittest
import sys
class TestTopologicallySorted(unittest.TestCase):
def test_Valid(self):
"""Test that sorting works on a valid graph with one possible order."""
graph = {
'a': ['b', 'c'],
'b': [],
'c': ['d'],
'd': ['b'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertEqual(
gyp.common.TopologicallySorted(graph.keys(), GetEdge),
['a', 'c', 'd', 'b'])
def test_Cycle(self):
"""Test that an exception is thrown on a cyclic graph."""
graph = {
'a': ['b'],
'b': ['c'],
'c': ['d'],
'd': ['a'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertRaises(
gyp.common.CycleError, gyp.common.TopologicallySorted,
graph.keys(), GetEdge)
class TestGetFlavor(unittest.TestCase):
"""Test that gyp.common.GetFlavor works as intended"""
original_platform = ''
def setUp(self):
self.original_platform = sys.platform
def tearDown(self):
sys.platform = self.original_platform
def assertFlavor(self, expected, argument, param):
sys.platform = argument
self.assertEqual(expected, gyp.common.GetFlavor(param))
def test_platform_default(self):
self.assertFlavor('freebsd', 'freebsd9' , {})
self.assertFlavor('freebsd', 'freebsd10', {})
self.assertFlavor('openbsd', 'openbsd5' , {})
self.assertFlavor('solaris', 'sunos5' , {});
self.assertFlavor('solaris', 'sunos' , {});
self.assertFlavor('linux' , 'linux2' , {});
self.assertFlavor('linux' , 'linux3' , {});
def test_param(self):
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
if __name__ == '__main__':
unittest.main()
|
JohnGriffiths/dipy
|
refs/heads/master
|
scratch/very_scratch/simulation_comparison_dsi_gqi.py
|
22
|
import numpy as np
import dipy as dp
import dipy.io.pickles as pkl
import scipy as sp
fname='/home/ian/Data/SimData/results_SNR030_1fibre'
#fname='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/results_SNR030_isotropic'
''' file has one row for every voxel, every voxel is repeating 1000
times with the same noise level , then we have 100 different
directions. 1000 * 100 is the number of all rows.
'''
marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt'
sim_data=np.loadtxt(fname)
#bvalsf='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/bvals101D_float.txt'
b_vals_dirs=np.loadtxt(marta_table_fname)
bvals=b_vals_dirs[:,0]*1000
gradients=b_vals_dirs[:,1:]
gq = dp.GeneralizedQSampling(sim_data,bvals,gradients)
tn = dp.Tensor(sim_data,bvals,gradients)
#'''
gqfile = '/home/ian/Data/SimData/gq_SNR030_1fibre.pkl'
pkl.save_pickle(gqfile,gq)
tnfile = '/home/ian/Data/SimData/tn_SNR030_1fibre.pkl'
pkl.save_pickle(tnfile,tn)
'''
print tn.evals.shape
print tn.evecs.shape
evals=tn.evals[0]
evecs=tn.evecs[0]
print evecs.shape
first_directions = tn.evecs[:,:,0]
first1000 = first_directions[:1000,:]
cross = np.dot(first1000.T,first1000)
np.linalg.eig(cross)
'''
|
Kazade/NeHe-Website
|
refs/heads/master
|
google_appengine/lib/django-1.4/django/contrib/sitemaps/tests/urls/http.py
|
109
|
from datetime import datetime
from django.conf.urls import patterns, url
from django.contrib.sitemaps import Sitemap, GenericSitemap, FlatPageSitemap, views
from django.contrib.auth.models import User
from django.views.decorators.cache import cache_page
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
lastmod = datetime.now()
def items(self):
return [object()]
simple_sitemaps = {
'simple': SimpleSitemap,
}
generic_sitemaps = {
'generic': GenericSitemap({'queryset': User.objects.all()}),
}
flatpage_sitemaps = {
'flatpages': FlatPageSitemap,
}
urlpatterns = patterns('django.contrib.sitemaps.views',
(r'^simple/index\.xml$', 'index', {'sitemaps': simple_sitemaps}),
(r'^simple/custom-index\.xml$', 'index',
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap_index.xml'}),
(r'^simple/sitemap-(?P<section>.+)\.xml$', 'sitemap',
{'sitemaps': simple_sitemaps}),
(r'^simple/sitemap\.xml$', 'sitemap', {'sitemaps': simple_sitemaps}),
(r'^simple/custom-sitemap\.xml$', 'sitemap',
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap.xml'}),
(r'^generic/sitemap\.xml$', 'sitemap', {'sitemaps': generic_sitemaps}),
(r'^flatpages/sitemap\.xml$', 'sitemap', {'sitemaps': flatpage_sitemaps}),
url(r'^cached/index\.xml$', cache_page(1)(views.index),
{'sitemaps': simple_sitemaps, 'sitemap_url_name': 'cached_sitemap'}),
url(r'^cached/sitemap-(?P<section>.+)\.xml', cache_page(1)(views.sitemap),
{'sitemaps': simple_sitemaps}, name='cached_sitemap')
)
|
PeterDaveHello/eden
|
refs/heads/master
|
modules/s3db/skeleton.py
|
15
|
# -*- coding: utf-8 -*-
"""
This is just a commented template to copy/paste from when implementing
new models. Be sure you replace this docstring by something more
appropriate, e.g. a short module description and a license statement.
The module prefix is the same as the filename (without the ".py"), in this
case "skeleton". Remember to always add an import statement for your module
to:
models/00_tables.py
like:
import eden.skeleton
(Yeah - not this one of course :P it's just an example)
"""
# mandatory __all__ statement:
#
# - all classes in the name list will be initialized with the
# module prefix as only parameter. Subclasses of S3Model
# support this automatically, and run the model() method
# if the module is enabled in deployment_settings, otherwise
# the default() method.
#
# - all other names in the name list will be added to response.s3
# if their names start with the module prefix plus underscore
#
__all__ = ("SkeletonDataModel",
"skeleton_example_represent"
)
# The following import statements are needed in almost every model
# (you may need more than this in your particular case). To
# import classes from s3, use from + relative path like below
#
from gluon import *
from gluon.storage import Storage
from ..s3 import *
from s3layouts import S3AddResourceLink
# =============================================================================
# Define a new class as subclass of S3Model
# => you can define multiple of these classes within the same module, each
# of them will be initialized only when one of the declared names gets
# requested from s3db
# => remember to list all model classes in __all__, otherwise they won't ever
# be loaded.
#
class S3SkeletonDataModel(S3Model):
# Declare all the names this model can auto-load, i.e. all tablenames
# and all response.s3 names which are defined here. If you omit the "names"
# variable, then this class will serve as a fallback model for this module
# in case a requested name cannot be found in one of the other model classes
#
names = ("skeleton_example",
"skeleton_example_id",
)
# Define a function model() which takes no parameters (except self):
def model(self):
# You will most likely need (at least) these:
db = current.db
T = current.T
# This one may be useful:
settings = current.deployment_settings
# Now define your table(s),
# -> always use self.define_table instead of db.define_table, this
# makes sure the table won't be re-defined if it's already in db
# -> use s3_meta_fields to include meta fields (not s3_meta_fields!),
# of course this needs the s3 assignment above
tablename = "skeleton_example"
self.define_table(tablename,
Field("name"),
*s3_meta_fields())
# Use self.configure to configure your model (or current.s3db.configure)
self.configure(tablename,
listadd=False)
# The following shortcuts for S3 model functions are available (make
# sure you do not overwrite them):
#
# self.define_table => db.define_table (repeat-safe variant)
# self.super_entity => super_entity
# self.super_key => super_key
# self.super_link => super_link
# self.add_components => s3db.add_components
# self.configure => s3db.configure
# self.table => s3db.table
#
# If you need to reference external tables, always use the table-method.
# This will automatically load the respective model unless it is already
# loaded at this point:
xy_table = self.table("xy_table")
# Alternatively, you can also use on of these:
xy_table = self.xy_table
xy_table = self["xy_table"]
# The following two are equivalent:
xy_variable = self.xy_variable
# and:
xy_variable = response.s3.xy_variable
# However, if "xy_variable" is also a tablename, then the first
# variant would return that table instead. Thus, make sure your
# response.s3-global variables do not use tablenames as names
# You can define ReusableFields,
# -> make sure you prefix their names properly with the module prefix:
skeleton_example_id = S3ReusableField("skeleton_example_id", "reference %s" % tablename,
label = T("Skeleton Example"),
requires = IS_EMPTY_OR(IS_ONE_OF(db,
"skeleton_example.id")))
# Pass names back to global scope (s3.*)
return dict(
skeleton_example_id=skeleton_example_id,
)
# -------------------------------------------------------------------------
@staticmethod
def defaults():
"""
Return safe defaults for model globals, this will be called instead
of model() in case the model has been deactivated in
deployment_settings.
You don't need this function in case your model is mandatory anyway.
"""
return dict(
skeleton_example_id = S3ReusableField("skeleton_example_id",
"integer",
readable=False,
writable=False),
)
# ---------------------------------------------------------------------
# Static so that calling it doesn't require loading the models
@staticmethod
def skeleton_example_onvalidation(form):
""" Form validation """
db = current.db
# Note that we don't need to use s3db here since this is a method of the class,
# so the table must have loaded
table = db.skeleton_example
query = (table.id == form.vars.id)
record = db(query).select(table.name,
limitby=(0, 1)).first()
return
# =============================================================================
# Module-global functions will automatically be added to response.s3 if
# they use the module prefix and are listed in __all__
#
# Represents are good to put here as they can be put places without loading the
# models at that time
#
def skeleton_example_represent(id):
if not id:
# Don't do a DB lookup if we have no id
# Instead return a consistenct representation of a null value
return current.messages["NONE"]
# Your function may need to access tables. If a table isn't defined
# at the point when this function gets called, then this:
s3db = current.s3db
table = s3db.skeleton_table
# will load the table. This is the same function as self.table described in
# the model class except that "self" is not available here, so you need to
# use the class instance as reference instead
db = current.db
query = (table.id == id)
record = db(query).select(table.name,
limitby=(0, 1)).first()
try:
# Try faster than If for the common case where it works
return record.name
except:
# Data inconsistency error!
return current.messages.UNKNOWN_OPT
# END =========================================================================
|
zeroc-ice/ice-demos
|
refs/heads/3.7
|
python/Ice/optional/server.py
|
1
|
#!/usr/bin/env python
#
# Copyright (c) ZeroC, Inc. All rights reserved.
#
import signal
import sys
import Ice
Ice.loadSlice("Contact.ice")
import Demo
class ContactDBI(Demo.ContactDB):
def __init__(self):
self._contacts = {}
def addContact(self, name, type, number, dialGroup, current=None):
contact = Demo.Contact()
contact.name = name
if type != Ice.Unset:
contact.type = type
if number != Ice.Unset:
contact.number = number
if dialGroup != Ice.Unset:
contact.dialGroup = dialGroup
self._contacts[name] = contact
def updateContact(self, name, type, number, dialGroup, current=None):
if name in self._contacts:
contact = self._contacts[name]
if type != Ice.Unset:
contact.type = type
if number != Ice.Unset:
contact.number = number
if dialGroup != Ice.Unset:
contact.dialGroup = dialGroup
def query(self, name, current=None):
if name in self._contacts:
return self._contacts[name]
return None
def queryNumber(self, name, current=None):
if name in self._contacts:
return self._contacts[name].number
return Ice.Unset
def queryDialgroup(self, name, current=None):
if name in self._contacts:
return self._contacts[name].dialGroup
return Ice.Unset
def shutdown(self, current=None):
print("Shutting down...")
current.adapter.getCommunicator().shutdown()
#
# Ice.initialize returns an initialized Ice communicator,
# the communicator is destroyed once it goes out of scope.
#
with Ice.initialize(sys.argv, "config.server") as communicator:
#
# Install a signal handler to shutdown the communicator on Ctrl-C
#
signal.signal(signal.SIGINT, lambda signum, handler: communicator.shutdown())
#
# The communicator initialization removes all Ice-related arguments from argv
#
if len(sys.argv) > 1:
print(sys.argv[0] + ": too many arguments")
sys.exit(1)
adapter = communicator.createObjectAdapter("ContactDB")
adapter.add(ContactDBI(), Ice.stringToIdentity("contactdb"))
adapter.activate()
communicator.waitForShutdown()
|
amplify-education/rover
|
refs/heads/master
|
rover/fetch.py
|
2
|
#!/usr/bin/env python
#
# Copyright (c) 2009 Wireless Generation, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#vi:sts=4 ts=4 sw=4 expandtab
import os
import sys
from optparse import OptionParser
from rover import Rover
import rover.config
from rover.version import version
def main():
parser = OptionParser("""usage: %prog [options] config[@revision]
Rover accepts one 'config' argument, which is a file name
or path to a given rover configuration file. A revison
can be added optionally after the '@', and that revision will be
forced for all modules in that config file.""")
parser.add_option('', '--version',
action="store_true",
dest='version',
help='Report the version number and exit')
parser.add_option('-p', '--preserve-dirs',
action="store_true",
dest="preserve_dirs",
help="(git only) preserve path to git repository; i.e., git@github.com:outer/space/plan9.git will be checked out to `outer/space/plan9/' instead of just `plan9/', which is default git behavior.")
parser.add_option('-v', '--verbose',
action='store_true',
dest='verbose',
help='Print lots of stuff')
parser.add_option('-q', '--quiet',
action='store_false',
dest='verbose',
default=False,
help='Only print commands being issued (default)')
parser.add_option('-t', '--test-mode',
action='store_true',
dest='test_mode',
default=False,
help="Build and display commands, but don't issue them")
parser.add_option('-m', '--mode',
action='store',
dest='checkout_mode',
default='preserve',
help="Must be one of {'paranoid', 'clean', 'preserve'}. Paranoid wipes out the entire source directory before doing a fresh checkout, clean performs an update but reverts all modified files to the repository version, and preserve performs an update while attempting to preserve local modifications. Preserve is the default.")
parser.add_option('-d','--checkout-dir',
action='store',
dest='checkout_dir',
default=None,
help='Root dir, relative to working dir, that Rover will check out to. Defaults to the name of the config.')
parser.add_option('-f','--manifest',
action='store',
dest='manifest_filename',
default=None,
help='File in which to store list of all directories & branches checked out')
parser.add_option('-x', '--exclude',
action='append',
dest='excludes',
default=[],
help='Files or directories to not check out. Specify full path, eg, `src/test.java`... May specify multiple paths.')
parser.add_option('-i', '--include',
action='append',
dest='includes',
default=[],
help='Files or directories to check out. Specify full path, eg, `src/test.java`. May specify multiple paths. If specified, only files or directories matched will be checked out.')
opts, args = parser.parse_args()
if opts.version:
print 'rover version '+ version()[1:]
sys.exit(0)
if len(args) < 1:
parser.print_help()
sys.exit(-1)
elif len(args) > 1:
print "Multiple config names no longer supported as of version 0.3"
parser.print_help()
sys.exit(-1)
config_name = args[0]
config_filename = rover.config.find_config(config_name)
if not config_filename:
print "Could not find config file for '%s'" % config_name
sys.exit(-2)
try:
r = Rover(config_names=[config_name]
, checkout_mode=opts.checkout_mode
, checkout_dir=opts.checkout_dir)
r.set_verbose(opts.verbose)
r.set_test_mode(opts.test_mode)
r.set_manifest(opts.manifest_filename)
r.set_excludes(opts.excludes)
r.set_includes(opts.includes)
r.set_preserve_dirs(opts.preserve_dirs)
except Exception, e:
parser.print_help()
print
parser.set_usage('')
parser.error(e)
r.run()
if __name__ == '__main__':
main()
|
tboyce021/home-assistant
|
refs/heads/dev
|
homeassistant/components/evohome/water_heater.py
|
16
|
"""Support for WaterHeater devices of (EMEA/EU) Honeywell TCC systems."""
import logging
from typing import List
from homeassistant.components.water_heater import (
SUPPORT_AWAY_MODE,
SUPPORT_OPERATION_MODE,
WaterHeaterEntity,
)
from homeassistant.const import PRECISION_TENTHS, PRECISION_WHOLE, STATE_OFF, STATE_ON
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
import homeassistant.util.dt as dt_util
from . import EvoChild
from .const import DOMAIN, EVO_FOLLOW, EVO_PERMOVER
_LOGGER = logging.getLogger(__name__)
STATE_AUTO = "auto"
HA_STATE_TO_EVO = {STATE_AUTO: "", STATE_ON: "On", STATE_OFF: "Off"}
EVO_STATE_TO_HA = {v: k for k, v in HA_STATE_TO_EVO.items() if k != ""}
STATE_ATTRS_DHW = ["dhwId", "activeFaults", "stateStatus", "temperatureStatus"]
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
) -> None:
"""Create a DHW controller."""
if discovery_info is None:
return
broker = hass.data[DOMAIN]["broker"]
_LOGGER.debug(
"Adding: DhwController (%s), id=%s",
broker.tcs.hotwater.zone_type,
broker.tcs.hotwater.zoneId,
)
new_entity = EvoDHW(broker, broker.tcs.hotwater)
async_add_entities([new_entity], update_before_add=True)
class EvoDHW(EvoChild, WaterHeaterEntity):
"""Base for a Honeywell TCC DHW controller (aka boiler)."""
def __init__(self, evo_broker, evo_device) -> None:
"""Initialize an evohome DHW controller."""
super().__init__(evo_broker, evo_device)
self._unique_id = evo_device.dhwId
self._name = "DHW controller"
self._icon = "mdi:thermometer-lines"
self._precision = PRECISION_TENTHS if evo_broker.client_v1 else PRECISION_WHOLE
self._supported_features = SUPPORT_AWAY_MODE | SUPPORT_OPERATION_MODE
@property
def state(self):
"""Return the current state."""
return EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]]
@property
def current_operation(self) -> str:
"""Return the current operating mode (Auto, On, or Off)."""
if self._evo_device.stateStatus["mode"] == EVO_FOLLOW:
return STATE_AUTO
return EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]]
@property
def operation_list(self) -> List[str]:
"""Return the list of available operations."""
return list(HA_STATE_TO_EVO)
@property
def is_away_mode_on(self):
"""Return True if away mode is on."""
is_off = EVO_STATE_TO_HA[self._evo_device.stateStatus["state"]] == STATE_OFF
is_permanent = self._evo_device.stateStatus["mode"] == EVO_PERMOVER
return is_off and is_permanent
async def async_set_operation_mode(self, operation_mode: str) -> None:
"""Set new operation mode for a DHW controller.
Except for Auto, the mode is only until the next SetPoint.
"""
if operation_mode == STATE_AUTO:
await self._evo_broker.call_client_api(self._evo_device.set_dhw_auto())
else:
await self._update_schedule()
until = dt_util.parse_datetime(self.setpoints.get("next_sp_from", ""))
until = dt_util.as_utc(until) if until else None
if operation_mode == STATE_ON:
await self._evo_broker.call_client_api(
self._evo_device.set_dhw_on(until=until)
)
else: # STATE_OFF
await self._evo_broker.call_client_api(
self._evo_device.set_dhw_off(until=until)
)
async def async_turn_away_mode_on(self):
"""Turn away mode on."""
await self._evo_broker.call_client_api(self._evo_device.set_dhw_off())
async def async_turn_away_mode_off(self):
"""Turn away mode off."""
await self._evo_broker.call_client_api(self._evo_device.set_dhw_auto())
async def async_update(self) -> None:
"""Get the latest state data for a DHW controller."""
await super().async_update()
for attr in STATE_ATTRS_DHW:
self._device_state_attrs[attr] = getattr(self._evo_device, attr)
|
arahuja/scikit-learn
|
refs/heads/master
|
sklearn/ensemble/tests/test_gradient_boosting_loss_functions.py
|
221
|
"""
Testing for the gradient boosting loss functions and initial estimators.
"""
import numpy as np
from numpy.testing import assert_array_equal
from numpy.testing import assert_almost_equal
from numpy.testing import assert_equal
from nose.tools import assert_raises
from sklearn.utils import check_random_state
from sklearn.ensemble.gradient_boosting import BinomialDeviance
from sklearn.ensemble.gradient_boosting import LogOddsEstimator
from sklearn.ensemble.gradient_boosting import LeastSquaresError
from sklearn.ensemble.gradient_boosting import RegressionLossFunction
from sklearn.ensemble.gradient_boosting import LOSS_FUNCTIONS
from sklearn.ensemble.gradient_boosting import _weighted_percentile
def test_binomial_deviance():
# Check binomial deviance loss.
# Check against alternative definitions in ESLII.
bd = BinomialDeviance(2)
# pred has the same BD for y in {0, 1}
assert_equal(bd(np.array([0.0]), np.array([0.0])),
bd(np.array([1.0]), np.array([0.0])))
assert_almost_equal(bd(np.array([1.0, 1.0, 1.0]),
np.array([100.0, 100.0, 100.0])),
0.0)
assert_almost_equal(bd(np.array([1.0, 0.0, 0.0]),
np.array([100.0, -100.0, -100.0])), 0)
# check if same results as alternative definition of deviance (from ESLII)
alt_dev = lambda y, pred: np.mean(np.logaddexp(0.0, -2.0 *
(2.0 * y - 1) * pred))
test_data = [(np.array([1.0, 1.0, 1.0]), np.array([100.0, 100.0, 100.0])),
(np.array([0.0, 0.0, 0.0]), np.array([100.0, 100.0, 100.0])),
(np.array([0.0, 0.0, 0.0]),
np.array([-100.0, -100.0, -100.0])),
(np.array([1.0, 1.0, 1.0]),
np.array([-100.0, -100.0, -100.0]))]
for datum in test_data:
assert_almost_equal(bd(*datum), alt_dev(*datum))
# check the gradient against the
alt_ng = lambda y, pred: (2 * y - 1) / (1 + np.exp(2 * (2 * y - 1) * pred))
for datum in test_data:
assert_almost_equal(bd.negative_gradient(*datum), alt_ng(*datum))
def test_log_odds_estimator():
# Check log odds estimator.
est = LogOddsEstimator()
assert_raises(ValueError, est.fit, None, np.array([1]))
est.fit(None, np.array([1.0, 0.0]))
assert_equal(est.prior, 0.0)
assert_array_equal(est.predict(np.array([[1.0], [1.0]])),
np.array([[0.0], [0.0]]))
def test_sample_weight_smoke():
rng = check_random_state(13)
y = rng.rand(100)
pred = rng.rand(100)
# least squares
loss = LeastSquaresError(1)
loss_wo_sw = loss(y, pred)
loss_w_sw = loss(y, pred, np.ones(pred.shape[0], dtype=np.float32))
assert_almost_equal(loss_wo_sw, loss_w_sw)
def test_sample_weight_init_estimators():
# Smoke test for init estimators with sample weights.
rng = check_random_state(13)
X = rng.rand(100, 2)
sample_weight = np.ones(100)
reg_y = rng.rand(100)
clf_y = rng.randint(0, 2, size=100)
for Loss in LOSS_FUNCTIONS.values():
if Loss is None:
continue
if issubclass(Loss, RegressionLossFunction):
k = 1
y = reg_y
else:
k = 2
y = clf_y
if Loss.is_multi_class:
# skip multiclass
continue
loss = Loss(k)
init_est = loss.init_estimator()
init_est.fit(X, y)
out = init_est.predict(X)
assert_equal(out.shape, (y.shape[0], 1))
sw_init_est = loss.init_estimator()
sw_init_est.fit(X, y, sample_weight=sample_weight)
sw_out = init_est.predict(X)
assert_equal(sw_out.shape, (y.shape[0], 1))
# check if predictions match
assert_array_equal(out, sw_out)
def test_weighted_percentile():
y = np.empty(102, dtype=np.float)
y[:50] = 0
y[-51:] = 2
y[-1] = 100000
y[50] = 1
sw = np.ones(102, dtype=np.float)
sw[-1] = 0.0
score = _weighted_percentile(y, sw, 50)
assert score == 1
def test_weighted_percentile_equal():
y = np.empty(102, dtype=np.float)
y.fill(0.0)
sw = np.ones(102, dtype=np.float)
sw[-1] = 0.0
score = _weighted_percentile(y, sw, 50)
assert score == 0
def test_weighted_percentile_zero_weight():
y = np.empty(102, dtype=np.float)
y.fill(1.0)
sw = np.ones(102, dtype=np.float)
sw.fill(0.0)
score = _weighted_percentile(y, sw, 50)
assert score == 1.0
def test_sample_weight_deviance():
# Test if deviance supports sample weights.
rng = check_random_state(13)
X = rng.rand(100, 2)
sample_weight = np.ones(100)
reg_y = rng.rand(100)
clf_y = rng.randint(0, 2, size=100)
mclf_y = rng.randint(0, 3, size=100)
for Loss in LOSS_FUNCTIONS.values():
if Loss is None:
continue
if issubclass(Loss, RegressionLossFunction):
k = 1
y = reg_y
p = reg_y
else:
k = 2
y = clf_y
p = clf_y
if Loss.is_multi_class:
k = 3
y = mclf_y
# one-hot encoding
p = np.zeros((y.shape[0], k), dtype=np.float64)
for i in range(k):
p[:, i] = y == i
loss = Loss(k)
deviance_w_w = loss(y, p, sample_weight)
deviance_wo_w = loss(y, p)
assert deviance_wo_w == deviance_w_w
|
pleaseproject/python-for-android
|
refs/heads/master
|
python-build/python-libs/xmpppy/doc/examples/bot.py
|
87
|
#!/usr/bin/python
# -*- coding: koi8-r -*-
# $Id: bot.py,v 1.2 2006/10/06 12:30:42 normanr Exp $
import sys
import xmpp
commands={}
i18n={'ru':{},'en':{}}
########################### user handlers start ##################################
i18n['en']['HELP']="This is example jabber bot.\nAvailable commands: %s"
def helpHandler(user,command,args,mess):
lst=commands.keys()
lst.sort()
return "HELP",', '.join(lst)
i18n['en']['EMPTY']="%s"
i18n['en']['HOOK1']='Responce 1: %s'
def hook1Handler(user,command,args,mess):
return "HOOK1",'You requested: %s'%args
i18n['en']['HOOK2']='Responce 2: %s'
def hook2Handler(user,command,args,mess):
return "HOOK2","hook2 called with %s"%(`(user,command,args,mess)`)
i18n['en']['HOOK3']='Responce 3: static string'
def hook3Handler(user,command,args,mess):
return "HOOK3"*int(args)
########################### user handlers stop ###################################
############################ bot logic start #####################################
i18n['en']["UNKNOWN COMMAND"]='Unknown command "%s". Try "help"'
i18n['en']["UNKNOWN USER"]="I do not know you. Register first."
def messageCB(conn,mess):
text=mess.getBody()
user=mess.getFrom()
user.lang='en' # dup
if text.find(' ')+1: command,args=text.split(' ',1)
else: command,args=text,''
cmd=command.lower()
if commands.has_key(cmd): reply=commands[cmd](user,command,args,mess)
else: reply=("UNKNOWN COMMAND",cmd)
if type(reply)==type(()):
key,args=reply
if i18n[user.lang].has_key(key): pat=i18n[user.lang][key]
elif i18n['en'].has_key(key): pat=i18n['en'][key]
else: pat="%s"
if type(pat)==type(''): reply=pat%args
else: reply=pat(**args)
else:
try: reply=i18n[user.lang][reply]
except KeyError:
try: reply=i18n['en'][reply]
except KeyError: pass
if reply: conn.send(xmpp.Message(mess.getFrom(),reply))
for i in globals().keys():
if i[-7:]=='Handler' and i[:-7].lower()==i[:-7]: commands[i[:-7]]=globals()[i]
############################# bot logic stop #####################################
def StepOn(conn):
try:
conn.Process(1)
except KeyboardInterrupt: return 0
return 1
def GoOn(conn):
while StepOn(conn): pass
if len(sys.argv)<3:
print "Usage: bot.py username@server.net password"
else:
jid=xmpp.JID(sys.argv[1])
user,server,password=jid.getNode(),jid.getDomain(),sys.argv[2]
conn=xmpp.Client(server)#,debug=[])
conres=conn.connect()
if not conres:
print "Unable to connect to server %s!"%server
sys.exit(1)
if conres<>'tls':
print "Warning: unable to estabilish secure connection - TLS failed!"
authres=conn.auth(user,password)
if not authres:
print "Unable to authorize on %s - check login/password."%server
sys.exit(1)
if authres<>'sasl':
print "Warning: unable to perform SASL auth os %s. Old authentication method used!"%server
conn.RegisterHandler('message',messageCB)
conn.sendInitPresence()
print "Bot started."
GoOn(conn)
|
mxOBS/deb-pkg_trusty_chromium-browser
|
refs/heads/master
|
tools/perf/metrics/speedindex.py
|
9
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
from metrics import Metric
from telemetry.image_processing import image_util
from telemetry.image_processing import rgba_color
from telemetry.value import scalar
class SpeedIndexMetric(Metric):
"""The speed index metric is one way of measuring page load speed.
It is meant to approximate user perception of page load speed, and it
is based on the amount of time that it takes to paint to the visual
portion of the screen. It includes paint events that occur after the
onload event, and it doesn't include time loading things off-screen.
This speed index metric is based on WebPageTest.org (WPT).
For more info see: http://goo.gl/e7AH5l
"""
def __init__(self):
super(SpeedIndexMetric, self).__init__()
self._impl = None
@classmethod
def CustomizeBrowserOptions(cls, options):
options.AppendExtraBrowserArgs('--disable-infobars')
def Start(self, _, tab):
"""Start recording events.
This method should be called in the WillNavigateToPage method of
a PageTest, so that all the events can be captured. If it's called
in DidNavigateToPage, that will be too late.
"""
self._impl = (VideoSpeedIndexImpl() if tab.video_capture_supported else
PaintRectSpeedIndexImpl())
self._impl.Start(tab)
def Stop(self, _, tab):
"""Stop timeline recording."""
assert self._impl, 'Must call Start() before Stop()'
assert self.IsFinished(tab), 'Must wait for IsFinished() before Stop()'
self._impl.Stop(tab)
# Optional argument chart_name is not in base class Metric.
# pylint: disable=W0221
def AddResults(self, tab, results, chart_name=None):
"""Calculate the speed index and add it to the results."""
index = self._impl.CalculateSpeedIndex(tab)
# Release the tab so that it can be disconnected.
self._impl = None
results.AddValue(scalar.ScalarValue(
results.current_page, '%s_speed_index' % chart_name, 'ms', index,
description='Speed Index. This focuses on time when visible parts of '
'page are displayed and shows the time when the '
'first look is "almost" composed. If the contents of the '
'testing page are composed by only static resources, load '
'time can measure more accurately and speed index will be '
'smaller than the load time. On the other hand, If the '
'contents are composed by many XHR requests with small '
'main resource and javascript, speed index will be able to '
'get the features of performance more accurately than load '
'time because the load time will measure the time when '
'static resources are loaded. If you want to get more '
'detail, please refer to http://goo.gl/Rw3d5d. Currently '
'there are two implementations: for Android and for '
'Desktop. The Android version uses video capture; the '
'Desktop one uses paint events and has extra overhead to '
'catch paint events.'))
def IsFinished(self, tab):
"""Decide whether the timeline recording should be stopped.
When the timeline recording is stopped determines which paint events
are used in the speed index metric calculation. In general, the recording
should continue if there has just been some data received, because
this suggests that painting may continue.
A page may repeatedly request resources in an infinite loop; a timeout
should be placed in any measurement that uses this metric, e.g.:
def IsDone():
return self._speedindex.IsFinished(tab)
util.WaitFor(IsDone, 60)
Returns:
True if 2 seconds have passed since last resource received, false
otherwise.
"""
return tab.HasReachedQuiescence()
class SpeedIndexImpl(object):
def Start(self, tab):
raise NotImplementedError()
def Stop(self, tab):
raise NotImplementedError()
def GetTimeCompletenessList(self, tab):
"""Returns a list of time to visual completeness tuples.
In the WPT PHP implementation, this is also called 'visual progress'.
"""
raise NotImplementedError()
def CalculateSpeedIndex(self, tab):
"""Calculate the speed index.
The speed index number conceptually represents the number of milliseconds
that the page was "visually incomplete". If the page were 0% complete for
1000 ms, then the score would be 1000; if it were 0% complete for 100 ms
then 90% complete (ie 10% incomplete) for 900 ms, then the score would be
1.0*100 + 0.1*900 = 190.
Returns:
A single number, milliseconds of visual incompleteness.
"""
time_completeness_list = self.GetTimeCompletenessList(tab)
prev_completeness = 0.0
speed_index = 0.0
prev_time = time_completeness_list[0][0]
for time, completeness in time_completeness_list:
# Add the incemental value for the interval just before this event.
elapsed_time = time - prev_time
incompleteness = (1.0 - prev_completeness)
speed_index += elapsed_time * incompleteness
# Update variables for next iteration.
prev_completeness = completeness
prev_time = time
return int(speed_index)
class VideoSpeedIndexImpl(SpeedIndexImpl):
def __init__(self, image_util_module=image_util):
# Allow image_util to be passed in so we can fake it out for testing.
super(VideoSpeedIndexImpl, self).__init__()
self._time_completeness_list = None
self._image_util_module = image_util_module
def Start(self, tab):
assert tab.video_capture_supported
# Blank out the current page so it doesn't count towards the new page's
# completeness.
tab.Highlight(rgba_color.WHITE)
# TODO(tonyg): Bitrate is arbitrary here. Experiment with screen capture
# overhead vs. speed index accuracy and set the bitrate appropriately.
tab.StartVideoCapture(min_bitrate_mbps=4)
def Stop(self, tab):
# Ignore white because Chrome may blank out the page during load and we want
# that to count as 0% complete. Relying on this fact, we also blank out the
# previous page to white. The tolerance of 8 experimentally does well with
# video capture at 4mbps. We should keep this as low as possible with
# supported video compression settings.
video_capture = tab.StopVideoCapture()
histograms = [(time, self._image_util_module.GetColorHistogram(
image, ignore_color=rgba_color.WHITE, tolerance=8))
for time, image in video_capture.GetVideoFrameIter()]
start_histogram = histograms[0][1]
final_histogram = histograms[-1][1]
total_distance = start_histogram.Distance(final_histogram)
def FrameProgress(histogram):
if total_distance == 0:
if histogram.Distance(final_histogram) == 0:
return 1.0
else:
return 0.0
return 1 - histogram.Distance(final_histogram) / total_distance
self._time_completeness_list = [(time, FrameProgress(hist))
for time, hist in histograms]
def GetTimeCompletenessList(self, tab):
assert self._time_completeness_list, 'Must call Stop() first.'
return self._time_completeness_list
class PaintRectSpeedIndexImpl(SpeedIndexImpl):
def __init__(self):
super(PaintRectSpeedIndexImpl, self).__init__()
def Start(self, tab):
tab.StartTimelineRecording()
def Stop(self, tab):
tab.StopTimelineRecording()
def GetTimeCompletenessList(self, tab):
events = tab.timeline_model.GetAllEvents()
viewport = self._GetViewportSize(tab)
paint_events = self._IncludedPaintEvents(events)
time_area_dict = self._TimeAreaDict(paint_events, viewport)
total_area = sum(time_area_dict.values())
assert total_area > 0.0, 'Total paint event area must be greater than 0.'
completeness = 0.0
time_completeness_list = []
# TODO(tonyg): This sets the start time to the start of the first paint
# event. That can't be correct. The start time should be navigationStart.
# Since the previous screen is not cleared at navigationStart, we should
# probably assume the completeness is 0 until the first paint and add the
# time of navigationStart as the start. We need to confirm what WPT does.
time_completeness_list.append(
(tab.timeline_model.GetAllEvents()[0].start, completeness))
for time, area in sorted(time_area_dict.items()):
completeness += float(area) / total_area
# Visual progress is rounded to the nearest percentage point as in WPT.
time_completeness_list.append((time, round(completeness, 2)))
return time_completeness_list
def _GetViewportSize(self, tab):
"""Returns dimensions of the viewport."""
return tab.EvaluateJavaScript('[ window.innerWidth, window.innerHeight ]')
def _IncludedPaintEvents(self, events):
"""Get all events that are counted in the calculation of the speed index.
There's one category of paint event that's filtered out: paint events
that occur before the first 'ResourceReceiveResponse' and 'Layout' events.
Previously in the WPT speed index, paint events that contain children paint
events were also filtered out.
"""
def FirstLayoutTime(events):
"""Get the start time of the first layout after a resource received."""
has_received_response = False
for event in events:
if event.name == 'ResourceReceiveResponse':
has_received_response = True
elif has_received_response and event.name == 'Layout':
return event.start
assert False, 'There were no layout events after resource receive events.'
first_layout_time = FirstLayoutTime(events)
paint_events = [e for e in events
if e.start >= first_layout_time and e.name == 'Paint']
return paint_events
def _TimeAreaDict(self, paint_events, viewport):
"""Make a dict from time to adjusted area value for events at that time.
The adjusted area value of each paint event is determined by how many paint
events cover the same rectangle, and whether it's a full-window paint event.
"Adjusted area" can also be thought of as "points" of visual completeness --
each rectangle has a certain number of points and these points are
distributed amongst the paint events that paint that rectangle.
Args:
paint_events: A list of paint events
viewport: A tuple (width, height) of the window.
Returns:
A dictionary of times of each paint event (in milliseconds) to the
adjusted area that the paint event is worth.
"""
width, height = viewport
fullscreen_area = width * height
def ClippedArea(rectangle):
"""Returns rectangle area clipped to viewport size."""
_, x0, y0, x1, y1 = rectangle
clipped_width = max(0, min(width, x1) - max(0, x0))
clipped_height = max(0, min(height, y1) - max(0, y0))
return clipped_width * clipped_height
grouped = self._GroupEventByRectangle(paint_events)
event_area_dict = collections.defaultdict(int)
for rectangle, events in grouped.items():
# The area points for each rectangle are divided up among the paint
# events in that rectangle.
area = ClippedArea(rectangle)
update_count = len(events)
adjusted_area = float(area) / update_count
# Paint events for the largest-area rectangle are counted as 50%.
if area == fullscreen_area:
adjusted_area /= 2
for event in events:
# The end time for an event is used for that event's time.
event_time = event.end
event_area_dict[event_time] += adjusted_area
return event_area_dict
def _GetRectangle(self, paint_event):
"""Get the specific rectangle on the screen for a paint event.
Each paint event belongs to a frame (as in html <frame> or <iframe>).
This, together with location and dimensions, comprises a rectangle.
In the WPT source, this 'rectangle' is also called a 'region'.
"""
def GetBox(quad):
"""Gets top-left and bottom-right coordinates from paint event.
In the timeline data from devtools, paint rectangle dimensions are
represented x-y coordinates of four corners, clockwise from the top-left.
See: function WebInspector.TimelinePresentationModel.quadFromRectData
in file src/out/Debug/obj/gen/devtools/TimelinePanel.js.
"""
x0, y0, _, _, x1, y1, _, _ = quad
return (x0, y0, x1, y1)
assert paint_event.name == 'Paint'
frame = paint_event.args['frameId']
return (frame,) + GetBox(paint_event.args['data']['clip'])
def _GroupEventByRectangle(self, paint_events):
"""Group all paint events according to the rectangle that they update."""
result = collections.defaultdict(list)
for event in paint_events:
assert event.name == 'Paint'
result[self._GetRectangle(event)].append(event)
return result
|
gregomni/swift
|
refs/heads/master
|
utils/pygments/swift.py
|
33
|
#!/usr/bin/env python
import re
from pygments.lexer import (
RegexLexer,
bygroups,
default,
include,
)
from pygments.token import (
Comment,
Generic,
Keyword,
Name,
Number,
Operator,
Punctuation,
String,
Text,
Whitespace,
)
__all__ = ['SwiftLexer', 'SILLexer', 'SwiftConsoleLexer']
class SwiftLexer(RegexLexer):
name = 'Swift'
aliases = ['swift']
filenames = ['*.swift']
flags = re.MULTILINE | re.DOTALL
_isa = r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(:)(\s*)([A-Z0-9_][a-zA-Z0-9_]*)'
_isa_comma = r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*)(:)(\s*)' + \
r'([A-Z0-9_][a-zA-Z0-9_]*)(,\s?)'
_name = u'([@a-zA-Z_\U00000100-\U00100000]' + \
u'[a-zA-Z0-9_\U00000100-\U00100000]*)'
tokens = {
'root': [
(r'^', Punctuation, 'root2'),
],
'root2': [
(r'\n', Text, '#pop'),
include('func-class-list'),
(r'\bimport\s+', Keyword.Namespace, 'import'),
(r'\b(class|struct|protocol|extension)\s',
Keyword.Declaration, 'class-decl'),
include('body'),
],
'func-class-list': [
(r'\b(func|init|deinit|class func|public func)\s',
Keyword.Declaration, 'func-decl'),
],
'comment': [
(r'//.*?\n', Comment.Single, '#pop'),
(r'/\*', Comment.Multiline, 'comment-multiline'),
],
'token-list': [
(r'\$([0-9]+)', Name.Variable), # Tokens
],
'body': [
include('comment'),
include('name'),
(r'\.{3}', Generic.Emph), # emphasize ellipses
(r'[\~\^\*!%&<>+=/?-]|\.{2}', Operator),
include('token-list'),
(r'[\[\]\(\)\{\}\|:;,.#]', Punctuation),
(r'[0-9]+\.[0-9]+', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'\s', Whitespace),
(r'\(', Punctuation, 'tuple'),
(r'(\b[A-Z][a-zA-Z0-9_]*\s?)(\()',
bygroups(Name.Constant, Punctuation), 'type-cast'),
(r'(\b[A-Z][a-zA-Z0-9_]*)(\.)([a-z][a-zA-Z0-9_]*)',
bygroups(Name.Constant, Punctuation, Name), 'arg-list'),
(r'"', String, 'string'),
(r'\'', String.Char, 'string'),
(r'(\bnew\b\s?)', Keyword.Reserved, 'class-name'),
(r'\b(true|false)\b', Keyword.Reserved),
(r'\b(if|else)\s', Keyword.Reserved),
(r'\b(return|break)\b', Keyword.Reserved),
(r'(\bset\b)(\s?)(\()', bygroups(
Keyword.Declaration, Whitespace, Punctuation), 'arg-list'),
(r'(set|get)(:)', bygroups(Keyword.Reserved, Punctuation)),
(r'\b(self|Self)\b', Name.Builtin.Pseudo),
(r'\bid\b', Name.Builtin),
(r'\b(var|let)\s', Keyword.Declaration, 'var-decl'),
(r'\bfor\s', Keyword.Reserved, 'for-loop'),
],
'body2': [
(r'}', Punctuation, '#pop'),
include('body'),
],
'isa': [
(_isa, bygroups(
Name,
Whitespace,
Punctuation,
Whitespace,
Name.Constant)),
],
'class-isa': [
(_isa, bygroups(Name.Class, Whitespace,
Punctuation, Whitespace, Name.Constant)),
],
'var-isa': [
(_isa, bygroups(Name.Variable, Whitespace,
Punctuation, Whitespace, Name.Constant)),
],
'var-isa-pop': [
(_isa, bygroups(Name.Variable, Whitespace,
Punctuation, Whitespace, Name.Constant), '#pop'),
],
'var-isa-comma': [
(_isa_comma, bygroups(Name.Variable, Whitespace,
Punctuation, Whitespace,
Name.Constant, Punctuation)),
],
'var-name': [
(r'[a-zA-Z_][a-zA-Z0-9_?]*', Name.Variable),
],
'tuple': [
(r'\(', Punctuation, 'in-tuple'),
],
'in-tuple': [
(r'\)', Punctuation, '#pop'),
include('class-name'),
include('name'),
include('isa'),
include('root2'),
],
'name': [
(_name, Name),
(r'`[^\n`]*`', Name),
(r'@_specialize', Name),
],
'comment-multiline': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'import': [
(_name, Name.Namespace),
(r'(func|var|class)\s+', Keyword.Declaration),
(r'\.', Punctuation),
(r',\s*', Punctuation),
(r'\(', Punctuation, 'import'),
(r'\)', Punctuation, '#pop'),
(r'=', Operator),
(r' ', Text.Whitespace),
default('#pop'),
# ('\n', Punctuation, '#pop'),
],
'generic-type': [
(r'\s', Whitespace),
(r'>', Punctuation, '#pop'),
include('class-name'),
include('isa'),
include('root2'),
],
'class-name': [
(r'[_A-Z][a-zA-Z0-9_?]*', Name.Constant),
(r'(\[)([0-9]+)(\])',
bygroups(Operator, Number.Integer, Operator)),
(r'<', Punctuation, 'generic-type'),
(r'\.\(', Punctuation, 'arg-list'),
(r'\(', Punctuation, 'type-cast'),
(r'\)', Punctuation, '#pop'),
],
'label': [
(r'[a-zA-Z_][a-zA-Z0-9_]*:(?=\s*\n)', Name.Label),
],
'ws-pop': [
(r'\s?[\s\n]', Whitespace, '#pop'),
],
'var-decl': [
(r'(\[)([\w\s,]*)(\])(\s+)', bygroups(
Punctuation,
Name.Attribute,
Punctuation,
Whitespace)),
(r':\s*', Punctuation),
include('tuple'),
include('var-isa-comma'),
include('var-isa-pop'),
include('var-name'),
(r',\s+', Punctuation, 'var-decl'),
include('ws-pop'),
],
'for-loop': [
(r'\sin\s', Keyword.Reserved),
include('isa'),
include('name'),
include('ws-pop'),
include('root2'),
],
'func-decl': [
(r'(\[)([\w\s,]*)(\])(\s+)', bygroups(
Punctuation,
Name.Attribute,
Punctuation,
Whitespace)),
(r'\s?\bthrows\b', Keyword.Reserved),
(r'\s?\brethrows\b', Keyword.Reserved),
(r'\s?\breturn\b', Keyword.Reserved, 'root2'),
(r'<', Punctuation, 'generic-type'),
(r'\(\s?', Punctuation, 'arg-list'),
(r'\s?->\s?', Operator, 'return-type'),
(r'\s?(\w+|[\*\+\-\=]{1,2})(\s*)', bygroups(
Name.Function, Punctuation)),
(r'\s?' + _name + r'(\s*)', bygroups(
Name.Function, Punctuation)),
(r'\s?\{', Punctuation, '#pop'),
default('#pop'),
],
'return-type': [
include('tuple'),
include('class-name'),
(r'\bid\b', Name.Builtin),
(r'\s?\)', Punctuation, '#pop'),
(r'\s?\[', Punctuation),
(r'\s?\]\s*', Punctuation, '#pop'),
default('#pop'),
],
'name-list': [
(_name, Name.Namespace),
(r',\s*', Punctuation),
(r' ', Text.Whitespace),
(r'(\()(\d+\.\d+)(\))', bygroups(
Punctuation, Number.Float, Punctuation)),
default('#pop'),
],
'class-decl': [
(r'\{', Punctuation, '#pop'),
(r'(\[)([\w\s,]*)(\])(\s+)', bygroups(
Punctuation,
Name.Attribute,
Punctuation,
Whitespace)),
include('class-isa'),
(r'(\*?)([a-zA-Z_][a-zA-Z0-9_?]*)', bygroups(
Punctuation, Name.Class)),
(r'\.', Punctuation),
(r'<', Punctuation, 'generic-type'),
(r':', Punctuation, 'name-list'),
(r'\s', Whitespace),
(r'\s?(,)(\s*)([A-Z0-9_][a-zA-Z0-9_]*)', bygroups(
Punctuation, Whitespace, Name.Constant)),
(r'<', Punctuation, 'generic-type'),
(r'where', Keyword.Reserved),
default("#pop"),
],
'arg-list': [
(r',\s?', Punctuation),
(r'\)', Punctuation, '#pop'),
(r'\s?\bthrows\b', Keyword.Reserved),
(r'\s?\brethrows\b', Keyword.Reserved),
include('isa'),
(r'\s?->\s?', Operator, 'return-type'),
include('root2'),
],
'type-cast': [
(r'\)', Punctuation, '#pop'),
include('root2'),
],
'in-interpolated': [
(r'\)', String.Interpol, '#pop'),
include('root2'),
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})',
String.Escape),
(r'\\\(', String.Interpol, 'in-interpolated'),
(r'[^\\"]+', String),
(r'\\', String),
],
}
class SILLexer(RegexLexer):
name = 'SIL'
aliases = ['sil']
filenames = ['*.sil']
flags = re.MULTILINE | re.DOTALL
_isa = SwiftLexer._isa
_isa_comma = SwiftLexer._isa_comma
_name = SwiftLexer._name
tokens = SwiftLexer.tokens.copy()
tokens['token-list'] = [
(r'[%]([a-zA-Z0-9]+)', Name.Variable), # Tokens
(r'\$[*]?([a-zA-Z0-9]+)', Name.Variable), # Tokens
(r'\$[*]?\(([a-zA-Z0-9, ]+\))', Name.Variable), # Tokens
]
class SwiftConsoleLexer(RegexLexer):
name = 'SwiftConsole'
aliases = ['swift-console']
filenames = ['*.swiftc']
flags = re.MULTILINE | re.DOTALL
_isa = SwiftLexer._isa
_isa_comma = SwiftLexer._isa_comma
_name = SwiftLexer._name
tokens = SwiftLexer.tokens.copy()
tokens['root'] = [
(r'Welcome to swift. Type \':help\' for assistance.', Generic.Prompt),
(r'(\(swift\) | )', Generic.Prompt, 'root2'),
(r'\(swift\)', Generic.Prompt),
(r' ', Generic.Prompt),
(r'//.*?\n', Generic.Output),
(r'<REPL Buffer>:[0-9]*:[0-9]*:.*?\n', Generic.Heading),
(r'~*?\^\s?~*?\n', Generic.Heading),
(r'.*?\n', Generic.Output),
]
|
JetBrains/intellij-community
|
refs/heads/master
|
python/testData/completion/superInitKwParams.py
|
83
|
class B:
def __init__(self, auno=True): pass
class C(B):
def __init__(self, **kwargs): pass
c = C(au<caret>)
|
ethan-nelson/osm-tasking-manager2
|
refs/heads/master
|
osmtm/models.py
|
1
|
from sqlalchemy import (
Table,
Column,
Float,
Integer,
BigInteger,
Unicode,
ForeignKey,
ForeignKeyConstraint,
PrimaryKeyConstraint,
Boolean,
DateTime,
CheckConstraint,
Index,
event,
and_
)
from sqlalchemy.sql.expression import (
func,
select,
)
from sqlalchemy.ext.hybrid import (
hybrid_property
)
from geoalchemy2 import (
Geometry,
shape,
)
from geoalchemy2.functions import (
ST_Area,
ST_Transform,
ST_Centroid,
GenericFunction
)
from geojson import (
Feature
)
from shapely.geometry import (
MultiPolygon
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship
)
from .utils import (
TileBuilder,
get_tiles_in_geom,
max,
parse_geojson,
)
from zope.sqlalchemy import ZopeTransactionExtension
import datetime
import re
from json import (
JSONEncoder,
dumps as _dumps,
loads as _loads,
)
import functools
from sqlalchemy_i18n import (
Translatable,
make_translatable,
translation_base,
)
from pyramid.threadlocal import get_current_registry
class ST_Multi(GenericFunction):
name = 'ST_Multi'
type = Geometry
class ST_Collect(GenericFunction):
name = 'ST_Collect'
type = Geometry
class ST_Union(GenericFunction):
name = 'ST_Union'
type = Geometry
class ST_Buffer(GenericFunction):
name = 'ST_Buffer'
type = Geometry
class ST_SetSRID(GenericFunction):
name = 'ST_SetSRID'
type = Geometry
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
make_translatable()
users_licenses_table = Table(
'users_licenses', Base.metadata,
Column('user', BigInteger, ForeignKey('users.id')),
Column('license', Integer, ForeignKey('licenses.id')))
# user roles
ADMIN = 1
PROJECT_MANAGER = 2
VALIDATOR = 4
EXPERIENCED_MAPPER = 8
class User(Base):
__tablename__ = "users"
id = Column(BigInteger, primary_key=True, index=True)
username = Column(Unicode)
role_admin = ADMIN
role_project_manager = PROJECT_MANAGER
role_validator = VALIDATOR
role_experienced_mapper = EXPERIENCED_MAPPER
role = Column(Integer, default=0)
accepted_licenses = relationship("License", secondary=users_licenses_table)
private_projects = relationship("Project",
secondary="project_allowed_users")
unread_messages = relationship(
'Message',
primaryjoin=lambda: and_(
User.id == Message.to_user_id,
Message.read.isnot(True)
))
def __init__(self, id, username):
self.id = id
self.username = username
@hybrid_property
def is_admin(self):
return self.role & self.role_admin
@hybrid_property
def is_project_manager(self):
return self.role & self.role_project_manager
@hybrid_property
def is_validator(self):
return self.role & self.role_validator
@hybrid_property
def is_experienced_mapper(self):
return self.role & self.role_experienced_mapper
def as_dict(self):
return {
"id": self.id,
"username": self.username,
"is_admin": self.is_admin,
"is_project_manager": self.is_project_manager,
"is_validator": self.is_validator,
"is_experienced_mapper": self.is_experienced_mapper,
}
# task states
READY = 0
INVALIDATED = 1
DONE = 2
VALIDATED = 3
REMOVED = -1
class TaskState(Base):
__tablename__ = "task_state"
id = Column(Integer, primary_key=True)
task_id = Column(Integer)
project_id = Column(Integer)
state_ready = READY
state_done = DONE
state_validated = VALIDATED
state_invalidated = INVALIDATED
state_removed = REMOVED
state = Column(Integer)
user_id = Column(BigInteger, ForeignKey('users.id'))
user = relationship(User)
date = Column(DateTime, default=datetime.datetime.utcnow)
__table_args__ = (ForeignKeyConstraint([task_id, project_id],
['task.id', 'task.project_id']),
Index('task_state_task_project_index',
'task_id',
'project_id'),
Index('task_state_date', date.desc()),
{})
def __init__(self, user=None, state=None):
self.user = user
self.state = state if state is not None else TaskState.state_ready
class TaskLock(Base):
__tablename__ = "task_lock"
id = Column(Integer, primary_key=True)
task_id = Column(Integer)
project_id = Column(Integer)
lock = Column(Boolean)
user_id = Column(BigInteger, ForeignKey('users.id'))
user = relationship(User)
date = Column(DateTime, default=datetime.datetime.utcnow)
# duration of the lock once the task is unlocked
# relevant only for records with lock == true
duration = 0
__table_args__ = (ForeignKeyConstraint([task_id, project_id],
['task.id', 'task.project_id']),
Index('task_lock_task_project_index',
'task_id',
'project_id'),
Index('task_lock_date', date.desc()),
{})
def __init__(self, user=None, lock=None):
self.user = user
self.lock = lock
@event.listens_for(TaskLock, "after_insert")
def task_lock_after_insert(mapper, connection, target):
task_table = Task.__table__
date = target.date if target.lock is True else None
connection.execute(
task_table.update().
where(and_(task_table.c.id == target.task_id,
task_table.c.project_id == target.project_id)).
values(lock_date=date)
)
class TaskComment(Base):
__tablename__ = "task_comment"
id = Column(Integer, primary_key=True)
task_id = Column(Integer)
project_id = Column(Integer)
comment = Column(Unicode)
date = Column(DateTime, default=datetime.datetime.utcnow)
author_id = Column(BigInteger, ForeignKey('users.id'))
author = relationship(User)
__table_args__ = (ForeignKeyConstraint([task_id, project_id],
['task.id', 'task.project_id']),
Index('task_comment_task_project_index',
'task_id',
'project_id'),
Index('task_comment_date', date.desc()),
{})
def __init__(self, comment, author):
self.comment = comment
self.author = author
def task_id_factory(context):
project_id = context.compiled_parameters[0]['project_id']
sql = """
SELECT MAX(id)
FROM task
WHERE project_id='%d'""" % (project_id, )
result = context.connection.execute(sql).fetchone()[0]
if result > 0:
return result + 1
else:
return 1
class Task(Base):
__tablename__ = "task"
id = Column(Integer, default=task_id_factory)
x = Column(Integer)
y = Column(Integer)
zoom = Column(Integer)
project_id = Column(Integer, ForeignKey('project.id'), index=True)
geometry = Column(Geometry('MultiPolygon', srid=4326))
date = Column(DateTime, default=datetime.datetime.utcnow)
lock_date = Column(DateTime, default=None)
extra_properties = Column(Unicode)
assigned_to_id = Column(Integer, ForeignKey('users.id'))
assigned_to = relationship(User)
assigned_date = Column(DateTime)
difficulty_easy = 1
difficulty_medium = 2
difficulty_hard = 3
difficulty = Column(Integer)
parent_id = Column(Integer)
cur_lock = relationship(
TaskLock,
primaryjoin=lambda: and_(
Task.id == TaskLock.task_id,
Task.project_id == TaskLock.project_id,
TaskLock.date == select(
[func.max(TaskLock.date)]
)
.where(and_(TaskLock.task_id == Task.id,
TaskLock.project_id == Task.project_id))
.correlate(Task.__table__)
),
uselist=False
)
cur_state = relationship(
TaskState,
primaryjoin=lambda: and_(
Task.id == TaskState.task_id,
Task.project_id == TaskState.project_id,
TaskState.date == select(
[func.max(TaskState.date)]
)
.where(and_(TaskState.task_id == Task.id,
TaskState.project_id == Task.project_id))
.correlate(Task.__table__)
),
uselist=False
)
locks = relationship(
TaskLock,
order_by="desc(TaskLock.date)",
cascade="all, delete, delete-orphan",
backref="task")
states = relationship(
TaskState,
order_by="desc(TaskState.date)",
cascade="all, delete, delete-orphan",
backref="task")
comments = relationship(
TaskComment,
order_by="desc(TaskComment.date)",
cascade="all, delete, delete-orphan",
backref="task")
__table_args__ = (PrimaryKeyConstraint('project_id', 'id'),
Index('task_lock_date_', date.desc()),
{},)
def __init__(self, x, y, zoom, geometry=None, properties=None):
self.x = x
self.y = y
self.zoom = zoom
if properties is not None:
self.extra_properties = unicode(_dumps(properties))
if geometry is None:
geometry = self.to_polygon()
multipolygon = MultiPolygon([geometry])
geometry = ST_Transform(shape.from_shape(multipolygon, 3857), 4326)
self.geometry = geometry
self.states.append(TaskState())
self.locks.append(TaskLock())
def to_polygon(self):
# task size (in meters) at the required zoom level
step = max / (2 ** (self.zoom - 1))
tb = TileBuilder(step)
return tb.create_square(self.x, self.y)
def to_feature(self):
properties = {
'state': self.cur_state.state if self.cur_state else 0,
'locked': self.lock_date is not None
}
if self.difficulty:
properties['difficulty'] = self.difficulty
if self.x and self.y and self.zoom:
properties['x'] = self.x
properties['y'] = self.y
properties['zoom'] = self.zoom
return Feature(
geometry=shape.to_shape(self.geometry),
id=self.id,
properties=properties
)
def get_extra_instructions(self):
instructions = self.project.per_task_instructions
def replace_colon(matchobj):
return matchobj.group(0).replace(':', '_')
instructions = re.sub('\{([^}]*)\}', replace_colon, instructions)
properties = {}
if self.x:
properties['x'] = str(self.x)
if self.y:
properties['y'] = str(self.y)
if self.zoom:
properties['z'] = str(self.zoom)
if self.extra_properties:
extra_properties = _loads(self.extra_properties)
for key in extra_properties:
properties.update({
key.replace(':', '_'): extra_properties[key]
})
return instructions.format(**properties)
@event.listens_for(Task, "after_update")
def after_update(mapper, connection, target):
project_table = Project.__table__
project = target.project
connection.execute(
project_table.update().
where(project_table.c.id == project.id).
values(last_update=datetime.datetime.utcnow(),
done=project.get_done(),
validated=project.get_validated())
)
@event.listens_for(DBSession, "before_flush")
def before_flush(session, flush_context, instances):
for obj in session.dirty:
if isinstance(obj, Task):
obj.project.last_update = datetime.datetime.utcnow()
class Area(Base):
__tablename__ = 'areas'
id = Column(Integer, primary_key=True)
geometry = Column(Geometry('MultiPolygon', srid=4326))
centroid = Column(Geometry('Point', srid=4326))
def __init__(self, geometry):
self.geometry = ST_SetSRID(ST_Multi(geometry), 4326)
@event.listens_for(Area, "after_insert")
def area_after_insert(mapper, connection, target):
area_table = Area.__table__
connection.execute(
area_table.update().
where(area_table.c.id == target.id).
values(centroid=ST_Centroid(target.geometry))
)
project_allowed_users = Table(
'project_allowed_users',
Base.metadata,
Column('project_id', Integer, ForeignKey('project.id')),
Column('user_id', BigInteger, ForeignKey('users.id'))
)
class PriorityArea(Base):
__tablename__ = 'priority_area'
id = Column(Integer, primary_key=True)
geometry = Column(Geometry('Polygon', srid=4326))
def __init__(self, geometry):
self.geometry = geometry
project_priority_areas = Table(
'project_priority_areas',
Base.metadata,
Column('project_id', Integer, ForeignKey('project.id')),
Column('priority_area_id', Integer, ForeignKey('priority_area.id'))
)
project_labels_table = Table(
'project_labels', Base.metadata,
Column('project', Integer, ForeignKey('project.id')),
Column('label', Integer, ForeignKey('label.id')))
# A project corresponds to a given mapping job to do on a given area
# Example 1: trace the major roads
# Example 2: trace the buildings
# Each has its own grid with its own task size.
class Project(Base, Translatable):
__tablename__ = 'project'
id = Column(Integer, primary_key=True)
status_archived = 0
status_published = 1
status_draft = 2
status = Column(Integer, default=status_draft)
locale = 'en'
area_id = Column(Integer, ForeignKey('areas.id'))
created = Column(DateTime, default=datetime.datetime.utcnow)
author_id = Column(BigInteger, ForeignKey('users.id'))
author = relationship(User)
last_update = Column(DateTime, default=datetime.datetime.utcnow)
area = relationship(Area)
tasks = relationship(Task, backref='project',
cascade="all, delete, delete-orphan")
license_id = Column(Integer, ForeignKey('licenses.id'))
zoom = Column(Integer) # is not None when project is auto-filled (grid)
imagery = Column(Unicode)
# priorities are:
# 0 - Urgent
# 1 - High
# 2 - Medium
# 3 - Low
priority = Column(Integer, default=2)
# percentage of done tasks
done = Column(Float, default=0)
# percentage of validated tasks
validated = Column(Float, default=0)
__table_args__ = (CheckConstraint(priority.in_(range(0, 4))), )
entities_to_map = Column(Unicode)
changeset_comment = Column(Unicode)
private = Column(Boolean, default=False)
allowed_users = relationship(User,
secondary=project_allowed_users)
josm_preset = Column(Unicode)
due_date = Column(DateTime)
priority_areas = relationship(PriorityArea,
secondary=project_priority_areas)
# whether the validation should require the validator role or not
requires_validator_role = Column(Boolean, default=False)
labels = relationship("Label", secondary=project_labels_table)
# whether the validation should require the validator role or not
requires_experienced_mapper_role = Column(Boolean, default=False)
def __init__(self, name, user=None):
self.name = name
self.author = user
# auto magically fills the area with tasks for the given zoom
def auto_fill(self, zoom):
self.zoom = zoom
geom_3857 = DBSession.execute(ST_Transform(self.area.geometry, 3857)) \
.scalar()
geom_3857 = shape.to_shape(geom_3857)
tasks = []
for i in get_tiles_in_geom(geom_3857, zoom):
multi = MultiPolygon([i[2]])
geometry = ST_Transform(shape.from_shape(multi, 3857), 4326)
tasks.append(Task(i[0], i[1], zoom, geometry))
self.tasks = tasks
def import_from_geojson(self, input):
features = parse_geojson(input)
tasks = []
for feature in features:
if not isinstance(feature.geometry, MultiPolygon):
feature.geometry = MultiPolygon([feature.geometry])
properties = feature.properties
tasks.append(Task(
None,
None,
None,
'SRID=4326;%s' % feature.geometry.wkt,
properties
))
self.tasks = tasks
DBSession.add(self)
DBSession.flush()
bounds = DBSession.query(ST_Union(ST_Buffer(Task.geometry, 0.01))) \
.filter(Task.project_id == self.id).one()
self.area = Area(bounds[0])
return len(tasks)
def get_done(self):
total = DBSession.query(func.sum(ST_Area(Task.geometry))) \
.filter(
Task.project_id == self.id,
Task.cur_state.has(TaskState.state != TaskState.state_removed)
) \
.scalar()
done = DBSession.query(func.sum(ST_Area(Task.geometry))) \
.filter(
Task.project_id == self.id,
Task.cur_state.has(TaskState.state == TaskState.state_done)
) \
.scalar()
if not done:
done = 0
return round(done * 100 / total, 2) if total != 0 else 0
def get_validated(self):
total = DBSession.query(func.sum(ST_Area(Task.geometry))) \
.filter(
Task.project_id == self.id,
Task.cur_state.has(TaskState.state != TaskState.state_removed)
) \
.scalar()
validated = DBSession.query(func.sum(ST_Area(Task.geometry))) \
.filter(
Task.project_id == self.id,
Task.cur_state.has(
TaskState.state == TaskState.state_validated)
) \
.scalar()
if not validated:
validated = 0
return round(validated * 100 / total, 2) if total != 0 else 0
def to_bbox(self):
return shape.to_shape(self.area.geometry).bounds
# get the count of currently locked tasks
def get_locked(self):
query = DBSession.query(Task) \
.filter(and_(Task.lock_date.__ne__(None),
Task.project_id == self.id))
return query.count()
def to_feature(self):
properties = {}
properties['name'] = self.name
properties['description'] = self.description
properties['short_description'] = self.short_description
properties['instructions'] = self.instructions
properties['per_task_instructions'] = self.per_task_instructions
properties['status'] = self.status
properties['created'] = self.created.strftime('%FT%TZ')
if self.author:
properties['author'] = self.author.username
properties['last_update'] = self.last_update.strftime('%FT%TZ')
properties['license'] = self.license_id
properties['priority'] = self.priority
properties['done'] = self.done
properties['validated'] = self.validated
properties['changeset_comment'] = self.changeset_comment
return Feature(
geometry=shape.to_shape(self.area.geometry),
id=self.id,
properties=properties
)
# the time delta after which the task is unlocked (in seconds)
EXPIRATION_DELTA = datetime.timedelta(seconds=2 * 60 * 60)
@event.listens_for(Project, "after_insert")
def project_after_insert(mapper, connection, target):
project_table = Project.__table__
settings = get_current_registry().settings
# settings may be None
# This happens for example when initializing the database
default_comment_prefix = settings.get('default_comment_prefix') \
if settings is not None else None
comment_prefix = default_comment_prefix or '#hotosm-project'
connection.execute(
project_table.update().
where(project_table.c.id == target.id).
values(changeset_comment=u'%s-%d' % (comment_prefix, target.id))
)
class ProjectTranslation(translation_base(Project)):
__tablename__ = 'project_translation'
name = Column(Unicode(255), default=u'')
description = Column(Unicode, default=u'')
short_description = Column(Unicode, default=u'')
instructions = Column(Unicode, default=u'')
per_task_instructions = Column(Unicode, default=u'')
class License(Base):
__tablename__ = "licenses"
id = Column(Integer, primary_key=True)
name = Column(Unicode)
description = Column(Unicode)
plain_text = Column(Unicode)
projects = relationship("Project", backref='license')
users = relationship("License", secondary=users_licenses_table)
def __init__(self):
pass
class Message(Base):
__tablename__ = "message"
id = Column(Integer, primary_key=True)
message = Column(Unicode)
subject = Column(Unicode)
from_user_id = Column(BigInteger, ForeignKey('users.id'))
from_user = relationship(User, foreign_keys=[from_user_id])
to_user_id = Column(BigInteger, ForeignKey('users.id'))
to_user = relationship(User, foreign_keys=[to_user_id],
backref='messages')
date = Column(DateTime, default=datetime.datetime.utcnow)
read = Column(Boolean)
def __init__(self, subject, from_, to, message):
self.subject = subject
self.from_user = from_
self.to_user = to
self.message = message
class Label(Base, Translatable):
__tablename__ = 'label'
id = Column(Integer, primary_key=True)
name = Column(Unicode, nullable=False)
color = Column(Unicode)
projects = relationship("Project", secondary=project_labels_table)
locale = 'en'
def __init__(self):
pass
class LabelTranslation(translation_base(Label)):
__tablename__ = 'label_translation'
description = Column(Unicode, default=u'')
class ExtendedJSONEncoder(JSONEncoder):
def default(self, obj): # pragma: no cover
if isinstance(obj, (datetime.date, datetime.datetime)):
return obj.isoformat(' ')
return JSONEncoder.default(self, obj)
dumps = functools.partial(_dumps, cls=ExtendedJSONEncoder)
|
conda/conda-ui
|
refs/heads/master
|
setup.py
|
2
|
from __future__ import print_function
import sys
import subprocess
from os.path import join
from setuptools import setup, find_packages
def build():
retcode = subprocess.call(
["coffee", "--no-header", "-c", join("conda_ui", "static", "conda_ui")],
shell=(sys.platform == 'win32'))
if retcode != 0:
raise RuntimeError("compilation failed")
build()
setup(
name='conda-ui',
version='0.1.1',
author='Continuum Analytics',
author_email='conda@continuum.io',
description='Web user interface for Conda',
install_requires=['Flask', 'conda'],
include_package_data=True,
zip_safe=False,
packages=find_packages(),
entry_points={
'console_scripts': [
'conda-ui = conda_ui:main',
],
},
)
|
saknis/upelis
|
refs/heads/master
|
wiki - Copy.py
|
1
|
#!/usr/bin/env python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple Google App Engine wiki application.
The main distinguishing feature is that editing is in a WYSIWYG editor
rather than a text editor with special syntax. This application uses
google.appengine.api.datastore to access the datastore. This is a
lower-level API on which google.appengine.ext.db depends.
"""
__author__ = 'Bret Taylor & Nerijus Terebas'
import cgi
import cgitb
cgitb.enable()
import datetime
import os
import re
import sys
import urllib
import urlparse
import base64
import codecs
import math
from pngcanvas import PNGCanvas
import random
import json
#import wsgiref.handlers
#from google.appengine.ext import webapp
#from google.appengine.ext.webapp.util import run_wsgi_app
import webapp2 as webapp
from webapp2_extras import routes
#import wsgiref.handlers
import traceback
from google.appengine.ext import db
from google.appengine.api import mail
from google.appengine.api import datastore
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.ext.webapp import template
from google.appengine.api import images
import locale
import gettext
from google.appengine.api import urlfetch
from Picasa import Picasa
from postmarkup import render_bbcode
from UserAdd import UserAdd
from UserAdd import Vartotojai
from Start import Start
from Start import Codeimagereg
from Start import AppVer
from Start import DinCode
import facebookoauth
from facebookoauth import FBUser
import linkedinauth
from linkedinauth import LIUser
import vkauth
from vkauth import VKUser
from upelis_settings import *
_DEBUG = DEBUG
_mailsender=MAILSENDER
_mailrcptto=MAILRCPTTO
cmsname2=CMSNAME
cmspath2=CMSPATH
cmstrans2=CMSTRANS
site1a=SITE1A
site1b=SITE1B
site2a=SITE2A
site2b=SITE2B
sitedown=SITEDOWN
current_locale = CURLOCALE
kalbos=LANGUAGES
kalboseile=LANGUAGESNR
kalbossort = LANGUAGESSORT
locale_path = LOCALEPATH
fileext=FILEEXT
lang=LANG
_kalbhtml = LANGHTML
langdef=lang
lang1 = gettext.translation (cmstrans2, locale_path, [current_locale] , fallback=True)
_ = lang1.ugettext
# Set to true if we want to have our webapp print stack traces, etc
_titauth = TITAUTH
#_titauth = "Nerijus Terebas"
_version=VERSION
#if os.environ['HTTP_HOST']==site1a or os.environ['HTTP_HOST']==site1b or os.environ['HTTP_HOST']==site2a or os.environ['HTTP_HOST']==site2b:
# imgfont = "Ubuntu-B.ttf"
#else:
# imgfont = "VeraSeBd.ttf"
#imgfont = "Ubuntu-B.ttf"
imgopt = DYNABOPT
fbputimgurl2="/static/images/upelis116.jpg"
avatarmaxurl2="/static/images/avatarmax.png"
avatarminurl2="/static/images/avatarmin.png"
g16url="/static/images/g16.png"
fb16url="/static/images/fb16.png"
in16url="/static/images/in16.png"
vk16url="/static/images/vk16.png"
gplusurl="/static/images/plus.ico"
def siteauth():
if os.environ['HTTP_HOST']==site1a or os.environ['HTTP_HOST']==site1b or os.environ['HTTP_HOST']==site2a or os.environ['HTTP_HOST']==site2b:
return "Nerijus Terebas"
else:
return _titauth
def urlparam(rparameters):
parts = rparameters.split(".")
parts.reverse()
parts.append('')
parts.append('')
parts.append('')
[ext,lang,aps]=parts[:3]
if lang in kalbos:
kalb=kalbos[lang]
else:
kalb=kalbos[langdef]
lang=langdef
values = {
'ext': ext,
'lang': lang,
'kalb': kalb,
'aps': aps}
return values
def userinfo(pic_key2,utype,lang, fileext):
if lang in kalbos:
kalb=kalbos[lang]
else:
kalb=kalbos[langdef]
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
rcomm = False
rpica = False
rplus = True
buvoapp = False
userid = "0"
content = ""
content2 = ""
pseudonimas = "Anonymous"
lank = UserNone(email=None, federated_identity=None)
youtname=""
vartkey=""
thubnail=""
vartot = None
imagemaxurl = avatarmaxurl2
userpicapagetext=""
klaida=False
errtext = ""
user = users.get_current_user()
try:
if utype:
buvesapp = db.GqlQuery("SELECT * FROM Vartotojai WHERE userid = :1", pic_key2)
else:
buvesapp = db.GqlQuery("SELECT * FROM Vartotojai WHERE lankytojas = :1", pic_key2)
for app in buvesapp:
rcomm = app.commrodyti
rpica = app.picarodyti
rplus = app.plusrodyti
buvoapp = app.rodyti
userid = app.userid
content = render_bbcode(str(app.content))
content2 = str(app.content)
pseudonimas = str(app.pseudonimas)
lank=app.lankytojas
youtname=app.youtname
vartkey=app.key()
thubnail=getphoto(lank.email())
vartot = db.get(vartkey)
except:
klaida=True
errtext = cgi.escape(str(sys.exc_info()[0])) + ' ' + cgi.escape(str(sys.exc_info()[1])) + ' ' + cgi.escape(str(sys.exc_info()[2]))
usercommpageurl = ("%s/%s-usercommpage-%s.%s/%s/%s" % (urlhost2(), cmspath2,lang, fileext, pseudonimas, userid))
userpicapageurl = ("%s/%s-userpicapage-%s.%s/%s/%s" % (urlhost2(), cmspath2,lang, fileext, pseudonimas, userid))
useryoutpageurl = ("%s/%s-useryoutpage-%s.%s/%s/%s" % (urlhost2(), cmspath2,lang, fileext, pseudonimas, userid))
usermailformpageurl = ("%s/%s-usermailformpage-%s.%s/%s/%s" % (urlhost2(), cmspath2,lang, fileext, pseudonimas, userid))
if lank.email():
usermailformtext = (_("User mail form page link text %(usercppseudonimas)s %(usermailformpageurl)s") % {'usercppseudonimas': pseudonimas,'usermailformpageurl': usermailformpageurl})
else:
usermailformtext = "User not Found"
if klaida:
userpicapagetext = ("<div>Error: %s</div" % (errtext))
if rcomm:
userpicapagetext = userpicapagetext + (_("User comm page link text %(usercppseudonimas)s %(usercommpageurl)s") % {'usercppseudonimas': pseudonimas,'usercommpageurl': usercommpageurl})
if rpica:
userpicapagetext = userpicapagetext + (_("User pica page link text %(usercppseudonimas)s %(userpicapageurl)s") % {'usercppseudonimas': pseudonimas,'userpicapageurl': userpicapageurl})
if klaida:
userpicapagetext = userpicapagetext + "<br />" + errtext
plusurl=getplius(lank.email())
if rplus and plusurl:
usercpplustext = ("<a href=\"%s\"><img src=\"%s\" width=\"32\" height=\"32\" border=\"0\" alt=\"\"></img> <strong>Google Plus</strong></a><br /><br />\n\n" % (plusurl,gplusurl))
rpluscheck="checked=\"checked\" "
else:
usercpplustext = ""
rpluscheck=" "
if youtname and len(str(youtname))>0:
userpicapagetext = userpicapagetext + (_("User yout page link text %(usercppseudonimas)s %(useryoutpageurl)s") % {'usercppseudonimas': pseudonimas,'useryoutpageurl': useryoutpageurl})
if buvoapp:
imagemaxurl = ("/%s-userimage/%s/%s" % (cmspath2,pseudonimas, userid))
if thubnail and not buvoapp:
imagemaxurl = str(thubnail)
uphoto=imagemaxurl.split("/s144/", 1)
slasas="/s200/"
imagemaxurl = slasas.join(uphoto)
usercpurl = ("/%s-usercontrolpanel-%s.%s" % (cmspath2,lang,fileext))
userpageend = ("%s/%s/%s" % (fileext,pseudonimas,userid))
userpageurl = ("%s/%s-userpage-%s.%s/%s/%s" % (urlhost2(),cmspath2, lang, fileext, pseudonimas, userid))
if rcomm:
rcommcheck="checked=\"checked\" "
# rcommcheck="checked=\"yes\""
else:
rcommcheck=""
# rcommcheck="checked=\"no\""
if rpica:
rpicacheck="checked=\"checked\" "
# rpicacheck="checked=\"yes\""
else:
rpicacheck=""
# rpicacheck="checked=\"no\""
if buvoapp:
buvoappcheck=""
# buvoappcheck="checked=\"no\""
else:
buvoappcheck="checked=\"checked\" "
# buvoappcheck="checked=\"yes\""
values = {
'imagemaxurl': imagemaxurl,
'userpageend': userpageend,
'userpicapagetext': userpicapagetext,
'usercpplustext': usercpplustext,
'usermailformtext': usermailformtext,
'usermailformpageurl': usermailformpageurl,
'useryoutpageurl': useryoutpageurl,
'userpicapageurl': userpicapageurl,
'usercommpageurl': usercommpageurl,
'usercommpageurl': usercommpageurl,
'usercpurl': usercpurl,
'pseudonimas': pseudonimas,
'userid': userid,
'content': content,
'content2': content2,
'youtname': youtname,
'vartot': vartot,
'rcomm': rcomm,
'rpica': rpica,
'lank': lank,
'rcommcheck': rcommcheck,
'rpluscheck': rpluscheck,
'rpicacheck': rpicacheck,
'buvoappcheck': buvoappcheck,
'userpageurl': userpageurl}
return values
def codekey2():
codeimg = Codeimagereg()
codeimg.ipadresas = os.environ['REMOTE_ADDR']
codeimg.date = datetime.datetime.now()
code = random.randrange(100000, 999999)
codeimg.code = "%s" % code
codeimg.put()
codekey=codeimg.key()
return codekey
def urlhost2():
if os.environ['HTTPS']=="off":
return str('http://'+os.environ['HTTP_HOST'])
else:
return str('https://'+os.environ['HTTP_HOST'])
def textloc():
q2_message = ""
if 'HTTP_X_APPENGINE_CITY' in os.environ:
q2_message = q2_message + ("%s: %s \n" % ('City', os.environ['HTTP_X_APPENGINE_CITY']))
if 'HTTP_X_APPENGINE_COUNTRY' in os.environ:
q2_message = q2_message + ("%s: %s \n" % ('Country', os.environ['HTTP_X_APPENGINE_COUNTRY']))
if 'HTTP_X_APPENGINE_CITYLATLONG' in os.environ:
q2_message = q2_message +("%s: http://maps.google.com/maps?q=%s \n" % ('CityLatLong', os.environ['HTTP_X_APPENGINE_CITYLATLONG']))
return q2_message
def textinfo():
q2_message = "\n\nRemote Addr: " + os.environ['REMOTE_ADDR'] + "\nUser Agent: " + os.environ['HTTP_USER_AGENT'] + "\nLog ID: " + os.environ['REQUEST_LOG_ID'] + "\n"
return q2_message
class BaseRequestHandler(webapp.RequestHandler):
"""Supplies a common template generation function.
When you call generate(), we augment the template variables supplied with
the current user in the 'user' variable and the current webapp request
in the 'request' variable.
"""
def generate(self, template_name, languag, template_values={}):
UserAdd().plus()
Start().first()
if not languag:
languag=langdef
if languag in kalbos:
kalb=kalbos[languag]
else:
kalb=kalbos[langdef]
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
kalb2=kalb.replace("_", "-")
values = {
'request': self.request,
'user': users.GetCurrentUser(),
'fbuser': self.fb_current_user,
'liuser': self.li_current_user,
'vkuser': self.vk_current_user,
'isadmin': users.is_current_user_admin(),
'self_url': self.request.uri,
'login_url': users.CreateLoginURL(self.request.uri),
'logout_url': users.CreateLogoutURL(self.request.uri),
'fblogin_url': "/auth/login?continue=%s" % (urllib.quote(self.request.uri)),
'fblogout_url': "/auth/logout?continue=%s" % (urllib.quote(self.request.uri)),
'lilogin_url': "/liauth/login?continue=%s" % (urllib.quote(self.request.uri)),
'lilogout_url': "/liauth/logout?continue=%s" % (urllib.quote(self.request.uri)),
'vklogin_url': "/vkauth/login?continue=%s" % (urllib.quote(self.request.uri)),
'vklogout_url': "/vkauth/logout?continue=%s" % (urllib.quote(self.request.uri)),
'application_name': siteauth(),
'msgtext_logout': _("logout"),
'msgtext_login': _("login"),
'msgtext_header': _("header %(cmsname)s") % {'cmsname': cmsname2},
'gallery': _("Gallery"),
'kalba': kalb2,
'cmspath':cmspath2,
}
values.update(template_values)
directory = os.path.dirname(__file__)
path = os.path.join(directory, os.path.join('templates', template_name))
self.response.headers['X-Powered-By'] = cmsname2+'/'+_version
appon=False
try:
codedb = db.GqlQuery("SELECT * FROM DinCode WHERE codename = :1", "start")
for thiscode in codedb:
thiscode = thiscode.codetext
appon = eval(thiscode)
except:
appon=False
if appon:
self.response.out.write(template.render(path, values, debug=_DEBUG))
else:
disablecode = "<html><body>Disable, swith to on</body></html>"
try:
codedb = db.GqlQuery("SELECT * FROM DinCode WHERE codename = :1", "disable")
for thiscode in codedb:
disablecode = thiscode.codetext
except:
disablecode = "<html><body>Disable, swith to on</body></html>"
self.response.out.write(disablecode)
@property
def fb_current_user(self):
"""Returns the logged in Facebook user, or None if unconnected."""
if not hasattr(self, "_fb_current_user"):
self._fb_current_user = None
user_id = facebookoauth.parse_cookie(self.request.cookies.get("fb_user"))
if user_id:
self._fb_current_user = FBUser.get_by_key_name(user_id)
if not self._fb_current_user or not hasattr(self._fb_current_user, "login") or not self._fb_current_user.login:
self._fb_current_user=None
return self._fb_current_user
@property
def li_current_user(self):
"""Returns the logged in Linkedin user, or None if unconnected."""
if not hasattr(self, "_li_current_user"):
self._li_current_user = None
user_id = linkedinauth.parse_cookie(self.request.cookies.get("li_user"))
if user_id:
self._li_current_user = LIUser.get_by_key_name(user_id)
return self._li_current_user
@property
def vk_current_user(self):
"""Returns the logged in Linkedin user, or None if unconnected."""
if not hasattr(self, "_vk_current_user"):
self._vk_current_user = None
user_id = vkauth.parse_cookie(self.request.cookies.get("vk_user"))
if user_id:
self._vk_current_user = VKUser.get_by_key_name(user_id)
return self._vk_current_user
class WikiFav(BaseRequestHandler):
def get(self, page_name):
self.response.headers['Cache-Control'] = 'public, max-age=60'
# self.response.headers['Last-Modified'] = lastmod.strftime("%a, %d %b %Y %H:%M:%S GMT")
expires = datetime.datetime.now() + datetime.timedelta(minutes=1)
self.response.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
from bindata import FavIcon
imagelogo=FavIcon()
fav=imagelogo.data1
if os.environ['HTTP_HOST']==site1a or os.environ['HTTP_HOST']==site1b:
fav = imagelogo.data2
self.response.headers['Content-Type'] = 'image/x-icon'
self.response.out.write(fav)
def post(self, page_name):
self.response.headers['Cache-Control'] = 'public, max-age=60'
# self.response.headers['Last-Modified'] = lastmod.strftime("%a, %d %b %Y %H:%M:%S GMT")
expires = datetime.datetime.now() + datetime.timedelta(minutes=1)
self.response.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
from bindata import FavIcon
imagelogo=FavIcon()
fav=imagelogo.data1
if os.environ['HTTP_HOST']==site1a or os.environ['HTTP_HOST']==site1b:
fav = imagelogo.data2
self.response.headers['Content-Type'] = 'image/x-icon'
self.response.out.write(fav)
class WikiRedirDown(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
user = users.get_current_user()
if user:
self.response.headers['X-Powered-By'] = cmsname2+'/'+_version
self.redirect(sitedown)
# exit(0)
else:
greeting = _("Sign in or register %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
greeting=greeting+"<br />"+(_("diferent accounts"))
page = Page.loadnew("download")
page.content = "Download - "+_("Login header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('download', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class RedirN(BaseRequestHandler):
def get(self, page_name):
self.response.headers['X-Powered-By'] = cmsname2+'/'+_version
self.redirect('http://'+site2b+os.environ['PATH_INFO'])
class RedirN2(BaseRequestHandler):
def get(self, page_name):
self.redirect('http://www.google.com/')
class WikiRedirMain(BaseRequestHandler):
def get(self, page_name):
if not page_name:
page_name="MainPage"
self.response.headers['X-Powered-By'] = cmsname2+'/'+_version
entitiesRx = re.compile("[^0-9a-zA-Z]")
page_name = entitiesRx.sub("", page_name)
self.redirect('/'+cmspath2+'-'+page_name+'-'+lang+'.'+fileext)
# def post(self, page_name):
# if not page_name:
# page_name="MainPage"
# self.response.headers['X-Powered-By'] = cmsname2+'/'+_version
# self.redirect('/'+cmspath2+'-'+page_name+'-'+lang+'.'+fileext)
def post(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
page = Page.loadnew("env")
user = users.get_current_user()
greeting = ''
if user:
if users.is_current_user_admin():
items = os.environ.items()
items.sort()
for name, value in items:
aaa = "%s\t= %s <br/>" % (name, value)
greeting = greeting + aaa
for field in self.request.arguments():
aaa = "%s\t= %s <br/>" % (field, self.request.get(field))
greeting = greeting + aaa
else:
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': '', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
greeting = greeting + " " + _("and") + " " + (_("sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)})
else:
greeting = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page.content = _("Enviroment header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('env', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class WikiRedir(BaseRequestHandler):
def get(self, page_name):
self.redirect('/')
def post(self, page_name):
self.redirect('/')
class WikiInstall(BaseRequestHandler):
def get(self):
for name, value in kalbossort:
lang=name
kalb=kalbos[lang]
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
yra1 = False
yra2 = False
puslapis1 = _("page index html %(cmsname)s %(cmspath)s") % {'cmsname': cmsname2,'cmspath': cmspath2}
puslapis2 = _("page menu html %(cmspath)s") % {'cmspath': cmspath2}
query = datastore.Query('Page')
query['name ='] = "MainPage-"+lang+'.'+fileext
entities = query.Get(1)
if len(entities) < 1:
yra1 = False
else:
yra1 = True
query = datastore.Query('Page')
query['name ='] = "menu-"+lang+'.'+fileext
entities = query.Get(1)
if len(entities) < 1:
yra2 = False
else:
yra2 = True
if not yra1:
page = Page.loadnew("MainPage-"+lang+'.'+fileext)
page.content = puslapis1
page.save()
if not yra2:
page = Page.loadnew("menu-"+lang+'.'+fileext)
page.content = puslapis2
page.save()
self.redirect('/')
class WikiPage(BaseRequestHandler):
"""Our one and only request handler.
We first determine which page we are editing, using "MainPage" if no
page is specified in the URI. We then determine the mode we are in (view
or edit), choosing "view" by default.
POST requests to this handler handle edit operations, writing the new page
to the datastore.
"""
def get(self, page_name, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
formurl=urlhost2()
formurl=urlparse.urljoin(formurl, str(self.request.uri))
o = urlparse.urlparse(formurl)
urlpath_without_query_string = o.path
url_without_query_string = o.scheme+"://"+o.netloc+o.path
url_host = o.scheme+"://"+o.netloc
# Load the main page by default
if not page_name:
page_name = 'MainPage'
page_name_org = page_name
rparameters2 = rparameters
entitiesRx = re.compile("[^0-9a-zA-Z\x2D\x5F\x2E\x2C]")
rparameters2 = entitiesRx.sub("", rparameters2)
page_name = "%s-%s" % (page_name,rparameters2)
page = Page.load(page_name)
# page_name2 = 'menu'+'-'+lang+'.'+fileext
page_name2 = "menu-%s.%s" % (lang,fileext)
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % (page_name_org, textaps+name, ext, name, name))
page3.content = text
# ner
if not page.entity and not users.GetCurrentUser() and not users.is_current_user_admin():
self.error(404)
self.response.out.write('Not found')
return
# Default to edit for pages that do not yet exist
if not page.entity:
mode = 'edit'
else:
modes = ['view', 'edit', 'fbputwall']
mode = self.request.get('mode')
if not mode in modes:
mode = 'view'
# User must be logged in to edit
if mode == 'edit' and not users.GetCurrentUser() and not users.is_current_user_admin():
self.redirect(users.CreateLoginURL(self.request.uri))
return
if mode == 'fbputwall':
greeting = ''
fb_current_user=self.fb_current_user
if fb_current_user:
aaa = _("logged Facebook %(fbprofileurl)s %(fbpicurl)s %(fbname)s %(url)s") % {'fbprofileurl': fb_current_user.profile_url,'fbpicurl': "http://graph.facebook.com/"+fb_current_user.id+"/picture",'fbname': fb_current_user.name,'url': '/auth/logout?continue='+urllib.quote(self.request.uri)}
from rss import MyHTMLParser,HTMLParser
parser = HTMLParser()
parerr = False
try:
p = MyHTMLParser()
p.feed(parser.unescape(page.content))
pav=p.data[0]
p.close()
except:
pav=_("--- tag h1 not found in page ---")
parerr = True
if not parerr:
message = _("Message from:").encode("utf-8")+"\n"+urlhost2()
attachment = {}
attachment['name'] = pav.encode("utf-8")
attachment['caption'] = os.environ['HTTP_HOST']
attachment['link'] = urlhost2()+os.environ['PATH_INFO']
attachment['picture'] = urlhost2()+fbputimgurl2
attachment['description'] = ' '
import fb
obj = self
aaa=fb.putwall(obj,message,attachment)
else:
aaa="<h1>Error</h1>%s" % (pav)
else:
aaa = _("not logged Facebook %(url)s") % {'url': '/auth/login?continue='+urllib.quote(self.request.uri)}
greeting = greeting + aaa
page.content = "%s" % (greeting)
# Genertate the appropriate template
self.generate('view.html', lang, {
'imgshar': False,
'kalbos': page3,
'menu': page2,
'page': page,
})
return
if mode == 'view':
page.content = "%s<p> </p><p><a href=\"%s?mode=fbputwall\"><img src=\"%s/dynab?button_text=%s%s\" border=\"0\" alt=\"%s\"></img></a></p>\n" % (page.content,url_without_query_string,url_host,urllib.quote(_("Put to Facebook Wall").encode("utf-8")),imgopt,_("Put to Facebook Wall"))
soccomtext = ""
soccomshowform = False
if mode == 'view' and (page.commenablego or page.commenablefb or page.commenableli or page.commenablevk):
# if hasattr(a, 'property'):
soccomtext = "<div><h3>"+_("Commenting is turned on with a social networks logins:")
if page.commenablego:
soccomtext = soccomtext + " Google"
user = users.get_current_user()
if user:
soccomshowform = True
if page.commenablefb:
soccomtext = soccomtext + " FaceBook"
fb_current_user=self.fb_current_user
if fb_current_user:
soccomshowform = True
if page.commenableli:
soccomtext = soccomtext + " LinkedIn"
li_current_user=self.li_current_user
if li_current_user:
soccomshowform = True
if page.commenablevk:
soccomtext = soccomtext + " VKontakte"
vk_current_user=self.vk_current_user
if vk_current_user:
soccomshowform = True
soccomtext = soccomtext + "</h3></div>"
page.content = "%s%s" % (page.content,soccomtext)
soccomtext2 = ""
if soccomshowform:
codekey=codekey2()
soccomtext2 = (_("page Comments form %(commsendurl)s %(commcodekey)s %(commbutsrc)s") % {'commsendurl': urlpath_without_query_string, 'commcodekey': codekey, 'commbutsrc': "src=\""+url_host+"/dynab?button_text="+urllib.quote(_("Submit Comment").encode("utf-8"))+imgopt+"\""})
if mode == 'view' and (page.commenablego or page.commenablefb or page.commenableli or page.commenablevk):
page.content = "%s%s<div><p><a href=\"%s?cmd=comments\"><img src=\"%s/dynab?button_text=%s%s\" border=\"0\" alt=\"%s\"></img></a></p></div>\n" % (page.content,soccomtext2,urlpath_without_query_string,url_host,urllib.quote(_("View Comments").encode("utf-8")),imgopt,_("View Comments"))
if self.request.get('cmd') == 'comments':
rcomm = True
userid = "0"
content = ""
pseudonimas = "Anonymous"
user = users.get_current_user()
if rcomm:
yra=False
wtext=""
try:
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
try:
query = db.GqlQuery("SELECT * FROM Commentsrec3 WHERE commpage = :1 ORDER BY date DESC", page_name)
greetings = query.fetch(10,pg*10)
co=query.count()
except:
klaida=True
co=0
greetings = []
i=0
ii=0
bbb=""
while i<=co:
i=i+10
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+(" <a href=\"%s?cmd=comments&pg=%s\">%s</a>" % (urlpath_without_query_string,str(ii),str(ii)))
ii=ii+1
wtext=wtext+"<div><hr width=\"70%\"></hr></div>\n<div style=\"text-align: center;\">"+bbb+"</div>\n\n"
for greeting in greetings:
wijun = ""
wdel = ""
if greeting.rodyti or (users.GetCurrentUser() and users.get_current_user() == greeting.author) or users.is_current_user_admin():
if users.is_current_user_admin():
wdel = _("Comments delete %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
if (users.GetCurrentUser() and users.get_current_user() == greeting.author) or users.is_current_user_admin():
if not greeting.rodyti:
wijun = _("Comments show %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
else:
wijun = _("Comments hidden %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
user3 = greeting.vartot
user3fb = greeting.vartotfb
user3li = greeting.vartotli
user3vk = greeting.vartotvk
pseudonimas3 = "Anonymous"
userid3 = '0'
try:
userid3 = user3.userid
pseudonimas3 = user3.pseudonimas
except:
klaida=True
wtext = wtext + "\n<div class=\"comm-container\">\n<div class=\"comm-name\">\n"
if user3:
imagemaxurl2 = ("/%s-userimagemin/%s/%s" % (cmspath2,pseudonimas3, userid3))
userpageurl = ("%s/%s-userpage-%s.%s/%s/%s" % (urlhost2(), cmspath2,lang, fileext, pseudonimas3, userid3))
wtext = wtext +("<a href=\"%s\"><img src=\"%s\" border=\"0\" alt=\"\"></img></a> <strong><img src=\"%s\" alt=\"\" border=\"0\"></img></strong> <strong>%s</strong><br />\n" % (userpageurl,imagemaxurl2,g16url,pseudonimas3))
if user3fb:
userid = user3fb.id
# pseudonimas3 = user3fb.nickname
pseudonimas3 = user3fb.name
imagemaxurl2 = ("http://graph.facebook.com/%s/picture" % (userid))
userpageurl = ("%s/fbinfo?id=%s" % (urlhost2(),userid))
wtext = wtext +("<a href=\"%s\"><img src=\"%s\" border=\"0\" alt=\"\"></img></a> <strong><img src=\"%s\" alt=\"\" border=\"0\"></img></strong> <strong>%s</strong><br />\n" % (userpageurl,imagemaxurl2,fb16url,pseudonimas3))
if user3li:
userid = user3li.id
ukey = user3li.key()
# pseudonimas3 = user3li.nickname
pseudonimas3 = user3li.name
imagemaxurl2 = ("%s/liphoto2/%s" % (urlhost2(),ukey))
userpageurl = user3li.profile_url
wtext = wtext +("<a href=\"%s\"><img src=\"%s\" border=\"0\" alt=\"\"></img></a> <strong><img src=\"%s\" alt=\"\" border=\"0\"></img></strong> <strong>%s</strong><br />\n" % (userpageurl,imagemaxurl2,in16url,pseudonimas3))
if user3vk:
userid = user3vk.id
ukey = user3vk.key()
# pseudonimas3 = user3li.nickname
pseudonimas3 = user3vk.name
imagemaxurl2 = ("%s/vkphoto/%s" % (urlhost2(),userid))
userpageurl = user3vk.profile_url
wtext = wtext +("<a href=\"%s\"><img src=\"%s\" border=\"0\" alt=\"\"></img></a> <strong><img src=\"%s\" alt=\"\" border=\"0\"></img></strong> <strong>%s</strong><br />\n" % (userpageurl,imagemaxurl2,vk16url,pseudonimas3))
wtext = wtext +("\n</div>\n<div class=\"font-small-gray\">%s</div>\n" % greeting.date.strftime("%a, %d %b %Y %H:%M:%S"))
if greeting.avatar:
if greeting.avatarmax:
wtext = wtext + ("<div class=\"font-small-gray\"><a href=\"/commimg?img_id=%s&size=yes\"><img src=\"/commimg?img_id=%s\" alt=\"\"></img></a></div>\n" % (greeting.key(),greeting.key()))
else:
wtext = wtext + ("<div class=\"font-small-gray\"><img src=\"/commimg?img_id=%s\" alt=\"\"></img></div>\n" % greeting.key())
wtext = wtext + ("\n<div class=\"comm-text\"><div>%s</div></div>\n" % greeting.content)
# wtext = wtext + "</div><div class=\"clear\"><!-- --></div>\n"
#redaguoti wtext = wtext + "\n<div>"+wijun+" " +wdel+"</div>\n\n"
wtext = wtext + "<div> </div>\n</div>\n"
yra=True
except:
yra=False
errtext = ''.join(traceback.format_exception(*sys.exc_info())) #cgi.escape(str(sys.exc_info()[0]))
if yra:
commtext=("<div>%s</div>\n\t" % (wtext))
else:
commtext="<div>comments db error: %s</div>\n\t" % (errtext)
page.content = "%s%s" % (page.content,commtext)
# Genertate the appropriate template
self.generate(mode + '.html', lang, {
'imgshar': False,
'kalbos': page3,
'menu': page2,
'page': page,
})
def post(self, page_name, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
# User must be logged in to edit
if not users.GetCurrentUser() and not self.request.get('cmd') == 'pagecomm':
# The GET version of this URI is just the view/edit mode, which is a
# reasonable thing to redirect to
self.redirect(users.CreateLoginURL(self.request.uri))
return
if not users.is_current_user_admin() and not self.request.get('cmd') == 'pagecomm':
self.redirect(users.CreateLoginURL(self.request.uri))
return
if not page_name:
self.redirect('/')
# Create or overwrite the page
page_name = page_name+'-'+rparameters
page = Page.load(page_name)
if self.request.get('cmd') == 'pagecomm' and ((page.commenablego and users.get_current_user()) or (page.commenablefb and self.fb_current_user) or (page.commenableli and self.li_current_user) or (page.commenablevk and self.vk_current_user)):
user = users.get_current_user()
if user:
userid = user.user_id()
else:
userid = "0"
fb_current_user=self.fb_current_user
li_current_user=self.li_current_user
vk_current_user=self.vk_current_user
connt=""
vartot = None
vartkey=""
try:
buvesapp = db.GqlQuery("SELECT * FROM Vartotojai WHERE userid = :1", userid)
for app in buvesapp:
vartkey=app.key()
vartot = db.get(vartkey)
except:
klaida=True
try:
codeimg = db.get(self.request.get("scodeid"))
except:
prn="Error"
if codeimg and codeimg.code == self.request.get("scode"):
greeting = Commentsrec3()
greeting.commpage = page_name
greeting.vartot = vartot
greeting.vartotfb = self.fb_current_user
greeting.vartotli = self.li_current_user
greeting.vartotvk = self.vk_current_user
greeting.rodyti = True
greeting.userid = userid
greeting.ipadresas = os.environ['REMOTE_ADDR']
# greeting.laikas = datetime.datetime.now()
if users.get_current_user():
greeting.author = users.get_current_user()
connt = cgi.escape(self.request.get("content"))
connt = render_bbcode(connt)
connt = connt[0:400]
greeting.content = connt
# priesduom = self.request.get("img")
greeting.rname = "anonymous"
if self.request.get("img"):
avatarmax = images.resize(self.request.get("img"), width=600, height=400, output_encoding=images.PNG)
greeting.avatarmax = db.Blob(avatarmax)
avatar = images.resize(self.request.get("img"), width=96, height=96, output_encoding=images.PNG)
greeting.avatar = db.Blob(avatar)
greeting.put()
to_addr = _mailrcptto
user = users.get_current_user()
if user:
uname=user.nickname()
umail=users.get_current_user().email()
else:
uname=""
umail=""
message = mail.EmailMessage()
message.subject = os.environ['HTTP_HOST'] + " - comments"
message.sender = _mailsender
message.to = to_addr
q_message = ""
q_message = q_message + ("\n%s: %s \n%s \n%s \n" % ('Page', str(self.request.uri),str(textinfo()),str(textloc())))
message.body = (_("Comments mail message %(communame)s %(commumail)s %(commrealname)s %(commmessage)s") % {'communame': uname,'commumail': umail,'commrealname': greeting.rname,'commmessage': greeting.content}) + q_message
message.send()
# self.redirect('/'+cmspath2+'-usercommpage-'+lang+'.'+fileext+'/'+pseudonimas+'/'+userid )
if not self.request.get('cmd') == 'pagecomm':
page.content = self.request.get('content')
page.save()
self.redirect(page.view_url())
class WikiExec(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
# values222 = { "name" : "world" }
page = Page.loadnew("pasaulis")
page.content = base64.decodestring("PGgxPkhlbGxvPC9oMT48cD5OZXJpamF1cyBUZXJlYmFzIC0gQ01TICIlcyIgLSAlcyAtIGJhc2VkICJjY2N3aWtpIiAoQnJldCBUYXlsb3IpLCAiaW1hZ2Vfc2hhcmluZyIgKEZyZWQgV3VsZmYpPC9wPg==") % (cmsname2,_version)
page_name2 = 'menu'+'-'+lang+'.'+fileext
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('ver', textaps+name, ext, name, name))
page3.content = text
page2 = Page.load(page_name2)
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class WikiLogin(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
user = users.get_current_user()
if user:
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': '', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
if users.is_current_user_admin():
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': 'Administrator', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
else:
greeting = _("Sign in or register %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
greeting=greeting+"<br />"+(_("diferent accounts"))
page = Page.loadnew("login")
page.content = _("Login header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('login', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class WikiEnv(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
page = Page.loadnew("env")
user = users.get_current_user()
greeting = ''
if user:
if users.is_current_user_admin():
items = os.environ.items()
items.sort()
for name, value in items:
aaa = "%s\t= %s <br/>" % (name, value)
greeting = greeting + aaa
else:
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': '', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
greeting = greeting + " " + _("and") + " " + (_("sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)})
else:
greeting = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page.content = _("Enviroment header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('env', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class WikiFB(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
page = Page.loadnew("fb")
user = users.get_current_user()
greeting = ''
if user:
if users.is_current_user_admin():
items = os.environ.items()
fb_current_user=self.fb_current_user
if fb_current_user:
aaa = _("logged Facebook %(fbprofileurl)s %(fbpicurl)s %(fbname)s %(url)s") % {'fbprofileurl': fb_current_user.profile_url,'fbpicurl': "http://graph.facebook.com/"+fb_current_user.id+"/picture",'fbname': fb_current_user.name,'url': '/auth/logout?continue='+urllib.quote(self.request.uri)}
else:
aaa = _("not logged Facebook %(url)s") % {'url': '/auth/login?continue='+urllib.quote(self.request.uri)}
greeting = greeting + aaa
else:
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': '', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
greeting = greeting + " " + _("and") + " " + (_("sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)})
else:
greeting = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page.content = _("Facebook header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('fb', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class WikiLI(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
page = Page.loadnew("fb")
user = users.get_current_user()
greeting = ''
if user:
if users.is_current_user_admin():
items = os.environ.items()
li_current_user=self.li_current_user
if li_current_user:
aaa = _("logged LinkedIn %(liprofileurl)s %(lipicurl)s %(liname)s %(url)s") % {'liprofileurl': li_current_user.profile_url,'lipicurl': "/liphoto/"+li_current_user.id,'liname': li_current_user.name,'url': '/liauth/logout?continue='+urllib.quote(self.request.uri)}
else:
aaa = _("not logged LinkedIn %(url)s") % {'url': '/liauth/login?continue='+urllib.quote(self.request.uri)}
greeting = greeting + aaa
else:
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': '', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
greeting = greeting + " " + _("and") + " " + (_("sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)})
else:
greeting = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page.content = _("LinkedIn header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('li', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class WikiVK(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
page = Page.loadnew("vk")
user = users.get_current_user()
greeting = ''
if user:
if users.is_current_user_admin():
items = os.environ.items()
vk_current_user=self.vk_current_user
if vk_current_user:
aaa = _("logged VKontakte %(vkprofileurl)s %(vkpicurl)s %(vkname)s %(url)s") % {'vkprofileurl': vk_current_user.profile_url,'vkpicurl': "/vkphoto/"+vk_current_user.id,'vkname': vk_current_user.name,'url': '/vkauth/logout?continue='+urllib.quote(self.request.uri)}
else:
aaa = _("not logged VKontakte %(url)s") % {'url': '/vkauth/login?continue='+urllib.quote(self.request.uri)}
greeting = greeting + aaa
else:
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': '', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
greeting = greeting + " " + _("and") + " " + (_("sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)})
else:
greeting = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page.content = _("VKontakte header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('li', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class WikiAdmin(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
page = Page.loadnew("admin")
user = users.get_current_user()
greeting = ''
if user:
if users.is_current_user_admin():
greeting = _("page admin content html %(cmspath)s") % {'cmspath': cmspath2}
else:
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': '', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
greeting = greeting + " " + _("and") + " " + (_("sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)})
else:
greeting = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page.content = _("Admin header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('admin', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class WikiMod(BaseRequestHandler):
def get(self, modname, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
sys.path.append(os.getcwd()+os.path.sep+"componets")
import importlib
entitiesRx = re.compile("[^0-9a-zA-Z]")
modname = entitiesRx.sub("", modname)
modloaderr = False
modname2 = 'custommodule'
try:
moduleim = importlib.import_module("mod"+modname)
except:
modloaderr = True
if not modloaderr:
moduleim = importlib.import_module("mod"+modname)
modmed = getattr(moduleim, "modobj"+modname)
modresult = modmed().cont(self)
if 'cont' in modresult and len(modresult['cont'])>0:
modcont = modresult['cont']
else:
modcont = "<h1>Custom Compontet \"%s\"</h1>\n" % (modname)
if 'name' in modresult and len(modresult['name'])>0:
modname2 = modresult['name']
else:
modname2 = 'customcomponent'
if 'fpput' in modresult and modresult['fpput']==True:
modfpput = True
else:
modfpput = False
if 'title' in modresult and len(modresult['title'])>0:
modtitle = modresult['title']
else:
modtitle = 'Custom Compontet'
if 'descr' in modresult and len(modresult['descr'])>0:
moddescr = modresult['descr']
else:
moddescr = ' '
# user = users.get_current_user()
aaa = "%s" % (modcont)
modes = ['view', 'fbputwall']
mode = self.request.get('mode')
if not mode in modes:
mode = 'view'
if mode == 'fbputwall' and modfpput:
fb_current_user=self.fb_current_user
if fb_current_user:
pav = modtitle
aaa = _("logged Facebook %(fbprofileurl)s %(fbpicurl)s %(fbname)s %(url)s") % {'fbprofileurl': fb_current_user.profile_url,'fbpicurl': "http://graph.facebook.com/"+fb_current_user.id+"/picture",'fbname': fb_current_user.name,'url': '/auth/logout?continue='+urllib.quote(self.request.uri)}
parerr = False
if not parerr:
message = _("Message from:").encode("utf-8")+"\n"+urlhost2()
attachment = {}
attachment['name'] = pav.encode("utf-8")
attachment['caption'] = os.environ['HTTP_HOST']
attachment['link'] = urlhost2()+os.environ['PATH_INFO']
attachment['picture'] = urlhost2()+fbputimgurl2
attachment['description'] = moddescr.encode("utf-8")
obj=self
import fb
obj = self
aaa=fb.putwall(obj,message,attachment)
else:
aaa="<h1>Error</h1>%s" % (pav)
else:
aaa = _("not logged Facebook %(url)s") % {'url': '/auth/login?continue='+urllib.quote(self.request.uri)}
if mode == 'view' and modfpput:
aaa = "%s<p> </p><p><a href=\"%s%s?mode=fbputwall\"><img src=\"%s/dynab?button_text=%s%s\" border=\"0\" alt=\"%s\"></img></a></p>" % (aaa,urlhost2(),os.environ['PATH_INFO'],urlhost2(),urllib.quote(_("Put to Facebook Wall").encode("utf-8")),imgopt,_("Put to Facebook Wall"))
else:
aaa = "<h1>Compontet \"%s\" load error</h1>\n" % (modname)
page = Page.loadnew(modname2)
page.content = "%s" % (aaa)
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('mod'+modname, textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
def post(self, modname, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
sys.path.append(os.getcwd()+os.path.sep+"componets")
import importlib
entitiesRx = re.compile("[^0-9a-zA-Z]")
modname = entitiesRx.sub("", modname)
modloaderr = False
modname2 = 'custommodule'
try:
moduleim = importlib.import_module("mod"+modname)
except:
modloaderr = True
if not modloaderr:
moduleim = importlib.import_module("mod"+modname)
modmed = getattr(moduleim, "modobj"+modname)
modresult = modmed().cont(self)
if 'cont' in modresult and len(modresult['cont'])>0:
modcont = modresult['cont']
else:
modcont = "<h1>Custom Compontet \"%s\"</h1>\n" % (modname)
if 'name' in modresult and len(modresult['name'])>0:
modname2 = modresult['name']
else:
modname2 = 'customcomponent'
if 'fpput' in modresult and modresult['fpput']==True:
modfpput = True
else:
modfpput = False
if 'title' in modresult and len(modresult['title'])>0:
modtitle = modresult['title']
else:
modtitle = 'Custom Compontet'
if 'descr' in modresult and len(modresult['descr'])>0:
moddescr = modresult['descr']
else:
moddescr = ' '
# user = users.get_current_user()
aaa = "%s" % (modcont)
modes = ['view', 'fbputwall']
mode = self.request.get('mode')
if not mode in modes:
mode = 'view'
if mode == 'fbputwall' and modfpput:
fb_current_user=self.fb_current_user
if fb_current_user:
pav = modtitle
aaa = _("logged Facebook %(fbprofileurl)s %(fbpicurl)s %(fbname)s %(url)s") % {'fbprofileurl': fb_current_user.profile_url,'fbpicurl': "http://graph.facebook.com/"+fb_current_user.id+"/picture",'fbname': fb_current_user.name,'url': '/auth/logout?continue='+urllib.quote(self.request.uri)}
parerr = False
if not parerr:
message = _("Message from:").encode("utf-8")+"\n"+urlhost2()
attachment = {}
attachment['name'] = pav.encode("utf-8")
attachment['caption'] = os.environ['HTTP_HOST']
attachment['link'] = urlhost2()+os.environ['PATH_INFO']
attachment['picture'] = urlhost2()+fbputimgurl2
attachment['description'] = moddescr.encode("utf-8")
obj=self
import fb
obj = self
aaa=fb.putwall(obj,message,attachment)
else:
aaa="<h1>Error</h1>%s" % (pav)
else:
aaa = _("not logged Facebook %(url)s") % {'url': '/auth/login?continue='+urllib.quote(self.request.uri)}
if mode == 'view' and modfpput:
aaa = "%s<p> </p><p><a href=\"%s%s?mode=fbputwall\"><img src=\"%s/dynab?button_text=%s%s\" border=\"0\" alt=\"%s\"></img></a></p>" % (aaa,urlhost2(),os.environ['PATH_INFO'],urlhost2(),urllib.quote(_("Put to Facebook Wall").encode("utf-8")),imgopt,_("Put to Facebook Wall"))
else:
aaa = "<h1>Compontet \"%s\" load error</h1>\n" % (modname)
page = Page.loadnew(modname2)
page.content = "%s" % (aaa)
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('mod'+modname, textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class ListDir(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
page = Page.loadnew("list")
user = users.get_current_user()
greeting = ''
if user:
if users.is_current_user_admin():
items = os.listdir(self.request.get("ls"))
items.sort()
for name in items:
aaa = "%s <br />\n" % (name)
greeting = greeting + aaa
else:
greeting = _("Welcome2 %(admin)s %(usernickname)s %(userlogouturl)s") % {'admin': '', 'usernickname': user.nickname(), 'userlogouturl': users.create_logout_url(self.request.uri)}
greeting = greeting + " " + _("and") + " " + (_("sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)})
else:
greeting = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page.content = _("List header %(greeting)s") % {'greeting': greeting}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('ls', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class Greeting(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now_add=True)
class SingGuestbook(BaseRequestHandler):
def post(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
greeting = Greeting()
aaa=""
if users.get_current_user():
greeting = Greeting()
greeting.author = users.get_current_user()
greeting.content = self.request.get('content')
greeting.put()
# self.redirect('/')
aaa = _("Guestbook 2")
else:
aaa = _("Guestbook 1 %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page = Page.loadnew("guestbook")
page.content = aaa
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'menu': page2,
'page': page,
})
class WikiGuest(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
aaa=""
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
query = db.GqlQuery("SELECT * "
"FROM Greeting "
"ORDER BY date DESC")
greetings = query.fetch(10,pg*10)
# query = Greeting.all()
co=query.count()
i=0
ii=0
bbb=""
while i<=co:
i=i+10
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+' '+"<a href=\""+'/'+cmspath2+'-guestbook-'+lang+'.'+fileext+"?pg="+ str(ii) +"\">"+ str(ii) +"</a>"
ii=ii+1
aaa=aaa+"<center>"+bbb+"</center><br />\n"
for greeting in greetings:
if greeting.author:
ccc1=''
ccc1=_("Guestbook 3 %(greetingusernickname)s post") % {'greetingusernickname': greeting.author.nickname()}
aaa=aaa+ccc1
else:
aaa=aaa + _("Guestbook 4 anonymous post")
aaa=aaa+('<blockquote>%s</blockquote>' %
cgi.escape(greeting.content))
if users.get_current_user():
aaa=aaa+(_("Guestbook 5 %(guestsendurl)s") % {'guestsendurl': '/'+cmspath2+'-sing-'+lang+'.'+fileext})
else:
ccc2 = ''
ccc2 = _("Guestbook 6 %(guestuserloginurl)s") % {'guestuserloginurl': users.create_login_url(self.request.uri)}
aaa=aaa+ccc2
page = Page.loadnew("guestbook")
page.content = _("Guestbook header %(guestgreeting)s") % {'guestgreeting': aaa}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('guestbook', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class MailForm(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
codekey=codekey2()
page2 = Page.load("pasl-"+lang+'.'+fileext)
page = Page.loadnew("mailform")
user = users.get_current_user()
greeting = ''
# if user:
greeting = _("Mail form %(mailsendurl)s %(mailcodekey)s") % {'mailsendurl': '/'+cmspath2+'-sendmail-'+lang+'.'+fileext,'mailcodekey': codekey}
# else:
# greeting = "<p><a href=\""+users.create_login_url(self.request.uri)+u"\">Please login</a> with Google account.</p>"
page.content = u""+page2.content+greeting+""
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('mailform', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class MailSend(BaseRequestHandler):
# @login_required
def post(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
try:
codeimg = db.get(self.request.get("scodeid"))
except:
prn="Error"
# codeimg = db.get(self.request.get("scodeid"))
if codeimg and codeimg.code == self.request.get("scode"):
codeimg.delete()
x_zmail = self.request.get("zemail")
x_subject = self.request.get("zsubject")
x_realname = self.request.get("zrealname")
x_message = self.request.get("zmessage")
to_addr = _mailrcptto
user = users.get_current_user()
if user:
uname=user.nickname()
umail=users.get_current_user().email()
else:
uname=""
umail=""
if not mail.is_email_valid(to_addr):
# Return an error message...
pass
message = mail.EmailMessage()
message.subject = os.environ['HTTP_HOST'] + " maiform - " +x_subject.encode("utf-8")
# message.subject = "www"
message.sender = _mailsender
message.to = to_addr
# q_uname = uname.encode("utf-8")
# q_umail = umail.encode("utf-8")
# q_zmail = x_zmail.encode("utf-8")
# q_realname = x_realname.encode("utf-8")
# q_message = x_message.encode("utf-8")
q_uname = ''
q_umail = ''
q_zmail = ''
q_realname = ''
q_message = ''
q_uname = uname
q_umail = umail
q_zmail = x_zmail
q_realname = x_realname
q_message = x_message + ("\n%s: %s \n%s \n%s \n" % ('Page', str(self.request.uri),str(textinfo()),str(textloc())))
message.body = (_("Mail message %(mailuname)s %(mailumail)s %(mailrealname)s %(mailzmail)s %(mailmessage)s") % {'mailuname': q_uname, 'mailumail': q_umail, 'mailrealname': q_realname, 'mailzmail': q_zmail, 'mailmessage': q_message})
message.send()
ptext=_("Mail send OK")
else:
ptext=_("Mail send Error")
page = Page.loadnew("sendmail")
page.content = ptext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'menu': page2,
'page': page,
})
class Page(object):
"""Our abstraction for a Wiki page.
We handle all datastore operations so that new pages are handled
seamlessly. To create OR edit a page, just create a Page instance and
clal save().
"""
def __init__(self, name, entity=None):
self.name = name
self.entity = entity
if entity:
self.content = entity['content']
if entity.has_key('user'):
self.user = entity['user']
else:
self.user = None
self.created = entity['created']
self.modified = entity['modified']
self.sitemaprodyti=entity['sitemaprodyti']
self.rssrodyti=entity['rssrodyti']
self.sitemapfreq=entity['sitemapfreq']
self.sitemapprio=entity['sitemapprio']
if "commenablego" in entity:
self.commenablego=entity['commenablego']
else:
self.commenablego=False
if "commenablefb" in entity:
self.commenablefb=entity['commenablefb']
else:
self.commenablefb=False
if "commenableli" in entity:
self.commenableli=entity['commenableli']
else:
self.commenableli=False
if "commenablevk" in entity:
self.commenablevk=entity['commenablevk']
else:
self.commenablevk=False
else:
# New pages should start out with a simple title to get the user going
now = datetime.datetime.now()
if not name=="menu":
self.content = '<h1>' + cgi.escape(name) + '</h1>'
self.user = None
self.created = now
self.modified = now
self.rssrodyti=False
self.sitemaprodyti=False
self.sitemapfreq='weekly'
self.sitemapprio='0.5'
self.commenablego=False
self.commenablefb=False
self.commenableli=False
self.commenablevk=False
def entity(self):
return self.entity
def edit_url(self):
return '/'+cmspath2+'-' + self.name + '?mode=edit'
def view_url(self):
return '/'+cmspath2+'-' + self.name
def wikified_content(self):
"""Applies our wiki transforms to our content for HTML display.
We auto-link URLs, link WikiWords, and hide referers on links that
go outside of the Wiki.
"""
transforms = [
# AutoLink(),
# WikiWords(),
# HideReferers(),
]
content = self.content
for transform in transforms:
content = transform.run(content)
return content
def save(self):
"""Creates or edits this page in the datastore."""
now = datetime.datetime.now()
if self.entity:
entity = self.entity
else:
entity = datastore.Entity('Page')
entity['name'] = self.name
entity['created'] = now
entity['rssrodyti'] = self.rssrodyti
entity['sitemaprodyti'] = self.sitemaprodyti
entity['sitemapfreq'] = self.sitemapfreq
entity['sitemapprio'] = self.sitemapprio
entity['commenablego'] = self.commenablego
entity['commenablefb'] = self.commenablefb
entity['commenableli'] = self.commenableli
entity['commenablevk'] = self.commenablevk
entity['content'] = datastore_types.Text(self.content)
entity['modified'] = now
if users.GetCurrentUser():
entity['user'] = users.GetCurrentUser()
elif entity.has_key('user'):
del entity['user']
datastore.Put(entity)
@staticmethod
def loadnew(name):
return Page(name)
@staticmethod
def load(name):
"""Loads the page with the given name.
We always return a Page instance, even if the given name isn't yet in
the database. In that case, the Page object will be created when save()
is called.
"""
query = datastore.Query('Page')
query['name ='] = name
entities = query.Get(1)
if len(entities) < 1:
return Page(name)
else:
return Page(name, entities[0])
@staticmethod
def exists(name):
"""Returns true if the page with the given name exists in the datastore."""
return Page.load(name).entity
class Transform(object):
"""Abstraction for a regular expression transform.
Transform subclasses have two properties:
regexp: the regular expression defining what will be replaced
replace(MatchObject): returns a string replacement for a regexp match
We iterate over all matches for that regular expression, calling replace()
on the match to determine what text should replace the matched text.
The Transform class is more expressive than regular expression replacement
because the replace() method can execute arbitrary code to, e.g., look
up a WikiWord to see if the page exists before determining if the WikiWord
should be a link.
"""
def run(self, content):
"""Runs this transform over the given content.
We return a new string that is the result of this transform.
"""
parts = []
offset = 0
for match in self.regexp.finditer(content):
parts.append(content[offset:match.start(0)])
parts.append(self.replace(match))
offset = match.end(0)
parts.append(content[offset:])
return ''.join(parts)
class WikiWords(Transform):
"""Translates WikiWords to links.
We look up all words, and we only link those words that currently exist.
"""
def __init__(self):
self.regexp = re.compile(r'[A-Z][a-z]+([A-Z][a-z]+)+')
def replace(self, match):
wikiword = match.group(0)
if Page.exists(wikiword):
return '<a class="wikiword" href="/%s">%s</a>' % (wikiword, wikiword)
else:
return wikiword
class AutoLink(Transform):
"""A transform that auto-links URLs."""
def __init__(self):
self.regexp = re.compile(r'([^"])\b((http|https)://[^ \t\n\r<>\(\)&"]+' \
r'[^ \t\n\r<>\(\)&"\.])')
def replace(self, match):
url = match.group(2)
return match.group(1) + '<a class="autourl" href="%s">%s</a>' % (url, url)
class HideReferers(Transform):
"""A transform that hides referers for external hyperlinks."""
def __init__(self):
self.regexp = re.compile(r'href="(http[^"]+)"')
def replace(self, match):
url = match.group(1)
scheme, host, path, parameters, query, fragment = urlparse.urlparse(url)
url = 'http://www.google.com/url?sa=D&q=' + urllib.quote(url)
return 'href="' + url + '"'
class VarId(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
query = db.GqlQuery("SELECT * "
"FROM Vartotojai "
"ORDER BY laikas DESC")
for greeting in query:
greeting.userid=greeting.lankytojas.user_id()
greeting.put()
page = Page.loadnew("suradimas")
page.content = u'<h1>Suradimas</h1>'+""
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('searchid', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class VartSar(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
aaa=""
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
query = db.GqlQuery("SELECT * "
"FROM Vartotojai "
"ORDER BY laikas DESC")
greetings = query.fetch(10,pg*10)
co=query.count()
i=0
ii=0
bbb=""
while i<=co:
i=i+10
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+' '+"<a href=\"/"+cmspath2+"-memberlist-"+lang+'.'+fileext+"?pg="+ str(ii) +"\">"+ str(ii) +"</a>"
ii=ii+1
aaa=aaa+"<center>"+bbb+"</center><br />\n"
for greeting in greetings:
buvoapp = greeting.rodyti
userid = greeting.userid
content = greeting.content
pseudonimas = greeting.pseudonimas
if buvoapp:
imagemaxurl = ("/"+cmspath2+"-userimagemin/%s/%s" % (pseudonimas, userid))
else:
imagemaxurl = avatarminurl2
if greeting.lankytojas:
thubnail=getphoto(greeting.lankytojas.email())
if not thubnail:
thubnail = imagemaxurl
userpageurl = ("%s/%s-userpage-%s.%s/%s/%s" % (urlhost2(), cmspath2,lang, fileext, pseudonimas, userid))
userpicaurl = ("%s/picaenable2?user=%s" % (urlhost2(), greeting.lankytojas.email()))
userplusurl = ("%s/avatar2?user=%s" % (urlhost2(), greeting.lankytojas.email()))
aaa=aaa+(("<a href=\"%s\"><img src=\"%s\"+ border=\"0\" alt=\"\"></img><img src=\"%s\"+ border=\"0\" alt=\"\"></img><br />\n\n<strong>%s</strong></a> <a href=\"%s\">Plus</a> <a href=\"%s\">Picasa</a><br />google user: <b>%s</b> email: %s") % (userpageurl,imagemaxurl,thubnail,pseudonimas,userplusurl,userpicaurl,greeting.lankytojas.nickname(),greeting.lankytojas.email()))
else:
aaa=aaa+''
iplink = ("<a href=\"%s/logs3?filter=%s\">%s</a>" % (urlhost2(), greeting.ipadresas,greeting.ipadresas))
aaa=aaa+(_("Memberlist entry msg %(memlisttime)s %(memlistipaddr)s %(memlistbrowser)s") % {'memlisttime': greeting.laikas, 'memlistipaddr': iplink, 'memlistbrowser': greeting.narsykle})
if not users.get_current_user() or not users.is_current_user_admin():
aaa = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page = Page.loadnew("memberlist")
page.content = _("Memberlist header %(memlistgreeting)s") % {'memlistgreeting': aaa}
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('memberlist', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class VartSarTrumpas(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
aaa=""
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
query = db.GqlQuery("SELECT * "
"FROM Vartotojai "
"ORDER BY laikas DESC")
greetings = query.fetch(8,pg*8)
co=query.count()
i=0
ii=0
bbb=""
while i<=co:
i=i+8
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+' '+"<a href=\"/"+cmspath2+"-memberlistshort-"+lang+'.'+fileext+"?pg="+ str(ii) +"\">"+ str(ii) +"</a>"
ii=ii+1
# aaa=aaa+"<center>"+bbb+"</center><br />\n"
aaa=aaa+"<table cellspacing=\"0\" cellpadding=\"0\">\n"
z = 0
for greeting in greetings:
z = z + 1
if z==1:
aaa=aaa+"<tr>\n"
buvoapp = greeting.rodyti
userid = greeting.userid
content = greeting.content
pseudonimas = greeting.pseudonimas
if buvoapp:
imagemaxurl = ("/%s-userimagemin/%s/%s" % (cmspath2,pseudonimas, userid))
else:
imagemaxurl = avatarminurl2
if greeting.lankytojas:
thubnail=getphoto(greeting.lankytojas.email())
if thubnail and not buvoapp:
imagemaxurl = str(thubnail)
uphoto=imagemaxurl.split("/s144/", 1)
slasas="/s50/"
imagemaxurl = slasas.join(uphoto)
userpageurl = ("%s/%s-userpage-%s.%s/%s/%s" % (urlhost2(), cmspath2 ,lang, fileext, pseudonimas, userid))
aaa=aaa+(("<td width=\"50\"><a href=\"%s\" target=\"_top\"><img src=\"%s\" border=\"0\" alt=\"%s\"></img></a></td>\n") % (userpageurl,imagemaxurl,pseudonimas))
else:
aaa=aaa+''
if z==2:
z=0
aaa=aaa+"\n</tr>"
# aaa=aaa+(_("Memberlist entry msg %(memlisttime)s %(memlistipaddr)s %(memlistbrowser)s") % {'memlisttime': greeting.laikas, 'memlistipaddr': greeting.ipadresas, 'memlistbrowser': greeting.narsykle})
if z==1:
aaa=aaa+"<td width=\"50\"> </td></tr>"
# if z==0:
# aaa=aaa+"\n</tr>"
aaa=aaa+"\n</table>"
# if not users.get_current_user() or not users.is_current_user_admin():
# aaa = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page = Page.loadnew("memberlist")
page.content = aaa
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('memberlist', textaps+name, ext, name, name))
page3.content = text
self.generate('viewicon.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class FBUserListSort(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
aaa=""
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
query = db.GqlQuery("SELECT * "
"FROM FBUser "
"ORDER BY updated DESC")
greetings = query.fetch(8,pg*8)
co=query.count()
i=0
ii=0
bbb=""
while i<=co:
i=i+8
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+' '+"<a href=\"/"+cmspath2+"-fbmemberlistshort-"+lang+'.'+fileext+"?pg="+ str(ii) +"\">"+ str(ii) +"</a>"
ii=ii+1
# aaa=aaa+"<center>"+bbb+"</center><br />\n"
aaa=aaa+"<table cellspacing=\"0\" cellpadding=\"0\">\n"
z = 0
for greeting in greetings:
z = z + 1
if z==1:
aaa=aaa+"<tr>\n"
userid = greeting.id
pseudonimas = greeting.nickname
imagemaxurl = ("http://graph.facebook.com/%s/picture" % (userid))
if greeting.id:
# userpageurl = ("http://www.facebook.com/profile.php?id=%s" % (userid))
userpageurl = ("%s/fbinfo?id=%s" % (urlhost2(),userid))
aaa=aaa+(("<td width=\"50\"><a href=\"%s\" target=\"_top\"><img src=\"%s\" border=\"0\" alt=\"%s\"></img></a></td>\n") % (userpageurl,imagemaxurl,pseudonimas))
else:
aaa=aaa+''
if z==2:
z=0
aaa=aaa+"\n</tr>"
# aaa=aaa+(_("Memberlist entry msg %(memlisttime)s %(memlistipaddr)s %(memlistbrowser)s") % {'memlisttime': greeting.laikas, 'memlistipaddr': greeting.ipadresas, 'memlistbrowser': greeting.narsykle})
if z==1:
aaa=aaa+"<td width=\"50\"> </td></tr>"
# if z==0:
# aaa=aaa+"\n</tr>"
aaa=aaa+"\n</table>"
# if not users.get_current_user() or not users.is_current_user_admin():
# aaa = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page = Page.loadnew("fbmemberlist")
page.content = aaa
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('fbmemberlist', textaps+name, ext, name, name))
page3.content = text
self.generate('viewicon.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class LIUserListSort(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
aaa=""
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
query = db.GqlQuery("SELECT * "
"FROM LIUser "
"ORDER BY updated DESC")
greetings = query.fetch(8,pg*8)
co=query.count()
i=0
ii=0
bbb=""
while i<=co:
i=i+8
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+' '+"<a href=\"/"+cmspath2+"-fbmemberlistshort-"+lang+'.'+fileext+"?pg="+ str(ii) +"\">"+ str(ii) +"</a>"
ii=ii+1
# aaa=aaa+"<center>"+bbb+"</center><br />\n"
aaa=aaa+"<table cellspacing=\"0\" cellpadding=\"0\">\n"
z = 0
for greeting in greetings:
z = z + 1
if z==1:
aaa=aaa+"<tr>\n"
ukey = greeting.key()
userid = greeting.id
pseudonimas = greeting.nickname
profile_url = greeting.profile_url
liuname = greeting.name
directory = os.path.dirname(__file__)
pathimg = os.path.join(directory, 'liphoto2.py')
if os.path.exists(pathimg) and os.path.isfile(pathimg):
imagemaxurl = ("%s/liphoto2/%s" % (urlhost2(),ukey))
else:
imagemaxurl = ("%s%s" % (urlhost2(),avatarminurl2))
if greeting.id:
# userpageurl = ("http://www.facebook.com/profile.php?id=%s" % (userid))
userpageurl = profile_url
aaa=aaa+(("<td width=\"50\"><a href=\"%s\" target=\"_top\"><img src=\"%s\" border=\"0\" alt=\"%s\"></img></a></td>\n") % (userpageurl,imagemaxurl,pseudonimas))
else:
aaa=aaa+''
if z==2:
z=0
aaa=aaa+"\n</tr>"
# aaa=aaa+(_("Memberlist entry msg %(memlisttime)s %(memlistipaddr)s %(memlistbrowser)s") % {'memlisttime': greeting.laikas, 'memlistipaddr': greeting.ipadresas, 'memlistbrowser': greeting.narsykle})
if z==1:
aaa=aaa+"<td width=\"50\"> </td></tr>"
# if z==0:
# aaa=aaa+"\n</tr>"
aaa=aaa+"\n</table>"
# if not users.get_current_user() or not users.is_current_user_admin():
# aaa = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page = Page.loadnew("limemberlist")
page.content = aaa
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('fbmemberlist', textaps+name, ext, name, name))
page3.content = text
self.generate('viewicon.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class VKUserListSort(BaseRequestHandler):
def get(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
aaa=""
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
query = db.GqlQuery("SELECT * "
"FROM VKUser "
"ORDER BY updated DESC")
greetings = query.fetch(8,pg*8)
co=query.count()
i=0
ii=0
bbb=""
while i<=co:
i=i+8
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+' '+"<a href=\"/"+cmspath2+"-vkmemberlistshort-"+lang+'.'+fileext+"?pg="+ str(ii) +"\">"+ str(ii) +"</a>"
ii=ii+1
# aaa=aaa+"<center>"+bbb+"</center><br />\n"
aaa=aaa+"<table cellspacing=\"0\" cellpadding=\"0\">\n"
z = 0
for greeting in greetings:
z = z + 1
if z==1:
aaa=aaa+"<tr>\n"
userid = greeting.id
pseudonimas = greeting.nickname
directory = os.path.dirname(__file__)
pathimg = os.path.join(directory, 'vkphoto.py')
if os.path.exists(pathimg) and os.path.isfile(pathimg):
imagemaxurl = ("/vkphoto/%s" % (userid))
else:
imagemaxurl = ("%s%s" % (urlhost2(),avatarminurl2))
if greeting.id:
# userpageurl = ("http://www.facebook.com/profile.php?id=%s" % (userid))
# userpageurl = ("%s/fbinfo?id=%s" % (urlhost2(),userid))
userpageurl = greeting.profile_url
aaa=aaa+(("<td width=\"50\"><a href=\"%s\" target=\"_top\"><img src=\"%s\" border=\"0\" alt=\"%s\"></img></a></td>\n") % (userpageurl,imagemaxurl,pseudonimas))
else:
aaa=aaa+''
if z==2:
z=0
aaa=aaa+"\n</tr>"
# aaa=aaa+(_("Memberlist entry msg %(memlisttime)s %(memlistipaddr)s %(memlistbrowser)s") % {'memlisttime': greeting.laikas, 'memlistipaddr': greeting.ipadresas, 'memlistbrowser': greeting.narsykle})
if z==1:
aaa=aaa+"<td width=\"50\"> </td></tr>"
# if z==0:
# aaa=aaa+"\n</tr>"
aaa=aaa+"\n</table>"
# if not users.get_current_user() or not users.is_current_user_admin():
# aaa = _("Sign in on Administrator %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}
page = Page.loadnew("vkmemberlist")
page.content = aaa
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('vkmemberlist', textaps+name, ext, name, name))
page3.content = text
self.generate('viewicon.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class SiteDisable(webapp.RequestHandler):
def get(self,pagename):
disablecode = "<html><body>Disable, swith to on</body></html>"
try:
codedb = db.GqlQuery("SELECT * FROM DinCode WHERE codename = :1", "disable")
for thiscode in codedb:
disablecode = thiscode.codetext
except:
disablecode = "<html><body>Disable, swith to on</body></html>"
self.response.out.write(disablecode)
def post(self,pagename):
disablecode = "<html><body>Disable, swith to on</body></html>"
try:
codedb = db.GqlQuery("SELECT * FROM DinCode WHERE codename = :1", "disable")
for thiscode in codedb:
disablecode = thiscode.codetext
except:
disablecode = "<html><body>Disable, swith to on</body></html>"
self.response.out.write(disablecode)
class HttpError(webapp.RequestHandler):
def get(self,pagename):
disablecode = "<html><body>over quota - website flood botnet</body></html>"
self.response.out.write(disablecode)
def post(self,pagename):
disablecode = "<html><body>over quota - website flood botnet</body></html>"
self.response.out.write(disablecode)
class PicaAlbumOn(db.Model):
lankytojas = db.UserProperty(required=True)
laikas = db.DateTimeProperty(auto_now_add=True)
administratorius = db.BooleanProperty()
ipadresas = db.StringProperty()
userid = db.StringProperty()
rodyti = db.BooleanProperty()
albumname = db.StringProperty()
class SpamIP(db.Model):
ipadresas = db.StringProperty()
lastserver = db.StringProperty()
date = db.DateTimeProperty(auto_now_add=True)
check = db.BooleanProperty()
spamcount = db.StringProperty()
spam = db.BooleanProperty()
class Commentsrec(db.Model):
# laikas = db.DateTimeProperty(auto_now_add=True)
author = db.UserProperty()
content = db.StringProperty(multiline=True)
rname = db.StringProperty(multiline=False)
avatar = db.BlobProperty()
avatarmax = db.BlobProperty()
date = db.DateTimeProperty(auto_now_add=True)
ipadresas = db.StringProperty()
rodyti = db.BooleanProperty()
class Commentsrec2(db.Model):
# laikas = db.DateTimeProperty(auto_now_add=True)
vartot = db.ReferenceProperty(Vartotojai, collection_name='komentarai')
author = db.UserProperty()
userid = db.StringProperty()
content = db.StringProperty(multiline=True)
rname = db.StringProperty(multiline=False)
avatar = db.BlobProperty()
avatarmax = db.BlobProperty()
date = db.DateTimeProperty(auto_now_add=True)
ipadresas = db.StringProperty()
rodyti = db.BooleanProperty()
class Commentsrec3(db.Model):
# laikas = db.DateTimeProperty(auto_now_add=True)
commpage = db.StringProperty()
vartot = db.ReferenceProperty(Vartotojai, collection_name='komentarai-go')
vartotfb = db.ReferenceProperty(FBUser, collection_name='komentarai-fb')
vartotli = db.ReferenceProperty(LIUser, collection_name='komentarai-li')
vartotvk = db.ReferenceProperty(VKUser, collection_name='komentarai-vk')
author = db.UserProperty()
userid = db.StringProperty()
content = db.StringProperty(multiline=True)
rname = db.StringProperty(multiline=False)
avatar = db.BlobProperty()
avatarmax = db.BlobProperty()
date = db.DateTimeProperty(auto_now_add=True)
ipadresas = db.StringProperty()
rodyti = db.BooleanProperty()
def getplius(useris):
try:
yra=False
if True:
f = urllib.urlopen("http://picasaweb.google.com/data/feed/api/user/%s?kind=album" % useris)
data=f.read()
# r = re.compile("(user'/><title.*>)([\s\S]*)(</title>)")
# plusid = r.search(data).group(2)
r = re.compile("(alternate.*)(google.com/[\d]*)(\'/)")
# r = re.compile("(<link rel=\x27alternate\x27 type=\x27text/html\x27 href=\x27https://picasaweb.google.com/)(.*)(\x27/>)")
plusid = "https://plus."+r.search(data).group(2)
yra=True
if yra:
return plusid
else:
return False
except:
return False
class UserControl(BaseRequestHandler):
def get(self,rparameters):
# self.response.out.write('<html><head><style>body { text-align: center; font: 11px arial, sans-serif; color: #565656; } .clear { clear:both; } .comm-container { margin-bottom:20px;} .comm-name { font-size:10pt; float:left; width:20%; padding:5px; overflow:hidden; } .comm-text { float:left; line-height:17px; width:70%; padding:5px; padding-top:0px; overflow:hidden; } .font-small-gray { font-size:10pt !important; }</style></head><body>')
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
wtext=""
codekey=codekey2()
buvoapp = False
rpica = False
rplus = True
rcomm = True
userid = "0"
content = ""
pseudonimas = "Anonymous"
thubnail = avatarmaxurl2
thubnail2 = avatarmaxurl2
eee=""
rpicacheck = ""
rcommcheck = ""
buvoappcheck=""
youtname=""
if users.get_current_user():
user = users.get_current_user()
userinfo2=userinfo(user ,False,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
usercppicaurl = ("/%s-userpicacontrol-%s.%s" % (cmspath2,lang,fileext))
eee=str(user.email())
eee = eee.strip()
try:
thubnail=getphoto(eee)
# wtext = wtext + eee+ " "+str(thubnail) +"<br />"
thubnail2 = str(thubnail)
uphoto=thubnail2.split("/s144/", 1)
slasas="/s200/"
thubnail2 = slasas.join(uphoto)
except:
klaida=True
if users.get_current_user():
wtext = wtext + _("user control panel header") + ("<br />\n<img src=\"%s\" border=\"0\" alt=\"\"></img><img src=\"%s\" border=\"0\" alt=\"\"></img><br />\n\n%s" % (imagemaxurl,thubnail2,usercpplustext))+(_("User control panel form %(usercpsendurl)s %(usercpcodekey)s %(usercpuserid)s %(usercpcontent)s %(usercppseudonimas)s %(rpluscheck)s %(rpicacheck)s %(buvoappcheck)s %(youtname)s %(rcommcheck)s") % {'usercpsendurl': '/'+cmspath2+'-usercpsubmit-'+lang+'.'+fileext, 'usercpcodekey': codekey, 'usercpuserid': userid, 'usercpcontent': content2,'usercppseudonimas': pseudonimas,'rpluscheck': rpluscheck,'rpicacheck': rpicacheck,'buvoappcheck': buvoappcheck,'youtname': youtname,'rcommcheck': rcommcheck})
if rpica:
wtext = wtext + (_("pica control link %(usercppicaurl)s") % {'usercppicaurl': usercppicaurl})
if userid != "0":
wtext = wtext + (_("vartotojo puslapis %(usercppseudonimas)s %(userpageurl)s") % {'usercppseudonimas': pseudonimas, 'userpageurl': userpageurl})
else:
wtext = wtext + _("user control panel header") + "<br />" + (_("Sign in or register %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)})
page = Page.loadnew("usercontrolpanel")
page.content = wtext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('usercontrolpanel', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class UserShowPage(BaseRequestHandler):
def get(self,rparameters, pseudonim , pic_key):
# self.response.out.write('<html><head><style>body { text-align: center; font: 11px arial, sans-serif; color: #565656; } .clear { clear:both; } .comm-container { margin-bottom:20px;} .comm-name { font-size:10pt; float:left; width:20%; padding:5px; overflow:hidden; } .comm-text { float:left; line-height:17px; width:70%; padding:5px; padding-top:0px; overflow:hidden; } .font-small-gray { font-size:10pt !important; }</style></head><body>')
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
wtext=""
codekey=codekey2()
userinfo2=userinfo(pic_key, True,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
wtext = wtext + (_("user page header %(pseudonimas)s") % {'pseudonimas': pseudonimas}) + ("<br />\n<img src=\"%s\" border=\"0\" id=\"profile_pic\" alt=\"\"></img><br />\n\n%s" % (imagemaxurl,usercpplustext))+(_("User page %(usercpuserid)s %(usercpcontent)s %(usercppseudonimas)s %(usercpurl)s %(userpicapagetext)s %(usermailformtext)s") % { 'usercpuserid': userid, 'usercpcontent': content,'usercppseudonimas': pseudonimas, 'usercpurl': usercpurl, 'userpicapagetext': userpicapagetext,'usermailformtext': usermailformtext})
page = Page.loadnew("userpage")
page.content = wtext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('userpage', textaps+name, userpageend, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class UserYoutPage(BaseRequestHandler):
def get(self,rparameters, pseudonim , pic_key):
# self.response.out.write('<html><head><style>body { text-align: center; font: 11px arial, sans-serif; color: #565656; } .clear { clear:both; } .comm-container { margin-bottom:20px;} .comm-name { font-size:10pt; float:left; width:20%; padding:5px; overflow:hidden; } .comm-text { float:left; line-height:17px; width:70%; padding:5px; padding-top:0px; overflow:hidden; } .font-small-gray { font-size:10pt !important; }</style></head><body>')
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
wtext=""
# codekey=codekey2()
userinfo2=userinfo(pic_key, True,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
usercppicatext=""
if youtname and len(str(youtname))>0:
yra=False
out=""
try:
if not yra:
f = urllib.urlopen("https://gdata.youtube.com/feeds/api/users/%s/uploads?v=2&alt=jsonc" % youtname)
data = json.loads(f.read())
out=out+"<table>"
for item in data['data']['items']:
out=out+"<tr><td>Video Title: </td><td>%s</td></tr>" % (item['title'])
out=out+"<tr><td>Video Category: </td><td>%s</td></tr>" % (item['category'])
out=out+"<tr><td>Video ID: </td><td>%s</td></tr>" % (item['id'])
if item.has_key('rating'):
out=out+"<tr><td>Video Rating: </td><td>%f</td></tr>" % (item['rating'])
out=out+"<tr><td>Embed URL: </td><td><a href=\"%s\">link to Youtube</a></td></tr>" % (item['player']['default'])
out=out+"<tr><td> </td><td> </td></tr>"
out=out+"</table>"
yra=True
except:
yra=False
if yra:
usercppicatext=("<div>%s</div>\n\t" % (out))
else:
usercppicatext="<div>Youtube not found or error</div>\n\t";
wtext = wtext + (_("user yout page header %(pseudonimas)s") % {'pseudonimas': pseudonimas}) + ("<br />\n<img src=\"%s\" border=\"0\" id=\"profile_pic\" alt=\"\"></img><br />\n\n%s" % (imagemaxurl,usercpplustext))+(_("User yout page %(usercpuserid)s %(usercpcontent)s %(usercppseudonimas)s %(usercpurl)s %(usercppicatext)s") % { 'usercpuserid': userid, 'usercpcontent': content,'usercppseudonimas': pseudonimas, 'usercpurl': usercpurl, 'usercppicatext': usercppicatext})
wtext = wtext + (_("vartotojo puslapis %(usercppseudonimas)s %(userpageurl)s") % {'usercppseudonimas': pseudonimas, 'userpageurl': userpageurl})
page = Page.loadnew("useryoutpage")
page.content = wtext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('useryoutpage', textaps+name, userpageend, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class UserCommPage(BaseRequestHandler):
def get(self,rparameters, pseudonim , pic_key):
# self.response.out.write('<html><head><style>body { text-align: center; font: 11px arial, sans-serif; color: #565656; } .clear { clear:both; } .comm-container { margin-bottom:20px;} .comm-name { font-size:10pt; float:left; width:20%; padding:5px; overflow:hidden; } .comm-text { float:left; line-height:17px; width:70%; padding:5px; padding-top:0px; overflow:hidden; } .font-small-gray { font-size:10pt !important; }</style></head><body>')
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
wtext=""
userinfo2=userinfo(pic_key, True,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
if rcomm:
yra=False
wtext=""
try:
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
try:
query = db.GqlQuery("SELECT * FROM Commentsrec2 WHERE vartot = :1 ORDER BY date DESC", vartot)
# query = db.GqlQuery("SELECT * FROM Commentsrec WHERE rodyti = :1, author = :2 ORDER BY date DESC", '1',users.GetCurrentUser())
greetings = query.fetch(10,pg*10)
co=query.count()
except:
klaida=True
co=0
greetings = []
i=0
ii=0
bbb=""
while i<=co:
i=i+10
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+' '+"<a href=\"/"+cmspath2+"-usercommpage-"+lang+'.'+fileext+'/'+pseudonimas+'/'+userid+"?pg="+ str(ii) +"\">"+ str(ii) +"</a>"
ii=ii+1
# page2 = Page.load("atsi-"+lang+'.'+fileext)
wtext=wtext+"<div><hr width=\"70%\"></hr></div>\n<div><div style=\"text-align: center;\">"+bbb+"</div>\n\n"
for greeting in greetings:
wijun = ""
wdel = ""
if greeting.rodyti or (users.GetCurrentUser() and users.get_current_user() == greeting.author) or (users.GetCurrentUser() and users.get_current_user() == vartot.lankytojas) or users.is_current_user_admin():
if users.is_current_user_admin():
wdel = _("Comments delete %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
if (users.GetCurrentUser() and users.get_current_user() == vartot.lankytojas) or (users.GetCurrentUser() and users.get_current_user() == greeting.author) or users.is_current_user_admin():
if not greeting.rodyti:
wijun = _("Comments show %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
else:
wijun = _("Comments hidden %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
user3 = greeting.author
pseudonimas3 = "Anonymous"
userid3 = '0'
try:
buvesapp = db.GqlQuery("SELECT * FROM Vartotojai WHERE lankytojas = :1", user3)
for app in buvesapp:
userid3 = app.userid
pseudonimas3 = app.pseudonimas
except:
klaida=True
imagemaxurl2 = ("/%s-userimagemin/%s/%s" % (cmspath2,pseudonimas3, userid3))
userpageurl = ("%s/%s-userpage-%s.%s/%s/%s" % (urlhost2(), cmspath2,lang, fileext, pseudonimas3, userid3))
wtext = wtext + "\n<div class=\"comm-container\">"
wtext = wtext + "<div class=\"comm-name\">"+("<a href=\"%s\"><img src=\"%s\" alt=\"\" border=\"0\"></img></a>" % (userpageurl,imagemaxurl2))+(' <strong>%s</strong>' % pseudonimas3) +", "+('<div class="font-small-gray">%s</div>' % greeting.date.strftime("%a, %d %b %Y %H:%M:%S"))
if greeting.avatar:
if greeting.avatarmax:
wtext = wtext + ('<div class="font-small-gray"><a href="/commimg?img_id=%s&size=yes"><img src="/commimg?img_id=%s" alt=""></img></a></div>' % (greeting.key(),greeting.key()))
else:
wtext = wtext + ('<div class="font-small-gray"><img src="/commimg?img_id=%s" alt=""></img></div>' % greeting.key())
wtext = wtext + ("</div><div class=\"comm-text\"><div>%s</div></div>\n\n</div><div class=\"clear\"><!-- --></div>\n\n<div>%s %s</div>\n\n<div> </div>\n" % (greeting.content,wijun,wdel))
codekey=codekey2()
if users.GetCurrentUser():
wtext = wtext + "\n</div>\n<div> </div>\n"+(_("user Comments form %(commsendurl)s %(commcodekey)s") % {'commsendurl': '/'+cmspath2+'-usercommsubmit-'+lang+'.'+fileext+'/'+pseudonimas +'/'+userid, 'commcodekey': codekey})
else:
wtext = wtext + "\n</div>\n<div> </div>\n<div>" + (_("Sign in or register %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}) + "</div>"
yra=True
except:
yra=False
if yra:
usercppicatext=("<div>%s</div>\n\t" % (wtext))
else:
usercppicatext="<div>comments db error</div>\n\t";
wtext = (_("user comm page header %(pseudonimas)s") % {'pseudonimas': pseudonimas}) + ("<br />\n<img src=\"%s\" border=\"0\" id=\"profile_pic\" alt=\"\"></img><br />\n\n%s" % (imagemaxurl,usercpplustext))+(_("User comm page %(usercpuserid)s %(usercpcontent)s %(usercppseudonimas)s %(usercpurl)s %(usercppicatext)s") % { 'usercpuserid': userid, 'usercpcontent': content,'usercppseudonimas': pseudonimas, 'usercpurl': usercpurl, 'usercppicatext': usercppicatext})
wtext = wtext + (_("vartotojo puslapis %(usercppseudonimas)s %(userpageurl)s") % {'usercppseudonimas': pseudonimas, 'userpageurl': userpageurl})
page = Page.loadnew("usercommpage")
page.content = wtext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('usercommpage', textaps+name, userpageend, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class UserCommSubmit(webapp.RequestHandler):
def post(self, rparameters, pseudonim , pic_key):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
userinfo2=userinfo(pic_key,True,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
connt=""
try:
codeimg = db.get(self.request.get("scodeid"))
except:
prn="Error"
if codeimg and codeimg.code == self.request.get("scode") and rcomm:
greeting = Commentsrec2()
greeting.vartot = vartot
greeting.rodyti = True
greeting.userid = userid
greeting.ipadresas = os.environ['REMOTE_ADDR']
# greeting.laikas = datetime.datetime.now()
if users.get_current_user():
greeting.author = users.get_current_user()
connt = cgi.escape(self.request.get("content"))
connt = render_bbcode(connt)
connt = connt[0:400]
greeting.content = connt
# priesduom = self.request.get("img")
greeting.rname = pseudonimas
if self.request.get("img"):
avatarmax = images.resize(self.request.get("img"), width=600, height=400, output_encoding=images.PNG)
greeting.avatarmax = db.Blob(avatarmax)
avatar = images.resize(self.request.get("img"), width=96, height=96, output_encoding=images.PNG)
greeting.avatar = db.Blob(avatar)
greeting.put()
to_addr = _mailrcptto
user = users.get_current_user()
if user:
uname=user.nickname()
umail=users.get_current_user().email()
else:
uname=""
umail=""
message = mail.EmailMessage()
message.subject = os.environ['HTTP_HOST'] + " - comments"
# message.subject = "www"
message.sender = _mailsender
message.to = to_addr
q_message = ("\n%s: %s \n%s \n%s \n" % ('Page', str(self.request.uri),str(textinfo()),str(textloc())))
message.body = (_("Comments mail message %(communame)s %(commumail)s %(commrealname)s %(commmessage)s") % {'communame': uname,'commumail': umail,'commrealname': greeting.rname,'commmessage': greeting.content}) + q_message
message.send()
self.redirect('/'+cmspath2+'-usercommpage-'+lang+'.'+fileext+'/'+pseudonimas+'/'+userid )
class UserPicaPage(BaseRequestHandler):
def get(self,rparameters, pseudonim , pic_key):
# self.response.out.write('<html><head><style>body { text-align: center; font: 11px arial, sans-serif; color: #565656; } .clear { clear:both; } .comm-container { margin-bottom:20px;} .comm-name { font-size:10pt; float:left; width:20%; padding:5px; overflow:hidden; } .comm-text { float:left; line-height:17px; width:70%; padding:5px; padding-top:0px; overflow:hidden; } .font-small-gray { font-size:10pt !important; }</style></head><body>')
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
wtext=""
codekey=codekey2()
userinfo2=userinfo(pic_key, True,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
if rpica:
albumbuvo = {}
buves_album = db.GqlQuery("SELECT * FROM PicaAlbumOn WHERE userid = :1", userid)
for albumb in buves_album:
albumname=albumb.albumname
albumbuvo[albumname]=albumb.rodyti
user2 = lank.email()
album = self.request.get("album")
yra=False
out=""
try:
if not self.request.get("album"):
f = urllib.urlopen("http://picasaweb.google.com/data/feed/api/user/%s?kind=album" % user2)
list = Picasa().albums(f.read())
# self.response.out.write("<xmp>")
# self.response.out.write(list)
# self.response.out.write("</xmp>")
out=out+"<table>"
for name in list.keys():
album = list[name]
if albumbuvo.has_key(name) and albumbuvo[name]: #albumname in albumbuvo:
out=out+("<tr><td><img src=\"%s\" border=\"0\" alt=\"%s\"></img></td><td><a href=\"%s?album=%s\">%s</a></td><td>%s</td></tr>" % (album.thumbnail,album.title,userpicapageurl,name,name,album.title))
# pass
out=out+"<tr><td colspan=\"3\"></td></tr></table>"
# pass
yra=True
else:
f = urllib.urlopen("http://picasaweb.google.com/data/feed/api/user/%s/album/%s?kind=photo" % (user2,album))
list = Picasa().photos(f.read())
out=out+"<table>"
for photo in list:
out=out+("<tr><td><img src=\"%s\" border=\"0\" alt=\"%s\"></img></td><td><a href=\"%s\">%s</a></td><td>%s</td></tr>" % (photo.thumbnail,photo.title,photo.webpage,photo.title, photo.getDatetime() ))
# pass
out=out+"<tr><td colspan=\"3\"></td></tr></table>"
# self.response.out.write("Please login");
yra=True
except:
yra=False
if yra:
usercppicatext=("<div>%s</div>\n\t" % (out))
else:
usercppicatext="<div>Picasa info not found or error</div>\n\t";
wtext = wtext + (_("user pica page header %(pseudonimas)s") % {'pseudonimas': pseudonimas}) + ("<br />\n<img src=\"%s\" border=\"0\" id=\"profile_pic\" alt=\"\"></img><br />\n\n%s" % (imagemaxurl,usercpplustext))+(_("User pica page %(usercpuserid)s %(usercpcontent)s %(usercppseudonimas)s %(usercpurl)s %(usercppicatext)s") % { 'usercpuserid': userid, 'usercpcontent': content,'usercppseudonimas': pseudonimas, 'usercpurl': usercpurl, 'usercppicatext': usercppicatext})
wtext = wtext + (_("vartotojo puslapis %(usercppseudonimas)s %(userpageurl)s") % {'usercppseudonimas': pseudonimas, 'userpageurl': userpageurl})
page = Page.loadnew("userpicapage")
page.content = wtext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('userpicapage', textaps+name, userpageend, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class UserPicaControl(BaseRequestHandler):
def get(self,rparameters):
# self.response.out.write('<html><head><style>body { text-align: center; font: 11px arial, sans-serif; color: #565656; } .clear { clear:both; } .comm-container { margin-bottom:20px;} .comm-name { font-size:10pt; float:left; width:20%; padding:5px; overflow:hidden; } .comm-text { float:left; line-height:17px; width:70%; padding:5px; padding-top:0px; overflow:hidden; } .font-small-gray { font-size:10pt !important; }</style></head><body>')
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
wtext=""
codekey=codekey2()
user = users.get_current_user()
userinfo2=userinfo(user,False,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
usercppicaurl = ("/%s-userpicacontrol-%s.%s" % (cmspath2,lang,fileext))
usercppicasubmiturl = ("/%s-userpicasubmit-%s.%s" % (cmspath2,lang,fileext))
usercppicatext = ""
if rpica:
albumbuvo = {}
buves_album = db.GqlQuery("SELECT * FROM PicaAlbumOn WHERE lankytojas = :1", user)
for albumb in buves_album:
albumname=albumb.albumname
albumbuvo[albumname]=albumb.rodyti
user2 = lank.email()
album = self.request.get("album")
yra=False
out=""
namelist =""
errtext =""
buvoappcheck=""
try:
if not self.request.get("album"):
f = urllib.urlopen("http://picasaweb.google.com/data/feed/api/user/%s?kind=album" % user2)
list = Picasa().albums(f.read())
out=out+"<table><form method=\"POST\" action=\""+usercppicasubmiturl+"\">"
for name in list.keys():
album = list[name]
if albumbuvo.has_key(name) and albumbuvo[name]: #albumname in albumbuvo:
buvoappcheck="checked=\"yes\""
out=out+("<tr><td><img src=\"%s\" border=\"0\" alt=\"%s\"></img></td><td><a href=\"%s?album=%s\">%s</a></td><td>%s</td><td><input type=\"checkbox\" name=\"photoalbum\" value=\"%s\" %s></td></tr>" % (album.thumbnail,album.title,usercppicaurl,name,name,album.title,name,buvoappcheck))
namelist = namelist + "||" + str(base64.urlsafe_b64encode(str(name)))
out=out+"<tr><td colspan=\"4\"><input type=\"hidden\" name=\"namelist\" value=\""+str(namelist)+"\" ><input type=\"submit\"></td></tr></form></table>"
yra=True
else:
f = urllib.urlopen("http://picasaweb.google.com/data/feed/api/user/%s/album/%s?kind=photo" % (user2,album))
list = Picasa().photos(f.read())
out=out+"<table>"
for photo in list:
out=out+("<tr><td><img src=\"%s\" border=\"0\" alt=\"%s\"></img></td><td><a href=\"%s\">%s</a></td><td>%s</td></tr>" % (photo.thumbnail,photo.title,photo.webpage,photo.title, photo.getDatetime() ))
out=out+"<tr><td colspan=\"3\"></td></tr></table>"
yra=True
except:
errtext = cgi.escape(str(sys.exc_info()[0]))
yra=False
if yra:
usercppicatext=("<div>%s</div>\n\t" % (out))
else:
usercppicatext="<div>Picasa info not found or error " + errtext +"</div>\n\t";
wtext = wtext + _("user pica control panel header") + ("<br />\n<img src=\"%s\" border=\"0\" id=\"profile_pic\" alt=\"\"></img><br />\n\n%s" % (imagemaxurl,usercpplustext))+(_("User pica page %(usercpuserid)s %(usercpcontent)s %(usercppseudonimas)s %(usercpurl)s %(usercppicatext)s") % { 'usercpuserid': userid, 'usercpcontent': content,'usercppseudonimas': pseudonimas, 'usercpurl': usercpurl, 'usercppicatext': usercppicatext})
page = Page.loadnew("userpage")
page.content = wtext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('userpage', textaps+name, userpageend, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class UserControlSend(webapp.RequestHandler):
def post(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
cont=""
try:
codeimg = db.get(self.request.get("scodeid"))
except:
prn="Error"
if codeimg and codeimg.code == self.request.get("scode"):
user = users.get_current_user()
klaida=False
try:
buves_vart = db.GqlQuery("SELECT * FROM Vartotojai WHERE lankytojas = :1", user)
for vart in buves_vart:
vart.ipadresas = os.environ['REMOTE_ADDR']
vart.narsykle = os.environ['HTTP_USER_AGENT']
# greeting.laikas = datetime.datetime.now()
if users.get_current_user():
vart.lankytojas = users.get_current_user()
cont = self.request.get("content")
cont = cgi.escape(cont)
vart.content = (cont)[0:2000]
# priesduom = self.request.get("img")
vart.pseudonimas = "Anonymous"
if self.request.get("img"):
avatarmin = images.resize(self.request.get("img"), width=50, height=50, output_encoding=images.PNG)
vart.avatarmin = db.Blob(avatarmin)
avatarmax = images.resize(self.request.get("img"), width=200, height=200, output_encoding=images.PNG)
vart.avatarmax = db.Blob(avatarmax)
vart.rodyti = True
if self.request.get("rname"):
entitiesRx = re.compile("[^0-9a-zA-Z]")
rnametext = cgi.escape(self.request.get("rname"))
rnametext = entitiesRx.sub("", rnametext)
vart.pseudonimas = rnametext[0:30]
if self.request.get("youtname"):
entitiesRx = re.compile("[^0-9a-zA-Z]")
ynametext = cgi.escape(self.request.get("youtname"))
ynametext = entitiesRx.sub("", ynametext)
vart.youtname = ynametext[0:50]
if self.request.get("globphoto"):
vart.rodyti = False
if self.request.get("picasaen"):
vart.picarodyti = True
else:
vart.picarodyti = False
if self.request.get("plusen"):
vart.plusrodyti = True
else:
vart.plusrodyti = False
if self.request.get("commen"):
vart.commrodyti = True
else:
vart.commrodyti = False
vart.put()
except:
errtext = cgi.escape(str(sys.exc_info()[0]))
klaida=True
to_addr = _mailrcptto
user = users.get_current_user()
if user:
uname=user.nickname()
umail=users.get_current_user().email()
else:
uname=""
umail=""
message = mail.EmailMessage()
message.subject = os.environ['HTTP_HOST'] + " - user page edit"
# message.subject = "www"
message.sender = _mailsender
message.to = to_addr
q_message = ("\n%s: %s \n%s \n%s \n" % ('Page', str(self.request.uri),str(textinfo()),str(textloc())))
message.body = (_("Comments mail message %(communame)s %(commumail)s %(commrealname)s %(commmessage)s") % {'communame': uname,'commumail': umail,'commrealname': vart.pseudonimas,'commmessage': vart.content}) + q_message
message.send()
if klaida:
self.response.out.write("""%s <br />\n""" % (errtext))
else:
self.redirect('/'+cmspath2+'-usercontrolpanel-'+lang+'.'+fileext)
class UserPicaControlSend(webapp.RequestHandler):
def post(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
user = users.get_current_user()
now = datetime.datetime.now()
buvo = False
if user:
# try:
if user:
photoal=[]
namelist2=[]
albumbuvo = {}
albumname = ""
# form = cgi.FieldStorage()
# item = form.getvalue("photoalbum")
# if form["namelist"].value:
# namelist=form["namelist"].value
# namelist2=namelist.split("||")
# if isinstance(item, list):
# for item in form.getlist("photoalbum"):
# photoal.append(item)
# else:
# photoa = form.getfirst("photoalbum", "")
# photoal.append(photoa)
namelist=self.request.POST['namelist']
namelist2=namelist.split("||")
photoal=self.request.POST.getall('photoalbum')
for albumn2 in namelist2:
if len(albumn2)>0:
albumname = base64.urlsafe_b64decode(str(albumn2))
albumbuvo[albumname]=False
# self.response.out.write("namelist:" +albumname+ "<br />\n")
for albumn2 in photoal:
albumbuvo[albumn2]=True
# self.response.out.write("photoal:" +albumn2+ "<br />\n")
albumbuvo2=albumbuvo
buves_album = db.GqlQuery("SELECT * FROM PicaAlbumOn WHERE lankytojas = :1", user)
for albumb in buves_album:
albumb.ipadresas = os.environ['REMOTE_ADDR']
albumb.narsykle = os.environ['HTTP_USER_AGENT']
albumb.laikas = datetime.datetime.now()
albumb.userid = user.user_id()
if users.is_current_user_admin():
albumb.administratorius = True
else:
albumb.administratorius = False
albumname=str(albumb.albumname)
if albumbuvo.has_key(albumname): #albumname in albumbuvo:
albumb.rodyti = albumbuvo[albumname]
# self.response.out.write("buvo:" +albumname+" "+str(albumbuvo[albumname])+ "<br />\n")
albumbuvo2.pop(albumname)
albumb.put()
for albumn in albumbuvo2.keys():
album = PicaAlbumOn(lankytojas=user)
album.albumname=albumn
album.ipadresas = os.environ['REMOTE_ADDR']
album.narsykle = os.environ['HTTP_USER_AGENT']
album.laikas = datetime.datetime.now()
album.userid = user.user_id()
if users.is_current_user_admin():
album.administratorius = True
else:
album.administratorius = False
album.rodyti = albumbuvo2[albumn]
# self.response.out.write("naujas:" +albumn+" "+str(albumbuvo2[albumn])+ "<br />\n")
album.put()
buvo = True
# except:
# klaida=True
to_addr = _mailrcptto
user = users.get_current_user()
if user:
uname=user.nickname()
umail=users.get_current_user().email()
else:
uname=""
umail=""
message = mail.EmailMessage()
message.subject = os.environ['HTTP_HOST'] + " - add albums"
# message.subject = "www"
message.sender = _mailsender
message.to = to_addr
q_message = ("\n%s: %s \n%s \n%s \n" % ('Page', str(self.request.uri),str(textinfo()),str(textloc())))
message.body = (_("Comments mail message %(communame)s %(commumail)s %(commrealname)s %(commmessage)s") % {'communame': uname,'commumail': umail,'commrealname': '','commmessage': ''}) + q_message
message.send()
self.redirect('/'+cmspath2+'-userpicacontrol-'+lang+'.'+fileext)
class UserMailFormPage(BaseRequestHandler):
def get(self, rparameters, pseudonim , pic_key):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
codekey=codekey2()
userinfo2=userinfo(pic_key,True,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
usersendmailurl = ("/%s-usersendmail-%s.%s/%s/%s" % (cmspath2,lang, fileext, pseudonimas, userid))
userpagetext = (_("vartotojo puslapis %(usercppseudonimas)s %(userpageurl)s") % {'usercppseudonimas': pseudonimas, 'userpageurl': userpageurl})
if hasattr(lank, 'email'):
plusurl=getplius(lank.email())
else:
plusurl=None
wtext = (_("user mailform header %(pseudonimas)s") % {'pseudonimas': pseudonimas}) + "<br />" + "<img src=\""+imagemaxurl+"\" border=\"0\" id=\"profile_pic\" alt=\"\"></img><br />\n\n"+usercpplustext+(_("User mailform %(usercpuserid)s %(usercpcontent)s %(usercppseudonimas)s %(usercpurl)s %(userpicapagetext)s") % { 'usercpuserid': userid, 'usercpcontent': content,'usercppseudonimas': pseudonimas, 'usercpurl': usercpurl, 'userpicapagetext': userpagetext})
page = Page.loadnew("usermailformpage")
user = users.get_current_user()
greeting = ''
if user and hasattr(lank, 'email'):
greeting = _("User Mail form %(mailsendurl)s %(mailcodekey)s") % {'mailsendurl': usersendmailurl,'mailcodekey': codekey}
elif not hasattr(lank, 'email'):
greeting = "\t\n<div> </div>\t\n<div>User not Found</div>"
else:
greeting = "\t\n<div> </div>\t\n<div>" + (_("Sign in or register %(userloginurl)s") % {'userloginurl': users.create_login_url(self.request.uri)}) + "</div>"
page.content = u""+ wtext +greeting+""
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('usermailformpage', textaps+name, userpageend, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class UserMailSend(BaseRequestHandler):
# @login_required
def post(self, rparameters, pseudonim , pic_key):
parts = rparameters.split(".")
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
try:
codeimg = db.get(self.request.get("scodeid"))
except:
prn="Error"
# codeimg = db.get(self.request.get("scodeid"))
if codeimg and codeimg.code == self.request.get("scode") and users.GetCurrentUser():
userinfo2=userinfo(pic_key,True,lang,ext)
for key, val in userinfo2.items():
try:
exec(key + '=val')
except Exception, e:
err=''
userpicapagetext=""
codeimg.delete()
x_zmail = lank.email()
x_subject = self.request.get("zsubject")
x_realname = self.request.get("zrealname")
x_message = self.request.get("zmessage")
to_addr = _mailrcptto
user = users.get_current_user()
if user:
uname=user.nickname()
umail=users.get_current_user().email()
else:
uname=""
umail=""
if not mail.is_email_valid(to_addr):
# Return an error message...
pass
message = mail.EmailMessage()
message.subject = x_subject.encode("utf-8")
# message.subject = "www"
message.sender = users.get_current_user().email()
if lank.email():
message.to = lank.email()
else:
message.to = to_addr
# q_uname = uname.encode("utf-8")
# q_umail = umail.encode("utf-8")
# q_zmail = x_zmail.encode("utf-8")
# q_realname = x_realname.encode("utf-8")
# q_message = x_message.encode("utf-8")
q_uname = ''
q_umail = ''
q_zmail = ''
q_realname = ''
q_message = ''
q_uname = uname
q_umail = umail
q_zmail = x_zmail
q_realname = x_realname
q_message = x_message + ("\n%s: %s \n%s \n%s \n" % ('Page', str(self.request.uri),str(textinfo()),str(textloc())))
message.body = (_("Mail message %(mailuname)s %(mailumail)s %(mailrealname)s %(mailzmail)s %(mailmessage)s") % {'mailuname': q_uname, 'mailumail': q_umail, 'mailrealname': q_realname, 'mailzmail': q_zmail, 'mailmessage': q_message})
message.body = message.body + ("\n\nMail page: %s" % (userpageurl))
message.send()
ptext=_("Mail send OK")
else:
ptext=_("Mail send Error")
page = Page.loadnew("sendmail")
page.content = ptext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'menu': page2,
'page': page,
})
class Comments(BaseRequestHandler):
def get(self,rparameters):
# self.response.out.write('<html><head><style>body { text-align: center; font: 11px arial, sans-serif; color: #565656; } .clear { clear:both; } .comm-container { margin-bottom:20px;} .comm-name { font-size:10pt; float:left; width:20%; padding:5px; overflow:hidden; } .comm-text { float:left; line-height:17px; width:70%; padding:5px; padding-top:0px; overflow:hidden; } .font-small-gray { font-size:10pt !important; }</style></head><body>')
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
wtext=""
pg=self.request.get('pg')
entitiesRx = re.compile("[^0-9]")
pg=entitiesRx.sub("", pg)
if pg:
pg = int(pg)
else:
pg=0
try:
query = db.GqlQuery("SELECT * FROM Commentsrec ORDER BY date DESC")
# query = db.GqlQuery("SELECT * FROM Commentsrec WHERE rodyti = :1, author = :2 ORDER BY date DESC", '1',users.GetCurrentUser())
greetings = query.fetch(10,pg*10)
co=query.count()
except:
klaida=True
co=0
greetings = []
i=0
ii=0
bbb=""
while i<=co:
i=i+10
if ii == pg:
bbb=bbb+' '+str(ii)
else:
bbb=bbb+' '+"<a href=\"/"+cmspath2+"-comments-"+lang+'.'+fileext+"?pg="+ str(ii) +"\">"+ str(ii) +"</a>"
ii=ii+1
page2 = Page.load("atsi-"+lang+'.'+fileext)
wtext=wtext+page2.content+"\n<div><div style=\"text-align: center;\">"+bbb+"</div>\n\n"
for greeting in greetings:
wijun = ""
wdel = ""
if greeting.rodyti or (users.GetCurrentUser() and users.get_current_user() == greeting.author) or users.is_current_user_admin():
if users.is_current_user_admin():
wdel = _("Comments delete %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
if (users.GetCurrentUser() and users.get_current_user() == greeting.author) or users.is_current_user_admin():
if not greeting.rodyti:
wijun = _("Comments show %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
else:
wijun = _("Comments hidden %(commswiturl)s %(commkey)s") % {'commswiturl': '/commswit', 'commkey': greeting.key()}
wtext = wtext + "\n<div class=\"comm-container\">"
wtext = wtext + "<div class=\"comm-name\">"+('<strong>%s</strong>' % greeting.rname) +", "+('<div class="font-small-gray">%s</div>' % greeting.date.strftime("%a, %d %b %Y %H:%M:%S"))
if greeting.avatar:
if greeting.avatarmax:
wtext = wtext + ('<div class="font-small-gray"><a href="/commimg?img_id=%s&size=yes"><img src="/commimg?img_id=%s" alt=""></img></a></div>' % (greeting.key(),greeting.key()))
else:
wtext = wtext + ('<div class="font-small-gray"><img src="/commimg?img_id=%s" alt=""></img></div>' % greeting.key())
wtext = wtext + ("</div><div class=\"comm-text\"><div>%s</div></div>\n\n</div><div class=\"clear\"><!-- --></div>\n\n<div>%s %s</div>\n\n<div> </div>\n" % (greeting.content,wijun,wdel))
codekey=codekey2()
wtext = wtext + "\n</div>\n"+(_("Comments form %(commsendurl)s %(commcodekey)s") % {'commsendurl': '/'+cmspath2+'-commsubmit-'+lang+'.'+fileext, 'commcodekey': codekey})
page = Page.loadnew("comments")
page.content = wtext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('comments', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class AvatarErr(object):
from bindata import PictureErr
avatar=PictureErr.thumbnail_data
avatarmax=PictureErr.data
class CommentsImage(webapp.RequestHandler):
def get(self):
try:
greeting = db.get(self.request.get("img_id"))
atype = "png"
except:
greeting = AvatarErr()
atype = "jpeg"
if self.request.get("size"):
if hasattr(greeting, 'avatarmax'):
# if greeting.avatarmax:
self.response.headers['Content-Type'] = "image/%s" % atype
self.response.out.write(greeting.avatarmax)
else:
self.response.out.write("No image")
else:
if hasattr(greeting, 'avatar'):
# if greeting.avatar:
self.response.headers['Content-Type'] = "image/%s" % atype
self.response.out.write(greeting.avatar)
else:
self.response.out.write("No image")
class UserShowImageMin(webapp.RequestHandler):
def get(self, pseudonim , pic_key):
buvoapp = False
imagemaxurl = ""
lank = UserNone(email=None, federated_identity=None)
try:
buvesapp = db.GqlQuery("SELECT * FROM Vartotojai WHERE userid = :1", pic_key)
for app in buvesapp:
avatarmin=app.avatarmin
buvoapp = app.rodyti
lank=app.lankytojas
except:
klaida=True
thubnail=getphoto(lank.email())
if buvoapp:
self.response.headers['Content-Type'] = "image/png"
self.response.out.write(avatarmin)
elif thubnail:
imagemaxurl = str(thubnail)
uphoto=imagemaxurl.split("/s144/", 1)
slasas="/s50/"
imagemaxurl = slasas.join(uphoto)
self.response.set_status(302)
self.response.headers['Location'] = imagemaxurl
else:
self.response.set_status(302)
self.response.headers['Location'] = avatarminurl2
self.response.out.write("No image " +pic_key)
class UserShowImageMax(webapp.RequestHandler):
def get(self, pseudonim , pic_key):
buvoapp = False
avatarmax = ""
try:
buvesapp = db.GqlQuery("SELECT * FROM Vartotojai WHERE userid = :1", pic_key)
for app in buvesapp:
avatarmax=app.avatarmax
buvoapp = app.rodyti
except:
klaida=True
if buvoapp:
self.response.headers['Content-Type'] = "image/png"
self.response.out.write(avatarmax)
else:
self.response.set_status(302)
self.response.headers['Location'] = avatarmaxurl2
self.response.out.write("No image " +pic_key)
class SwitComments(webapp.RequestHandler):
def get(self):
userid = "0"
pseudonimas = "Anonymous"
lank = ""
vartkey=""
user = users.get_current_user()
usercomm = False
url='/comments'
try:
buvesapp = db.GqlQuery("SELECT * FROM Vartotojai WHERE lankytojas = :1", user)
for app in buvesapp:
userid = app.userid
pseudonimas = str(app.pseudonimas)
lank=app.lankytojas
vartkey=app.key()
vartot = db.get(vartkey)
comm = db.get(self.request.get("id"))
kname=comm.kind()
vartot_comm=comm.vartot
vartot_comm_key=vartot_comm.key()
vartot_comm_vartot=db.get(vartot_comm_key)
if (userid == vartot_comm_vartot.userid):
usercomm = True
except:
klaida=True
try:
if ((users.GetCurrentUser() and users.get_current_user() == comm.author) or (usercomm) or users.is_current_user_admin()) and ((kname == 'Commentsrec') or (kname == 'Commentsrec2')):
if self.request.get("show")=="del" and users.is_current_user_admin():
comm.delete()
if self.request.get("show")=="yes":
comm.rodyti=True
comm.put()
if self.request.get("show")=="no":
comm.rodyti=False
comm.put()
if kname == 'Commentsrec':
url='/comments'
if kname == 'Commentsrec2':
userid=comm.userid
rname=comm.rname
url='/'+cmspath2+'-usercommpage-'+langdef+'.'+fileext+'/'+rname+'/'+userid
except:
klaida=True
self.redirect(url)
class SubmitComments(webapp.RequestHandler):
def post(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
connt=""
try:
codeimg = db.get(self.request.get("scodeid"))
except:
prn="Error"
if codeimg and codeimg.code == self.request.get("scode"):
codeimg.delete()
greeting = Commentsrec()
greeting.rodyti = True
greeting.ipadresas = os.environ['REMOTE_ADDR']
# greeting.laikas = datetime.datetime.now()
if users.get_current_user():
greeting.author = users.get_current_user()
htmlerr=True
connt = self.request.get("content")
connt2 = cgi.escape(connt)
if connt==connt2:
htmlerr=False
connt = render_bbcode(connt)
connt = connt[0:400]
greeting.content = connt
# priesduom = self.request.get("img")
greeting.rname = "Anonymous"
if self.request.get("img"):
try:
avatar = images.resize(self.request.get("img"), width=96, height=96, output_encoding=images.PNG)
greeting.avatar = db.Blob(avatar)
avatarmax = images.resize(self.request.get("img"), width=600, height=400, output_encoding=images.PNG)
greeting.avatarmax = db.Blob(avatarmax)
except:
avatarerr="Error"
if self.request.get("rname"):
greeting.rname = cgi.escape(self.request.get("rname")[0:60])
if not htmlerr:
greeting.put()
buvoip = False
try:
ipaddr = os.environ['REMOTE_ADDR']
if True:
try:
buvesip = db.GqlQuery("SELECT * FROM SpamIP WHERE ipadresas = :1", ipaddr)
for app in buvesip:
buvoip = True
except:
klaida=True
if not buvoip:
app = SpamIP(ipadresas=ipaddr)
app.date = datetime.datetime.now()
app.lastserver = '0'
app.check = False
app.spamcount = '0'
app.spam = False
app.put()
except:
klaida=True
to_addr = _mailrcptto
user = users.get_current_user()
if user:
uname=user.nickname()
umail=users.get_current_user().email()
else:
uname=""
umail=""
message = mail.EmailMessage()
message.subject = os.environ['HTTP_HOST'] + " - comments" + (" %s %s %s") % (codeimg.code,self.request.get("scode"),htmlerr)
# message.subject = "www"
message.sender = _mailsender
message.to = to_addr
q_message = ("\n%s: %s \n%s \n%s \n" % ('Page', str(self.request.uri),str(textinfo()),str(textloc())))
message.body = (_("Comments mail message %(communame)s %(commumail)s %(commrealname)s %(commmessage)s") % {'communame': uname,'commumail': umail,'commrealname': greeting.rname,'commmessage': greeting.content}) + q_message
message.send()
self.redirect('/'+cmspath2+'-comments-'+lang+'.'+fileext)
class SiteMapControl(BaseRequestHandler):
def get(self,rparameters):
# self.response.out.write('<html><head><style>body { text-align: center; font: 11px arial, sans-serif; color: #565656; } .clear { clear:both; } .comm-container { margin-bottom:20px;} .comm-name { font-size:10pt; float:left; width:20%; padding:5px; overflow:hidden; } .comm-text { float:left; line-height:17px; width:70%; padding:5px; padding-top:0px; overflow:hidden; } .font-small-gray { font-size:10pt !important; }</style></head><body>')
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
wtext=""
buvoappcheck=""
user = users.get_current_user()
usercpsmurl = ("/%s-sitemapcp2-%s.%s" % (cmspath2,lang,fileext))
out=("<table><form method=\"POST\" action=\"%s\">\n" % (usercpsmurl))
freqvalues = {
'1': 'always',
'2': 'hourly',
'3': 'daily',
'4': 'weekly',
'5': 'monthly',
'6': 'yearly',
'7': 'never'
}
selecttext=""
namelist = ''
if users.is_current_user_admin():
sitemapbuvo = {}
query = datastore.Query('Page')
entities = query.Get(1000)
namelist = ''
for entity in entities:
sitemaprodyti=True
rssprodyti=True
sitemapfreqkey='weekly'
buvosmcheck=""
buvorsscheck=""
commenablegocheck=""
commenablefbcheck=""
commenablelicheck=""
commenablevkcheck=""
ename="---"
pagekey=entity.key()
sitemapprio='0.5'
if 'name' in entity.keys():
ename=entity['name']
if 'sitemapfreq' in entity.keys():
sitemapfreqkey=entity['sitemapfreq']
if 'sitemapprio' in entity.keys():
sitemapprio=entity['sitemapprio']
if 'sitemaprodyti' in entity.keys():
if entity['sitemaprodyti']:
sitemaprodyti=entity['sitemaprodyti']
buvosmcheck="checked=\"yes\""
if 'rssrodyti' in entity.keys():
if entity['rssrodyti']:
sitemaprodyti=entity['rssrodyti']
buvorsscheck="checked=\"yes\""
if 'commenablego' in entity.keys():
if entity['commenablego']:
commenablegocheck="checked=\"checked\""
if 'commenablefb' in entity.keys():
if entity['commenablefb']:
commenablefbcheck="checked=\"checked\""
if 'commenableli' in entity.keys():
if entity['commenableli']:
commenablelicheck="checked=\"checked\""
if 'commenablevk' in entity.keys():
if entity['commenablevk']:
commenablevkcheck="checked=\"checked\""
selecttext=("<select name=\"freq_%s\">" % (ename))
for fname in sorted(freqvalues.iterkeys()):
freqvalue = freqvalues[fname]
selecttextyes=""
# if cmp(int(fname),int(sitemapfreqkey))==0:
if freqvalue==sitemapfreqkey:
selecttextyes="selected=\"selected\""
selecttext=selecttext+("<option %s>%s</option>" % (selecttextyes,freqvalue))
selecttext=selecttext+"</select>\n"
out=out+("<tr><td>%s</td><td><input type=\"checkbox\" name=\"sitemaprodyti\" value=\"%s\" %s></td><td>%s</td><td><input type=\"text\" size=\"4\" name=\"prio_%s\" value=\"%s\" > RSS <input type=\"checkbox\" name=\"rssrodyti\" value=\"%s\" %s></td></tr>\n" % (ename,ename,buvosmcheck,selecttext,ename,sitemapprio,ename,buvorsscheck))
out=out+("<tr><td> </td><td colspan=\"3\"> Google comments <input type=\"checkbox\" name=\"commenablego\" value=\"%s\" %s> Facebook comments <input type=\"checkbox\" name=\"commenablefb\" value=\"%s\" %s> LinkedIn comments <input type=\"checkbox\" name=\"commenableli\" value=\"%s\" %s> VKontakte comments <input type=\"checkbox\" name=\"commenablevk\" value=\"%s\" %s></td></td></tr>\n" % (ename,commenablegocheck,ename,commenablefbcheck,ename,commenablelicheck,ename,commenablevkcheck))
namelist = namelist + "||" + str(base64.urlsafe_b64encode(str(ename)))
out=out+"<tr><td colspan=\"4\"><input type=\"hidden\" name=\"namelist\" value=\""+str(namelist)+"\" ></td></tr>\n"
out=out+"<tr><td></td><td></td><td></td><td><input type=\"submit\"></td></tr>\n"
out=out+"</form></table>\n"
wtext = out
page = Page.loadnew("sitemapcp")
page.content = wtext
page_name2 = 'menu'+'-'+lang+'.'+fileext
page2 = Page.load(page_name2)
page3 = Page.loadnew("kalbos")
textaps=''
if len(aps)>0:
textaps=aps+'.'
text=''
for name, value in kalbossort:
text = text + (_kalbhtml % ('sitemapcp', textaps+name, ext, name, name))
page3.content = text
self.generate('view.html', lang, {
'imgshar': False,
'noedit': '1',
'application_name': siteauth(),
'kalbos': page3,
'menu': page2,
'page': page,
})
class SiteMapControl2(webapp.RequestHandler):
def post(self, rparameters):
param=urlparam(rparameters)
ext=param['ext']
lang=param['lang']
aps=param['aps']
kalb=param['kalb']
lang1 = gettext.translation (cmstrans2, locale_path, [kalb] , fallback=True)
_ = lang1.ugettext
user = users.get_current_user()
now = datetime.datetime.now()
buvo = False
if user:
# try:
if users.is_current_user_admin():
photoal=[]
namelist2=[]
albumbuvo = {}
rssbuvo = {}
commenablegobuvo = {}
commenablefbbuvo = {}
commenablelibuvo = {}
commenablevkbuvo = {}
albumname = ""
# form = cgi.FieldStorage()
# item = form.getvalue("sitemaprodyti")
# if form["namelist"].value:
# namelist=form["namelist"].value
# namelist2=namelist.split("||")
# if isinstance(item, list):
# for item in form.getlist("sitemaprodyti"):
# photoal.append(item)
# else:
# photoa = form.getfirst("sitemaprodyti", "")
# photoal.append(photoa)
namelist=self.request.POST['namelist']
namelist2=namelist.split("||")
photoal=self.request.POST.getall('sitemaprodyti')
rssal=self.request.POST.getall('rssrodyti')
commenablegoal=self.request.POST.getall('commenablego')
commenablefbal=self.request.POST.getall('commenablefb')
commenablelial=self.request.POST.getall('commenableli')
commenablevkal=self.request.POST.getall('commenablevk')
for albumn2 in namelist2:
if len(albumn2)>0:
albumname = base64.urlsafe_b64decode(str(albumn2))
albumbuvo[albumname]=False
rssbuvo[albumname]=False
commenablegobuvo[albumname]=False
commenablefbbuvo[albumname]=False
commenablelibuvo[albumname]=False
commenablevkbuvo[albumname]=False
# self.response.out.write("namelist:" +albumname+ "<br />\n")
for albumn2 in photoal:
albumbuvo[albumn2]=True
for albumn2 in rssal:
rssbuvo[albumn2]=True
for albumn2 in commenablegoal:
commenablegobuvo[albumn2]=True
for albumn2 in commenablefbal:
commenablefbbuvo[albumn2]=True
for albumn2 in commenablelial:
commenablelibuvo[albumn2]=True
for albumn2 in commenablevkal:
commenablevkbuvo[albumn2]=True
# self.response.out.write("photoal:" +albumn2+ "<br />\n")
albumbuvo2=albumbuvo
rssbuvo2=rssbuvo
query = datastore.Query('Page')
entities = query.Get(1000)
for albumb in entities:
albumname=str(albumb['name'])
if albumbuvo.has_key(albumname): #albumname in albumbuvo:
albumb['sitemaprodyti'] = albumbuvo[albumname]
albumb['rssrodyti'] = rssbuvo[albumname]
albumb['commenablego'] = commenablegobuvo[albumname]
albumb['commenablefb'] = commenablefbbuvo[albumname]
albumb['commenableli'] = commenablelibuvo[albumname]
albumb['commenablevk'] = commenablevkbuvo[albumname]
albumb['sitemapfreq'] = self.request.POST["freq_"+albumname]
albumb['sitemapprio'] = self.request.POST["prio_"+albumname]
# self.response.out.write("buvo:" +albumname+" "+str(albumbuvo[albumname])+ "<br />\n")
albumbuvo2.pop(albumname)
datastore.Put(albumb)
self.redirect('/'+cmspath2+'-sitemapcp-'+lang+'.'+fileext)
class CodeImage(webapp.RequestHandler):
def get(self):
# img = PNGCanvas(256, 256)
# pix = [0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,0,0,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,1,1,0,0,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,1,0,0,0,1,1,1,1,1,1,0,0,0,1,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,1,0,0,0,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0,1,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,1,1,1,0,0,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
krast=5
starp=2
# splot = 14;
# sauks = 14;
pix = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,1,1,0,0,0,1,1,0,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,1,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,0,1,1,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,1,1,1,0,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,1,1,0,1,1,1,0,0,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,1,0,0,0,1,1,1,0,1,1,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,1,1,0,0,1,1,0,0,1,1,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
krast=5
starp=2
splot = 14
sauks = 14
splot = 9
sauks = 15
nn=6
istr=int(math.sqrt((splot/2)*(splot/2)+(sauks/2)*(sauks/2)))
splot2 = istr*2
sauks2 = istr*2
plot=2*krast + nn*splot + (nn-1)*starp;
auks=2*krast + sauks;
plot2=2*krast + nn*splot2 + (nn-1)*starp
auks2=2*krast + sauks2;
# img = PNGCanvas(plot, auks, [0, 0,0,0])
# img = PNGCanvas(plot2, auks2, [0, 0,0,0])
img = PNGCanvas(plot2, auks2, [0xff, 0xfa, 0xcd, 0xff])
# img = PNGCanvas(plot, auks, [0, 0,0,0])
ss=[0,2,4,6,8,1,3,5,7,9]
try:
codeimg = db.get(self.request.get("id"))
kodas=codeimg.code
except:
kodas="000000"
for i in range(0, 6):
sx2 = "%s" % kodas[i:i+1]
try:
sx = int(sx2)
except:
sx=0
# sx = random.randrange(0, 10)
alfa=((random.randrange(0, 90 , 5)-45)*math.pi)/180
for y in range(1, sauks):
# alfa=math.pi/2
# alfa=math.pi/4
for x in range(1, splot):
nr = sx*(splot*sauks)+(y-1)*splot+x-1
xcor=x-splot/2 -1
ycor=y-sauks/2-1
istrs=math.sqrt(xcor*xcor+ycor*ycor)
alfa1=math.atan2(ycor,xcor)
xcornew=istrs*math.cos(alfa1+alfa)
ycornew=istrs*math.sin(alfa1+alfa)
xx=int(krast+i*(starp+splot2)+splot2/2+1+xcornew)
yy=int(krast+sauks2/2+1+ycornew)
# xx=krast+i*(starp+splot2)+xcor+splot2/2 +1;
# yy=krast+ycor+sauks2/2 +1;
if pix[nr]==1:
# img.point(xx, yy, [0xff, 0, 0, 0xff])
img.point(xx, yy, [0, 0, 0, 0xff])
# img.point(xx, yy, [0xff, 0xfa, 0xcd, 0xff])
self.response.headers['Content-Type'] = "image/png"
self.response.out.write(img.dump())
class UserNone(object):
__user_id = None
__federated_identity = None
__federated_provider = None
def __init__(self, email=None, _auth_domain=None,
_user_id=None, federated_identity=None, federated_provider=None,
_strict_mode=True):
if email is None:
email = ''
self.__email = email
self.__federated_identity = federated_identity
self.__federated_provider = federated_provider
self.__auth_domain = _auth_domain
self.__user_id = _user_id or None
def nickname(self):
return self.__email
def email(self):
return self.__email
def user_id(self):
return self.__user_id
def auth_domain(self):
return self.__auth_domain
def federated_identity(self):
return self.__federated_identity
def federated_provider(self):
return self.__federated_provider
def getphoto(useris):
yra=False
try:
f = urllib.urlopen("http://picasaweb.google.com/data/feed/api/user/%s?kind=album" % useris)
list = Picasa().albums(f.read())
for name in list.keys():
album = list[name]
if name.find('ProfilePhotos') == 0:
f = urllib.urlopen("http://picasaweb.google.com/data/feed/api/user/%s/album/%s?kind=photo" % (useris,name))
list = Picasa().photos(f.read())
for photo in list:
phototurim = photo.thumbnail #photo.webpage
yra=True
break
break
if name == "Profile_photosActive":
f = urllib.urlopen("http://picasaweb.google.com/data/feed/api/user/%s/album/%s?kind=photo" % (useris,"Profile_photosActive"))
list = Picasa().photos(f.read())
for photo in list:
phototurim = photo.thumbnail #photo.webpage
yra=True
break
break
except:
yra=False
if yra:
return phototurim
else:
return False
def mainold():
try:
imgcodes = db.GqlQuery("SELECT * FROM Codeimagereg WHERE date < :1", datetime.datetime.now() + datetime.timedelta(minutes=-15))
for imgcode in imgcodes:
imgcode.delete()
except:
klaida=True
redir = False
if os.environ['HTTP_HOST']=='www.upe.lt' or os.environ['HTTP_HOST']=='lt.upe.lt' or os.environ['HTTP_HOST']=='us.upe.lt' or os.environ['HTTP_HOST']=='upe.lt':
redir = True
redir2 = False
if os.environ['HTTP_HOST']=='google5353c7992b3833b7.nerij.us':
redir2 = True
buvoapp = False
try:
thisappver = os.environ['CURRENT_VERSION_ID']
thisappid = os.environ['APPLICATION_ID']
thisappsoftver = os.environ['SERVER_SOFTWARE']
thishostname = os.environ['DEFAULT_VERSION_HOSTNAME']
if True:
try:
buvesapp = db.GqlQuery("SELECT * FROM AppVer WHERE appver = :1", thisappver)
for app in buvesapp:
app.timelast = datetime.datetime.now()
app.put()
buvoapp = True
except:
klaida=True
# db.put(buves_vart)
if not buvoapp:
app = AppVer(appver=thisappver)
app.timestart = now
app.timelast = now
app.enable = False
app.appsoftver = thisappsoftver
app.appid = thisappid
app.hostname = thishostname
app.put()
except:
klaida=True
try:
codedb = db.GqlQuery("SELECT * FROM DinCode WHERE codename = :1", "start")
for thiscode in codedb:
thiscode = thiscode.codetext
appon = eval(thiscode)
except:
appon=False
def handle_500(request, response, exception):
greeting = ''
items = os.environ.items()
items.sort()
for name, value in items:
aaa = "%s\t= %s\n" % (name, value)
greeting = greeting + aaa
lines = ''.join(traceback.format_exception(*sys.exc_info()))
message = mail.EmailMessage()
message.subject = os.environ['HTTP_HOST'] + " - Error500 - " + os.environ['REQUEST_ID_HASH']
message.sender = _mailsender
message.to = _mailrcptto
message.body = "%s\n\n%s" % (greeting,lines)
message.send()
response.write("<html><body><h1>Internal Server Error 500</h1>\n<xmp>")
# response.write("%s\n\n" % (greeting))
# response.write(cgi.escape(lines, quote=True))
response.write("</xmp></body></html>")
#applicationdisable = webapp.WSGIApplication([('/(.*)', SiteDisable),], debug=_DEBUG)
#applicationredir = webapp.WSGIApplication([('/(.*)', RedirN),], debug=_DEBUG)
#applicationredir2 = webapp.WSGIApplication([('/(.*)', RedirN2),], debug=_DEBUG)
url_map1 = [
routes.DomainRoute(r'<:(upe\.lt|lt\.upe\.lt|us\.upe\.lt|www\.upe\.lt)>', [
webapp.Route('/(.*)', handler=RedirN),
]),
('/install', WikiInstall),
('/'+cmspath2+'-env-(.*)', WikiEnv),
('/'+cmspath2+'-fb-(.*)', WikiFB),
('/'+cmspath2+'-li-(.*)', WikiLI),
('/'+cmspath2+'-vk-(.*)', WikiVK),
('/'+cmspath2+'-ver-(.*)', WikiExec),
('/'+cmspath2+'-login-(.*)', WikiLogin),
('/'+cmspath2+'-admin-(.*)', WikiAdmin),
('/'+cmspath2+'-mod(.*)-(.*)', WikiMod),
('/'+cmspath2+'-lietuvos(.*)-(.*)', WikiMod),
('/'+cmspath2+'-sitemapcp-(.*)', SiteMapControl),
('/'+cmspath2+'-sitemapcp2-(.*)', SiteMapControl2),
('/'+cmspath2+'-memberlistshort-(.*)', VartSarTrumpas),
('/'+cmspath2+'-fbmemberlistshort-(.*)', FBUserListSort),
('/'+cmspath2+'-limemberlistshort-(.*)', LIUserListSort),
('/'+cmspath2+'-vkmemberlistshort-(.*)', VKUserListSort),
('/'+cmspath2+'-memberlist-(.*)', VartSar),
('/'+cmspath2+'-usercontrolpanel-(.*)', UserControl),
('/'+cmspath2+'-usercpsubmit-(.*)', UserControlSend),
('/'+cmspath2+'-userpicacontrol-(.*)', UserPicaControl),
('/'+cmspath2+'-userpicasubmit-(.*)', UserPicaControlSend),
('/'+cmspath2+'-userpicapage-(.*)/([-\w]+)/([0-9_]+)', UserPicaPage),
('/'+cmspath2+'-useryoutpage-(.*)/([-\w]+)/([0-9_]+)', UserYoutPage),
('/'+cmspath2+'-usercommpage-(.*)/([-\w]+)/([0-9_]+)', UserCommPage),
('/'+cmspath2+'-usercommsubmit-(.*)/([-\w]+)/([0-9_]+)', UserCommSubmit),
('/'+cmspath2+'-usermailformpage-(.*)/([-\w]+)/([0-9_]+)', UserMailFormPage),
('/'+cmspath2+'-usersendmail-(.*)/([-\w]+)/([0-9_]+)', UserMailSend),
('/'+cmspath2+'-userpage-(.*)/([-\w]+)/([0-9_]+)', UserShowPage),
('/'+cmspath2+'-userimagemin/([-\w]+)/([0-9_]+)', UserShowImageMin),
('/'+cmspath2+'-userimage/([-\w]+)/([0-9_]+)', UserShowImageMax),
('/'+cmspath2+'-comments-(.*)', Comments),
('/'+cmspath2+'-atsiliepimai-(.*)', Comments),
('/'+cmspath2+'-commsubmit-(.*)', SubmitComments),
('/'+cmspath2+'-mailform-(.*)', MailForm),
('/'+cmspath2+'-siustilaiska-(.*)', MailForm),
('/'+cmspath2+'-sendmail-(.*)', MailSend),
# ('/'+cmspath2+'-searchid-(.*)', VarId),
('/commswit', SwitComments),
('/commimg', CommentsImage),
('/codeimg', CodeImage),
('/(.*)favicon.ico', WikiFav),
('/'+cmspath2+'-guestbook-(.*)', WikiGuest),
('/'+cmspath2+'-sveciai-(.*)', WikiGuest),
('/'+cmspath2+'-sing-(.*)', SingGuestbook),
('/'+cmspath2+'-ls-(.*)', ListDir),
('/'+cmspath2+'-download-(.*)', WikiRedirDown),
# ('/redir.php/(.*)', WikiRedir),
# ('/redir(.*)', WikiRedir),
]
#url_map2 = [('/'+cmspath2+'-pic-(.*)', ImageSharingAlbumIndex),
# ('/'+cmspath2+'-picnew-(.*)', ImageSharingAlbumCreate),
# ('/'+cmspath2+'-picalbum-(.*)/([-\w]+)', ImageSharingAlbumView),
# ('/'+cmspath2+'-picupload-(.*)/([-\w]+)', ImageSharingUploadImage),
# ('/'+cmspath2+'-picshowimage-(.*)/([-\w]+)', ImageSharingShowImage),
# ('/'+cmspath2+'-pic(thumbnail|image)-(.*)/([-\w]+)', ImageSharingServeImage),
# ('/'+cmspath2+'-picsearch-(.*)', ImageSharingSearch)]
url_map = []
url_map.extend(url_map1)
#url_map.extend(url_map2)
url_map.extend([('/'+cmspath2+'-([-\w]+)-(.*)', WikiPage),('/(.*)', WikiRedirMain)])
app = webapp.WSGIApplication(url_map, debug=_DEBUG)
app.error_handlers[500] = handle_500
# wsgiref.handlers.CGIHandler().run(application)
# if redir:
# applicationredir.run()
# exit(0)
# if redir2:
# applicationredir2.run()
# exit(0)
# if appon:
# app.run()
# exit(0)
# else:
# applicationdisable.run()
# exit(0)
#if __name__ == '__main__':
# main()
#if __name__ == '__main__':
# try:
# main()
# except:
# applicationerror = webapp.WSGIApplication([('/(.*)', HttpError),], debug=_DEBUG)
# run_wsgi_app(applicationerror)
# exit(0)
|
s-hertel/ansible
|
refs/heads/devel
|
test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/vars/custom_vars.py
|
58
|
# Copyright 2019 RedHat, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
vars: custom_vars
version_added: "2.10"
short_description: load host and group vars
description: test loading host and group vars from a collection
options:
stage:
choices: ['all', 'inventory', 'task']
type: str
ini:
- key: stage
section: custom_vars
env:
- name: ANSIBLE_VARS_PLUGIN_STAGE
'''
from ansible.plugins.vars import BaseVarsPlugin
class VarsModule(BaseVarsPlugin):
def get_vars(self, loader, path, entities, cache=True):
super(VarsModule, self).get_vars(loader, path, entities)
return {'collection': 'collection_root_user'}
|
StellarCN/py-stellar-base
|
refs/heads/master
|
stellar_sdk/xdr/string32.py
|
1
|
# This is an automatically generated file.
# DO NOT EDIT or your changes may be overwritten
import base64
from xdrlib import Packer, Unpacker
from .base import *
__all__ = ["String32"]
class String32:
"""
XDR Source Code
----------------------------------------------------------------
typedef string string32<32>;
----------------------------------------------------------------
"""
def __init__(self, string32: bytes) -> None:
self.string32 = string32
def pack(self, packer: Packer) -> None:
String(self.string32, 32).pack(packer)
@classmethod
def unpack(cls, unpacker: Unpacker) -> "String32":
string32 = String.unpack(unpacker)
return cls(string32)
def to_xdr_bytes(self) -> bytes:
packer = Packer()
self.pack(packer)
return packer.get_buffer()
@classmethod
def from_xdr_bytes(cls, xdr: bytes) -> "String32":
unpacker = Unpacker(xdr)
return cls.unpack(unpacker)
def to_xdr(self) -> str:
xdr_bytes = self.to_xdr_bytes()
return base64.b64encode(xdr_bytes).decode()
@classmethod
def from_xdr(cls, xdr: str) -> "String32":
xdr_bytes = base64.b64decode(xdr.encode())
return cls.from_xdr_bytes(xdr_bytes)
def __eq__(self, other: object):
if not isinstance(other, self.__class__):
return NotImplemented
return self.string32 == other.string32
def __str__(self):
return f"<String32 [string32={self.string32}]>"
|
sohovet/sohovet
|
refs/heads/master
|
sohovet_product_price/__init__.py
|
1
|
# -*- encoding: utf-8 -*-
##############################################################################
# #
# OpenERP, Open Source Management Solution. #
# #
# @author Juan Ignacio Alonso Barba <jialonso@grupovermon.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
##############################################################################
from . import models, wizard
|
firebitsbr/pwn_plug_sources
|
refs/heads/master
|
src/metagoofil/hachoir_core/language.py
|
95
|
from hachoir_core.iso639 import ISO639_2
class Language:
def __init__(self, code):
code = str(code)
if code not in ISO639_2:
raise ValueError("Invalid language code: %r" % code)
self.code = code
def __cmp__(self, other):
if other.__class__ != Language:
return 1
return cmp(self.code, other.code)
def __unicode__(self):
return ISO639_2[self.code]
def __str__(self):
return self.__unicode__()
def __repr__(self):
return "<Language '%s', code=%r>" % (unicode(self), self.code)
|
Austin503/pyglet
|
refs/heads/master
|
experimental/input/dinput.py
|
28
|
#!/usr/bin/python
# $Id:$
import ctypes
import pyglet
from pyglet import com
from pyglet.window.win32 import _kernel32
lib = ctypes.oledll.dinput8
LPVOID = ctypes.c_void_p
WORD = ctypes.c_uint16
DWORD = ctypes.c_uint32
LPDWORD = ctypes.POINTER(DWORD)
BOOL = ctypes.c_int
WCHAR = ctypes.c_wchar
UINT = ctypes.c_uint
HWND = ctypes.c_uint32
MAX_PATH = 260
DIENUM_STOP = 0
DIENUM_CONTINUE = 1
DIEDFL_ALLDEVICES = 0x00000000
DIEDFL_ATTACHEDONLY = 0x00000001
DIEDFL_FORCEFEEDBACK = 0x00000100
DIEDFL_INCLUDEALIASES = 0x00010000
DIEDFL_INCLUDEPHANTOMS = 0x00020000
DIEDFL_INCLUDEHIDDEN = 0x00040000
DI8DEVCLASS_ALL = 0
DI8DEVCLASS_DEVICE = 1
DI8DEVCLASS_POINTER = 2
DI8DEVCLASS_KEYBOARD = 3
DI8DEVCLASS_GAMECTRL = 4
DI8DEVTYPE_DEVICE = 0x11
DI8DEVTYPE_MOUSE = 0x12
DI8DEVTYPE_KEYBOARD = 0x13
DI8DEVTYPE_JOYSTICK = 0x14
DI8DEVTYPE_GAMEPAD = 0x15
DI8DEVTYPE_DRIVING = 0x16
DI8DEVTYPE_FLIGHT = 0x17
DI8DEVTYPE_1STPERSON = 0x18
DI8DEVTYPE_DEVICECTRL = 0x19
DI8DEVTYPE_SCREENPOINTER = 0x1A
DI8DEVTYPE_REMOTE = 0x1B
DI8DEVTYPE_SUPPLEMENTAL = 0x1C
DI8DEVTYPEMOUSE_UNKNOWN = 1
DI8DEVTYPEMOUSE_TRADITIONAL = 2
DI8DEVTYPEMOUSE_FINGERSTICK = 3
DI8DEVTYPEMOUSE_TOUCHPAD = 4
DI8DEVTYPEMOUSE_TRACKBALL = 5
DI8DEVTYPEMOUSE_ABSOLUTE = 6
DI8DEVTYPEKEYBOARD_UNKNOWN = 0
DI8DEVTYPEKEYBOARD_PCXT = 1
DI8DEVTYPEKEYBOARD_OLIVETTI = 2
DI8DEVTYPEKEYBOARD_PCAT = 3
DI8DEVTYPEKEYBOARD_PCENH = 4
DI8DEVTYPEKEYBOARD_NOKIA1050 = 5
DI8DEVTYPEKEYBOARD_NOKIA9140 = 6
DI8DEVTYPEKEYBOARD_NEC98 = 7
DI8DEVTYPEKEYBOARD_NEC98LAPTOP = 8
DI8DEVTYPEKEYBOARD_NEC98106 = 9
DI8DEVTYPEKEYBOARD_JAPAN106 = 10
DI8DEVTYPEKEYBOARD_JAPANAX = 11
DI8DEVTYPEKEYBOARD_J3100 = 12
DI8DEVTYPE_LIMITEDGAMESUBTYPE = 1
DI8DEVTYPEJOYSTICK_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPEJOYSTICK_STANDARD = 2
DI8DEVTYPEGAMEPAD_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPEGAMEPAD_STANDARD = 2
DI8DEVTYPEGAMEPAD_TILT = 3
DI8DEVTYPEDRIVING_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPEDRIVING_COMBINEDPEDALS = 2
DI8DEVTYPEDRIVING_DUALPEDALS = 3
DI8DEVTYPEDRIVING_THREEPEDALS = 4
DI8DEVTYPEDRIVING_HANDHELD = 5
DI8DEVTYPEFLIGHT_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPEFLIGHT_STICK = 2
DI8DEVTYPEFLIGHT_YOKE = 3
DI8DEVTYPEFLIGHT_RC = 4
DI8DEVTYPE1STPERSON_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPE1STPERSON_UNKNOWN = 2
DI8DEVTYPE1STPERSON_SIXDOF = 3
DI8DEVTYPE1STPERSON_SHOOTER = 4
DI8DEVTYPESCREENPTR_UNKNOWN = 2
DI8DEVTYPESCREENPTR_LIGHTGUN = 3
DI8DEVTYPESCREENPTR_LIGHTPEN = 4
DI8DEVTYPESCREENPTR_TOUCH = 5
DI8DEVTYPEREMOTE_UNKNOWN = 2
DI8DEVTYPEDEVICECTRL_UNKNOWN = 2
DI8DEVTYPEDEVICECTRL_COMMSSELECTION = 3
DI8DEVTYPEDEVICECTRL_COMMSSELECTION_HARDWIRED = 4
DI8DEVTYPESUPPLEMENTAL_UNKNOWN = 2
DI8DEVTYPESUPPLEMENTAL_2NDHANDCONTROLLER = 3
DI8DEVTYPESUPPLEMENTAL_HEADTRACKER = 4
DI8DEVTYPESUPPLEMENTAL_HANDTRACKER = 5
DI8DEVTYPESUPPLEMENTAL_SHIFTSTICKGATE = 6
DI8DEVTYPESUPPLEMENTAL_SHIFTER = 7
DI8DEVTYPESUPPLEMENTAL_THROTTLE = 8
DI8DEVTYPESUPPLEMENTAL_SPLITTHROTTLE = 9
DI8DEVTYPESUPPLEMENTAL_COMBINEDPEDALS = 10
DI8DEVTYPESUPPLEMENTAL_DUALPEDALS = 11
DI8DEVTYPESUPPLEMENTAL_THREEPEDALS = 12
DI8DEVTYPESUPPLEMENTAL_RUDDERPEDALS = 13
DIDC_ATTACHED = 0x00000001
DIDC_POLLEDDEVICE = 0x00000002
DIDC_EMULATED = 0x00000004
DIDC_POLLEDDATAFORMAT = 0x00000008
DIDC_FORCEFEEDBACK = 0x00000100
DIDC_FFATTACK = 0x00000200
DIDC_FFFADE = 0x00000400
DIDC_SATURATION = 0x00000800
DIDC_POSNEGCOEFFICIENTS = 0x00001000
DIDC_POSNEGSATURATION = 0x00002000
DIDC_DEADBAND = 0x00004000
DIDC_STARTDELAY = 0x00008000
DIDC_ALIAS = 0x00010000
DIDC_PHANTOM = 0x00020000
DIDC_HIDDEN = 0x00040000
DIDFT_ALL = 0x00000000
DIDFT_RELAXIS = 0x00000001
DIDFT_ABSAXIS = 0x00000002
DIDFT_AXIS = 0x00000003
DIDFT_PSHBUTTON = 0x00000004
DIDFT_TGLBUTTON = 0x00000008
DIDFT_BUTTON = 0x0000000C
DIDFT_POV = 0x00000010
DIDFT_COLLECTION = 0x00000040
DIDFT_NODATA = 0x00000080
DIDFT_ANYINSTANCE = 0x00FFFF00
DIDFT_INSTANCEMASK = DIDFT_ANYINSTANCE
DIDFT_FFACTUATOR = 0x01000000
DIDFT_FFEFFECTTRIGGER = 0x02000000
DIDFT_OUTPUT = 0x10000000
DIDFT_VENDORDEFINED = 0x04000000
DIDFT_ALIAS = 0x08000000
DIDFT_OPTIONAL = 0x80000000
DIDFT_NOCOLLECTION = 0x00FFFF00
DIA_FORCEFEEDBACK = 0x00000001
DIA_APPMAPPED = 0x00000002
DIA_APPNOMAP = 0x00000004
DIA_NORANGE = 0x00000008
DIA_APPFIXED = 0x00000010
DIAH_UNMAPPED = 0x00000000
DIAH_USERCONFIG = 0x00000001
DIAH_APPREQUESTED = 0x00000002
DIAH_HWAPP = 0x00000004
DIAH_HWDEFAULT = 0x00000008
DIAH_DEFAULT = 0x00000020
DIAH_ERROR = 0x80000000
DIAFTS_NEWDEVICELOW = 0xFFFFFFFF
DIAFTS_NEWDEVICEHIGH = 0xFFFFFFFF
DIAFTS_UNUSEDDEVICELOW = 0x00000000
DIAFTS_UNUSEDDEVICEHIGH = 0x00000000
DIDBAM_DEFAULT = 0x00000000
DIDBAM_PRESERVE = 0x00000001
DIDBAM_INITIALIZE = 0x00000002
DIDBAM_HWDEFAULTS = 0x00000004
DIDSAM_DEFAULT = 0x00000000
DIDSAM_NOUSER = 0x00000001
DIDSAM_FORCESAVE = 0x00000002
DICD_DEFAULT = 0x00000000
DICD_EDIT = 0x00000001
DIDOI_FFACTUATOR = 0x00000001
DIDOI_FFEFFECTTRIGGER = 0x00000002
DIDOI_POLLED = 0x00008000
DIDOI_ASPECTPOSITION = 0x00000100
DIDOI_ASPECTVELOCITY = 0x00000200
DIDOI_ASPECTACCEL = 0x00000300
DIDOI_ASPECTFORCE = 0x00000400
DIDOI_ASPECTMASK = 0x00000F00
DIDOI_GUIDISUSAGE = 0x00010000
DIPH_DEVICE = 0
DIPH_BYOFFSET = 1
DIPH_BYID = 2
DIPH_BYUSAGE = 3
DISCL_EXCLUSIVE = 0x00000001
DISCL_NONEXCLUSIVE = 0x00000002
DISCL_FOREGROUND = 0x00000004
DISCL_BACKGROUND = 0x00000008
DISCL_NOWINKEY = 0x00000010
DIPROP_BUFFERSIZE = 1
class DIDEVICEINSTANCE(ctypes.Structure):
_fields_ = (
('dwSize', DWORD),
('guidInstance', com.GUID),
('guidProduct', com.GUID),
('dwDevType', DWORD),
('tszInstanceName', WCHAR * MAX_PATH),
('tszProductName', WCHAR * MAX_PATH),
('guidFFDriver', com.GUID),
('wUsagePage', WORD),
('wUsage', WORD)
)
LPDIDEVICEINSTANCE = ctypes.POINTER(DIDEVICEINSTANCE)
LPDIENUMDEVICESCALLBACK = ctypes.WINFUNCTYPE(BOOL, LPDIDEVICEINSTANCE, LPVOID)
class DIDEVICEOBJECTINSTANCE(ctypes.Structure):
_fields_ = (
('dwSize', DWORD),
('guidType', com.GUID),
('dwOfs', DWORD),
('dwType', DWORD),
('dwFlags', DWORD),
('tszName', WCHAR * MAX_PATH),
('dwFFMaxForce', DWORD),
('dwFFForceResolution', DWORD),
('wCollectionNumber', WORD),
('wDesignatorIndex', WORD),
('wUsagePage', WORD),
('wUsage', WORD),
('dwDimension', DWORD),
('wExponent', WORD),
('wReportId', WORD)
)
LPDIDEVICEOBJECTINSTANCE = ctypes.POINTER(DIDEVICEOBJECTINSTANCE)
LPDIENUMDEVICEOBJECTSCALLBACK = \
ctypes.WINFUNCTYPE( BOOL, LPDIDEVICEOBJECTINSTANCE, LPVOID)
class DIOBJECTDATAFORMAT(ctypes.Structure):
_fields_ = (
('pguid', ctypes.POINTER(com.GUID)),
('dwOfs', DWORD),
('dwType', DWORD),
('dwFlags', DWORD)
)
__slots__ = [n for n, t in _fields_]
LPDIOBJECTDATAFORMAT = ctypes.POINTER(DIOBJECTDATAFORMAT)
class DIDATAFORMAT(ctypes.Structure):
_fields_ = (
('dwSize', DWORD),
('dwObjSize', DWORD),
('dwFlags', DWORD),
('dwDataSize', DWORD),
('dwNumObjs', DWORD),
('rgodf', LPDIOBJECTDATAFORMAT)
)
__slots__ = [n for n, t in _fields_]
LPDIDATAFORMAT = ctypes.POINTER(DIDATAFORMAT)
class DIDEVICEOBJECTDATA(ctypes.Structure):
_fields_ = (
('dwOfs', DWORD),
('dwData', DWORD),
('dwTimeStamp', DWORD),
('dwSequence', DWORD),
('uAppData', ctypes.POINTER(UINT))
)
LPDIDEVICEOBJECTDATA = ctypes.POINTER(DIDEVICEOBJECTDATA)
class DIPROPHEADER(ctypes.Structure):
_fields_ = (
('dwSize', DWORD),
('dwHeaderSize', DWORD),
('dwObj', DWORD),
('dwHow', DWORD)
)
LPDIPROPHEADER = ctypes.POINTER(DIPROPHEADER)
class DIPROPDWORD(ctypes.Structure):
_fields_ = (
('diph', DIPROPHEADER),
('dwData', DWORD)
)
# All method names in the interfaces are filled in, but unused (so far)
# methods have no parameters.. they'll crash when we try and use them, at
# which point we can go in and fill them in.
# IDirect* interfaces are all Unicode (e.g. IDirectInputDevice8W).
class IDirectInputDevice8(com.IUnknown):
_methods_ = [
('GetCapabilities',
com.STDMETHOD()),
('EnumObjects',
com.STDMETHOD(LPDIENUMDEVICEOBJECTSCALLBACK, LPVOID, DWORD)),
('GetProperty',
com.STDMETHOD()),
('SetProperty',
com.STDMETHOD(LPVOID, LPDIPROPHEADER)),
('Acquire',
com.STDMETHOD()),
('Unacquire',
com.STDMETHOD()),
('GetDeviceState',
com.STDMETHOD()),
('GetDeviceData',
com.STDMETHOD(DWORD, LPDIDEVICEOBJECTDATA, LPDWORD, DWORD)),
('SetDataFormat',
com.STDMETHOD(LPDIDATAFORMAT)),
('SetEventNotification',
com.STDMETHOD()),
('SetCooperativeLevel',
com.STDMETHOD(HWND, DWORD)),
('GetObjectInfo',
com.STDMETHOD()),
('GetDeviceInfo',
com.STDMETHOD()),
('RunControlPanel',
com.STDMETHOD()),
('Initialize',
com.STDMETHOD()),
('CreateEffect',
com.STDMETHOD()),
('EnumEffects',
com.STDMETHOD()),
('GetEffectInfo',
com.STDMETHOD()),
('GetForceFeedbackState',
com.STDMETHOD()),
('SendForceFeedbackCommand',
com.STDMETHOD()),
('EnumCreatedEffectObjects',
com.STDMETHOD()),
('Escape',
com.STDMETHOD()),
('Poll',
com.STDMETHOD()),
('SendDeviceData',
com.STDMETHOD()),
('EnumEffectsInFile',
com.STDMETHOD()),
('WriteEffectToFile',
com.STDMETHOD()),
('BuildActionMap',
com.STDMETHOD()),
('SetActionMap',
com.STDMETHOD()),
('GetImageInfo',
com.STDMETHOD()),
]
class IDirectInput8(com.IUnknown):
_methods_ = [
('CreateDevice',
com.STDMETHOD(ctypes.POINTER(com.GUID),
ctypes.POINTER(IDirectInputDevice8),
ctypes.c_void_p)),
('EnumDevices',
com.STDMETHOD(DWORD, LPDIENUMDEVICESCALLBACK, LPVOID, DWORD)),
('GetDeviceStatus',
com.STDMETHOD()),
('RunControlPanel',
com.STDMETHOD()),
('Initialize',
com.STDMETHOD()),
('FindDevice',
com.STDMETHOD()),
('EnumDevicesBySemantics',
com.STDMETHOD()),
('ConfigureDevices',
com.STDMETHOD()),
]
IID_IDirectInput8W = \
com.GUID(0xBF798031,0x483A,0x4DA2,0xAA,0x99,0x5D,0x64,0xED,0x36,0x97,0x00)
lib.DirectInput8Create.argtypes = \
(ctypes.c_void_p, DWORD, com.LPGUID, ctypes.c_void_p, ctypes.c_void_p)
class Element(object):
value = None
def __init__(self, object_instance):
self.name = object_instance.tszName
self._flags = object_instance.dwFlags
self._guid = object_instance.guidType
self._type = object_instance.dwType
def get_value(self):
return self.value
class Device(object):
def __init__(self, device, device_instance):
self.name = device_instance.tszInstanceName
#print self.name, hex(device_instance.dwDevType & 0xff), \
# hex(device_instance.dwDevType & 0xff00)
#print hex(device_instance.wUsagePage), hex(device_instance.wUsage)
self._device = device
self._init_elements()
self._set_format()
def _init_elements(self):
self.elements = []
self._device.EnumObjects(
LPDIENUMDEVICEOBJECTSCALLBACK(self._object_enum), None, DIDFT_ALL)
def _object_enum(self, object_instance, arg):
type = object_instance.contents.dwType
flags = object_instance.contents.dwFlags
if type & DIDFT_NODATA:
return DIENUM_CONTINUE
element = Element(object_instance.contents)
self.elements.append(element)
return DIENUM_CONTINUE
def _set_format(self):
if not self.elements:
return
object_formats = (DIOBJECTDATAFORMAT * len(self.elements))()
offset = 0
for object_format, element in zip(object_formats, self.elements):
object_format.dwOfs = offset
object_format.dwType = element._type
offset += 4
format = DIDATAFORMAT()
format.dwSize = ctypes.sizeof(format)
format.dwObjSize = ctypes.sizeof(DIOBJECTDATAFORMAT)
format.dwFlags = 0
format.dwDataSize = offset
format.dwNumObjs = len(object_formats)
format.rgodf = ctypes.cast(ctypes.pointer(object_formats),
LPDIOBJECTDATAFORMAT)
self._device.SetDataFormat(format)
prop = DIPROPDWORD()
prop.diph.dwSize = ctypes.sizeof(prop)
prop.diph.dwHeaderSize = ctypes.sizeof(prop.diph)
prop.diph.dwObj = 0
prop.diph.dwHow = DIPH_DEVICE
prop.dwData = 64 * ctypes.sizeof(DIDATAFORMAT)
self._device.SetProperty(DIPROP_BUFFERSIZE, ctypes.byref(prop.diph))
def open(self, window=None):
if not self.elements:
return
if window is None:
# Pick any open window, or the shadow window if no windows
# have been created yet.
window = pyglet.gl._shadow_window
for window in pyglet.app.windows:
break
self._device.SetCooperativeLevel(window._hwnd,
DISCL_BACKGROUND | DISCL_NONEXCLUSIVE)
self._device.Acquire()
# XXX HACK
pyglet.clock.schedule(self.dispatch_events)
def close(self):
if not self.elements:
return
self._device.Unacquire()
# XXX HACK?
def dispatch_events(self, dt): # dt HACK
if not self.elements:
return
events = (DIDEVICEOBJECTDATA * 64)()
n_events = DWORD(len(events))
self._device.GetDeviceData(ctypes.sizeof(DIDEVICEOBJECTDATA),
ctypes.cast(ctypes.pointer(events),
LPDIDEVICEOBJECTDATA),
ctypes.byref(n_events),
0)
for event in events[:n_events.value]:
index = event.dwOfs // 4
self.elements[index].value = event.dwData
def _device_enum(device_instance, arg):
device = IDirectInputDevice8()
dinput.CreateDevice(device_instance.contents.guidInstance,
ctypes.byref(device),
None)
_devices.append(Device(device, device_instance.contents))
return DIENUM_CONTINUE
def get_devices():
global _devices
_devices = []
dinput.EnumDevices(DI8DEVCLASS_ALL, LPDIENUMDEVICESCALLBACK(_device_enum),
None, DIEDFL_ATTACHEDONLY)
return _devices
def _init_directinput():
global dinput
dinput = IDirectInput8()
module = _kernel32.GetModuleHandleW(None)
DIRECTINPUT_VERSION = 0x0800
lib.DirectInput8Create(module, DIRECTINPUT_VERSION,
IID_IDirectInput8W, ctypes.byref(dinput), None)
_init_directinput()
'''
#for device in get_devices():
device = get_devices()[0]
device.open(w)
print device.name
pyglet.app.run()
'''
|
bramalingam/openmicroscopy
|
refs/heads/develop
|
components/tools/OmeroPy/src/omero/plugins/admin.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:author: Josh Moore, josh at glencoesoftware.com
OMERO Grid admin controller
This is a python wrapper around icegridregistry/icegridnode for master
and various other tools needed for administration.
Copyright 2008 Glencoe Software, Inc. All Rights Reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import re
import os
import sys
import stat
import platform
import portalocker
from path import path
import omero
import omero.config
from omero.cli import CLI
from omero.cli import BaseControl
from omero.cli import DirectoryType
from omero.cli import NonZeroReturnCode
from omero.cli import VERSION
from omero.plugins.prefs import with_config
from omero_ext.which import whichall
from omero_version import ice_compatibility
try:
import pywintypes
import win32service
import win32evtlogutil
import win32api
import win32security
has_win32 = True
except ImportError:
has_win32 = False
DEFAULT_WAIT = 300
HELP = """Administrative tools including starting/stopping OMERO.
Environment variables:
OMERO_MASTER
OMERO_NODE
Configuration properties:
omero.windows.user
omero.windows.pass
""" + "\n" + "="*50 + "\n"
class AdminControl(BaseControl):
def _complete(self, text, line, begidx, endidx):
"""
Returns a file after "deploy", "start", or "startasync"
and otherwise delegates to the BaseControl
"""
for s in (" deploy ", " start ", " startasync "):
l = len(s)
i = line.find(s)
if i >= 0:
f = line[i+l:]
return self._complete_file(f)
return BaseControl._complete(self, text, line, begidx, endidx)
def _configure(self, parser):
sub = parser.sub()
self.actions = {}
class Action(object):
def __init__(this, name, help, wait=False):
this.parser = sub.add_parser(name, help=help,
description=help)
this.parser.set_defaults(func=getattr(self, name))
self.actions[name] = this.parser
if wait:
this.parser.add_argument(
"--wait", type=float, default=DEFAULT_WAIT,
help="Seconds to wait for operation")
Action(
"start",
"""Start icegridnode daemon and waits for required components to \
come up, i.e. status == 0
If the first argument can be found as a file, it will be deployed as the
application descriptor rather than etc/grid/default.xml. All other arguments
will be used as targets to enable optional sections of the descriptor""",
wait=True)
Action("startasync", "The same as start but returns immediately",)
Action("restart", "stop && start", wait=True)
Action(
"restartasync", """The same as restart but returns as soon as \
starting has begun.""",
wait=True)
Action("status", """Status of server
Returns with 0 status if a node ping is successful and if some SessionManager
returns an OMERO-specific exception on a bad login. This can be used in shell
scripts, e.g.:
$ omero admin status && echo "server started"
""")
Action(
"stop",
"""Initiates node shutdown and waits for status to return a \
non-0 value""",
wait=True)
Action("stopasync", "The same as stop but returns immediately")
Action(
"deploy",
"""Deploy the given deployment descriptor. See etc/grid/*.xml
If the first argument is not a file path, etc/grid/default.xml will be
deployed by default. Same functionality as start, but requires that the node
already be running. This may automatically restart some server components.""")
Action(
"ice", "Drop user into icegridadmin console or execute arguments")
fixpyramids = Action(
"fixpyramids", "Remove empty pyramid pixels files").parser
# See cleanse options below
Action(
"diagnostics",
"Run a set of checks on the current, preferably active server")
Action(
"waitup",
"Used by start after calling startasync to wait on status==0",
wait=True)
Action(
"waitdown",
"Used by stop after calling stopasync to wait on status!=0",
wait=True)
reindex = Action(
"reindex",
"""Re-index the Lucene index
Command-line tool for re-index the database. This command must be run on the
machine where /OMERO/FullText is located.
Examples:
bin/omero admin reindex --full \
# All objects
bin/omero admin reindex --class ome.model.core.Image \
# Only images
JAVA_OPTS="-Dlog4j.configuration=stderr.xml" bin/omero admin reindex --full\
# Passing arguments to Java
LIMITATION: omero.db.pass values do not currently get passed to the Java
process. You will need to all passwordless login to PostgreSQL. In
fact, only the following properties are passed:
omero.data.dir
omero.search.*
omero.db.* (excluding pass)
""").parser
reindex.add_argument(
"--jdwp", help="Activate remote debugging")
group = reindex.add_mutually_exclusive_group()
group.add_argument(
"--full", action="store_true",
help="Reindexes all non-excluded tables sequentially")
group.add_argument(
"--events", action="store_true",
help="Reindexes all non-excluded event logs chronologically")
group.add_argument(
"--class", nargs="+",
help="Reindexes the given classes sequentially")
ports = Action(
"ports",
"""Allows modifying the ports from a standard OMERO install
To have two OMERO's running on the same machine, several ports must be
modified from their default values.
Internally, this command uses the omero.install.change_ports module.
Changing the ports on a running server is usually not what you want and
will be prevented. Use --skipcheck to change the ports anyway.
Examples:
%(prog)s --prefix=1 # sets ports to: 14061, \
14063, 14064
%(prog)s --prefix=1 --revert # sets ports back to: \
4061, 4063, 4064
%(prog)s --registry=4444 --tcp=5555 --ssl=6666 # sets ports to: 4444 \
5555 6666
""").parser
ports.add_argument(
"--prefix",
help="Adds a prefix to each port ON TOP OF any other settings")
ports.add_argument(
"--registry", default="4061",
help="Registry port. (default: %(default)s)")
ports.add_argument(
"--tcp", default="4063",
help="The tcp port to be used by Glacier2 (default: %(default)s)")
ports.add_argument(
"--ssl", default="4064",
help="The ssl port to be used by Glacier2 (default: %(default)s)")
ports.add_argument(
"--revert", action="store_true",
help="Used to rollback from the given settings to the defaults")
ports.add_argument(
"--skipcheck", action="store_true",
help="Skips the check if the server is already running")
sessionlist = Action(
"sessionlist", "List currently running sessions").parser
sessionlist.add_login_arguments()
cleanse = Action("cleanse", """Remove binary data files from OMERO
Deleting an object from OMERO currently does not remove the binary data. Use
this command either manually or in a cron job periodically to remove Pixels
and other data.
This is done by checking that for all the files in the given directory, a
matching entry exists on the server. THE /OMERO DIRECTORY MUST MATCH THE
DATABASE YOU ARE RUNNING AGAINST.
This command must be run on the machine where, for example, /OMERO/ is
located.
Examples:
bin/omero admin cleanse --dry-run /OMERO # Lists files that will be \
deleted
bin/omero admin cleanse /OMERO # Actually delete them.
bin/omero admin cleanse /volumes/data/OMERO # Delete from a standard \
location.
""").parser
for x in (cleanse, fixpyramids):
x.add_argument(
"--dry-run", action="store_true",
help="Print out which files would be deleted")
x.add_argument(
"data_dir", type=DirectoryType(),
help="omero.data.dir directory value (e.g. /OMERO")
x.add_login_arguments()
Action("checkwindows", "Run simple check of the local installation "
"(Windows-only)")
Action("checkice", "Run simple check of the Ice installation")
Action("events", "Print event log (Windows-only)")
self.actions["ice"].add_argument(
"argument", nargs="*",
help="""Arguments joined together to make an Ice command. If not \
present, the user will enter a console""")
self.actions["status"].add_argument(
"node", nargs="?", default="master")
self.actions["status"].add_argument(
"--nodeonly", action="store_true",
help="If set, then only tests if the icegridnode is running")
for name in ("start", "startasync"):
self.actions[name].add_argument(
"-u", "--user",
help="Service Log On As user name. If none given, the value"
" of omero.windows.user will be used. (Windows-only)")
self.actions[name].add_argument(
"-w", "--password",
help="Service Log On As user password. If none given, the"
" value of omero.windows.pass will be used. (Windows-only)")
for k in ("start", "startasync", "deploy", "restart", "restartasync"):
self.actions[k].add_argument(
"file", nargs="?",
help="Application descriptor. If not provided, a default"
" will be used")
self.actions[k].add_argument(
"targets", nargs="*",
help="Targets within the application descriptor which "
" should be activated. Common values are: \"debug\", "
"\"trace\" ")
# DISABLED = """ see: http://www.zeroc.com/forums/bug-reports/\
# 4237-sporadic-freeze-errors-concurrent-icegridnode-access.html
# restart [filename] [targets] : Calls stop followed by start \
# args
# restartasync [filename] [targets] : Calls stop followed by \
# startasync args
# """
#
# Windows utility methods
#
if has_win32:
def _query_service(unused, svc_name):
hscm = win32service.OpenSCManager(
None, None, win32service.SC_MANAGER_ALL_ACCESS)
try:
try:
hs = win32service.OpenService(
hscm, svc_name, win32service.SERVICE_ALL_ACCESS)
except:
return "DOESNOTEXIST"
try:
q = win32service.QueryServiceStatus(hs)
type, state, ctrl, err, svcerr, svccp, svcwh = q
if state == win32service.SERVICE_STOPPED:
return "STOPPED"
else:
return "unknown"
finally:
win32service.CloseServiceHandle(hs)
finally:
win32service.CloseServiceHandle(hscm)
def events(self, svc_name):
def DumpRecord(record):
if str(record.SourceName) == svc_name:
self.ctx.out("Time: %s" % record.TimeWritten)
self.ctx.out("Rec: %s" % record.RecordNumber)
for si in record.StringInserts:
self.ctx.out(si)
self.ctx.out("="*20)
win32evtlogutil.FeedEventLogRecords(DumpRecord)
else:
def events(self, svc_name):
self.ctx.die(
666, "Could not import win32service and/or win32evtlogutil")
def _query_service(self, svc_name):
self.ctx.die(
666, "Could not import win32service and/or win32evtlogutil")
#
# End Windows Methods
#
def _node(self, omero_node=None):
"""
Overrides the regular node() logic to return the value of
OMERO_MASTER or "master"
"""
if omero_node is not None:
os.environ["OMERO_MASTER"] = omero_node
if "OMERO_MASTER" in os.environ:
return os.environ["OMERO_MASTER"]
else:
return "master"
def _cmd(self, *command_arguments):
"""
Used to generate an icegridadmin command line argument list
"""
command = ["icegridadmin", self._intcfg()]
command.extend(command_arguments)
return command
def _descript(self, args):
if args.file is not None:
# Relative to cwd
descript = path(args.file).abspath()
if not descript.exists():
self.ctx.dbg("No such file: %s -- Using as target" % descript)
args.targets.insert(0, args.file)
descript = None
else:
descript = None
if descript is None:
__d__ = "default.xml"
if self._isWindows():
__d__ = "windefault.xml"
descript = self.ctx.dir / "etc" / "grid" / __d__
self.ctx.err("No descriptor given. Using %s"
% os.path.sep.join(["etc", "grid", __d__]))
return descript
def checkwindows(self, args):
"""
Checks that the templates file as defined in etc\Windows.cfg
can be found.
"""
self.check_access(os.R_OK)
if not self._isWindows():
self.ctx.die(123, "Not Windows")
import Ice
key = "IceGrid.Node.Data"
properties = Ice.createProperties([self._icecfg()])
nodedata = properties.getProperty(key)
if not nodedata:
self.ctx.die(300,
"Bad configuration: No IceGrid.Node.Data property")
nodepath = path(nodedata)
pp = nodepath.parpath(self.ctx.dir)
if pp:
return
if nodepath == r"c:\omero_dist\var\master":
self.ctx.out("Found default value: %s" % nodepath)
self.ctx.out("Attempting to correct...")
from omero.install.win_set_path import win_set_path
count = win_set_path(dir=self.ctx.dir)
if count:
return
self.ctx.die(400, """
%s is not in this directory. Aborting...
Please see the installation instructions on modifying
the files for your installation (%s)
with bin\winconfig.bat
""" % (nodedata, self.ctx.dir))
##############################################
#
# Commands
#
@with_config
def startasync(self, args, config):
"""
First checks for a valid installation, then checks the grid,
then registers the action: "node HOST start"
"""
self.check_access(config=config)
self.checkice()
self.check_node(args)
if self._isWindows():
self.checkwindows(args)
if 0 == self.status(args, node_only=True):
self.ctx.die(876, "Server already running")
self._initDir()
# Do a check to see if we've started before.
self._regdata()
self.check([])
user = args.user
pasw = args.password
descript = self._descript(args)
if self._isWindows():
svc_name = "OMERO.%s" % args.node
output = self._query_service(svc_name)
# Now check if the server exists
if 0 <= output.find("DOESNOTEXIST"):
binpath = """icegridnode.exe "%s" --deploy "%s" --service\
%s""" % (self._icecfg(), descript, svc_name)
# By default: "NT Authority\Local System"
if not user:
try:
user = config.as_map()["omero.windows.user"]
except KeyError:
user = None
if user is not None and len(user) > 0:
if not "\\" in user:
computername = win32api.GetComputerName()
user = "\\".join([computername, user])
try:
# See #9967, code based on http://mail.python.org/\
# pipermail/python-win32/2010-October/010791.html
self.ctx.out("Granting SeServiceLogonRight to service"
" user \"%s\"" % user)
policy_handle = win32security.LsaOpenPolicy(
None, win32security.POLICY_ALL_ACCESS)
sid_obj, domain, tmp = \
win32security.LookupAccountName(None, user)
win32security.LsaAddAccountRights(
policy_handle, sid_obj, ('SeServiceLogonRight',))
win32security.LsaClose(policy_handle)
except pywintypes.error, details:
self.ctx.die(200, "Error during service user set up:"
" (%s) %s" % (details[0], details[2]))
if not pasw:
try:
pasw = config.as_map()["omero.windows.pass"]
except KeyError:
pasw = self._ask_for_password(
" for service user \"%s\"" % user)
else:
pasw = None
hscm = win32service.OpenSCManager(
None, None, win32service.SC_MANAGER_ALL_ACCESS)
try:
self.ctx.out("Installing %s Windows service." % svc_name)
hs = win32service.CreateService(
hscm, svc_name, svc_name,
win32service.SERVICE_ALL_ACCESS,
win32service.SERVICE_WIN32_OWN_PROCESS,
win32service.SERVICE_AUTO_START,
win32service.SERVICE_ERROR_NORMAL, binpath, None, 0,
None, user, pasw)
self.ctx.out("Successfully installed %s Windows service."
% svc_name)
win32service.CloseServiceHandle(hs)
finally:
win32service.CloseServiceHandle(hscm)
# Then check if the server is already running
if 0 <= output.find("RUNNING"):
self.ctx.die(201, "%s is already running. Use stop first"
% svc_name)
# Finally, try to start the service - delete if startup fails
hscm = win32service.OpenSCManager(
None, None, win32service.SC_MANAGER_ALL_ACCESS)
try:
try:
hs = win32service.OpenService(
hscm, svc_name, win32service.SC_MANAGER_ALL_ACCESS)
win32service.StartService(hs, None)
self.ctx.out("Starting %s Windows service." % svc_name)
except pywintypes.error, details:
self.ctx.out("%s service startup failed: (%s) %s"
% (svc_name, details[0], details[2]))
win32service.DeleteService(hs)
self.ctx.die(202, "%s service deleted." % svc_name)
finally:
win32service.CloseServiceHandle(hs)
win32service.CloseServiceHandle(hscm)
else:
command = [
"icegridnode", "--daemon", "--pidfile", str(self._pid()),
"--nochdir", self._icecfg(), "--deploy", str(descript)
] + args.targets
self.ctx.rv = self.ctx.call(command)
@with_config
def start(self, args, config):
self.startasync(args, config)
try:
self.waitup(args)
except NonZeroReturnCode, nzrc:
# stop() may itself throw,
# if it does not, then we rethrow
# the original
self.ctx.err('Calling "stop" on remaining components')
self.stop(args, config)
raise nzrc
@with_config
def deploy(self, args, config):
self.check_access()
self.checkice()
descript = self._descript(args)
# TODO : Doesn't properly handle whitespace
# Though users can workaround with something like:
# bin/omero admin deploy etc/grid/a\\\\ b.xml
command = ["icegridadmin", self._intcfg(), "-e",
" ".join(["application", "update", str(descript)] +
args.targets)]
self.ctx.call(command)
def status(self, args, node_only=False):
self.check_node(args)
command = self._cmd("-e", "node ping %s" % self._node())
self.ctx.rv = self.ctx.popen(command).wait() # popen
# node_only implies that "up" need not check for all
# of blitz to be accessible but just that if the node
# is running.
if not node_only:
node_only = getattr(args, "nodeonly", False)
if self.ctx.rv == 0 and not node_only:
try:
import Ice
ic = Ice.initialize([self._intcfg()])
try:
sm = self.session_manager(ic)
try:
sm.create("####### STATUS CHECK ########", None)
# Not adding "omero.client.uuid"
except omero.WrappedCreateSessionException:
# Only the server will throw one of these
self.ctx.dbg("Server reachable")
self.ctx.rv = 0
finally:
ic.destroy()
except Exception, exc:
self.ctx.rv = 1
self.ctx.dbg("Server not reachable: "+str(exc))
return self.ctx.rv
def wait_for_icedb(self, args, config):
"""
Since the stop and start are separately protected by
the lock on config.xml, we need to wait for some time
to hopefully let the icegridnode process release the
file locks.
"""
self.ctx.sleep(1) # put in sleep to try to prevent "db locked" (#7325)
@with_config
def restart(self, args, config):
if not self.stop(args, config):
self.ctx.die(54, "Failed to shutdown")
self.wait_for_icedb(args, config)
self.start(args, config)
@with_config
def restartasync(self, args, config):
if not self.stop(args, config):
self.ctx.die(54, "Failed to shutdown")
self.wait_for_icedb(args, config)
self.startasync(args, config)
def waitup(self, args):
"""
Loops 30 times with 10 second pauses waiting for status()
to return 0. If it does not, then ctx.die() is called.
"""
self.check_access(os.R_OK)
self.ctx.out("Waiting on startup. Use CTRL-C to exit")
count, loop_secs, time_msg = self.loops_and_wait(args)
while True:
count = count - 1
if count == 0:
self.ctx.die(43, "\nFailed to startup some components after"
" %s" % time_msg)
elif 0 == self.status(args, node_only=False):
break
else:
self.ctx.out(".", newline=False)
self.ctx.sleep(loop_secs)
def waitdown(self, args):
"""
Returns true if the server went down
"""
self.check_access(os.R_OK)
self.ctx.out("Waiting on shutdown. Use CTRL-C to exit")
count, loop_secs, time_msg = self.loops_and_wait(args)
while True:
count = count - 1
if count == 0:
self.ctx.die(44, "\nFailed to shutdown some components after"
" %s" % time_msg)
return False
elif 0 != self.status(args, node_only=True):
break
else:
self.ctx.out(".", newline=False)
self.ctx.sleep(loop_secs)
self.ctx.rv = 0
return True
def loops_and_wait(self, args):
"""
If present, get the wait time from the args argument
and calculate the number of loops and the wait time
needed. If not present in args, use a default value.
"""
if not hasattr(args, "wait"):
# This might happen if a new command starts using
# waitup/waitdown without setting wait=True for
# Action()
args.wait = DEFAULT_WAIT
total_secs = args.wait
loop_secs = total_secs / 30.0
return 30, loop_secs, "%s seconds" % total_secs
@with_config
def stopasync(self, args, config):
"""
Returns true if the server was already stopped
"""
self.check_node(args)
if 0 != self.status(args, node_only=True):
self.ctx.err("Server not running")
return True
elif self._isWindows():
svc_name = "OMERO.%s" % args.node
output = self._query_service(svc_name)
if 0 <= output.find("DOESNOTEXIST"):
self.ctx.die(203, "%s does not exist. Use 'start' first."
% svc_name)
hscm = win32service.OpenSCManager(
None, None, win32service.SC_MANAGER_ALL_ACCESS)
try:
hs = win32service.OpenService(
hscm, svc_name, win32service.SC_MANAGER_ALL_ACCESS)
win32service.ControlService(
hs, win32service.SERVICE_CONTROL_STOP)
win32service.DeleteService(hs)
self.ctx.out("%s service deleted." % svc_name)
finally:
win32service.CloseServiceHandle(hs)
win32service.CloseServiceHandle(hscm)
else:
command = self._cmd("-e", "node shutdown %s" % self._node())
try:
self.ctx.call(command)
except NonZeroReturnCode, nzrc:
self.ctx.rv = nzrc.rv
self.ctx.out("Was the server already stopped?")
@with_config
def stop(self, args, config):
if not self.stopasync(args, config):
return self.waitdown(args)
return True
def check(self, args):
# print "Check db. Have a way to load the db control"
pass
def ice(self, args):
self.check_access()
command = self._cmd()
if len(args.argument) > 0:
command.extend(["-e", " ".join(args.argument)])
return self.ctx.call(command)
else:
self.ctx.call(command)
@with_config
def fixpyramids(self, args, config):
self.check_access()
from omero.util.cleanse import fixpyramids
client = self.ctx.conn(args)
client.getSessionId()
fixpyramids(data_dir=args.data_dir, dry_run=args.dry_run,
query_service=client.sf.getQueryService(),
config_service=client.sf.getConfigService())
@with_config
def diagnostics(self, args, config):
self.check_access()
config = config.as_map()
omero_data_dir = '/OMERO'
try:
omero_data_dir = config['omero.data.dir']
except KeyError:
pass
from omero.util.temp_files import gettempdir
# gettempdir returns ~/omero/tmp/omero_%NAME/%PROCESS
# To find something more generally useful for calculating
# size, we go up two directories
omero_temp_dir = gettempdir()
omero_temp_dir = os.path.abspath(
os.path.join(omero_temp_dir, os.path.pardir, os.path.pardir))
self.ctx.out("""
%s
OMERO Diagnostics %s
%s
""" % ("="*80, VERSION, "="*80))
def sz_str(sz):
for x in ["KB", "MB", "GB"]:
sz /= 1000
if sz < 1000:
break
sz = "%.1f %s" % (sz, x)
return sz
def item(cat, msg):
cat = cat + ":"
cat = "%-12s" % cat
self.ctx.out(cat, False)
msg = "%-30s " % msg
self.ctx.out(msg, False)
def exists(p):
if p.isdir():
if not p.exists():
self.ctx.out("doesn't exist")
else:
self.ctx.out("exists")
else:
if not p.exists():
self.ctx.out("n/a")
else:
warn = 0
err = 0
for l in p.lines():
# ensure errors/warnings search is case-insensitive
lcl = l.lower()
found_err = lcl.find("error") >= 0
found_warn = lcl.find("warn") >= 0
if found_err:
err += 1
elif found_warn:
warn += 1
msg = ""
if warn or err:
msg = " errors=%-4s warnings=%-4s" % (err, warn)
self.ctx.out("%-12s %s" % (sz_str(p.size), msg))
def version(cmd):
"""
Returns a true response only
if a valid version was found.
"""
item("Commands", "%s" % " ".join(cmd))
try:
p = self.ctx.popen(cmd)
except OSError:
self.ctx.err("not found")
return False
p.wait()
io = p.communicate()
try:
v = io[0].split()
v.extend(io[1].split())
v = "".join(v)
m = re.match("^\D*(\d[.\d]+\d)\D?.*$", v)
v = "%-10s" % m.group(1)
self.ctx.out(v, False)
try:
where = whichall(cmd[0])
sz = len(where)
if sz == 0:
where = "unknown"
else:
where = where[0]
if sz > 1:
where += " -- %s others" % sz
except:
where = "unknown"
self.ctx.out("(%s)" % where)
return True
except Exception, e:
self.ctx.err("error:%s" % e)
return False
import logging
logging.basicConfig()
from omero.util.upgrade_check import UpgradeCheck
check = UpgradeCheck("diagnostics")
check.run()
if check.isUpgradeNeeded():
self.ctx.out("")
version(["java", "-version"])
version(["python", "-V"])
version(["icegridnode", "--version"])
iga = version(["icegridadmin", "--version"])
version(["psql", "--version"])
def get_ports(input):
router_lines = [line for line in input.split("\n")
if line.find("ROUTER") >= 0]
ssl_port = None
tcp_port = None
for line in router_lines:
if not ssl_port and line.find("ROUTERPORT") >= 0:
m = re.match(".*?(\d+).*?$", line)
if m:
ssl_port = m.group(1)
if not tcp_port and line.find("INSECUREROUTER") >= 0:
m = re.match("^.*?-p (\d+).*?$", line)
if m:
tcp_port = m.group(1)
return ssl_port, tcp_port
self.ctx.out("")
if not iga:
self.ctx.out(
"No icegridadmin available: Cannot check server list")
else:
item("Server", "icegridnode")
p = self.ctx.popen(self._cmd("-e", "server list")) # popen
rv = p.wait()
io = p.communicate()
if rv != 0:
self.ctx.out("not started")
self.ctx.dbg("""
Stdout:\n%s
Stderr:\n%s
""" % io)
else:
self.ctx.out("running")
servers = io[0].split()
servers.sort()
for s in servers:
item("Server", "%s" % s)
p2 = self.ctx.popen(
self._cmd("-e", "server state %s" % s)) # popen
p2.wait()
io2 = p2.communicate()
if io2[1]:
self.ctx.err(io2[1].strip())
elif io2[0]:
self.ctx.out(io2[0].strip())
else:
self.ctx.err("UNKNOWN!")
if self._isWindows():
# Print the OMERO server Windows service details
hscm = win32service.OpenSCManager(
None, None, win32service.SC_MANAGER_ALL_ACCESS)
services = win32service.EnumServicesStatus(hscm)
omesvcs = tuple((sname, fname) for sname, fname, status
in services if "OMERO" in fname)
for sname, fname in omesvcs:
item("Server", fname)
hsc = win32service.OpenService(
hscm, sname, win32service.SC_MANAGER_ALL_ACCESS)
logonuser = win32service.QueryServiceConfig(hsc)[7]
if win32service.QueryServiceStatus(hsc)[1] == \
win32service.SERVICE_RUNNING:
self.ctx.out("active (running as %s)" % logonuser)
else:
self.ctx.out("inactive")
win32service.CloseServiceHandle(hsc)
win32service.CloseServiceHandle(hscm)
# List SSL & TCP ports of deployed applications
self.ctx.out("")
p = self.ctx.popen(self._cmd("-e", "application list")) # popen
rv = p.wait()
io = p.communicate()
if rv != 0:
self.ctx.out("Cannot list deployed applications.")
self.ctx.dbg("""
Stdout:\n%s
Stderr:\n%s
""" % io)
else:
applications = io[0].split()
applications.sort()
for s in applications:
p2 = self.ctx.popen(
self._cmd("-e", "application describe %s" % s))
io2 = p2.communicate()
if io2[1]:
self.ctx.err(io2[1].strip())
elif io2[0]:
ssl_port, tcp_port = get_ports(io2[0])
item("%s" % s, "SSL port")
if not ssl_port:
self.ctx.err("Not found")
else:
self.ctx.out("%s" % ssl_port)
item("%s" % s, "TCP port")
if not tcp_port:
self.ctx.err("Not found")
else:
self.ctx.out("%s" % tcp_port)
else:
self.ctx.err("UNKNOWN!")
def log_dir(log, cat, cat2, knownfiles):
self.ctx.out("")
item(cat, "%s" % log.abspath())
exists(log)
self.ctx.out("")
if log.exists():
files = log.files()
files = set([x.basename() for x in files])
# Adding known names just in case
for x in knownfiles:
files.add(x)
files = list(files)
files.sort()
for x in files:
item(cat2, x)
exists(log / x)
item(cat2, "Total size")
sz = 0
for x in log.walkfiles():
sz += x.size
self.ctx.out("%-.2f MB" % (float(sz)/1000000.0))
log_dir(self.ctx.dir / "var" / "log", "Log dir", "Log files",
["Blitz-0.log", "Tables-0.log", "Processor-0.log",
"Indexer-0.log", "FileServer.log", "MonitorServer.log",
"DropBox.log", "TestDropBox.log", "OMEROweb.log"])
# Parsing well known issues
self.ctx.out("")
ready = re.compile(".*?ome.services.util.ServerVersionCheck\
.*OMERO.Version.*Ready..*?")
db_ready = re.compile(".*?Did.you.create.your.database[?].*?")
data_dir = re.compile(".*?Unable.to.initialize:.FullText.*?")
pg_password = re.compile(".*?org.postgresql.util.PSQLException:\
.FATAL:.password.*?authentication.failed.for.user.*?")
pg_user = re.compile(""".*?org.postgresql.util.PSQLException:\
.FATAL:.role.".*?".does.not.exist.*?""")
pg_conn = re.compile(""".*?org.postgresql.util.PSQLException:\
.Connection.refused.""")
issues = {
ready: "=> Server restarted <=",
db_ready: "Your database configuration is invalid",
data_dir: "Did you create your omero.data.dir? E.g. /OMERO",
pg_password: "Your postgres password seems to be invalid",
pg_user: "Your postgres user is invalid",
pg_conn: "Your postgres hostname and/or port is invalid"
}
try:
for file in ('Blitz-0.log',):
p = self.ctx.dir / "var" / "log" / file
import fileinput
for line in fileinput.input([str(p)]):
lno = fileinput.filelineno()
for k, v in issues.items():
if k.match(line):
item('Parsing %s' % file, "[line:%s] %s"
% (lno, v))
self.ctx.out("")
break
except:
self.ctx.err("Error while parsing logs")
self.ctx.out("")
def env_val(val):
item("Environment", "%s=%s"
% (val, os.environ.get(val, "(unset)")))
self.ctx.out("")
env_val("OMERO_HOME")
env_val("OMERO_NODE")
env_val("OMERO_MASTER")
env_val("OMERO_TEMPDIR")
env_val("PATH")
env_val("ICE_HOME")
env_val("LD_LIBRARY_PATH")
env_val("DYLD_LIBRARY_PATH")
self.ctx.out("")
for dir_name, dir_path, dir_size in (
("data", omero_data_dir, ""),
("temp", omero_temp_dir, True)):
dir_path_exists = os.path.exists(dir_path)
is_writable = os.access(dir_path, os.R_OK | os.W_OK)
if dir_size and dir_path_exists:
dir_size = self.getdirsize(omero_temp_dir)
dir_size = " (Size: %s)" % dir_size
self.ctx.out("OMERO %s dir: '%s'\tExists? %s\tIs writable? %s%s" %
(dir_name, dir_path, dir_path_exists, is_writable,
dir_size))
from omero.plugins.web import WebControl
try:
WebControl().status(args)
except:
self.ctx.out("OMERO.web not installed!")
def getdirsize(self, directory):
total = 0
for values in os.walk(directory):
for filename in values[2]:
total += os.path.getsize(os.path.join(values[0], filename))
return total
def session_manager(self, communicator):
import IceGrid
import Glacier2
iq = communicator.stringToProxy("IceGrid/Query")
iq = IceGrid.QueryPrx.checkedCast(iq)
sm = iq.findAllObjectsByType("::Glacier2::SessionManager")[0]
sm = Glacier2.SessionManagerPrx.checkedCast(sm)
return sm
def can_access(self, filepath, mask=os.R_OK | os.W_OK):
"""
Check that the given path belongs to
or is accessible by the current user
on Linux systems.
"""
if "Windows" == platform.system():
return
pathobj = path(filepath)
if not pathobj.exists():
self.ctx.die(8, "FATAL: OMERO directory does not exist: %s"
% pathobj)
owner = os.stat(filepath)[stat.ST_UID]
if owner == 0:
msg = ""
msg += "FATAL: OMERO directory which needs to be writeable"\
" belongs to root: %s\n" % filepath
msg += "Please use \"chown -R NEWUSER %s\" and run as then"\
" run %s as NEWUSER" % (filepath, sys.argv[0])
self.ctx.die(9, msg)
else:
if not os.access(filepath, mask):
self.ctx.die(10, "FATAL: Cannot access %s, a required"
" file/directory for OMERO" % filepath)
def check_access(self, mask=os.R_OK | os.W_OK, config=None):
"""Check that 'var' is accessible by the current user."""
var = self.ctx.dir / 'var'
if not os.path.exists(var):
self.ctx.out("Creating directory %s" % var)
os.makedirs(var, 0700)
else:
self.can_access(var, mask)
if config is not None:
omero_data_dir = '/OMERO'
config = config.as_map()
try:
omero_data_dir = config['omero.data.dir']
except KeyError:
pass
self.can_access(omero_data_dir)
for p in os.listdir(var):
subpath = os.path.join(var, p)
if os.path.isdir(subpath):
self.can_access(subpath, mask)
def check_node(self, args):
"""
If the args argparse.Namespace argument has no "node" attribute,
then assign one.
"""
if not hasattr(args, "node"):
args.node = self._node()
def checkice(self, args=None):
"""
Checks for Ice version 3.4
See ticket:2514, ticket:1260
"""
def _check(msg, vers):
compat = ice_compatibility.split(".")
vers = vers.split(".")
if compat[0:2] != vers[0:2]:
self.ctx.die(164, "%s is not compatible with %s: %s"
% (msg, ".".join(compat), ".".join(vers)))
import Ice
vers = Ice.stringVersion()
_check("IcePy version", vers)
popen = self.ctx.popen(["icegridnode", "--version"])
vers = popen.communicate()[1]
_check("icegridnode version", vers)
def open_config(self, unused):
"""
Callers are responsible for closing the
returned ConfigXml object.
"""
cfg_xml = self.ctx.dir / "etc" / "grid" / "config.xml"
cfg_tmp = self.ctx.dir / "etc" / "grid" / "config.xml.tmp"
grid_dir = self.ctx.dir / "etc" / "grid"
if not cfg_xml.exists() and self.can_access(grid_dir):
if cfg_tmp.exists() and self.can_access(cfg_tmp):
self.ctx.dbg("Removing old config.xml.tmp")
cfg_tmp.remove()
config = omero.config.ConfigXml(str(cfg_tmp))
try:
self.ctx.controls["config"].upgrade(None, config)
finally:
config.close()
self.ctx.err("Creating %s" % cfg_xml)
cfg_tmp.rename(str(cfg_xml))
try:
try:
config = omero.config.ConfigXml(str(cfg_xml))
except Exception, e:
self.ctx.die(577, str(e))
if config.save_on_close:
config.save()
else:
self.ctx.err("%s read-only" % cfg_xml)
except portalocker.LockException:
try:
config.close()
except:
pass
self.ctx.die(111, "Could not acquire lock on %s" % cfg_xml)
return config
@with_config
def reindex(self, args, config):
self.check_access(config=config)
import omero.java
server_dir = self.ctx.dir / "lib" / "server"
log4j = "-Dlog4j.configuration=log4j-cli.properties"
classpath = [file.abspath() for file in server_dir.files("*.jar")]
xargs = [log4j, "-Xmx1024M", "-cp", os.pathsep.join(classpath)]
cfg = config.as_map()
config.close() # Early close. See #9800
for x in ("name", "user", "host", "port"):
# NOT passing password on command-line
k = "omero.db.%s" % x
if k in cfg:
v = cfg[k]
xargs.append("-D%s=%s" % (k, v))
if "omero.data.dir" in cfg:
xargs.append("-Domero.data.dir=%s" % cfg["omero.data.dir"])
for k, v in cfg.items():
if k.startswith("omero.search"):
xargs.append("-D%s=%s" % (k, cfg[k]))
cmd = ["ome.services.fulltext.Main"]
if args.full:
cmd.append("full")
elif args.events:
cmd.append("events")
elif getattr(args, "class"):
cmd.append("reindex")
cmd.extend(getattr(args, "class"))
else:
self.ctx.die(502, "No valid action: %s" % args)
debug = False
if getattr(args, "jdwp"):
debug = True
self.ctx.dbg(
"Launching Java: %s, debug=%s, xargs=%s" % (cmd, debug, xargs))
p = omero.java.popen(
cmd, debug=debug, xargs=xargs, stdout=sys.stdout,
stderr=sys.stderr) # FIXME. Shouldn't use std{out,err}
self.ctx.rv = p.wait()
def ports(self, args):
self.check_access()
from omero.install.change_ports import change_ports
if not args.skipcheck:
if 0 == self.status(args, node_only=True):
self.ctx.die(
100, "Can't change ports while the server is running!")
# Resetting return value.
self.ctx.rv = 0
if args.prefix:
for x in ("registry", "tcp", "ssl"):
setattr(args, x, "%s%s" % (args.prefix, getattr(args, x)))
change_ports(
args.ssl, args.tcp, args.registry, args.revert, dir=self.ctx.dir)
def cleanse(self, args):
self.check_access()
from omero.util.cleanse import cleanse
client = self.ctx.conn(args)
cleanse(data_dir=args.data_dir, dry_run=args.dry_run,
query_service=client.sf.getQueryService(),
config_service=client.sf.getConfigService())
def sessionlist(self, args):
client = self.ctx.conn(args)
service = client.sf.getQueryService()
params = omero.sys.ParametersI()
query = "select s from Session s join fetch s.node n join fetch"\
" s.owner o where s.closed is null and n.id != 0"
results = service.findAllByQuery(query, params)
mapped = list()
for s in results:
rv = list()
mapped.append(rv)
if not s.isLoaded():
rv.append("")
rv.append("id=%s" % s.id.val)
rv.append("")
rv.append("")
rv.append("")
rv.append("insufficient privileges")
else:
rv.append(s.node.id)
rv.append(s.uuid)
rv.append(s.started)
rv.append(s.owner.omeName)
if s.userAgent is None:
rv.append("")
else:
rv.append(s.userAgent)
if client.getSessionId() == s.uuid.val:
rv.append("current session")
else:
rv.append("")
self.ctx.controls["hql"].display(
mapped, ("node", "session", "started", "owner", "agent", "notes"))
try:
register("admin", AdminControl, HELP)
except NameError:
if __name__ == "__main__":
cli = CLI()
cli.register("admin", AdminControl, HELP)
cli.invoke(sys.argv[1:])
|
armStrapTools/linux-sunxi-ap6210
|
refs/heads/sunxi-3.4
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
BartKeulen/drl
|
refs/heads/develop
|
drl/algorithms/naf/__init__.py
|
1
|
from .naf import NAF
from .network import NAFNetwork
|
tundish/rson
|
refs/heads/master
|
rson/tests/test_unquoted.py
|
3
|
from unittest import TestCase
import os
import sys
from rson.tests.read_samples import data as samples
from rson.py23 import unicode
# Really basic test to start with -- add more later
def expected(s):
if s == 'true': return True
if s == 'false': return False
if s == 'null': return None
try:
return int(s.replace('_', ''), 0)
except:
pass
try:
return float(s)
except:
return unicode(s)
class TestUnquoted(TestCase):
def setUp(self):
from rson.base import Tokenizer, UnquotedToken
self.t = Tokenizer.factory()
self.u = UnquotedToken().unquoted_parse_factory()
def test_simple(self):
tests = ''' 0 0.0 false true null 1.2 -3.7e5 Hey there how ya doin? '''.replace(' ', '\n')
tokens = list(reversed(self.t(tests, self)))
tokens.pop()
a = list(map(self.u, tokens, tokens))
b = list(map(expected, tests.split()))
self.assert_(a == b)
|
cxxgtxy/tensorflow
|
refs/heads/master
|
tensorflow/contrib/framework/python/ops/arg_scope.py
|
58
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the arg_scope used for scoping layers arguments.
Allows one to define models much more compactly by eliminating boilerplate
code. This is accomplished through the use of argument scoping (arg_scope).
Example of how to use tf.contrib.framework.arg_scope:
```
from third_party.tensorflow.contrib.layers.python import layers
arg_scope = tf.contrib.framework.arg_scope
with arg_scope([layers.conv2d], padding='SAME',
initializer=layers.variance_scaling_initializer(),
regularizer=layers.l2_regularizer(0.05)):
net = layers.conv2d(inputs, 64, [11, 11], 4, padding='VALID', scope='conv1')
net = layers.conv2d(net, 256, [5, 5], scope='conv2')
```
The first call to conv2d will behave as follows:
layers.conv2d(inputs, 64, [11, 11], 4, padding='VALID',
initializer=layers.variance_scaling_initializer(),
regularizer=layers.l2_regularizer(0.05), scope='conv1')
The second call to conv2d will also use the arg_scope's default for padding:
layers.conv2d(inputs, 256, [5, 5], padding='SAME',
initializer=layers.variance_scaling_initializer(),
regularizer=layers.l2_regularizer(0.05), scope='conv2')
Example of how to reuse an arg_scope:
```
with arg_scope([layers.conv2d], padding='SAME',
initializer=layers.variance_scaling_initializer(),
regularizer=layers.l2_regularizer(0.05)) as sc:
net = layers.conv2d(net, 256, [5, 5], scope='conv1')
....
with arg_scope(sc):
net = layers.conv2d(net, 256, [5, 5], scope='conv2')
```
Example of how to use tf.contrib.framework.add_arg_scope to enable your function to be called within an arg_scope later:
@tf.contrib.framework.add_arg_scope
def conv2d(*args, **kwargs)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_decorator
__all__ = ['arg_scope',
'add_arg_scope',
'has_arg_scope',
'arg_scoped_arguments']
_ARGSTACK = [{}]
_DECORATED_OPS = {}
def _get_arg_stack():
if _ARGSTACK:
return _ARGSTACK
else:
_ARGSTACK.append({})
return _ARGSTACK
def _current_arg_scope():
stack = _get_arg_stack()
return stack[-1]
def _key_op(op):
return getattr(op, '_key_op', str(op))
def _name_op(op):
return (op.__module__, op.__name__)
def _kwarg_names(func):
kwargs_length = len(func.__defaults__) if func.__defaults__ else 0
return func.__code__.co_varnames[-kwargs_length:func.__code__.co_argcount]
def _add_op(op):
key_op = _key_op(op)
if key_op not in _DECORATED_OPS:
_DECORATED_OPS[key_op] = _kwarg_names(op)
@tf_contextlib.contextmanager
def arg_scope(list_ops_or_scope, **kwargs):
"""Stores the default arguments for the given set of list_ops.
For usage, please see examples at top of the file.
Args:
list_ops_or_scope: List or tuple of operations to set argument scope for or
a dictionary containing the current scope. When list_ops_or_scope is a
dict, kwargs must be empty. When list_ops_or_scope is a list or tuple,
then every op in it need to be decorated with @add_arg_scope to work.
**kwargs: keyword=value that will define the defaults for each op in
list_ops. All the ops need to accept the given set of arguments.
Yields:
the current_scope, which is a dictionary of {op: {arg: value}}
Raises:
TypeError: if list_ops is not a list or a tuple.
ValueError: if any op in list_ops has not be decorated with @add_arg_scope.
"""
if isinstance(list_ops_or_scope, dict):
# Assumes that list_ops_or_scope is a scope that is being reused.
if kwargs:
raise ValueError('When attempting to re-use a scope by suppling a'
'dictionary, kwargs must be empty.')
current_scope = list_ops_or_scope.copy()
try:
_get_arg_stack().append(current_scope)
yield current_scope
finally:
_get_arg_stack().pop()
else:
# Assumes that list_ops_or_scope is a list/tuple of ops with kwargs.
if not isinstance(list_ops_or_scope, (list, tuple)):
raise TypeError('list_ops_or_scope must either be a list/tuple or reused'
'scope (i.e. dict)')
try:
current_scope = _current_arg_scope().copy()
for op in list_ops_or_scope:
key_op = _key_op(op)
if not has_arg_scope(op):
raise ValueError('%s is not decorated with @add_arg_scope',
_name_op(op))
if key_op in current_scope:
current_kwargs = current_scope[key_op].copy()
current_kwargs.update(kwargs)
current_scope[key_op] = current_kwargs
else:
current_scope[key_op] = kwargs.copy()
_get_arg_stack().append(current_scope)
yield current_scope
finally:
_get_arg_stack().pop()
def add_arg_scope(func):
"""Decorates a function with args so it can be used within an arg_scope.
Args:
func: function to decorate.
Returns:
A tuple with the decorated function func_with_args().
"""
def func_with_args(*args, **kwargs):
current_scope = _current_arg_scope()
current_args = kwargs
key_func = _key_op(func)
if key_func in current_scope:
current_args = current_scope[key_func].copy()
current_args.update(kwargs)
return func(*args, **current_args)
_add_op(func)
setattr(func_with_args, '_key_op', _key_op(func))
return tf_decorator.make_decorator(func, func_with_args)
def has_arg_scope(func):
"""Checks whether a func has been decorated with @add_arg_scope or not.
Args:
func: function to check.
Returns:
a boolean.
"""
return _key_op(func) in _DECORATED_OPS
def arg_scoped_arguments(func):
"""Returns the list kwargs that arg_scope can set for a func.
Args:
func: function which has been decorated with @add_arg_scope.
Returns:
a list of kwargs names.
"""
assert has_arg_scope(func)
return _DECORATED_OPS[_key_op(func)]
|
tiagocoutinho/bliss
|
refs/heads/master
|
bliss/controllers/motors/icepap/__init__.py
|
1
|
# -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2017 Beamline Control Unit, ESRF
# Distributed under the GNU LGPLv3. See LICENSE for more info.
import re
import time
import gevent
import functools
from bliss.common.greenlet_utils import protect_from_kill
from bliss.controllers.motor import Controller
from bliss.common.axis import AxisState,Axis
from bliss.common.utils import object_method
from bliss.comm.tcp import Command
import struct
import numpy
import sys
class Icepap(Controller):
"""
IcePAP stepper controller without Deep Technology of Communication.
But if you prefer to have it (DTC) move to IcePAP controller class.
Use this class controller at your own risk, because you won't
have any support...
"""
STATUS_DISCODE = {
0 : ('POWERENA', 'power enabled'),
1 : ('NOTACTIVE', 'axis configured as not active'),
2 : ('ALARM', 'alarm condition'),
3 : ('REMRACKDIS', 'remote rack disable input signal'),
4 : ('LOCRACKDIS', 'local rack disable switch'),
5 : ('REMAXISDIS', 'remote axis disable input signal'),
6 : ('LOCAXISDIS', 'local axis disable switch'),
7 : ('SOFTDIS', 'software disable'),
}
STATUS_MODCODE = {
0 : ('OPER', 'operation mode'),
1 : ('PROG', 'programmation mode'),
2 : ('TEST', 'test mode'),
3 : ('FAIL', 'fail mode'),
}
STATUS_STOPCODE = {
0 : ('SCEOM', 'end of movement'),
1 : ('SCSTOP', 'last motion was stopped'),
2 : ('SCABORT', 'last motion was aborted'),
3 : ('SCLIMPOS', 'positive limitswitch reached'),
4 : ('SCLINNEG', 'negative limitswitch reached'),
5 : ('SCSETTLINGTO', 'settling timeout'),
6 : ('SCAXISDIS', 'axis disabled (no alarm)'),
7 : ('SCBIT7', 'n/a'),
8 : ('SCINTFAIL', 'internal failure'),
9 : ('SCMOTFAIL', 'motor failure'),
10 : ('SCPOWEROVL', 'power overload'),
11 : ('SCHEATOVL', 'driver overheating'),
12 : ('SCCLERROR', 'closed loop error'),
13 : ('SCCENCERROR', 'control encoder error'),
14 : ('SCBIT14', 'n/a'),
15 : ('SCEXTALARM', 'external alarm'),
}
def __init__(self,*args,**kwargs):
Controller.__init__(self,*args,**kwargs)
self._cnx = None
self._last_axis_power_time = dict()
def initialize(self):
hostname = self.config.get("host")
self._cnx = Command(hostname,5000,eol='\n')
self._icestate = AxisState()
self._icestate.create_state("POWEROFF", "motor power is off")
for codes in (self.STATUS_DISCODE,self.STATUS_MODCODE,self.STATUS_STOPCODE):
for state,desc in codes.values():
self._icestate.create_state(state,desc)
def finalize(self):
if self._cnx is not None:
self._cnx.close()
def initialize_axis(self,axis):
axis.address = axis.config.get("address",lambda x: x)
if hasattr(axis,'_init_software'):
axis._init_software()
def initialize_hardware_axis(self,axis):
if axis.config.get('autopower', converter=bool, default=True):
try:
self.set_on(axis)
except:
sys.excepthook(*sys.exc_info())
if hasattr(axis,'_init_hardware'):
axis._init_hardware()
#Axis power management
def set_on(self,axis):
"""
Put the axis power on
"""
self._power(axis,True)
def set_off(self,axis):
"""
Put the axis power off
"""
self._power(axis,False)
def _power(self,axis,power):
_ackcommand(self._cnx,"POWER %s %s" %
("ON" if power else "OFF",axis.address))
self._last_axis_power_time[axis] = time.time()
def read_position(self,axis,cache=True):
pos_cmd = "FPOS" if cache else "POS"
return int(_command(self._cnx,"?%s %s" % (pos_cmd,axis.address)))
def set_position(self,axis,new_pos):
if isinstance(axis,SlaveAxis):
pre_cmd = "%d:DISPROT LINKED;" % axis.address
else:
pre_cmd = None
_ackcommand(self._cnx,"POS %s %d" % (axis.address,int(round(new_pos))),
pre_cmd = pre_cmd)
return self.read_position(axis,cache=False)
def read_velocity(self,axis):
return float(_command(self._cnx,"?VELOCITY %s" % axis.address))
def set_velocity(self,axis,new_velocity):
_ackcommand(self._cnx,"VELOCITY %s %f" %
(axis.address,new_velocity))
return self.read_velocity(axis)
def read_acceleration(self,axis):
acctime = float(_command(self._cnx,"?ACCTIME %s" % axis.address))
velocity = self.read_velocity(axis)
return velocity/float(acctime)
def set_acceleration(self,axis,new_acc):
velocity = self.read_velocity(axis)
new_acctime = velocity/new_acc
_ackcommand(self._cnx,"ACCTIME %s %f" % (axis.address,new_acctime))
return self.read_acceleration(axis)
def state(self,axis):
last_power_time = self._last_axis_power_time.get(axis,0)
if time.time() - last_power_time < 1.:
status_cmd = "?STATUS"
else:
self._last_axis_power_time.pop(axis,None)
status_cmd = "?FSTATUS"
status = int(_command(self._cnx,"%s %s" %
(status_cmd,axis.address)),16)
status ^= 1<<23 #neg POWERON FLAG
state = self._icestate.new()
for mask,value in (((1<<9),"READY"),
((1<<10|1<<11),"MOVING"),
((1<<18),"LIMPOS"),
((1<<19),"LIMNEG"),
((1<<20),"HOME"),
((1<<23),"POWEROFF")):
if status & mask:
state.set(value)
state_mode = (status >> 2) & 0x3
if state_mode:
state.set(self.STATUS_MODCODE.get(state_mode)[0])
stop_code = (status >> 14) & 0xf
if stop_code:
state.set(self.STATUS_STOPCODE.get(stop_code)[0])
disable_condition = (status >> 4) & 0x7
if disable_condition:
state.set(self.STATUS_DISCODE.get(disable_condition)[0])
if state.READY:
#if motor is ready then no need to investigate deeper
return state
if not state.MOVING:
# it seems it is not safe to call warning and/or alarm commands
# while homing motor, so let's not ask if motor is moving
if status & (1<<13):
try:
warning = _command(self._cnx,"%d:?WARNING" % axis.address)
except TypeError:
pass
else:
warn_str = "warning condition: \n" + warning
status.create_state("WARNING",warn_str)
status.set("WARNING")
try:
alarm = _command(self._cnx,"%d:?ALARM" % axis.address)
except (RuntimeError,TypeError):
pass
else:
if alarm != "NO":
alarm_dsc = "alarm condition: " + str(alarm)
state.create_state("ALARMDESC",alarm_dsc)
state.set("ALARMDESC")
return state
def get_info(self,axis):
pre_cmd = '%s:' % axis.address
r = "MOTOR : %s\n" % axis.name
r += "SYSTEM : %s (ID: %s) (VER: %s)\n" % (self._cnx._host,
_command(self._cnx,"0:?ID"),
_command(self._cnx,"?VER"))
r += "DRIVER : %s\n" % axis.address
r += "POWER : %s\n" % _command(self._cnx,pre_cmd + "?POWER")
r += "CLOOP : %s\n" % _command(self._cnx,pre_cmd + "?PCLOOP")
r += "WARNING : %s\n" % _command(self._cnx,pre_cmd + "?WARNING")
r += "ALARM : %s\n" % _command(self._cnx,pre_cmd + "?ALARM")
return r
def raw_write(self,message,data = None):
return _command(self._cnx,message,data)
def raw_write_read(self,message,data = None):
return _ackcommand(self._cnx,message,data)
def prepare_move(self,motion):
pass
def start_one(self,motion):
if isinstance(motion.axis,SlaveAxis):
pre_cmd = "%d:DISPROT LINKED;" % motion.axis.address
else:
pre_cmd = None
_ackcommand(self._cnx,"MOVE %s %d" % (motion.axis.address,
motion.target_pos),
pre_cmd = pre_cmd)
def start_all(self,*motions):
if motions > 1:
cmd = "MOVE GROUP "
cmd += ' '.join(["%s %d" % (m.axis.address,m.target_pos) for m in motions])
_ackcommand(self._cnx,cmd)
elif motions:
self.start_one(motions[0])
def stop(self,axis):
_command(self._cnx,"STOP %s" % axis.address)
def stop_all(self,*motions):
for motion in motions:
self.stop(motion.axis)
def home_search(self,axis,switch):
cmd = "HOME " + ("+1" if switch > 0 else "-1")
_ackcommand(self._cnx,"%s:%s" % (axis.address,cmd))
# IcePAP status is not immediately MOVING after home search command is sent
gevent.sleep(0.2)
def home_state(self,axis):
s = self.state(axis)
if s != 'READY' and s != 'POWEROFF':
s.set('MOVING')
return s
def limit_search(self,axis,limit):
cmd = "SRCH LIM" + ("+" if limit>0 else "-")
_ackcommand(self._cnx,"%s:%s" % (axis.address,cmd))
# TODO: MG18Nov14: remove this sleep (state is not immediately MOVING)
gevent.sleep(0.1)
def initialize_encoder(self,encoder):
# Get axis config from bliss config
# address form is XY : X=rack {0..?} Y=driver {1..8}
encoder.address = encoder.config.get("address", int)
# Get optional encoder input to read
enctype = encoder.config.get("type",str,"ENCIN").upper()
# Minium check on encoder input
if enctype not in ['ENCIN', 'ABSENC', 'INPOS', 'MOTOR', 'AXIS', 'SYNC']:
raise ValueError('Invalid encoder type')
encoder.enctype = enctype
def read_encoder(self,encoder):
value = _command(self._cnx,"?ENC %s %d" % (encoder.enctype,encoder.address))
return int(value)
def set_encoder(self,encoder,steps):
_ackcommand(self._cnx,"ENC %s %d %d" %
(encoder.enctype,encoder.address,steps))
def set_event_positions(self,axis_or_encoder,positions):
int_position = numpy.array(positions,dtype=numpy.int32)
#position has to be ordered
int_position.sort()
address = axis_or_encoder.address
if not len(int_position):
_ackcommand(self._cnx,"%s:ECAMDAT CLEAR" % address)
return
if isinstance(axis_or_encoder,Axis):
source = 'AXIS'
else: # encoder
source = 'MEASURE'
#load trigger positions
_ackcommand(self._cnx,"%s:*ECAMDAT %s DWORD" % (address,source),
int_position)
# send the trigger on the multiplexer
_ackcommand(self._cnx,"%s:SYNCAUX eCAM" % address)
def get_event_positions(self,axis_or_encoder):
"""
For this controller this method should be use
for debugging purposed only...
"""
address = axis_or_encoder.address
#Get the number of positions
reply = _command(self._cnx,"%d:?ECAMDAT" % address)
reply_exp = re.compile("(\w+) +([+-]?\d+) +([+-]?\d+) +(\d+)")
m = reply_exp.match(reply)
if m is None:
raise RuntimeError("Reply Didn't expected: %s" % reply)
source = m.group(1)
nb = int(m.group(4))
if isinstance(axis_or_encoder,Axis):
nb = nb if source == 'AXIS' else 0
else: # encoder
nb = nb if source == "MEASURE" else 0
positions = numpy.zeros((nb,),dtype = numpy.int32)
if nb > 0:
reply_exp = re.compile(".+: +([+-]?\d+)")
reply = _command(self._cnx,"%d:?ECAMDAT %d" % (address,nb))
for i,line in enumerate(reply.split('\n')):
m = reply_exp.match(line)
if m:
pos = int(m.group(1))
positions[i] = pos
return positions
def get_linked_axis(self):
reply = _command(self._cnx,"?LINKED")
linked = dict()
for line in reply.strip().split('\n'):
values = line.split()
linked[values[0]] = [int(x) for x in values[1:]]
return linked
@object_method(types_info=("bool","bool"))
def activate_closed_loop(self,axis,active):
_command(self._cnx,"#%s:PCLOOP %s" % (axis.address,"ON" if active else "OFF"))
return active
@object_method(types_info=("None","bool"))
def is_closed_loop_activate(self,axis):
return True if _command(self._cnx,"%s:?PCLOOP" % axis.address) == 'ON' else False
@object_method(types_info=("None","None"))
def reset_closed_loop(self,axis):
measure_position = int(_command(self._cnx,"%s:?POS MEASURE" % axis.address))
self.set_position(axis,measure_position)
if axis.config.get('autopower', converter=bool, default=True):
self.set_on(axis)
axis.sync_hard()
@object_method(types_info=("None","int"))
def temperature(self,axis):
return int(_command(self._cnx,"%s:?MEAS T" % axis.address))
@object_method(types_info=(("float","bool"),"None"))
def set_tracking_positions(self,axis,positions,cyclic = False):
"""
Send position to the controller which will be tracked.
positions -- are expressed in user unit
cyclic -- cyclic position or not default False
@see activate_track method
"""
address = axis.address
if not len(positions):
_ackcommand(self._cnx,"%s:LISTDAT CLEAR" % address)
return
dial_positions = axis.user2dial(numpy.array(positions, dtype=numpy.float))
step_positions = numpy.array(dial_positions * axis.steps_per_unit,
dtype=numpy.int32)
_ackcommand(self._cnx,"%d:*LISTDAT %s DWORD" %
(address, "CYCLIC" if cyclic else "NOCYCLIC"),
step_positions)
@object_method(types_info=("None",("float","bool")))
def get_tracking_positions(self,axis):
"""
Get the tacking positions.
This method should only be use for debugging
return a tuple with (positions,cyclic flag)
"""
address = axis.address
#Get the number of positions
reply = _command(self._cnx,"%d:?LISTDAT" % address)
reply_exp = re.compile("(\d+) *(\w+)?")
m = reply_exp.match(reply)
if m is None:
raise RuntimeError("Reply didn't expected: %s" % reply)
nb = int(m.group(1))
positions = numpy.zeros((nb,),dtype = numpy.int32)
cyclic = True if m.group(2) == "CYCLIC" else False
if nb > 0:
reply_exp = re.compile(".+: +([+-]?\d+)")
reply = _command(self._cnx,"%d:?LISTDAT %d" % (address,nb))
for i,line in enumerate(reply.split('\n')):
m = reply_exp.match(line)
if m:
pos = int(m.group(1))
positions[i] = pos
dial_positions = positions / axis.steps_per_unit
positions = axis.dial2user(dial_positions)
return positions,cyclic
@object_method(types_info=(("bool","str"),"None"))
def activate_tracking(self,axis,activate,mode = None):
"""
Activate/Deactivate the tracking position depending on
activate flag
mode -- default "INPOS" if None.
mode can be :
- SYNC -> Internal SYNC signal
- ENCIN -> ENCIN signal
- INPOS -> INPOS signal
- ABSENC -> ABSENC signal
"""
address = axis.address
if not activate:
_ackcommand(self._cnx,"STOP %d" % address)
axis.sync_hard()
else:
if mode is None: mode = "INPOS"
possibles_modes = ["SYNC","ENCIN","INPOS","ABSENC"]
if mode not in possibles_modes:
raise ValueError("mode %s is not managed, can only choose %s" %
(mode,possibles_modes))
if mode == "INPOS":
_ackcommand(self._cnx, "%d:POS INPOS 0" % address)
_ackcommand(self._cnx,"%d:LTRACK %s" % (address,mode))
@object_method(types_info=("float", "None"))
def blink(self, axis, second=3.):
"""
Blink axis driver
"""
_command(self._cnx,"%d:BLINK %f" % (axis.address, second))
def reset(self):
_command(self._cnx,"RESET")
def mdspreset(self):
"""
Reset the MASTER DSP
"""
_command(self._cnx,"_dsprst")
def reboot(self):
_command(self._cnx,"REBOOT")
self._cnx.close()
_check_reply = re.compile("^[#?]|^[0-9]+:\?")
@protect_from_kill
def _command(cnx,cmd,data = None,pre_cmd = None):
if data is not None:
uint16_view = data.view(dtype=numpy.uint16)
data_checksum = uint16_view.sum()
header = struct.pack("<III",
0xa5aa555a, # Header key
len(uint16_view),int(data_checksum) & 0xffffffff)
data_test = data.newbyteorder('<')
if len(data_test) and data_test[0] != data[0]: # not good endianness
data = data.byteswap()
full_cmd = "%s\n%s%s" % (cmd,header,data.tostring())
transaction = cnx._write(full_cmd)
else:
full_cmd = "%s%s\n" % (pre_cmd or '',cmd)
transaction = cnx._write(full_cmd)
with cnx.Transaction(cnx,transaction) :
if _check_reply.match(cmd):
msg = cnx._readline(transaction=transaction,
clear_transaction=False)
cmd = cmd.strip('#').split(' ')[0]
msg = msg.replace(cmd + ' ','')
if msg.startswith('$'):
msg = cnx._readline(transaction=transaction,
clear_transaction=False,eol='$\n')
elif msg.startswith('ERROR'):
raise RuntimeError(msg.replace('ERROR ',''))
return msg.strip(' ')
def _ackcommand(cnx,cmd,data = None,pre_cmd = None):
if not cmd.startswith('#') and not cmd.startswith('?'):
cmd = '#' + cmd
return _command(cnx,cmd,data,pre_cmd)
from .shutter import Shutter
from .switch import Switch
from .linked import LinkedAxis, SlaveAxis
|
fmoralesc/vim-pad
|
refs/heads/devel
|
pythonx/pad/modelines.py
|
3
|
html_style = ("<!-- ", " -->")
vim_style = ('" ', '')
hash_style = ("# ", '')
comment_style_map = {
"markdown": html_style,
"pandoc": html_style,
"textile": html_style,
"vo_base": html_style,
"quicktask": hash_style
}
def format_modeline(filetype):
try:
style = comment_style_map[filetype]
except KeyError:
style = vim_style
return style[0] + "vim: set ft=" + filetype + ":" + style[1]
|
agoravoting/agora-election
|
refs/heads/master
|
agora_election/crypto.py
|
1
|
# -*- coding: utf-8 -*-
#
# Copyright (c) Django Software Foundation and individual contributors.
# Copyright (c) Eduardo Robles Elvira <edulix AT agoravoting DOT com>
#
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#1. Redistributions of source code must retain the above copyright notice,
#this list of conditions and the following disclaimer.
#2. Redistributions in binary form must reproduce the above copyright
#notice, this list of conditions and the following disclaimer in the
#documentation and/or other materials provided with the distribution.
#3. Neither the name of Django nor the names of its contributors may be used
#to endorse or promote products derived from this software without
#specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import hmac
import hashlib
import time
# Use the system PRNG if possible
import random
try:
random = random.SystemRandom()
using_sysrandom = True
except NotImplementedError:
import warnings
warnings.warn('A secure pseudo-random number generator is not available '
'on your system. Falling back to Mersenne Twister.')
using_sysrandom = False
from flask import current_app
def force_bytes(s, encoding='utf-8', errors='strict'):
'''
Force input to be encoded as bytes.
If it's a string, encode it. if it's bytes already, return it.
'''
if isinstance(s, bytes):
return s
else:
return s.encode(encoding, errors)
def force_str(s, encoding='utf-8', errors='strict'):
'''
Force input to be decoded as a string.
If it's a string, encode it. if it's bytes already, return it.
'''
if isinstance(s, bytes):
return s.decode(encoding, errors)
else:
return s
def salted_hmac(key_salt, value, secret=None):
"""
Returns the HMAC-SHA1 of 'value', using a key generated from key_salt and a
secret (which defaults to settings.SECRET_KEY).
A different key_salt should be passed in for every application of HMAC.
"""
if secret is None:
secret = current_app.config.get("SECRET_KEY", "")
# We need to generate a derived key from our base key. We can do this by
# passing the key_salt and our base key through a pseudo-random function and
# SHA1 works nicely.
key = hashlib.sha1((key_salt + secret).encode('utf-8')).digest()
# If len(key_salt + secret) > sha_constructor().block_size, the above
# line is redundant and could be replaced by key = key_salt + secret, since
# the hmac module does the same thing for keys longer than the block size.
# However, we need to ensure that we *always* do this.
return hmac.new(key, msg=force_bytes(value), digestmod=hashlib.sha1)
def get_random_string(length=12,
allowed_chars='abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'):
"""
Returns a securely generated random string.
The default length of 12 with the a-z, A-Z, 0-9 character set returns
a 71-bit value. log_2((26+26+10)^12) =~ 71 bits
"""
secret = current_app.config.get("SECRET_KEY", "")
if not using_sysrandom:
# This is ugly, and a hack, but it makes things better than
# the alternative of predictability. This re-seeds the PRNG
# using a value that is hard for an attacker to predict, every
# time a random string is required. This may change the
# properties of the chosen random sequence slightly, but this
# is better than absolute predictability.
random.seed(
hashlib.sha256(
("%s%s%s" % (
random.getstate(),
time.time(),
secret)).encode('utf-8')
).digest())
return ''.join([random.choice(allowed_chars) for i in range(length)])
def constant_time_compare(val1, val2):
"""
Returns True if the two strings are equal, False otherwise.
The time taken is independent of the number of characters that match.
"""
val1 = force_bytes(val1)
val2 = force_bytes(val2)
if len(val1) != len(val2):
return False
result = 0
for x, y in zip(val1, val2):
result |= x ^ y
return result == 0
def hash_str(s):
"""
sha-512
"""
import hashlib
s_utf8 = s.encode('utf-8')
m = hashlib.sha512()
m.update(s_utf8)
return m.hexdigest()
def hash_token(token):
"""
sha-512
"""
import hashlib
token_utf8 = token.encode('utf-8')
m = hashlib.sha512()
m.update(token_utf8)
return m.hexdigest()
|
tempredirect/zxing
|
refs/heads/master
|
cpp/scons/scons-local-2.0.0.final.0/SCons/Tool/packaging/ipk.py
|
34
|
"""SCons.Tool.Packaging.ipk
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/packaging/ipk.py 5023 2010/06/14 22:05:46 scons"
import SCons.Builder
import SCons.Node.FS
import os
from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot
def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION,
SUMMARY, X_IPK_PRIORITY, X_IPK_SECTION, SOURCE_URL,
X_IPK_MAINTAINER, X_IPK_DEPENDS, **kw):
""" this function prepares the packageroot directory for packaging with the
ipkg builder.
"""
SCons.Tool.Tool('ipkg').generate(env)
# setup the Ipkg builder
bld = env['BUILDERS']['Ipkg']
target, source = stripinstallbuilder(target, source, env)
target, source = putintopackageroot(target, source, env, PACKAGEROOT)
# This should be overridable from the construction environment,
# which it is by using ARCHITECTURE=.
# Guessing based on what os.uname() returns at least allows it
# to work for both i386 and x86_64 Linux systems.
archmap = {
'i686' : 'i386',
'i586' : 'i386',
'i486' : 'i386',
}
buildarchitecture = os.uname()[4]
buildarchitecture = archmap.get(buildarchitecture, buildarchitecture)
if 'ARCHITECTURE' in kw:
buildarchitecture = kw['ARCHITECTURE']
# setup the kw to contain the mandatory arguments to this fucntion.
# do this before calling any builder or setup function
loc=locals()
del loc['kw']
kw.update(loc)
del kw['source'], kw['target'], kw['env']
# generate the specfile
specfile = gen_ipk_dir(PACKAGEROOT, source, env, kw)
# override the default target.
if str(target[0])=="%s-%s"%(NAME, VERSION):
target=[ "%s_%s_%s.ipk"%(NAME, VERSION, buildarchitecture) ]
# now apply the Ipkg builder
return bld(env, target, specfile, **kw)
def gen_ipk_dir(proot, source, env, kw):
# make sure the packageroot is a Dir object.
if SCons.Util.is_String(proot): proot=env.Dir(proot)
# create the specfile builder
s_bld=SCons.Builder.Builder(
action = build_specfiles,
)
# create the specfile targets
spec_target=[]
control=proot.Dir('CONTROL')
spec_target.append(control.File('control'))
spec_target.append(control.File('conffiles'))
spec_target.append(control.File('postrm'))
spec_target.append(control.File('prerm'))
spec_target.append(control.File('postinst'))
spec_target.append(control.File('preinst'))
# apply the builder to the specfile targets
s_bld(env, spec_target, source, **kw)
# the packageroot directory does now contain the specfiles.
return proot
def build_specfiles(source, target, env):
""" filter the targets for the needed files and use the variables in env
to create the specfile.
"""
#
# At first we care for the CONTROL/control file, which is the main file for ipk.
#
# For this we need to open multiple files in random order, so we store into
# a dict so they can be easily accessed.
#
#
opened_files={}
def open_file(needle, haystack):
try:
return opened_files[needle]
except KeyError:
file=filter(lambda x: x.get_path().rfind(needle)!=-1, haystack)[0]
opened_files[needle]=open(file.abspath, 'w')
return opened_files[needle]
control_file=open_file('control', target)
if 'X_IPK_DESCRIPTION' not in env:
env['X_IPK_DESCRIPTION']="%s\n %s"%(env['SUMMARY'],
env['DESCRIPTION'].replace('\n', '\n '))
content = """
Package: $NAME
Version: $VERSION
Priority: $X_IPK_PRIORITY
Section: $X_IPK_SECTION
Source: $SOURCE_URL
Architecture: $ARCHITECTURE
Maintainer: $X_IPK_MAINTAINER
Depends: $X_IPK_DEPENDS
Description: $X_IPK_DESCRIPTION
"""
control_file.write(env.subst(content))
#
# now handle the various other files, which purpose it is to set post-,
# pre-scripts and mark files as config files.
#
# We do so by filtering the source files for files which are marked with
# the "config" tag and afterwards we do the same for x_ipk_postrm,
# x_ipk_prerm, x_ipk_postinst and x_ipk_preinst tags.
#
# The first one will write the name of the file into the file
# CONTROL/configfiles, the latter add the content of the x_ipk_* variable
# into the same named file.
#
for f in [x for x in source if 'PACKAGING_CONFIG' in dir(x)]:
config=open_file('conffiles')
config.write(f.PACKAGING_INSTALL_LOCATION)
config.write('\n')
for str in 'POSTRM PRERM POSTINST PREINST'.split():
name="PACKAGING_X_IPK_%s"%str
for f in [x for x in source if name in dir(x)]:
file=open_file(name)
file.write(env[str])
#
# close all opened files
for f in opened_files.values():
f.close()
# call a user specified function
if 'CHANGE_SPECFILE' in env:
content += env['CHANGE_SPECFILE'](target)
return 0
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
Big-B702/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/lib2to3/tests/data/infinite_recursion.py
|
341
|
# This file is used to verify that 2to3 falls back to a slower, iterative pattern matching
# scheme in the event that the faster recursive system fails due to infinite recursion.
from ctypes import *
STRING = c_char_p
OSUnknownByteOrder = 0
UIT_PROMPT = 1
P_PGID = 2
P_PID = 1
UIT_ERROR = 5
UIT_INFO = 4
UIT_NONE = 0
P_ALL = 0
UIT_VERIFY = 2
OSBigEndian = 2
UIT_BOOLEAN = 3
OSLittleEndian = 1
__darwin_nl_item = c_int
__darwin_wctrans_t = c_int
__darwin_wctype_t = c_ulong
__int8_t = c_byte
__uint8_t = c_ubyte
__int16_t = c_short
__uint16_t = c_ushort
__int32_t = c_int
__uint32_t = c_uint
__int64_t = c_longlong
__uint64_t = c_ulonglong
__darwin_intptr_t = c_long
__darwin_natural_t = c_uint
__darwin_ct_rune_t = c_int
class __mbstate_t(Union):
pass
__mbstate_t._pack_ = 4
__mbstate_t._fields_ = [
('__mbstate8', c_char * 128),
('_mbstateL', c_longlong),
]
assert sizeof(__mbstate_t) == 128, sizeof(__mbstate_t)
assert alignment(__mbstate_t) == 4, alignment(__mbstate_t)
__darwin_mbstate_t = __mbstate_t
__darwin_ptrdiff_t = c_int
__darwin_size_t = c_ulong
__darwin_va_list = STRING
__darwin_wchar_t = c_int
__darwin_rune_t = __darwin_wchar_t
__darwin_wint_t = c_int
__darwin_clock_t = c_ulong
__darwin_socklen_t = __uint32_t
__darwin_ssize_t = c_long
__darwin_time_t = c_long
sig_atomic_t = c_int
class sigcontext(Structure):
pass
sigcontext._fields_ = [
('sc_onstack', c_int),
('sc_mask', c_int),
('sc_eax', c_uint),
('sc_ebx', c_uint),
('sc_ecx', c_uint),
('sc_edx', c_uint),
('sc_edi', c_uint),
('sc_esi', c_uint),
('sc_ebp', c_uint),
('sc_esp', c_uint),
('sc_ss', c_uint),
('sc_eflags', c_uint),
('sc_eip', c_uint),
('sc_cs', c_uint),
('sc_ds', c_uint),
('sc_es', c_uint),
('sc_fs', c_uint),
('sc_gs', c_uint),
]
assert sizeof(sigcontext) == 72, sizeof(sigcontext)
assert alignment(sigcontext) == 4, alignment(sigcontext)
u_int8_t = c_ubyte
u_int16_t = c_ushort
u_int32_t = c_uint
u_int64_t = c_ulonglong
int32_t = c_int
register_t = int32_t
user_addr_t = u_int64_t
user_size_t = u_int64_t
int64_t = c_longlong
user_ssize_t = int64_t
user_long_t = int64_t
user_ulong_t = u_int64_t
user_time_t = int64_t
syscall_arg_t = u_int64_t
# values for unnamed enumeration
class aes_key_st(Structure):
pass
aes_key_st._fields_ = [
('rd_key', c_ulong * 60),
('rounds', c_int),
]
assert sizeof(aes_key_st) == 244, sizeof(aes_key_st)
assert alignment(aes_key_st) == 4, alignment(aes_key_st)
AES_KEY = aes_key_st
class asn1_ctx_st(Structure):
pass
asn1_ctx_st._fields_ = [
('p', POINTER(c_ubyte)),
('eos', c_int),
('error', c_int),
('inf', c_int),
('tag', c_int),
('xclass', c_int),
('slen', c_long),
('max', POINTER(c_ubyte)),
('q', POINTER(c_ubyte)),
('pp', POINTER(POINTER(c_ubyte))),
('line', c_int),
]
assert sizeof(asn1_ctx_st) == 44, sizeof(asn1_ctx_st)
assert alignment(asn1_ctx_st) == 4, alignment(asn1_ctx_st)
ASN1_CTX = asn1_ctx_st
class asn1_object_st(Structure):
pass
asn1_object_st._fields_ = [
('sn', STRING),
('ln', STRING),
('nid', c_int),
('length', c_int),
('data', POINTER(c_ubyte)),
('flags', c_int),
]
assert sizeof(asn1_object_st) == 24, sizeof(asn1_object_st)
assert alignment(asn1_object_st) == 4, alignment(asn1_object_st)
ASN1_OBJECT = asn1_object_st
class asn1_string_st(Structure):
pass
asn1_string_st._fields_ = [
('length', c_int),
('type', c_int),
('data', POINTER(c_ubyte)),
('flags', c_long),
]
assert sizeof(asn1_string_st) == 16, sizeof(asn1_string_st)
assert alignment(asn1_string_st) == 4, alignment(asn1_string_st)
ASN1_STRING = asn1_string_st
class ASN1_ENCODING_st(Structure):
pass
ASN1_ENCODING_st._fields_ = [
('enc', POINTER(c_ubyte)),
('len', c_long),
('modified', c_int),
]
assert sizeof(ASN1_ENCODING_st) == 12, sizeof(ASN1_ENCODING_st)
assert alignment(ASN1_ENCODING_st) == 4, alignment(ASN1_ENCODING_st)
ASN1_ENCODING = ASN1_ENCODING_st
class asn1_string_table_st(Structure):
pass
asn1_string_table_st._fields_ = [
('nid', c_int),
('minsize', c_long),
('maxsize', c_long),
('mask', c_ulong),
('flags', c_ulong),
]
assert sizeof(asn1_string_table_st) == 20, sizeof(asn1_string_table_st)
assert alignment(asn1_string_table_st) == 4, alignment(asn1_string_table_st)
ASN1_STRING_TABLE = asn1_string_table_st
class ASN1_TEMPLATE_st(Structure):
pass
ASN1_TEMPLATE_st._fields_ = [
]
ASN1_TEMPLATE = ASN1_TEMPLATE_st
class ASN1_ITEM_st(Structure):
pass
ASN1_ITEM = ASN1_ITEM_st
ASN1_ITEM_st._fields_ = [
]
class ASN1_TLC_st(Structure):
pass
ASN1_TLC = ASN1_TLC_st
ASN1_TLC_st._fields_ = [
]
class ASN1_VALUE_st(Structure):
pass
ASN1_VALUE_st._fields_ = [
]
ASN1_VALUE = ASN1_VALUE_st
ASN1_ITEM_EXP = ASN1_ITEM
class asn1_type_st(Structure):
pass
class N12asn1_type_st4DOLLAR_11E(Union):
pass
ASN1_BOOLEAN = c_int
ASN1_INTEGER = asn1_string_st
ASN1_ENUMERATED = asn1_string_st
ASN1_BIT_STRING = asn1_string_st
ASN1_OCTET_STRING = asn1_string_st
ASN1_PRINTABLESTRING = asn1_string_st
ASN1_T61STRING = asn1_string_st
ASN1_IA5STRING = asn1_string_st
ASN1_GENERALSTRING = asn1_string_st
ASN1_BMPSTRING = asn1_string_st
ASN1_UNIVERSALSTRING = asn1_string_st
ASN1_UTCTIME = asn1_string_st
ASN1_GENERALIZEDTIME = asn1_string_st
ASN1_VISIBLESTRING = asn1_string_st
ASN1_UTF8STRING = asn1_string_st
N12asn1_type_st4DOLLAR_11E._fields_ = [
('ptr', STRING),
('boolean', ASN1_BOOLEAN),
('asn1_string', POINTER(ASN1_STRING)),
('object', POINTER(ASN1_OBJECT)),
('integer', POINTER(ASN1_INTEGER)),
('enumerated', POINTER(ASN1_ENUMERATED)),
('bit_string', POINTER(ASN1_BIT_STRING)),
('octet_string', POINTER(ASN1_OCTET_STRING)),
('printablestring', POINTER(ASN1_PRINTABLESTRING)),
('t61string', POINTER(ASN1_T61STRING)),
('ia5string', POINTER(ASN1_IA5STRING)),
('generalstring', POINTER(ASN1_GENERALSTRING)),
('bmpstring', POINTER(ASN1_BMPSTRING)),
('universalstring', POINTER(ASN1_UNIVERSALSTRING)),
('utctime', POINTER(ASN1_UTCTIME)),
('generalizedtime', POINTER(ASN1_GENERALIZEDTIME)),
('visiblestring', POINTER(ASN1_VISIBLESTRING)),
('utf8string', POINTER(ASN1_UTF8STRING)),
('set', POINTER(ASN1_STRING)),
('sequence', POINTER(ASN1_STRING)),
]
assert sizeof(N12asn1_type_st4DOLLAR_11E) == 4, sizeof(N12asn1_type_st4DOLLAR_11E)
assert alignment(N12asn1_type_st4DOLLAR_11E) == 4, alignment(N12asn1_type_st4DOLLAR_11E)
asn1_type_st._fields_ = [
('type', c_int),
('value', N12asn1_type_st4DOLLAR_11E),
]
assert sizeof(asn1_type_st) == 8, sizeof(asn1_type_st)
assert alignment(asn1_type_st) == 4, alignment(asn1_type_st)
ASN1_TYPE = asn1_type_st
class asn1_method_st(Structure):
pass
asn1_method_st._fields_ = [
('i2d', CFUNCTYPE(c_int)),
('d2i', CFUNCTYPE(STRING)),
('create', CFUNCTYPE(STRING)),
('destroy', CFUNCTYPE(None)),
]
assert sizeof(asn1_method_st) == 16, sizeof(asn1_method_st)
assert alignment(asn1_method_st) == 4, alignment(asn1_method_st)
ASN1_METHOD = asn1_method_st
class asn1_header_st(Structure):
pass
asn1_header_st._fields_ = [
('header', POINTER(ASN1_OCTET_STRING)),
('data', STRING),
('meth', POINTER(ASN1_METHOD)),
]
assert sizeof(asn1_header_st) == 12, sizeof(asn1_header_st)
assert alignment(asn1_header_st) == 4, alignment(asn1_header_st)
ASN1_HEADER = asn1_header_st
class BIT_STRING_BITNAME_st(Structure):
pass
BIT_STRING_BITNAME_st._fields_ = [
('bitnum', c_int),
('lname', STRING),
('sname', STRING),
]
assert sizeof(BIT_STRING_BITNAME_st) == 12, sizeof(BIT_STRING_BITNAME_st)
assert alignment(BIT_STRING_BITNAME_st) == 4, alignment(BIT_STRING_BITNAME_st)
BIT_STRING_BITNAME = BIT_STRING_BITNAME_st
class bio_st(Structure):
pass
BIO = bio_st
bio_info_cb = CFUNCTYPE(None, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)
class bio_method_st(Structure):
pass
bio_method_st._fields_ = [
('type', c_int),
('name', STRING),
('bwrite', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('bread', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('bputs', CFUNCTYPE(c_int, POINTER(BIO), STRING)),
('bgets', CFUNCTYPE(c_int, POINTER(BIO), STRING, c_int)),
('ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, c_long, c_void_p)),
('create', CFUNCTYPE(c_int, POINTER(BIO))),
('destroy', CFUNCTYPE(c_int, POINTER(BIO))),
('callback_ctrl', CFUNCTYPE(c_long, POINTER(BIO), c_int, POINTER(bio_info_cb))),
]
assert sizeof(bio_method_st) == 40, sizeof(bio_method_st)
assert alignment(bio_method_st) == 4, alignment(bio_method_st)
BIO_METHOD = bio_method_st
class crypto_ex_data_st(Structure):
pass
class stack_st(Structure):
pass
STACK = stack_st
crypto_ex_data_st._fields_ = [
('sk', POINTER(STACK)),
('dummy', c_int),
]
assert sizeof(crypto_ex_data_st) == 8, sizeof(crypto_ex_data_st)
assert alignment(crypto_ex_data_st) == 4, alignment(crypto_ex_data_st)
CRYPTO_EX_DATA = crypto_ex_data_st
bio_st._fields_ = [
('method', POINTER(BIO_METHOD)),
('callback', CFUNCTYPE(c_long, POINTER(bio_st), c_int, STRING, c_int, c_long, c_long)),
('cb_arg', STRING),
('init', c_int),
('shutdown', c_int),
('flags', c_int),
('retry_reason', c_int),
('num', c_int),
('ptr', c_void_p),
('next_bio', POINTER(bio_st)),
('prev_bio', POINTER(bio_st)),
('references', c_int),
('num_read', c_ulong),
('num_write', c_ulong),
('ex_data', CRYPTO_EX_DATA),
]
assert sizeof(bio_st) == 64, sizeof(bio_st)
assert alignment(bio_st) == 4, alignment(bio_st)
class bio_f_buffer_ctx_struct(Structure):
pass
bio_f_buffer_ctx_struct._fields_ = [
('ibuf_size', c_int),
('obuf_size', c_int),
('ibuf', STRING),
('ibuf_len', c_int),
('ibuf_off', c_int),
('obuf', STRING),
('obuf_len', c_int),
('obuf_off', c_int),
]
assert sizeof(bio_f_buffer_ctx_struct) == 32, sizeof(bio_f_buffer_ctx_struct)
assert alignment(bio_f_buffer_ctx_struct) == 4, alignment(bio_f_buffer_ctx_struct)
BIO_F_BUFFER_CTX = bio_f_buffer_ctx_struct
class hostent(Structure):
pass
hostent._fields_ = [
]
class bf_key_st(Structure):
pass
bf_key_st._fields_ = [
('P', c_uint * 18),
('S', c_uint * 1024),
]
assert sizeof(bf_key_st) == 4168, sizeof(bf_key_st)
assert alignment(bf_key_st) == 4, alignment(bf_key_st)
BF_KEY = bf_key_st
class bignum_st(Structure):
pass
bignum_st._fields_ = [
('d', POINTER(c_ulong)),
('top', c_int),
('dmax', c_int),
('neg', c_int),
('flags', c_int),
]
assert sizeof(bignum_st) == 20, sizeof(bignum_st)
assert alignment(bignum_st) == 4, alignment(bignum_st)
BIGNUM = bignum_st
class bignum_ctx(Structure):
pass
bignum_ctx._fields_ = [
]
BN_CTX = bignum_ctx
class bn_blinding_st(Structure):
pass
bn_blinding_st._fields_ = [
('init', c_int),
('A', POINTER(BIGNUM)),
('Ai', POINTER(BIGNUM)),
('mod', POINTER(BIGNUM)),
('thread_id', c_ulong),
]
assert sizeof(bn_blinding_st) == 20, sizeof(bn_blinding_st)
assert alignment(bn_blinding_st) == 4, alignment(bn_blinding_st)
BN_BLINDING = bn_blinding_st
class bn_mont_ctx_st(Structure):
pass
bn_mont_ctx_st._fields_ = [
('ri', c_int),
('RR', BIGNUM),
('N', BIGNUM),
('Ni', BIGNUM),
('n0', c_ulong),
('flags', c_int),
]
assert sizeof(bn_mont_ctx_st) == 72, sizeof(bn_mont_ctx_st)
assert alignment(bn_mont_ctx_st) == 4, alignment(bn_mont_ctx_st)
BN_MONT_CTX = bn_mont_ctx_st
class bn_recp_ctx_st(Structure):
pass
bn_recp_ctx_st._fields_ = [
('N', BIGNUM),
('Nr', BIGNUM),
('num_bits', c_int),
('shift', c_int),
('flags', c_int),
]
assert sizeof(bn_recp_ctx_st) == 52, sizeof(bn_recp_ctx_st)
assert alignment(bn_recp_ctx_st) == 4, alignment(bn_recp_ctx_st)
BN_RECP_CTX = bn_recp_ctx_st
class buf_mem_st(Structure):
pass
buf_mem_st._fields_ = [
('length', c_int),
('data', STRING),
('max', c_int),
]
assert sizeof(buf_mem_st) == 12, sizeof(buf_mem_st)
assert alignment(buf_mem_st) == 4, alignment(buf_mem_st)
BUF_MEM = buf_mem_st
class cast_key_st(Structure):
pass
cast_key_st._fields_ = [
('data', c_ulong * 32),
('short_key', c_int),
]
assert sizeof(cast_key_st) == 132, sizeof(cast_key_st)
assert alignment(cast_key_st) == 4, alignment(cast_key_st)
CAST_KEY = cast_key_st
class comp_method_st(Structure):
pass
comp_method_st._fields_ = [
('type', c_int),
('name', STRING),
('init', CFUNCTYPE(c_int)),
('finish', CFUNCTYPE(None)),
('compress', CFUNCTYPE(c_int)),
('expand', CFUNCTYPE(c_int)),
('ctrl', CFUNCTYPE(c_long)),
('callback_ctrl', CFUNCTYPE(c_long)),
]
assert sizeof(comp_method_st) == 32, sizeof(comp_method_st)
assert alignment(comp_method_st) == 4, alignment(comp_method_st)
COMP_METHOD = comp_method_st
class comp_ctx_st(Structure):
pass
comp_ctx_st._fields_ = [
('meth', POINTER(COMP_METHOD)),
('compress_in', c_ulong),
('compress_out', c_ulong),
('expand_in', c_ulong),
('expand_out', c_ulong),
('ex_data', CRYPTO_EX_DATA),
]
assert sizeof(comp_ctx_st) == 28, sizeof(comp_ctx_st)
assert alignment(comp_ctx_st) == 4, alignment(comp_ctx_st)
COMP_CTX = comp_ctx_st
class CRYPTO_dynlock_value(Structure):
pass
CRYPTO_dynlock_value._fields_ = [
]
class CRYPTO_dynlock(Structure):
pass
CRYPTO_dynlock._fields_ = [
('references', c_int),
('data', POINTER(CRYPTO_dynlock_value)),
]
assert sizeof(CRYPTO_dynlock) == 8, sizeof(CRYPTO_dynlock)
assert alignment(CRYPTO_dynlock) == 4, alignment(CRYPTO_dynlock)
BIO_dummy = bio_st
CRYPTO_EX_new = CFUNCTYPE(c_int, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p)
CRYPTO_EX_free = CFUNCTYPE(None, c_void_p, c_void_p, POINTER(CRYPTO_EX_DATA), c_int, c_long, c_void_p)
CRYPTO_EX_dup = CFUNCTYPE(c_int, POINTER(CRYPTO_EX_DATA), POINTER(CRYPTO_EX_DATA), c_void_p, c_int, c_long, c_void_p)
class crypto_ex_data_func_st(Structure):
pass
crypto_ex_data_func_st._fields_ = [
('argl', c_long),
('argp', c_void_p),
('new_func', POINTER(CRYPTO_EX_new)),
('free_func', POINTER(CRYPTO_EX_free)),
('dup_func', POINTER(CRYPTO_EX_dup)),
]
assert sizeof(crypto_ex_data_func_st) == 20, sizeof(crypto_ex_data_func_st)
assert alignment(crypto_ex_data_func_st) == 4, alignment(crypto_ex_data_func_st)
CRYPTO_EX_DATA_FUNCS = crypto_ex_data_func_st
class st_CRYPTO_EX_DATA_IMPL(Structure):
pass
CRYPTO_EX_DATA_IMPL = st_CRYPTO_EX_DATA_IMPL
st_CRYPTO_EX_DATA_IMPL._fields_ = [
]
CRYPTO_MEM_LEAK_CB = CFUNCTYPE(c_void_p, c_ulong, STRING, c_int, c_int, c_void_p)
DES_cblock = c_ubyte * 8
const_DES_cblock = c_ubyte * 8
class DES_ks(Structure):
pass
class N6DES_ks3DOLLAR_9E(Union):
pass
N6DES_ks3DOLLAR_9E._fields_ = [
('cblock', DES_cblock),
('deslong', c_ulong * 2),
]
assert sizeof(N6DES_ks3DOLLAR_9E) == 8, sizeof(N6DES_ks3DOLLAR_9E)
assert alignment(N6DES_ks3DOLLAR_9E) == 4, alignment(N6DES_ks3DOLLAR_9E)
DES_ks._fields_ = [
('ks', N6DES_ks3DOLLAR_9E * 16),
]
assert sizeof(DES_ks) == 128, sizeof(DES_ks)
assert alignment(DES_ks) == 4, alignment(DES_ks)
DES_key_schedule = DES_ks
_ossl_old_des_cblock = c_ubyte * 8
class _ossl_old_des_ks_struct(Structure):
pass
class N23_ossl_old_des_ks_struct4DOLLAR_10E(Union):
pass
N23_ossl_old_des_ks_struct4DOLLAR_10E._fields_ = [
('_', _ossl_old_des_cblock),
('pad', c_ulong * 2),
]
assert sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 8, sizeof(N23_ossl_old_des_ks_struct4DOLLAR_10E)
assert alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E) == 4, alignment(N23_ossl_old_des_ks_struct4DOLLAR_10E)
_ossl_old_des_ks_struct._fields_ = [
('ks', N23_ossl_old_des_ks_struct4DOLLAR_10E),
]
assert sizeof(_ossl_old_des_ks_struct) == 8, sizeof(_ossl_old_des_ks_struct)
assert alignment(_ossl_old_des_ks_struct) == 4, alignment(_ossl_old_des_ks_struct)
_ossl_old_des_key_schedule = _ossl_old_des_ks_struct * 16
class dh_st(Structure):
pass
DH = dh_st
class dh_method(Structure):
pass
dh_method._fields_ = [
('name', STRING),
('generate_key', CFUNCTYPE(c_int, POINTER(DH))),
('compute_key', CFUNCTYPE(c_int, POINTER(c_ubyte), POINTER(BIGNUM), POINTER(DH))),
('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DH), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))),
('init', CFUNCTYPE(c_int, POINTER(DH))),
('finish', CFUNCTYPE(c_int, POINTER(DH))),
('flags', c_int),
('app_data', STRING),
]
assert sizeof(dh_method) == 32, sizeof(dh_method)
assert alignment(dh_method) == 4, alignment(dh_method)
DH_METHOD = dh_method
class engine_st(Structure):
pass
ENGINE = engine_st
dh_st._fields_ = [
('pad', c_int),
('version', c_int),
('p', POINTER(BIGNUM)),
('g', POINTER(BIGNUM)),
('length', c_long),
('pub_key', POINTER(BIGNUM)),
('priv_key', POINTER(BIGNUM)),
('flags', c_int),
('method_mont_p', STRING),
('q', POINTER(BIGNUM)),
('j', POINTER(BIGNUM)),
('seed', POINTER(c_ubyte)),
('seedlen', c_int),
('counter', POINTER(BIGNUM)),
('references', c_int),
('ex_data', CRYPTO_EX_DATA),
('meth', POINTER(DH_METHOD)),
('engine', POINTER(ENGINE)),
]
assert sizeof(dh_st) == 76, sizeof(dh_st)
assert alignment(dh_st) == 4, alignment(dh_st)
class dsa_st(Structure):
pass
DSA = dsa_st
class DSA_SIG_st(Structure):
pass
DSA_SIG_st._fields_ = [
('r', POINTER(BIGNUM)),
('s', POINTER(BIGNUM)),
]
assert sizeof(DSA_SIG_st) == 8, sizeof(DSA_SIG_st)
assert alignment(DSA_SIG_st) == 4, alignment(DSA_SIG_st)
DSA_SIG = DSA_SIG_st
class dsa_method(Structure):
pass
dsa_method._fields_ = [
('name', STRING),
('dsa_do_sign', CFUNCTYPE(POINTER(DSA_SIG), POINTER(c_ubyte), c_int, POINTER(DSA))),
('dsa_sign_setup', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BN_CTX), POINTER(POINTER(BIGNUM)), POINTER(POINTER(BIGNUM)))),
('dsa_do_verify', CFUNCTYPE(c_int, POINTER(c_ubyte), c_int, POINTER(DSA_SIG), POINTER(DSA))),
('dsa_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))),
('bn_mod_exp', CFUNCTYPE(c_int, POINTER(DSA), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))),
('init', CFUNCTYPE(c_int, POINTER(DSA))),
('finish', CFUNCTYPE(c_int, POINTER(DSA))),
('flags', c_int),
('app_data', STRING),
]
assert sizeof(dsa_method) == 40, sizeof(dsa_method)
assert alignment(dsa_method) == 4, alignment(dsa_method)
DSA_METHOD = dsa_method
dsa_st._fields_ = [
('pad', c_int),
('version', c_long),
('write_params', c_int),
('p', POINTER(BIGNUM)),
('q', POINTER(BIGNUM)),
('g', POINTER(BIGNUM)),
('pub_key', POINTER(BIGNUM)),
('priv_key', POINTER(BIGNUM)),
('kinv', POINTER(BIGNUM)),
('r', POINTER(BIGNUM)),
('flags', c_int),
('method_mont_p', STRING),
('references', c_int),
('ex_data', CRYPTO_EX_DATA),
('meth', POINTER(DSA_METHOD)),
('engine', POINTER(ENGINE)),
]
assert sizeof(dsa_st) == 68, sizeof(dsa_st)
assert alignment(dsa_st) == 4, alignment(dsa_st)
class evp_pkey_st(Structure):
pass
class N11evp_pkey_st4DOLLAR_12E(Union):
pass
class rsa_st(Structure):
pass
N11evp_pkey_st4DOLLAR_12E._fields_ = [
('ptr', STRING),
('rsa', POINTER(rsa_st)),
('dsa', POINTER(dsa_st)),
('dh', POINTER(dh_st)),
]
assert sizeof(N11evp_pkey_st4DOLLAR_12E) == 4, sizeof(N11evp_pkey_st4DOLLAR_12E)
assert alignment(N11evp_pkey_st4DOLLAR_12E) == 4, alignment(N11evp_pkey_st4DOLLAR_12E)
evp_pkey_st._fields_ = [
('type', c_int),
('save_type', c_int),
('references', c_int),
('pkey', N11evp_pkey_st4DOLLAR_12E),
('save_parameters', c_int),
('attributes', POINTER(STACK)),
]
assert sizeof(evp_pkey_st) == 24, sizeof(evp_pkey_st)
assert alignment(evp_pkey_st) == 4, alignment(evp_pkey_st)
class env_md_st(Structure):
pass
class env_md_ctx_st(Structure):
pass
EVP_MD_CTX = env_md_ctx_st
env_md_st._fields_ = [
('type', c_int),
('pkey_type', c_int),
('md_size', c_int),
('flags', c_ulong),
('init', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))),
('update', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), c_void_p, c_ulong)),
('final', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(c_ubyte))),
('copy', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX), POINTER(EVP_MD_CTX))),
('cleanup', CFUNCTYPE(c_int, POINTER(EVP_MD_CTX))),
('sign', CFUNCTYPE(c_int)),
('verify', CFUNCTYPE(c_int)),
('required_pkey_type', c_int * 5),
('block_size', c_int),
('ctx_size', c_int),
]
assert sizeof(env_md_st) == 72, sizeof(env_md_st)
assert alignment(env_md_st) == 4, alignment(env_md_st)
EVP_MD = env_md_st
env_md_ctx_st._fields_ = [
('digest', POINTER(EVP_MD)),
('engine', POINTER(ENGINE)),
('flags', c_ulong),
('md_data', c_void_p),
]
assert sizeof(env_md_ctx_st) == 16, sizeof(env_md_ctx_st)
assert alignment(env_md_ctx_st) == 4, alignment(env_md_ctx_st)
class evp_cipher_st(Structure):
pass
class evp_cipher_ctx_st(Structure):
pass
EVP_CIPHER_CTX = evp_cipher_ctx_st
evp_cipher_st._fields_ = [
('nid', c_int),
('block_size', c_int),
('key_len', c_int),
('iv_len', c_int),
('flags', c_ulong),
('init', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_int)),
('do_cipher', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(c_ubyte), POINTER(c_ubyte), c_uint)),
('cleanup', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX))),
('ctx_size', c_int),
('set_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))),
('get_asn1_parameters', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), POINTER(ASN1_TYPE))),
('ctrl', CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), c_int, c_int, c_void_p)),
('app_data', c_void_p),
]
assert sizeof(evp_cipher_st) == 52, sizeof(evp_cipher_st)
assert alignment(evp_cipher_st) == 4, alignment(evp_cipher_st)
class evp_cipher_info_st(Structure):
pass
EVP_CIPHER = evp_cipher_st
evp_cipher_info_st._fields_ = [
('cipher', POINTER(EVP_CIPHER)),
('iv', c_ubyte * 16),
]
assert sizeof(evp_cipher_info_st) == 20, sizeof(evp_cipher_info_st)
assert alignment(evp_cipher_info_st) == 4, alignment(evp_cipher_info_st)
EVP_CIPHER_INFO = evp_cipher_info_st
evp_cipher_ctx_st._fields_ = [
('cipher', POINTER(EVP_CIPHER)),
('engine', POINTER(ENGINE)),
('encrypt', c_int),
('buf_len', c_int),
('oiv', c_ubyte * 16),
('iv', c_ubyte * 16),
('buf', c_ubyte * 32),
('num', c_int),
('app_data', c_void_p),
('key_len', c_int),
('flags', c_ulong),
('cipher_data', c_void_p),
('final_used', c_int),
('block_mask', c_int),
('final', c_ubyte * 32),
]
assert sizeof(evp_cipher_ctx_st) == 140, sizeof(evp_cipher_ctx_st)
assert alignment(evp_cipher_ctx_st) == 4, alignment(evp_cipher_ctx_st)
class evp_Encode_Ctx_st(Structure):
pass
evp_Encode_Ctx_st._fields_ = [
('num', c_int),
('length', c_int),
('enc_data', c_ubyte * 80),
('line_num', c_int),
('expect_nl', c_int),
]
assert sizeof(evp_Encode_Ctx_st) == 96, sizeof(evp_Encode_Ctx_st)
assert alignment(evp_Encode_Ctx_st) == 4, alignment(evp_Encode_Ctx_st)
EVP_ENCODE_CTX = evp_Encode_Ctx_st
EVP_PBE_KEYGEN = CFUNCTYPE(c_int, POINTER(EVP_CIPHER_CTX), STRING, c_int, POINTER(ASN1_TYPE), POINTER(EVP_CIPHER), POINTER(EVP_MD), c_int)
class lhash_node_st(Structure):
pass
lhash_node_st._fields_ = [
('data', c_void_p),
('next', POINTER(lhash_node_st)),
('hash', c_ulong),
]
assert sizeof(lhash_node_st) == 12, sizeof(lhash_node_st)
assert alignment(lhash_node_st) == 4, alignment(lhash_node_st)
LHASH_NODE = lhash_node_st
LHASH_COMP_FN_TYPE = CFUNCTYPE(c_int, c_void_p, c_void_p)
LHASH_HASH_FN_TYPE = CFUNCTYPE(c_ulong, c_void_p)
LHASH_DOALL_FN_TYPE = CFUNCTYPE(None, c_void_p)
LHASH_DOALL_ARG_FN_TYPE = CFUNCTYPE(None, c_void_p, c_void_p)
class lhash_st(Structure):
pass
lhash_st._fields_ = [
('b', POINTER(POINTER(LHASH_NODE))),
('comp', LHASH_COMP_FN_TYPE),
('hash', LHASH_HASH_FN_TYPE),
('num_nodes', c_uint),
('num_alloc_nodes', c_uint),
('p', c_uint),
('pmax', c_uint),
('up_load', c_ulong),
('down_load', c_ulong),
('num_items', c_ulong),
('num_expands', c_ulong),
('num_expand_reallocs', c_ulong),
('num_contracts', c_ulong),
('num_contract_reallocs', c_ulong),
('num_hash_calls', c_ulong),
('num_comp_calls', c_ulong),
('num_insert', c_ulong),
('num_replace', c_ulong),
('num_delete', c_ulong),
('num_no_delete', c_ulong),
('num_retrieve', c_ulong),
('num_retrieve_miss', c_ulong),
('num_hash_comps', c_ulong),
('error', c_int),
]
assert sizeof(lhash_st) == 96, sizeof(lhash_st)
assert alignment(lhash_st) == 4, alignment(lhash_st)
LHASH = lhash_st
class MD2state_st(Structure):
pass
MD2state_st._fields_ = [
('num', c_int),
('data', c_ubyte * 16),
('cksm', c_uint * 16),
('state', c_uint * 16),
]
assert sizeof(MD2state_st) == 148, sizeof(MD2state_st)
assert alignment(MD2state_st) == 4, alignment(MD2state_st)
MD2_CTX = MD2state_st
class MD4state_st(Structure):
pass
MD4state_st._fields_ = [
('A', c_uint),
('B', c_uint),
('C', c_uint),
('D', c_uint),
('Nl', c_uint),
('Nh', c_uint),
('data', c_uint * 16),
('num', c_int),
]
assert sizeof(MD4state_st) == 92, sizeof(MD4state_st)
assert alignment(MD4state_st) == 4, alignment(MD4state_st)
MD4_CTX = MD4state_st
class MD5state_st(Structure):
pass
MD5state_st._fields_ = [
('A', c_uint),
('B', c_uint),
('C', c_uint),
('D', c_uint),
('Nl', c_uint),
('Nh', c_uint),
('data', c_uint * 16),
('num', c_int),
]
assert sizeof(MD5state_st) == 92, sizeof(MD5state_st)
assert alignment(MD5state_st) == 4, alignment(MD5state_st)
MD5_CTX = MD5state_st
class mdc2_ctx_st(Structure):
pass
mdc2_ctx_st._fields_ = [
('num', c_int),
('data', c_ubyte * 8),
('h', DES_cblock),
('hh', DES_cblock),
('pad_type', c_int),
]
assert sizeof(mdc2_ctx_st) == 32, sizeof(mdc2_ctx_st)
assert alignment(mdc2_ctx_st) == 4, alignment(mdc2_ctx_st)
MDC2_CTX = mdc2_ctx_st
class obj_name_st(Structure):
pass
obj_name_st._fields_ = [
('type', c_int),
('alias', c_int),
('name', STRING),
('data', STRING),
]
assert sizeof(obj_name_st) == 16, sizeof(obj_name_st)
assert alignment(obj_name_st) == 4, alignment(obj_name_st)
OBJ_NAME = obj_name_st
ASN1_TIME = asn1_string_st
ASN1_NULL = c_int
EVP_PKEY = evp_pkey_st
class x509_st(Structure):
pass
X509 = x509_st
class X509_algor_st(Structure):
pass
X509_ALGOR = X509_algor_st
class X509_crl_st(Structure):
pass
X509_CRL = X509_crl_st
class X509_name_st(Structure):
pass
X509_NAME = X509_name_st
class x509_store_st(Structure):
pass
X509_STORE = x509_store_st
class x509_store_ctx_st(Structure):
pass
X509_STORE_CTX = x509_store_ctx_st
engine_st._fields_ = [
]
class PEM_Encode_Seal_st(Structure):
pass
PEM_Encode_Seal_st._fields_ = [
('encode', EVP_ENCODE_CTX),
('md', EVP_MD_CTX),
('cipher', EVP_CIPHER_CTX),
]
assert sizeof(PEM_Encode_Seal_st) == 252, sizeof(PEM_Encode_Seal_st)
assert alignment(PEM_Encode_Seal_st) == 4, alignment(PEM_Encode_Seal_st)
PEM_ENCODE_SEAL_CTX = PEM_Encode_Seal_st
class pem_recip_st(Structure):
pass
pem_recip_st._fields_ = [
('name', STRING),
('dn', POINTER(X509_NAME)),
('cipher', c_int),
('key_enc', c_int),
]
assert sizeof(pem_recip_st) == 16, sizeof(pem_recip_st)
assert alignment(pem_recip_st) == 4, alignment(pem_recip_st)
PEM_USER = pem_recip_st
class pem_ctx_st(Structure):
pass
class N10pem_ctx_st4DOLLAR_16E(Structure):
pass
N10pem_ctx_st4DOLLAR_16E._fields_ = [
('version', c_int),
('mode', c_int),
]
assert sizeof(N10pem_ctx_st4DOLLAR_16E) == 8, sizeof(N10pem_ctx_st4DOLLAR_16E)
assert alignment(N10pem_ctx_st4DOLLAR_16E) == 4, alignment(N10pem_ctx_st4DOLLAR_16E)
class N10pem_ctx_st4DOLLAR_17E(Structure):
pass
N10pem_ctx_st4DOLLAR_17E._fields_ = [
('cipher', c_int),
]
assert sizeof(N10pem_ctx_st4DOLLAR_17E) == 4, sizeof(N10pem_ctx_st4DOLLAR_17E)
assert alignment(N10pem_ctx_st4DOLLAR_17E) == 4, alignment(N10pem_ctx_st4DOLLAR_17E)
pem_ctx_st._fields_ = [
('type', c_int),
('proc_type', N10pem_ctx_st4DOLLAR_16E),
('domain', STRING),
('DEK_info', N10pem_ctx_st4DOLLAR_17E),
('originator', POINTER(PEM_USER)),
('num_recipient', c_int),
('recipient', POINTER(POINTER(PEM_USER))),
('x509_chain', POINTER(STACK)),
('md', POINTER(EVP_MD)),
('md_enc', c_int),
('md_len', c_int),
('md_data', STRING),
('dec', POINTER(EVP_CIPHER)),
('key_len', c_int),
('key', POINTER(c_ubyte)),
('data_enc', c_int),
('data_len', c_int),
('data', POINTER(c_ubyte)),
]
assert sizeof(pem_ctx_st) == 76, sizeof(pem_ctx_st)
assert alignment(pem_ctx_st) == 4, alignment(pem_ctx_st)
PEM_CTX = pem_ctx_st
pem_password_cb = CFUNCTYPE(c_int, STRING, c_int, c_int, c_void_p)
class pkcs7_issuer_and_serial_st(Structure):
pass
pkcs7_issuer_and_serial_st._fields_ = [
('issuer', POINTER(X509_NAME)),
('serial', POINTER(ASN1_INTEGER)),
]
assert sizeof(pkcs7_issuer_and_serial_st) == 8, sizeof(pkcs7_issuer_and_serial_st)
assert alignment(pkcs7_issuer_and_serial_st) == 4, alignment(pkcs7_issuer_and_serial_st)
PKCS7_ISSUER_AND_SERIAL = pkcs7_issuer_and_serial_st
class pkcs7_signer_info_st(Structure):
pass
pkcs7_signer_info_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)),
('digest_alg', POINTER(X509_ALGOR)),
('auth_attr', POINTER(STACK)),
('digest_enc_alg', POINTER(X509_ALGOR)),
('enc_digest', POINTER(ASN1_OCTET_STRING)),
('unauth_attr', POINTER(STACK)),
('pkey', POINTER(EVP_PKEY)),
]
assert sizeof(pkcs7_signer_info_st) == 32, sizeof(pkcs7_signer_info_st)
assert alignment(pkcs7_signer_info_st) == 4, alignment(pkcs7_signer_info_st)
PKCS7_SIGNER_INFO = pkcs7_signer_info_st
class pkcs7_recip_info_st(Structure):
pass
pkcs7_recip_info_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('issuer_and_serial', POINTER(PKCS7_ISSUER_AND_SERIAL)),
('key_enc_algor', POINTER(X509_ALGOR)),
('enc_key', POINTER(ASN1_OCTET_STRING)),
('cert', POINTER(X509)),
]
assert sizeof(pkcs7_recip_info_st) == 20, sizeof(pkcs7_recip_info_st)
assert alignment(pkcs7_recip_info_st) == 4, alignment(pkcs7_recip_info_st)
PKCS7_RECIP_INFO = pkcs7_recip_info_st
class pkcs7_signed_st(Structure):
pass
class pkcs7_st(Structure):
pass
pkcs7_signed_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('md_algs', POINTER(STACK)),
('cert', POINTER(STACK)),
('crl', POINTER(STACK)),
('signer_info', POINTER(STACK)),
('contents', POINTER(pkcs7_st)),
]
assert sizeof(pkcs7_signed_st) == 24, sizeof(pkcs7_signed_st)
assert alignment(pkcs7_signed_st) == 4, alignment(pkcs7_signed_st)
PKCS7_SIGNED = pkcs7_signed_st
class pkcs7_enc_content_st(Structure):
pass
pkcs7_enc_content_st._fields_ = [
('content_type', POINTER(ASN1_OBJECT)),
('algorithm', POINTER(X509_ALGOR)),
('enc_data', POINTER(ASN1_OCTET_STRING)),
('cipher', POINTER(EVP_CIPHER)),
]
assert sizeof(pkcs7_enc_content_st) == 16, sizeof(pkcs7_enc_content_st)
assert alignment(pkcs7_enc_content_st) == 4, alignment(pkcs7_enc_content_st)
PKCS7_ENC_CONTENT = pkcs7_enc_content_st
class pkcs7_enveloped_st(Structure):
pass
pkcs7_enveloped_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('recipientinfo', POINTER(STACK)),
('enc_data', POINTER(PKCS7_ENC_CONTENT)),
]
assert sizeof(pkcs7_enveloped_st) == 12, sizeof(pkcs7_enveloped_st)
assert alignment(pkcs7_enveloped_st) == 4, alignment(pkcs7_enveloped_st)
PKCS7_ENVELOPE = pkcs7_enveloped_st
class pkcs7_signedandenveloped_st(Structure):
pass
pkcs7_signedandenveloped_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('md_algs', POINTER(STACK)),
('cert', POINTER(STACK)),
('crl', POINTER(STACK)),
('signer_info', POINTER(STACK)),
('enc_data', POINTER(PKCS7_ENC_CONTENT)),
('recipientinfo', POINTER(STACK)),
]
assert sizeof(pkcs7_signedandenveloped_st) == 28, sizeof(pkcs7_signedandenveloped_st)
assert alignment(pkcs7_signedandenveloped_st) == 4, alignment(pkcs7_signedandenveloped_st)
PKCS7_SIGN_ENVELOPE = pkcs7_signedandenveloped_st
class pkcs7_digest_st(Structure):
pass
pkcs7_digest_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('md', POINTER(X509_ALGOR)),
('contents', POINTER(pkcs7_st)),
('digest', POINTER(ASN1_OCTET_STRING)),
]
assert sizeof(pkcs7_digest_st) == 16, sizeof(pkcs7_digest_st)
assert alignment(pkcs7_digest_st) == 4, alignment(pkcs7_digest_st)
PKCS7_DIGEST = pkcs7_digest_st
class pkcs7_encrypted_st(Structure):
pass
pkcs7_encrypted_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('enc_data', POINTER(PKCS7_ENC_CONTENT)),
]
assert sizeof(pkcs7_encrypted_st) == 8, sizeof(pkcs7_encrypted_st)
assert alignment(pkcs7_encrypted_st) == 4, alignment(pkcs7_encrypted_st)
PKCS7_ENCRYPT = pkcs7_encrypted_st
class N8pkcs7_st4DOLLAR_15E(Union):
pass
N8pkcs7_st4DOLLAR_15E._fields_ = [
('ptr', STRING),
('data', POINTER(ASN1_OCTET_STRING)),
('sign', POINTER(PKCS7_SIGNED)),
('enveloped', POINTER(PKCS7_ENVELOPE)),
('signed_and_enveloped', POINTER(PKCS7_SIGN_ENVELOPE)),
('digest', POINTER(PKCS7_DIGEST)),
('encrypted', POINTER(PKCS7_ENCRYPT)),
('other', POINTER(ASN1_TYPE)),
]
assert sizeof(N8pkcs7_st4DOLLAR_15E) == 4, sizeof(N8pkcs7_st4DOLLAR_15E)
assert alignment(N8pkcs7_st4DOLLAR_15E) == 4, alignment(N8pkcs7_st4DOLLAR_15E)
pkcs7_st._fields_ = [
('asn1', POINTER(c_ubyte)),
('length', c_long),
('state', c_int),
('detached', c_int),
('type', POINTER(ASN1_OBJECT)),
('d', N8pkcs7_st4DOLLAR_15E),
]
assert sizeof(pkcs7_st) == 24, sizeof(pkcs7_st)
assert alignment(pkcs7_st) == 4, alignment(pkcs7_st)
PKCS7 = pkcs7_st
class rc2_key_st(Structure):
pass
rc2_key_st._fields_ = [
('data', c_uint * 64),
]
assert sizeof(rc2_key_st) == 256, sizeof(rc2_key_st)
assert alignment(rc2_key_st) == 4, alignment(rc2_key_st)
RC2_KEY = rc2_key_st
class rc4_key_st(Structure):
pass
rc4_key_st._fields_ = [
('x', c_ubyte),
('y', c_ubyte),
('data', c_ubyte * 256),
]
assert sizeof(rc4_key_st) == 258, sizeof(rc4_key_st)
assert alignment(rc4_key_st) == 1, alignment(rc4_key_st)
RC4_KEY = rc4_key_st
class rc5_key_st(Structure):
pass
rc5_key_st._fields_ = [
('rounds', c_int),
('data', c_ulong * 34),
]
assert sizeof(rc5_key_st) == 140, sizeof(rc5_key_st)
assert alignment(rc5_key_st) == 4, alignment(rc5_key_st)
RC5_32_KEY = rc5_key_st
class RIPEMD160state_st(Structure):
pass
RIPEMD160state_st._fields_ = [
('A', c_uint),
('B', c_uint),
('C', c_uint),
('D', c_uint),
('E', c_uint),
('Nl', c_uint),
('Nh', c_uint),
('data', c_uint * 16),
('num', c_int),
]
assert sizeof(RIPEMD160state_st) == 96, sizeof(RIPEMD160state_st)
assert alignment(RIPEMD160state_st) == 4, alignment(RIPEMD160state_st)
RIPEMD160_CTX = RIPEMD160state_st
RSA = rsa_st
class rsa_meth_st(Structure):
pass
rsa_meth_st._fields_ = [
('name', STRING),
('rsa_pub_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)),
('rsa_pub_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)),
('rsa_priv_enc', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)),
('rsa_priv_dec', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), POINTER(c_ubyte), POINTER(RSA), c_int)),
('rsa_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(RSA))),
('bn_mod_exp', CFUNCTYPE(c_int, POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BIGNUM), POINTER(BN_CTX), POINTER(BN_MONT_CTX))),
('init', CFUNCTYPE(c_int, POINTER(RSA))),
('finish', CFUNCTYPE(c_int, POINTER(RSA))),
('flags', c_int),
('app_data', STRING),
('rsa_sign', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), POINTER(c_uint), POINTER(RSA))),
('rsa_verify', CFUNCTYPE(c_int, c_int, POINTER(c_ubyte), c_uint, POINTER(c_ubyte), c_uint, POINTER(RSA))),
]
assert sizeof(rsa_meth_st) == 52, sizeof(rsa_meth_st)
assert alignment(rsa_meth_st) == 4, alignment(rsa_meth_st)
RSA_METHOD = rsa_meth_st
rsa_st._fields_ = [
('pad', c_int),
('version', c_long),
('meth', POINTER(RSA_METHOD)),
('engine', POINTER(ENGINE)),
('n', POINTER(BIGNUM)),
('e', POINTER(BIGNUM)),
('d', POINTER(BIGNUM)),
('p', POINTER(BIGNUM)),
('q', POINTER(BIGNUM)),
('dmp1', POINTER(BIGNUM)),
('dmq1', POINTER(BIGNUM)),
('iqmp', POINTER(BIGNUM)),
('ex_data', CRYPTO_EX_DATA),
('references', c_int),
('flags', c_int),
('_method_mod_n', POINTER(BN_MONT_CTX)),
('_method_mod_p', POINTER(BN_MONT_CTX)),
('_method_mod_q', POINTER(BN_MONT_CTX)),
('bignum_data', STRING),
('blinding', POINTER(BN_BLINDING)),
]
assert sizeof(rsa_st) == 84, sizeof(rsa_st)
assert alignment(rsa_st) == 4, alignment(rsa_st)
openssl_fptr = CFUNCTYPE(None)
class SHAstate_st(Structure):
pass
SHAstate_st._fields_ = [
('h0', c_uint),
('h1', c_uint),
('h2', c_uint),
('h3', c_uint),
('h4', c_uint),
('Nl', c_uint),
('Nh', c_uint),
('data', c_uint * 16),
('num', c_int),
]
assert sizeof(SHAstate_st) == 96, sizeof(SHAstate_st)
assert alignment(SHAstate_st) == 4, alignment(SHAstate_st)
SHA_CTX = SHAstate_st
class ssl_st(Structure):
pass
ssl_crock_st = POINTER(ssl_st)
class ssl_cipher_st(Structure):
pass
ssl_cipher_st._fields_ = [
('valid', c_int),
('name', STRING),
('id', c_ulong),
('algorithms', c_ulong),
('algo_strength', c_ulong),
('algorithm2', c_ulong),
('strength_bits', c_int),
('alg_bits', c_int),
('mask', c_ulong),
('mask_strength', c_ulong),
]
assert sizeof(ssl_cipher_st) == 40, sizeof(ssl_cipher_st)
assert alignment(ssl_cipher_st) == 4, alignment(ssl_cipher_st)
SSL_CIPHER = ssl_cipher_st
SSL = ssl_st
class ssl_ctx_st(Structure):
pass
SSL_CTX = ssl_ctx_st
class ssl_method_st(Structure):
pass
class ssl3_enc_method(Structure):
pass
ssl_method_st._fields_ = [
('version', c_int),
('ssl_new', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_clear', CFUNCTYPE(None, POINTER(SSL))),
('ssl_free', CFUNCTYPE(None, POINTER(SSL))),
('ssl_accept', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_connect', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_read', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)),
('ssl_peek', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)),
('ssl_write', CFUNCTYPE(c_int, POINTER(SSL), c_void_p, c_int)),
('ssl_shutdown', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_renegotiate', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_renegotiate_check', CFUNCTYPE(c_int, POINTER(SSL))),
('ssl_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, c_long, c_void_p)),
('ssl_ctx_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, c_long, c_void_p)),
('get_cipher_by_char', CFUNCTYPE(POINTER(SSL_CIPHER), POINTER(c_ubyte))),
('put_cipher_by_char', CFUNCTYPE(c_int, POINTER(SSL_CIPHER), POINTER(c_ubyte))),
('ssl_pending', CFUNCTYPE(c_int, POINTER(SSL))),
('num_ciphers', CFUNCTYPE(c_int)),
('get_cipher', CFUNCTYPE(POINTER(SSL_CIPHER), c_uint)),
('get_ssl_method', CFUNCTYPE(POINTER(ssl_method_st), c_int)),
('get_timeout', CFUNCTYPE(c_long)),
('ssl3_enc', POINTER(ssl3_enc_method)),
('ssl_version', CFUNCTYPE(c_int)),
('ssl_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL), c_int, CFUNCTYPE(None))),
('ssl_ctx_callback_ctrl', CFUNCTYPE(c_long, POINTER(SSL_CTX), c_int, CFUNCTYPE(None))),
]
assert sizeof(ssl_method_st) == 100, sizeof(ssl_method_st)
assert alignment(ssl_method_st) == 4, alignment(ssl_method_st)
ssl3_enc_method._fields_ = [
]
SSL_METHOD = ssl_method_st
class ssl_session_st(Structure):
pass
class sess_cert_st(Structure):
pass
ssl_session_st._fields_ = [
('ssl_version', c_int),
('key_arg_length', c_uint),
('key_arg', c_ubyte * 8),
('master_key_length', c_int),
('master_key', c_ubyte * 48),
('session_id_length', c_uint),
('session_id', c_ubyte * 32),
('sid_ctx_length', c_uint),
('sid_ctx', c_ubyte * 32),
('not_resumable', c_int),
('sess_cert', POINTER(sess_cert_st)),
('peer', POINTER(X509)),
('verify_result', c_long),
('references', c_int),
('timeout', c_long),
('time', c_long),
('compress_meth', c_int),
('cipher', POINTER(SSL_CIPHER)),
('cipher_id', c_ulong),
('ciphers', POINTER(STACK)),
('ex_data', CRYPTO_EX_DATA),
('prev', POINTER(ssl_session_st)),
('next', POINTER(ssl_session_st)),
]
assert sizeof(ssl_session_st) == 200, sizeof(ssl_session_st)
assert alignment(ssl_session_st) == 4, alignment(ssl_session_st)
sess_cert_st._fields_ = [
]
SSL_SESSION = ssl_session_st
GEN_SESSION_CB = CFUNCTYPE(c_int, POINTER(SSL), POINTER(c_ubyte), POINTER(c_uint))
class ssl_comp_st(Structure):
pass
ssl_comp_st._fields_ = [
('id', c_int),
('name', STRING),
('method', POINTER(COMP_METHOD)),
]
assert sizeof(ssl_comp_st) == 12, sizeof(ssl_comp_st)
assert alignment(ssl_comp_st) == 4, alignment(ssl_comp_st)
SSL_COMP = ssl_comp_st
class N10ssl_ctx_st4DOLLAR_18E(Structure):
pass
N10ssl_ctx_st4DOLLAR_18E._fields_ = [
('sess_connect', c_int),
('sess_connect_renegotiate', c_int),
('sess_connect_good', c_int),
('sess_accept', c_int),
('sess_accept_renegotiate', c_int),
('sess_accept_good', c_int),
('sess_miss', c_int),
('sess_timeout', c_int),
('sess_cache_full', c_int),
('sess_hit', c_int),
('sess_cb_hit', c_int),
]
assert sizeof(N10ssl_ctx_st4DOLLAR_18E) == 44, sizeof(N10ssl_ctx_st4DOLLAR_18E)
assert alignment(N10ssl_ctx_st4DOLLAR_18E) == 4, alignment(N10ssl_ctx_st4DOLLAR_18E)
class cert_st(Structure):
pass
ssl_ctx_st._fields_ = [
('method', POINTER(SSL_METHOD)),
('cipher_list', POINTER(STACK)),
('cipher_list_by_id', POINTER(STACK)),
('cert_store', POINTER(x509_store_st)),
('sessions', POINTER(lhash_st)),
('session_cache_size', c_ulong),
('session_cache_head', POINTER(ssl_session_st)),
('session_cache_tail', POINTER(ssl_session_st)),
('session_cache_mode', c_int),
('session_timeout', c_long),
('new_session_cb', CFUNCTYPE(c_int, POINTER(ssl_st), POINTER(SSL_SESSION))),
('remove_session_cb', CFUNCTYPE(None, POINTER(ssl_ctx_st), POINTER(SSL_SESSION))),
('get_session_cb', CFUNCTYPE(POINTER(SSL_SESSION), POINTER(ssl_st), POINTER(c_ubyte), c_int, POINTER(c_int))),
('stats', N10ssl_ctx_st4DOLLAR_18E),
('references', c_int),
('app_verify_callback', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), c_void_p)),
('app_verify_arg', c_void_p),
('default_passwd_callback', POINTER(pem_password_cb)),
('default_passwd_callback_userdata', c_void_p),
('client_cert_cb', CFUNCTYPE(c_int, POINTER(SSL), POINTER(POINTER(X509)), POINTER(POINTER(EVP_PKEY)))),
('ex_data', CRYPTO_EX_DATA),
('rsa_md5', POINTER(EVP_MD)),
('md5', POINTER(EVP_MD)),
('sha1', POINTER(EVP_MD)),
('extra_certs', POINTER(STACK)),
('comp_methods', POINTER(STACK)),
('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)),
('client_CA', POINTER(STACK)),
('options', c_ulong),
('mode', c_ulong),
('max_cert_list', c_long),
('cert', POINTER(cert_st)),
('read_ahead', c_int),
('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)),
('msg_callback_arg', c_void_p),
('verify_mode', c_int),
('verify_depth', c_int),
('sid_ctx_length', c_uint),
('sid_ctx', c_ubyte * 32),
('default_verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))),
('generate_session_id', GEN_SESSION_CB),
('purpose', c_int),
('trust', c_int),
('quiet_shutdown', c_int),
]
assert sizeof(ssl_ctx_st) == 248, sizeof(ssl_ctx_st)
assert alignment(ssl_ctx_st) == 4, alignment(ssl_ctx_st)
cert_st._fields_ = [
]
class ssl2_state_st(Structure):
pass
class ssl3_state_st(Structure):
pass
ssl_st._fields_ = [
('version', c_int),
('type', c_int),
('method', POINTER(SSL_METHOD)),
('rbio', POINTER(BIO)),
('wbio', POINTER(BIO)),
('bbio', POINTER(BIO)),
('rwstate', c_int),
('in_handshake', c_int),
('handshake_func', CFUNCTYPE(c_int)),
('server', c_int),
('new_session', c_int),
('quiet_shutdown', c_int),
('shutdown', c_int),
('state', c_int),
('rstate', c_int),
('init_buf', POINTER(BUF_MEM)),
('init_msg', c_void_p),
('init_num', c_int),
('init_off', c_int),
('packet', POINTER(c_ubyte)),
('packet_length', c_uint),
('s2', POINTER(ssl2_state_st)),
('s3', POINTER(ssl3_state_st)),
('read_ahead', c_int),
('msg_callback', CFUNCTYPE(None, c_int, c_int, c_int, c_void_p, c_ulong, POINTER(SSL), c_void_p)),
('msg_callback_arg', c_void_p),
('hit', c_int),
('purpose', c_int),
('trust', c_int),
('cipher_list', POINTER(STACK)),
('cipher_list_by_id', POINTER(STACK)),
('enc_read_ctx', POINTER(EVP_CIPHER_CTX)),
('read_hash', POINTER(EVP_MD)),
('expand', POINTER(COMP_CTX)),
('enc_write_ctx', POINTER(EVP_CIPHER_CTX)),
('write_hash', POINTER(EVP_MD)),
('compress', POINTER(COMP_CTX)),
('cert', POINTER(cert_st)),
('sid_ctx_length', c_uint),
('sid_ctx', c_ubyte * 32),
('session', POINTER(SSL_SESSION)),
('generate_session_id', GEN_SESSION_CB),
('verify_mode', c_int),
('verify_depth', c_int),
('verify_callback', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))),
('info_callback', CFUNCTYPE(None, POINTER(SSL), c_int, c_int)),
('error', c_int),
('error_code', c_int),
('ctx', POINTER(SSL_CTX)),
('debug', c_int),
('verify_result', c_long),
('ex_data', CRYPTO_EX_DATA),
('client_CA', POINTER(STACK)),
('references', c_int),
('options', c_ulong),
('mode', c_ulong),
('max_cert_list', c_long),
('first_packet', c_int),
('client_version', c_int),
]
assert sizeof(ssl_st) == 268, sizeof(ssl_st)
assert alignment(ssl_st) == 4, alignment(ssl_st)
class N13ssl2_state_st4DOLLAR_19E(Structure):
pass
N13ssl2_state_st4DOLLAR_19E._fields_ = [
('conn_id_length', c_uint),
('cert_type', c_uint),
('cert_length', c_uint),
('csl', c_uint),
('clear', c_uint),
('enc', c_uint),
('ccl', c_ubyte * 32),
('cipher_spec_length', c_uint),
('session_id_length', c_uint),
('clen', c_uint),
('rlen', c_uint),
]
assert sizeof(N13ssl2_state_st4DOLLAR_19E) == 72, sizeof(N13ssl2_state_st4DOLLAR_19E)
assert alignment(N13ssl2_state_st4DOLLAR_19E) == 4, alignment(N13ssl2_state_st4DOLLAR_19E)
ssl2_state_st._fields_ = [
('three_byte_header', c_int),
('clear_text', c_int),
('escape', c_int),
('ssl2_rollback', c_int),
('wnum', c_uint),
('wpend_tot', c_int),
('wpend_buf', POINTER(c_ubyte)),
('wpend_off', c_int),
('wpend_len', c_int),
('wpend_ret', c_int),
('rbuf_left', c_int),
('rbuf_offs', c_int),
('rbuf', POINTER(c_ubyte)),
('wbuf', POINTER(c_ubyte)),
('write_ptr', POINTER(c_ubyte)),
('padding', c_uint),
('rlength', c_uint),
('ract_data_length', c_int),
('wlength', c_uint),
('wact_data_length', c_int),
('ract_data', POINTER(c_ubyte)),
('wact_data', POINTER(c_ubyte)),
('mac_data', POINTER(c_ubyte)),
('read_key', POINTER(c_ubyte)),
('write_key', POINTER(c_ubyte)),
('challenge_length', c_uint),
('challenge', c_ubyte * 32),
('conn_id_length', c_uint),
('conn_id', c_ubyte * 16),
('key_material_length', c_uint),
('key_material', c_ubyte * 48),
('read_sequence', c_ulong),
('write_sequence', c_ulong),
('tmp', N13ssl2_state_st4DOLLAR_19E),
]
assert sizeof(ssl2_state_st) == 288, sizeof(ssl2_state_st)
assert alignment(ssl2_state_st) == 4, alignment(ssl2_state_st)
SSL2_STATE = ssl2_state_st
class ssl3_record_st(Structure):
pass
ssl3_record_st._fields_ = [
('type', c_int),
('length', c_uint),
('off', c_uint),
('data', POINTER(c_ubyte)),
('input', POINTER(c_ubyte)),
('comp', POINTER(c_ubyte)),
]
assert sizeof(ssl3_record_st) == 24, sizeof(ssl3_record_st)
assert alignment(ssl3_record_st) == 4, alignment(ssl3_record_st)
SSL3_RECORD = ssl3_record_st
class ssl3_buffer_st(Structure):
pass
size_t = __darwin_size_t
ssl3_buffer_st._fields_ = [
('buf', POINTER(c_ubyte)),
('len', size_t),
('offset', c_int),
('left', c_int),
]
assert sizeof(ssl3_buffer_st) == 16, sizeof(ssl3_buffer_st)
assert alignment(ssl3_buffer_st) == 4, alignment(ssl3_buffer_st)
SSL3_BUFFER = ssl3_buffer_st
class N13ssl3_state_st4DOLLAR_20E(Structure):
pass
N13ssl3_state_st4DOLLAR_20E._fields_ = [
('cert_verify_md', c_ubyte * 72),
('finish_md', c_ubyte * 72),
('finish_md_len', c_int),
('peer_finish_md', c_ubyte * 72),
('peer_finish_md_len', c_int),
('message_size', c_ulong),
('message_type', c_int),
('new_cipher', POINTER(SSL_CIPHER)),
('dh', POINTER(DH)),
('next_state', c_int),
('reuse_message', c_int),
('cert_req', c_int),
('ctype_num', c_int),
('ctype', c_char * 7),
('ca_names', POINTER(STACK)),
('use_rsa_tmp', c_int),
('key_block_length', c_int),
('key_block', POINTER(c_ubyte)),
('new_sym_enc', POINTER(EVP_CIPHER)),
('new_hash', POINTER(EVP_MD)),
('new_compression', POINTER(SSL_COMP)),
('cert_request', c_int),
]
assert sizeof(N13ssl3_state_st4DOLLAR_20E) == 296, sizeof(N13ssl3_state_st4DOLLAR_20E)
assert alignment(N13ssl3_state_st4DOLLAR_20E) == 4, alignment(N13ssl3_state_st4DOLLAR_20E)
ssl3_state_st._fields_ = [
('flags', c_long),
('delay_buf_pop_ret', c_int),
('read_sequence', c_ubyte * 8),
('read_mac_secret', c_ubyte * 36),
('write_sequence', c_ubyte * 8),
('write_mac_secret', c_ubyte * 36),
('server_random', c_ubyte * 32),
('client_random', c_ubyte * 32),
('need_empty_fragments', c_int),
('empty_fragment_done', c_int),
('rbuf', SSL3_BUFFER),
('wbuf', SSL3_BUFFER),
('rrec', SSL3_RECORD),
('wrec', SSL3_RECORD),
('alert_fragment', c_ubyte * 2),
('alert_fragment_len', c_uint),
('handshake_fragment', c_ubyte * 4),
('handshake_fragment_len', c_uint),
('wnum', c_uint),
('wpend_tot', c_int),
('wpend_type', c_int),
('wpend_ret', c_int),
('wpend_buf', POINTER(c_ubyte)),
('finish_dgst1', EVP_MD_CTX),
('finish_dgst2', EVP_MD_CTX),
('change_cipher_spec', c_int),
('warn_alert', c_int),
('fatal_alert', c_int),
('alert_dispatch', c_int),
('send_alert', c_ubyte * 2),
('renegotiate', c_int),
('total_renegotiations', c_int),
('num_renegotiations', c_int),
('in_read_app_data', c_int),
('tmp', N13ssl3_state_st4DOLLAR_20E),
]
assert sizeof(ssl3_state_st) == 648, sizeof(ssl3_state_st)
assert alignment(ssl3_state_st) == 4, alignment(ssl3_state_st)
SSL3_STATE = ssl3_state_st
stack_st._fields_ = [
('num', c_int),
('data', POINTER(STRING)),
('sorted', c_int),
('num_alloc', c_int),
('comp', CFUNCTYPE(c_int, POINTER(STRING), POINTER(STRING))),
]
assert sizeof(stack_st) == 20, sizeof(stack_st)
assert alignment(stack_st) == 4, alignment(stack_st)
class ui_st(Structure):
pass
ui_st._fields_ = [
]
UI = ui_st
class ui_method_st(Structure):
pass
ui_method_st._fields_ = [
]
UI_METHOD = ui_method_st
class ui_string_st(Structure):
pass
ui_string_st._fields_ = [
]
UI_STRING = ui_string_st
# values for enumeration 'UI_string_types'
UI_string_types = c_int # enum
class X509_objects_st(Structure):
pass
X509_objects_st._fields_ = [
('nid', c_int),
('a2i', CFUNCTYPE(c_int)),
('i2a', CFUNCTYPE(c_int)),
]
assert sizeof(X509_objects_st) == 12, sizeof(X509_objects_st)
assert alignment(X509_objects_st) == 4, alignment(X509_objects_st)
X509_OBJECTS = X509_objects_st
X509_algor_st._fields_ = [
('algorithm', POINTER(ASN1_OBJECT)),
('parameter', POINTER(ASN1_TYPE)),
]
assert sizeof(X509_algor_st) == 8, sizeof(X509_algor_st)
assert alignment(X509_algor_st) == 4, alignment(X509_algor_st)
class X509_val_st(Structure):
pass
X509_val_st._fields_ = [
('notBefore', POINTER(ASN1_TIME)),
('notAfter', POINTER(ASN1_TIME)),
]
assert sizeof(X509_val_st) == 8, sizeof(X509_val_st)
assert alignment(X509_val_st) == 4, alignment(X509_val_st)
X509_VAL = X509_val_st
class X509_pubkey_st(Structure):
pass
X509_pubkey_st._fields_ = [
('algor', POINTER(X509_ALGOR)),
('public_key', POINTER(ASN1_BIT_STRING)),
('pkey', POINTER(EVP_PKEY)),
]
assert sizeof(X509_pubkey_st) == 12, sizeof(X509_pubkey_st)
assert alignment(X509_pubkey_st) == 4, alignment(X509_pubkey_st)
X509_PUBKEY = X509_pubkey_st
class X509_sig_st(Structure):
pass
X509_sig_st._fields_ = [
('algor', POINTER(X509_ALGOR)),
('digest', POINTER(ASN1_OCTET_STRING)),
]
assert sizeof(X509_sig_st) == 8, sizeof(X509_sig_st)
assert alignment(X509_sig_st) == 4, alignment(X509_sig_st)
X509_SIG = X509_sig_st
class X509_name_entry_st(Structure):
pass
X509_name_entry_st._fields_ = [
('object', POINTER(ASN1_OBJECT)),
('value', POINTER(ASN1_STRING)),
('set', c_int),
('size', c_int),
]
assert sizeof(X509_name_entry_st) == 16, sizeof(X509_name_entry_st)
assert alignment(X509_name_entry_st) == 4, alignment(X509_name_entry_st)
X509_NAME_ENTRY = X509_name_entry_st
X509_name_st._fields_ = [
('entries', POINTER(STACK)),
('modified', c_int),
('bytes', POINTER(BUF_MEM)),
('hash', c_ulong),
]
assert sizeof(X509_name_st) == 16, sizeof(X509_name_st)
assert alignment(X509_name_st) == 4, alignment(X509_name_st)
class X509_extension_st(Structure):
pass
X509_extension_st._fields_ = [
('object', POINTER(ASN1_OBJECT)),
('critical', ASN1_BOOLEAN),
('value', POINTER(ASN1_OCTET_STRING)),
]
assert sizeof(X509_extension_st) == 12, sizeof(X509_extension_st)
assert alignment(X509_extension_st) == 4, alignment(X509_extension_st)
X509_EXTENSION = X509_extension_st
class x509_attributes_st(Structure):
pass
class N18x509_attributes_st4DOLLAR_13E(Union):
pass
N18x509_attributes_st4DOLLAR_13E._fields_ = [
('ptr', STRING),
('set', POINTER(STACK)),
('single', POINTER(ASN1_TYPE)),
]
assert sizeof(N18x509_attributes_st4DOLLAR_13E) == 4, sizeof(N18x509_attributes_st4DOLLAR_13E)
assert alignment(N18x509_attributes_st4DOLLAR_13E) == 4, alignment(N18x509_attributes_st4DOLLAR_13E)
x509_attributes_st._fields_ = [
('object', POINTER(ASN1_OBJECT)),
('single', c_int),
('value', N18x509_attributes_st4DOLLAR_13E),
]
assert sizeof(x509_attributes_st) == 12, sizeof(x509_attributes_st)
assert alignment(x509_attributes_st) == 4, alignment(x509_attributes_st)
X509_ATTRIBUTE = x509_attributes_st
class X509_req_info_st(Structure):
pass
X509_req_info_st._fields_ = [
('enc', ASN1_ENCODING),
('version', POINTER(ASN1_INTEGER)),
('subject', POINTER(X509_NAME)),
('pubkey', POINTER(X509_PUBKEY)),
('attributes', POINTER(STACK)),
]
assert sizeof(X509_req_info_st) == 28, sizeof(X509_req_info_st)
assert alignment(X509_req_info_st) == 4, alignment(X509_req_info_st)
X509_REQ_INFO = X509_req_info_st
class X509_req_st(Structure):
pass
X509_req_st._fields_ = [
('req_info', POINTER(X509_REQ_INFO)),
('sig_alg', POINTER(X509_ALGOR)),
('signature', POINTER(ASN1_BIT_STRING)),
('references', c_int),
]
assert sizeof(X509_req_st) == 16, sizeof(X509_req_st)
assert alignment(X509_req_st) == 4, alignment(X509_req_st)
X509_REQ = X509_req_st
class x509_cinf_st(Structure):
pass
x509_cinf_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('serialNumber', POINTER(ASN1_INTEGER)),
('signature', POINTER(X509_ALGOR)),
('issuer', POINTER(X509_NAME)),
('validity', POINTER(X509_VAL)),
('subject', POINTER(X509_NAME)),
('key', POINTER(X509_PUBKEY)),
('issuerUID', POINTER(ASN1_BIT_STRING)),
('subjectUID', POINTER(ASN1_BIT_STRING)),
('extensions', POINTER(STACK)),
]
assert sizeof(x509_cinf_st) == 40, sizeof(x509_cinf_st)
assert alignment(x509_cinf_st) == 4, alignment(x509_cinf_st)
X509_CINF = x509_cinf_st
class x509_cert_aux_st(Structure):
pass
x509_cert_aux_st._fields_ = [
('trust', POINTER(STACK)),
('reject', POINTER(STACK)),
('alias', POINTER(ASN1_UTF8STRING)),
('keyid', POINTER(ASN1_OCTET_STRING)),
('other', POINTER(STACK)),
]
assert sizeof(x509_cert_aux_st) == 20, sizeof(x509_cert_aux_st)
assert alignment(x509_cert_aux_st) == 4, alignment(x509_cert_aux_st)
X509_CERT_AUX = x509_cert_aux_st
class AUTHORITY_KEYID_st(Structure):
pass
x509_st._fields_ = [
('cert_info', POINTER(X509_CINF)),
('sig_alg', POINTER(X509_ALGOR)),
('signature', POINTER(ASN1_BIT_STRING)),
('valid', c_int),
('references', c_int),
('name', STRING),
('ex_data', CRYPTO_EX_DATA),
('ex_pathlen', c_long),
('ex_flags', c_ulong),
('ex_kusage', c_ulong),
('ex_xkusage', c_ulong),
('ex_nscert', c_ulong),
('skid', POINTER(ASN1_OCTET_STRING)),
('akid', POINTER(AUTHORITY_KEYID_st)),
('sha1_hash', c_ubyte * 20),
('aux', POINTER(X509_CERT_AUX)),
]
assert sizeof(x509_st) == 84, sizeof(x509_st)
assert alignment(x509_st) == 4, alignment(x509_st)
AUTHORITY_KEYID_st._fields_ = [
]
class x509_trust_st(Structure):
pass
x509_trust_st._fields_ = [
('trust', c_int),
('flags', c_int),
('check_trust', CFUNCTYPE(c_int, POINTER(x509_trust_st), POINTER(X509), c_int)),
('name', STRING),
('arg1', c_int),
('arg2', c_void_p),
]
assert sizeof(x509_trust_st) == 24, sizeof(x509_trust_st)
assert alignment(x509_trust_st) == 4, alignment(x509_trust_st)
X509_TRUST = x509_trust_st
class X509_revoked_st(Structure):
pass
X509_revoked_st._fields_ = [
('serialNumber', POINTER(ASN1_INTEGER)),
('revocationDate', POINTER(ASN1_TIME)),
('extensions', POINTER(STACK)),
('sequence', c_int),
]
assert sizeof(X509_revoked_st) == 16, sizeof(X509_revoked_st)
assert alignment(X509_revoked_st) == 4, alignment(X509_revoked_st)
X509_REVOKED = X509_revoked_st
class X509_crl_info_st(Structure):
pass
X509_crl_info_st._fields_ = [
('version', POINTER(ASN1_INTEGER)),
('sig_alg', POINTER(X509_ALGOR)),
('issuer', POINTER(X509_NAME)),
('lastUpdate', POINTER(ASN1_TIME)),
('nextUpdate', POINTER(ASN1_TIME)),
('revoked', POINTER(STACK)),
('extensions', POINTER(STACK)),
('enc', ASN1_ENCODING),
]
assert sizeof(X509_crl_info_st) == 40, sizeof(X509_crl_info_st)
assert alignment(X509_crl_info_st) == 4, alignment(X509_crl_info_st)
X509_CRL_INFO = X509_crl_info_st
X509_crl_st._fields_ = [
('crl', POINTER(X509_CRL_INFO)),
('sig_alg', POINTER(X509_ALGOR)),
('signature', POINTER(ASN1_BIT_STRING)),
('references', c_int),
]
assert sizeof(X509_crl_st) == 16, sizeof(X509_crl_st)
assert alignment(X509_crl_st) == 4, alignment(X509_crl_st)
class private_key_st(Structure):
pass
private_key_st._fields_ = [
('version', c_int),
('enc_algor', POINTER(X509_ALGOR)),
('enc_pkey', POINTER(ASN1_OCTET_STRING)),
('dec_pkey', POINTER(EVP_PKEY)),
('key_length', c_int),
('key_data', STRING),
('key_free', c_int),
('cipher', EVP_CIPHER_INFO),
('references', c_int),
]
assert sizeof(private_key_st) == 52, sizeof(private_key_st)
assert alignment(private_key_st) == 4, alignment(private_key_st)
X509_PKEY = private_key_st
class X509_info_st(Structure):
pass
X509_info_st._fields_ = [
('x509', POINTER(X509)),
('crl', POINTER(X509_CRL)),
('x_pkey', POINTER(X509_PKEY)),
('enc_cipher', EVP_CIPHER_INFO),
('enc_len', c_int),
('enc_data', STRING),
('references', c_int),
]
assert sizeof(X509_info_st) == 44, sizeof(X509_info_st)
assert alignment(X509_info_st) == 4, alignment(X509_info_st)
X509_INFO = X509_info_st
class Netscape_spkac_st(Structure):
pass
Netscape_spkac_st._fields_ = [
('pubkey', POINTER(X509_PUBKEY)),
('challenge', POINTER(ASN1_IA5STRING)),
]
assert sizeof(Netscape_spkac_st) == 8, sizeof(Netscape_spkac_st)
assert alignment(Netscape_spkac_st) == 4, alignment(Netscape_spkac_st)
NETSCAPE_SPKAC = Netscape_spkac_st
class Netscape_spki_st(Structure):
pass
Netscape_spki_st._fields_ = [
('spkac', POINTER(NETSCAPE_SPKAC)),
('sig_algor', POINTER(X509_ALGOR)),
('signature', POINTER(ASN1_BIT_STRING)),
]
assert sizeof(Netscape_spki_st) == 12, sizeof(Netscape_spki_st)
assert alignment(Netscape_spki_st) == 4, alignment(Netscape_spki_st)
NETSCAPE_SPKI = Netscape_spki_st
class Netscape_certificate_sequence(Structure):
pass
Netscape_certificate_sequence._fields_ = [
('type', POINTER(ASN1_OBJECT)),
('certs', POINTER(STACK)),
]
assert sizeof(Netscape_certificate_sequence) == 8, sizeof(Netscape_certificate_sequence)
assert alignment(Netscape_certificate_sequence) == 4, alignment(Netscape_certificate_sequence)
NETSCAPE_CERT_SEQUENCE = Netscape_certificate_sequence
class PBEPARAM_st(Structure):
pass
PBEPARAM_st._fields_ = [
('salt', POINTER(ASN1_OCTET_STRING)),
('iter', POINTER(ASN1_INTEGER)),
]
assert sizeof(PBEPARAM_st) == 8, sizeof(PBEPARAM_st)
assert alignment(PBEPARAM_st) == 4, alignment(PBEPARAM_st)
PBEPARAM = PBEPARAM_st
class PBE2PARAM_st(Structure):
pass
PBE2PARAM_st._fields_ = [
('keyfunc', POINTER(X509_ALGOR)),
('encryption', POINTER(X509_ALGOR)),
]
assert sizeof(PBE2PARAM_st) == 8, sizeof(PBE2PARAM_st)
assert alignment(PBE2PARAM_st) == 4, alignment(PBE2PARAM_st)
PBE2PARAM = PBE2PARAM_st
class PBKDF2PARAM_st(Structure):
pass
PBKDF2PARAM_st._fields_ = [
('salt', POINTER(ASN1_TYPE)),
('iter', POINTER(ASN1_INTEGER)),
('keylength', POINTER(ASN1_INTEGER)),
('prf', POINTER(X509_ALGOR)),
]
assert sizeof(PBKDF2PARAM_st) == 16, sizeof(PBKDF2PARAM_st)
assert alignment(PBKDF2PARAM_st) == 4, alignment(PBKDF2PARAM_st)
PBKDF2PARAM = PBKDF2PARAM_st
class pkcs8_priv_key_info_st(Structure):
pass
pkcs8_priv_key_info_st._fields_ = [
('broken', c_int),
('version', POINTER(ASN1_INTEGER)),
('pkeyalg', POINTER(X509_ALGOR)),
('pkey', POINTER(ASN1_TYPE)),
('attributes', POINTER(STACK)),
]
assert sizeof(pkcs8_priv_key_info_st) == 20, sizeof(pkcs8_priv_key_info_st)
assert alignment(pkcs8_priv_key_info_st) == 4, alignment(pkcs8_priv_key_info_st)
PKCS8_PRIV_KEY_INFO = pkcs8_priv_key_info_st
class x509_hash_dir_st(Structure):
pass
x509_hash_dir_st._fields_ = [
('num_dirs', c_int),
('dirs', POINTER(STRING)),
('dirs_type', POINTER(c_int)),
('num_dirs_alloced', c_int),
]
assert sizeof(x509_hash_dir_st) == 16, sizeof(x509_hash_dir_st)
assert alignment(x509_hash_dir_st) == 4, alignment(x509_hash_dir_st)
X509_HASH_DIR_CTX = x509_hash_dir_st
class x509_file_st(Structure):
pass
x509_file_st._fields_ = [
('num_paths', c_int),
('num_alloced', c_int),
('paths', POINTER(STRING)),
('path_type', POINTER(c_int)),
]
assert sizeof(x509_file_st) == 16, sizeof(x509_file_st)
assert alignment(x509_file_st) == 4, alignment(x509_file_st)
X509_CERT_FILE_CTX = x509_file_st
class x509_object_st(Structure):
pass
class N14x509_object_st4DOLLAR_14E(Union):
pass
N14x509_object_st4DOLLAR_14E._fields_ = [
('ptr', STRING),
('x509', POINTER(X509)),
('crl', POINTER(X509_CRL)),
('pkey', POINTER(EVP_PKEY)),
]
assert sizeof(N14x509_object_st4DOLLAR_14E) == 4, sizeof(N14x509_object_st4DOLLAR_14E)
assert alignment(N14x509_object_st4DOLLAR_14E) == 4, alignment(N14x509_object_st4DOLLAR_14E)
x509_object_st._fields_ = [
('type', c_int),
('data', N14x509_object_st4DOLLAR_14E),
]
assert sizeof(x509_object_st) == 8, sizeof(x509_object_st)
assert alignment(x509_object_st) == 4, alignment(x509_object_st)
X509_OBJECT = x509_object_st
class x509_lookup_st(Structure):
pass
X509_LOOKUP = x509_lookup_st
class x509_lookup_method_st(Structure):
pass
x509_lookup_method_st._fields_ = [
('name', STRING),
('new_item', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))),
('free', CFUNCTYPE(None, POINTER(X509_LOOKUP))),
('init', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))),
('shutdown', CFUNCTYPE(c_int, POINTER(X509_LOOKUP))),
('ctrl', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_long, POINTER(STRING))),
('get_by_subject', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(X509_OBJECT))),
('get_by_issuer_serial', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(X509_NAME), POINTER(ASN1_INTEGER), POINTER(X509_OBJECT))),
('get_by_fingerprint', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, POINTER(c_ubyte), c_int, POINTER(X509_OBJECT))),
('get_by_alias', CFUNCTYPE(c_int, POINTER(X509_LOOKUP), c_int, STRING, c_int, POINTER(X509_OBJECT))),
]
assert sizeof(x509_lookup_method_st) == 40, sizeof(x509_lookup_method_st)
assert alignment(x509_lookup_method_st) == 4, alignment(x509_lookup_method_st)
X509_LOOKUP_METHOD = x509_lookup_method_st
x509_store_st._fields_ = [
('cache', c_int),
('objs', POINTER(STACK)),
('get_cert_methods', POINTER(STACK)),
('flags', c_ulong),
('purpose', c_int),
('trust', c_int),
('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))),
('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))),
('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))),
('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))),
('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))),
('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))),
('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('ex_data', CRYPTO_EX_DATA),
('references', c_int),
('depth', c_int),
]
assert sizeof(x509_store_st) == 76, sizeof(x509_store_st)
assert alignment(x509_store_st) == 4, alignment(x509_store_st)
x509_lookup_st._fields_ = [
('init', c_int),
('skip', c_int),
('method', POINTER(X509_LOOKUP_METHOD)),
('method_data', STRING),
('store_ctx', POINTER(X509_STORE)),
]
assert sizeof(x509_lookup_st) == 20, sizeof(x509_lookup_st)
assert alignment(x509_lookup_st) == 4, alignment(x509_lookup_st)
time_t = __darwin_time_t
x509_store_ctx_st._fields_ = [
('ctx', POINTER(X509_STORE)),
('current_method', c_int),
('cert', POINTER(X509)),
('untrusted', POINTER(STACK)),
('purpose', c_int),
('trust', c_int),
('check_time', time_t),
('flags', c_ulong),
('other_ctx', c_void_p),
('verify', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('verify_cb', CFUNCTYPE(c_int, c_int, POINTER(X509_STORE_CTX))),
('get_issuer', CFUNCTYPE(c_int, POINTER(POINTER(X509)), POINTER(X509_STORE_CTX), POINTER(X509))),
('check_issued', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509), POINTER(X509))),
('check_revocation', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('get_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(POINTER(X509_CRL)), POINTER(X509))),
('check_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL))),
('cert_crl', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX), POINTER(X509_CRL), POINTER(X509))),
('cleanup', CFUNCTYPE(c_int, POINTER(X509_STORE_CTX))),
('depth', c_int),
('valid', c_int),
('last_untrusted', c_int),
('chain', POINTER(STACK)),
('error_depth', c_int),
('error', c_int),
('current_cert', POINTER(X509)),
('current_issuer', POINTER(X509)),
('current_crl', POINTER(X509_CRL)),
('ex_data', CRYPTO_EX_DATA),
]
assert sizeof(x509_store_ctx_st) == 116, sizeof(x509_store_ctx_st)
assert alignment(x509_store_ctx_st) == 4, alignment(x509_store_ctx_st)
va_list = __darwin_va_list
__darwin_off_t = __int64_t
fpos_t = __darwin_off_t
class __sbuf(Structure):
pass
__sbuf._fields_ = [
('_base', POINTER(c_ubyte)),
('_size', c_int),
]
assert sizeof(__sbuf) == 8, sizeof(__sbuf)
assert alignment(__sbuf) == 4, alignment(__sbuf)
class __sFILEX(Structure):
pass
__sFILEX._fields_ = [
]
class __sFILE(Structure):
pass
__sFILE._pack_ = 4
__sFILE._fields_ = [
('_p', POINTER(c_ubyte)),
('_r', c_int),
('_w', c_int),
('_flags', c_short),
('_file', c_short),
('_bf', __sbuf),
('_lbfsize', c_int),
('_cookie', c_void_p),
('_close', CFUNCTYPE(c_int, c_void_p)),
('_read', CFUNCTYPE(c_int, c_void_p, STRING, c_int)),
('_seek', CFUNCTYPE(fpos_t, c_void_p, c_longlong, c_int)),
('_write', CFUNCTYPE(c_int, c_void_p, STRING, c_int)),
('_ub', __sbuf),
('_extra', POINTER(__sFILEX)),
('_ur', c_int),
('_ubuf', c_ubyte * 3),
('_nbuf', c_ubyte * 1),
('_lb', __sbuf),
('_blksize', c_int),
('_offset', fpos_t),
]
assert sizeof(__sFILE) == 88, sizeof(__sFILE)
assert alignment(__sFILE) == 4, alignment(__sFILE)
FILE = __sFILE
ct_rune_t = __darwin_ct_rune_t
rune_t = __darwin_rune_t
class div_t(Structure):
pass
div_t._fields_ = [
('quot', c_int),
('rem', c_int),
]
assert sizeof(div_t) == 8, sizeof(div_t)
assert alignment(div_t) == 4, alignment(div_t)
class ldiv_t(Structure):
pass
ldiv_t._fields_ = [
('quot', c_long),
('rem', c_long),
]
assert sizeof(ldiv_t) == 8, sizeof(ldiv_t)
assert alignment(ldiv_t) == 4, alignment(ldiv_t)
class lldiv_t(Structure):
pass
lldiv_t._pack_ = 4
lldiv_t._fields_ = [
('quot', c_longlong),
('rem', c_longlong),
]
assert sizeof(lldiv_t) == 16, sizeof(lldiv_t)
assert alignment(lldiv_t) == 4, alignment(lldiv_t)
__darwin_dev_t = __int32_t
dev_t = __darwin_dev_t
__darwin_mode_t = __uint16_t
mode_t = __darwin_mode_t
class mcontext(Structure):
pass
mcontext._fields_ = [
]
class mcontext64(Structure):
pass
mcontext64._fields_ = [
]
class __darwin_pthread_handler_rec(Structure):
pass
__darwin_pthread_handler_rec._fields_ = [
('__routine', CFUNCTYPE(None, c_void_p)),
('__arg', c_void_p),
('__next', POINTER(__darwin_pthread_handler_rec)),
]
assert sizeof(__darwin_pthread_handler_rec) == 12, sizeof(__darwin_pthread_handler_rec)
assert alignment(__darwin_pthread_handler_rec) == 4, alignment(__darwin_pthread_handler_rec)
class _opaque_pthread_attr_t(Structure):
pass
_opaque_pthread_attr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 36),
]
assert sizeof(_opaque_pthread_attr_t) == 40, sizeof(_opaque_pthread_attr_t)
assert alignment(_opaque_pthread_attr_t) == 4, alignment(_opaque_pthread_attr_t)
class _opaque_pthread_cond_t(Structure):
pass
_opaque_pthread_cond_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 24),
]
assert sizeof(_opaque_pthread_cond_t) == 28, sizeof(_opaque_pthread_cond_t)
assert alignment(_opaque_pthread_cond_t) == 4, alignment(_opaque_pthread_cond_t)
class _opaque_pthread_condattr_t(Structure):
pass
_opaque_pthread_condattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 4),
]
assert sizeof(_opaque_pthread_condattr_t) == 8, sizeof(_opaque_pthread_condattr_t)
assert alignment(_opaque_pthread_condattr_t) == 4, alignment(_opaque_pthread_condattr_t)
class _opaque_pthread_mutex_t(Structure):
pass
_opaque_pthread_mutex_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 40),
]
assert sizeof(_opaque_pthread_mutex_t) == 44, sizeof(_opaque_pthread_mutex_t)
assert alignment(_opaque_pthread_mutex_t) == 4, alignment(_opaque_pthread_mutex_t)
class _opaque_pthread_mutexattr_t(Structure):
pass
_opaque_pthread_mutexattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 8),
]
assert sizeof(_opaque_pthread_mutexattr_t) == 12, sizeof(_opaque_pthread_mutexattr_t)
assert alignment(_opaque_pthread_mutexattr_t) == 4, alignment(_opaque_pthread_mutexattr_t)
class _opaque_pthread_once_t(Structure):
pass
_opaque_pthread_once_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 4),
]
assert sizeof(_opaque_pthread_once_t) == 8, sizeof(_opaque_pthread_once_t)
assert alignment(_opaque_pthread_once_t) == 4, alignment(_opaque_pthread_once_t)
class _opaque_pthread_rwlock_t(Structure):
pass
_opaque_pthread_rwlock_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 124),
]
assert sizeof(_opaque_pthread_rwlock_t) == 128, sizeof(_opaque_pthread_rwlock_t)
assert alignment(_opaque_pthread_rwlock_t) == 4, alignment(_opaque_pthread_rwlock_t)
class _opaque_pthread_rwlockattr_t(Structure):
pass
_opaque_pthread_rwlockattr_t._fields_ = [
('__sig', c_long),
('__opaque', c_char * 12),
]
assert sizeof(_opaque_pthread_rwlockattr_t) == 16, sizeof(_opaque_pthread_rwlockattr_t)
assert alignment(_opaque_pthread_rwlockattr_t) == 4, alignment(_opaque_pthread_rwlockattr_t)
class _opaque_pthread_t(Structure):
pass
_opaque_pthread_t._fields_ = [
('__sig', c_long),
('__cleanup_stack', POINTER(__darwin_pthread_handler_rec)),
('__opaque', c_char * 596),
]
assert sizeof(_opaque_pthread_t) == 604, sizeof(_opaque_pthread_t)
assert alignment(_opaque_pthread_t) == 4, alignment(_opaque_pthread_t)
__darwin_blkcnt_t = __int64_t
__darwin_blksize_t = __int32_t
__darwin_fsblkcnt_t = c_uint
__darwin_fsfilcnt_t = c_uint
__darwin_gid_t = __uint32_t
__darwin_id_t = __uint32_t
__darwin_ino_t = __uint32_t
__darwin_mach_port_name_t = __darwin_natural_t
__darwin_mach_port_t = __darwin_mach_port_name_t
__darwin_mcontext_t = POINTER(mcontext)
__darwin_mcontext64_t = POINTER(mcontext64)
__darwin_pid_t = __int32_t
__darwin_pthread_attr_t = _opaque_pthread_attr_t
__darwin_pthread_cond_t = _opaque_pthread_cond_t
__darwin_pthread_condattr_t = _opaque_pthread_condattr_t
__darwin_pthread_key_t = c_ulong
__darwin_pthread_mutex_t = _opaque_pthread_mutex_t
__darwin_pthread_mutexattr_t = _opaque_pthread_mutexattr_t
__darwin_pthread_once_t = _opaque_pthread_once_t
__darwin_pthread_rwlock_t = _opaque_pthread_rwlock_t
__darwin_pthread_rwlockattr_t = _opaque_pthread_rwlockattr_t
__darwin_pthread_t = POINTER(_opaque_pthread_t)
__darwin_sigset_t = __uint32_t
__darwin_suseconds_t = __int32_t
__darwin_uid_t = __uint32_t
__darwin_useconds_t = __uint32_t
__darwin_uuid_t = c_ubyte * 16
class sigaltstack(Structure):
pass
sigaltstack._fields_ = [
('ss_sp', c_void_p),
('ss_size', __darwin_size_t),
('ss_flags', c_int),
]
assert sizeof(sigaltstack) == 12, sizeof(sigaltstack)
assert alignment(sigaltstack) == 4, alignment(sigaltstack)
__darwin_stack_t = sigaltstack
class ucontext(Structure):
pass
ucontext._fields_ = [
('uc_onstack', c_int),
('uc_sigmask', __darwin_sigset_t),
('uc_stack', __darwin_stack_t),
('uc_link', POINTER(ucontext)),
('uc_mcsize', __darwin_size_t),
('uc_mcontext', __darwin_mcontext_t),
]
assert sizeof(ucontext) == 32, sizeof(ucontext)
assert alignment(ucontext) == 4, alignment(ucontext)
__darwin_ucontext_t = ucontext
class ucontext64(Structure):
pass
ucontext64._fields_ = [
('uc_onstack', c_int),
('uc_sigmask', __darwin_sigset_t),
('uc_stack', __darwin_stack_t),
('uc_link', POINTER(ucontext64)),
('uc_mcsize', __darwin_size_t),
('uc_mcontext64', __darwin_mcontext64_t),
]
assert sizeof(ucontext64) == 32, sizeof(ucontext64)
assert alignment(ucontext64) == 4, alignment(ucontext64)
__darwin_ucontext64_t = ucontext64
class timeval(Structure):
pass
timeval._fields_ = [
('tv_sec', __darwin_time_t),
('tv_usec', __darwin_suseconds_t),
]
assert sizeof(timeval) == 8, sizeof(timeval)
assert alignment(timeval) == 4, alignment(timeval)
rlim_t = __int64_t
class rusage(Structure):
pass
rusage._fields_ = [
('ru_utime', timeval),
('ru_stime', timeval),
('ru_maxrss', c_long),
('ru_ixrss', c_long),
('ru_idrss', c_long),
('ru_isrss', c_long),
('ru_minflt', c_long),
('ru_majflt', c_long),
('ru_nswap', c_long),
('ru_inblock', c_long),
('ru_oublock', c_long),
('ru_msgsnd', c_long),
('ru_msgrcv', c_long),
('ru_nsignals', c_long),
('ru_nvcsw', c_long),
('ru_nivcsw', c_long),
]
assert sizeof(rusage) == 72, sizeof(rusage)
assert alignment(rusage) == 4, alignment(rusage)
class rlimit(Structure):
pass
rlimit._pack_ = 4
rlimit._fields_ = [
('rlim_cur', rlim_t),
('rlim_max', rlim_t),
]
assert sizeof(rlimit) == 16, sizeof(rlimit)
assert alignment(rlimit) == 4, alignment(rlimit)
mcontext_t = __darwin_mcontext_t
mcontext64_t = __darwin_mcontext64_t
pthread_attr_t = __darwin_pthread_attr_t
sigset_t = __darwin_sigset_t
ucontext_t = __darwin_ucontext_t
ucontext64_t = __darwin_ucontext64_t
uid_t = __darwin_uid_t
class sigval(Union):
pass
sigval._fields_ = [
('sival_int', c_int),
('sival_ptr', c_void_p),
]
assert sizeof(sigval) == 4, sizeof(sigval)
assert alignment(sigval) == 4, alignment(sigval)
class sigevent(Structure):
pass
sigevent._fields_ = [
('sigev_notify', c_int),
('sigev_signo', c_int),
('sigev_value', sigval),
('sigev_notify_function', CFUNCTYPE(None, sigval)),
('sigev_notify_attributes', POINTER(pthread_attr_t)),
]
assert sizeof(sigevent) == 20, sizeof(sigevent)
assert alignment(sigevent) == 4, alignment(sigevent)
class __siginfo(Structure):
pass
pid_t = __darwin_pid_t
__siginfo._fields_ = [
('si_signo', c_int),
('si_errno', c_int),
('si_code', c_int),
('si_pid', pid_t),
('si_uid', uid_t),
('si_status', c_int),
('si_addr', c_void_p),
('si_value', sigval),
('si_band', c_long),
('pad', c_ulong * 7),
]
assert sizeof(__siginfo) == 64, sizeof(__siginfo)
assert alignment(__siginfo) == 4, alignment(__siginfo)
siginfo_t = __siginfo
class __sigaction_u(Union):
pass
__sigaction_u._fields_ = [
('__sa_handler', CFUNCTYPE(None, c_int)),
('__sa_sigaction', CFUNCTYPE(None, c_int, POINTER(__siginfo), c_void_p)),
]
assert sizeof(__sigaction_u) == 4, sizeof(__sigaction_u)
assert alignment(__sigaction_u) == 4, alignment(__sigaction_u)
class __sigaction(Structure):
pass
__sigaction._fields_ = [
('__sigaction_u', __sigaction_u),
('sa_tramp', CFUNCTYPE(None, c_void_p, c_int, c_int, POINTER(siginfo_t), c_void_p)),
('sa_mask', sigset_t),
('sa_flags', c_int),
]
assert sizeof(__sigaction) == 16, sizeof(__sigaction)
assert alignment(__sigaction) == 4, alignment(__sigaction)
class sigaction(Structure):
pass
sigaction._fields_ = [
('__sigaction_u', __sigaction_u),
('sa_mask', sigset_t),
('sa_flags', c_int),
]
assert sizeof(sigaction) == 12, sizeof(sigaction)
assert alignment(sigaction) == 4, alignment(sigaction)
sig_t = CFUNCTYPE(None, c_int)
stack_t = __darwin_stack_t
class sigvec(Structure):
pass
sigvec._fields_ = [
('sv_handler', CFUNCTYPE(None, c_int)),
('sv_mask', c_int),
('sv_flags', c_int),
]
assert sizeof(sigvec) == 12, sizeof(sigvec)
assert alignment(sigvec) == 4, alignment(sigvec)
class sigstack(Structure):
pass
sigstack._fields_ = [
('ss_sp', STRING),
('ss_onstack', c_int),
]
assert sizeof(sigstack) == 8, sizeof(sigstack)
assert alignment(sigstack) == 4, alignment(sigstack)
u_char = c_ubyte
u_short = c_ushort
u_int = c_uint
u_long = c_ulong
ushort = c_ushort
uint = c_uint
u_quad_t = u_int64_t
quad_t = int64_t
qaddr_t = POINTER(quad_t)
caddr_t = STRING
daddr_t = int32_t
fixpt_t = u_int32_t
blkcnt_t = __darwin_blkcnt_t
blksize_t = __darwin_blksize_t
gid_t = __darwin_gid_t
in_addr_t = __uint32_t
in_port_t = __uint16_t
ino_t = __darwin_ino_t
key_t = __int32_t
nlink_t = __uint16_t
off_t = __darwin_off_t
segsz_t = int32_t
swblk_t = int32_t
clock_t = __darwin_clock_t
ssize_t = __darwin_ssize_t
useconds_t = __darwin_useconds_t
suseconds_t = __darwin_suseconds_t
fd_mask = __int32_t
class fd_set(Structure):
pass
fd_set._fields_ = [
('fds_bits', __int32_t * 32),
]
assert sizeof(fd_set) == 128, sizeof(fd_set)
assert alignment(fd_set) == 4, alignment(fd_set)
pthread_cond_t = __darwin_pthread_cond_t
pthread_condattr_t = __darwin_pthread_condattr_t
pthread_mutex_t = __darwin_pthread_mutex_t
pthread_mutexattr_t = __darwin_pthread_mutexattr_t
pthread_once_t = __darwin_pthread_once_t
pthread_rwlock_t = __darwin_pthread_rwlock_t
pthread_rwlockattr_t = __darwin_pthread_rwlockattr_t
pthread_t = __darwin_pthread_t
pthread_key_t = __darwin_pthread_key_t
fsblkcnt_t = __darwin_fsblkcnt_t
fsfilcnt_t = __darwin_fsfilcnt_t
# values for enumeration 'idtype_t'
idtype_t = c_int # enum
id_t = __darwin_id_t
class wait(Union):
pass
class N4wait3DOLLAR_3E(Structure):
pass
N4wait3DOLLAR_3E._fields_ = [
('w_Termsig', c_uint, 7),
('w_Coredump', c_uint, 1),
('w_Retcode', c_uint, 8),
('w_Filler', c_uint, 16),
]
assert sizeof(N4wait3DOLLAR_3E) == 4, sizeof(N4wait3DOLLAR_3E)
assert alignment(N4wait3DOLLAR_3E) == 4, alignment(N4wait3DOLLAR_3E)
class N4wait3DOLLAR_4E(Structure):
pass
N4wait3DOLLAR_4E._fields_ = [
('w_Stopval', c_uint, 8),
('w_Stopsig', c_uint, 8),
('w_Filler', c_uint, 16),
]
assert sizeof(N4wait3DOLLAR_4E) == 4, sizeof(N4wait3DOLLAR_4E)
assert alignment(N4wait3DOLLAR_4E) == 4, alignment(N4wait3DOLLAR_4E)
wait._fields_ = [
('w_status', c_int),
('w_T', N4wait3DOLLAR_3E),
('w_S', N4wait3DOLLAR_4E),
]
assert sizeof(wait) == 4, sizeof(wait)
assert alignment(wait) == 4, alignment(wait)
class timespec(Structure):
pass
timespec._fields_ = [
('tv_sec', time_t),
('tv_nsec', c_long),
]
assert sizeof(timespec) == 8, sizeof(timespec)
assert alignment(timespec) == 4, alignment(timespec)
class tm(Structure):
pass
tm._fields_ = [
('tm_sec', c_int),
('tm_min', c_int),
('tm_hour', c_int),
('tm_mday', c_int),
('tm_mon', c_int),
('tm_year', c_int),
('tm_wday', c_int),
('tm_yday', c_int),
('tm_isdst', c_int),
('tm_gmtoff', c_long),
('tm_zone', STRING),
]
assert sizeof(tm) == 44, sizeof(tm)
assert alignment(tm) == 4, alignment(tm)
__gnuc_va_list = STRING
ptrdiff_t = c_int
int8_t = c_byte
int16_t = c_short
uint8_t = c_ubyte
uint16_t = c_ushort
uint32_t = c_uint
uint64_t = c_ulonglong
int_least8_t = int8_t
int_least16_t = int16_t
int_least32_t = int32_t
int_least64_t = int64_t
uint_least8_t = uint8_t
uint_least16_t = uint16_t
uint_least32_t = uint32_t
uint_least64_t = uint64_t
int_fast8_t = int8_t
int_fast16_t = int16_t
int_fast32_t = int32_t
int_fast64_t = int64_t
uint_fast8_t = uint8_t
uint_fast16_t = uint16_t
uint_fast32_t = uint32_t
uint_fast64_t = uint64_t
intptr_t = c_long
uintptr_t = c_ulong
intmax_t = c_longlong
uintmax_t = c_ulonglong
__all__ = ['ENGINE', 'pkcs7_enc_content_st', '__int16_t',
'X509_REVOKED', 'SSL_CTX', 'UIT_BOOLEAN',
'__darwin_time_t', 'ucontext64_t', 'int_fast32_t',
'pem_ctx_st', 'uint8_t', 'fpos_t', 'X509', 'COMP_CTX',
'tm', 'N10pem_ctx_st4DOLLAR_17E', 'swblk_t',
'ASN1_TEMPLATE', '__darwin_pthread_t', 'fixpt_t',
'BIO_METHOD', 'ASN1_PRINTABLESTRING', 'EVP_ENCODE_CTX',
'dh_method', 'bio_f_buffer_ctx_struct', 'in_port_t',
'X509_SIG', '__darwin_ssize_t', '__darwin_sigset_t',
'wait', 'uint_fast16_t', 'N12asn1_type_st4DOLLAR_11E',
'uint_least8_t', 'pthread_rwlock_t', 'ASN1_IA5STRING',
'fsfilcnt_t', 'ucontext', '__uint64_t', 'timespec',
'x509_cinf_st', 'COMP_METHOD', 'MD5_CTX', 'buf_mem_st',
'ASN1_ENCODING_st', 'PBEPARAM', 'X509_NAME_ENTRY',
'__darwin_va_list', 'ucontext_t', 'lhash_st',
'N4wait3DOLLAR_4E', '__darwin_uuid_t',
'_ossl_old_des_ks_struct', 'id_t', 'ASN1_BIT_STRING',
'va_list', '__darwin_wchar_t', 'pthread_key_t',
'pkcs7_signer_info_st', 'ASN1_METHOD', 'DSA_SIG', 'DSA',
'UIT_NONE', 'pthread_t', '__darwin_useconds_t',
'uint_fast8_t', 'UI_STRING', 'DES_cblock',
'__darwin_mcontext64_t', 'rlim_t', 'PEM_Encode_Seal_st',
'SHAstate_st', 'u_quad_t', 'openssl_fptr',
'_opaque_pthread_rwlockattr_t',
'N18x509_attributes_st4DOLLAR_13E',
'__darwin_pthread_rwlock_t', 'daddr_t', 'ui_string_st',
'x509_file_st', 'X509_req_info_st', 'int_least64_t',
'evp_Encode_Ctx_st', 'X509_OBJECTS', 'CRYPTO_EX_DATA',
'__int8_t', 'AUTHORITY_KEYID_st', '_opaque_pthread_attr_t',
'sigstack', 'EVP_CIPHER_CTX', 'X509_extension_st', 'pid_t',
'RSA_METHOD', 'PEM_USER', 'pem_recip_st', 'env_md_ctx_st',
'rc5_key_st', 'ui_st', 'X509_PUBKEY', 'u_int8_t',
'ASN1_ITEM_st', 'pkcs7_recip_info_st', 'ssl2_state_st',
'off_t', 'N10ssl_ctx_st4DOLLAR_18E', 'crypto_ex_data_st',
'ui_method_st', '__darwin_pthread_rwlockattr_t',
'CRYPTO_EX_dup', '__darwin_ino_t', '__sFILE',
'OSUnknownByteOrder', 'BN_MONT_CTX', 'ASN1_NULL', 'time_t',
'CRYPTO_EX_new', 'asn1_type_st', 'CRYPTO_EX_DATA_FUNCS',
'user_time_t', 'BIGNUM', 'pthread_rwlockattr_t',
'ASN1_VALUE_st', 'DH_METHOD', '__darwin_off_t',
'_opaque_pthread_t', 'bn_blinding_st', 'RSA', 'ssize_t',
'mcontext64_t', 'user_long_t', 'fsblkcnt_t', 'cert_st',
'__darwin_pthread_condattr_t', 'X509_PKEY',
'__darwin_id_t', '__darwin_nl_item', 'SSL2_STATE', 'FILE',
'pthread_mutexattr_t', 'size_t',
'_ossl_old_des_key_schedule', 'pkcs7_issuer_and_serial_st',
'sigval', 'CRYPTO_MEM_LEAK_CB', 'X509_NAME', 'blkcnt_t',
'uint_least16_t', '__darwin_dev_t', 'evp_cipher_info_st',
'BN_BLINDING', 'ssl3_state_st', 'uint_least64_t',
'user_addr_t', 'DES_key_schedule', 'RIPEMD160_CTX',
'u_char', 'X509_algor_st', 'uid_t', 'sess_cert_st',
'u_int64_t', 'u_int16_t', 'sigset_t', '__darwin_ptrdiff_t',
'ASN1_CTX', 'STACK', '__int32_t', 'UI_METHOD',
'NETSCAPE_SPKI', 'UIT_PROMPT', 'st_CRYPTO_EX_DATA_IMPL',
'cast_key_st', 'X509_HASH_DIR_CTX', 'sigevent',
'user_ssize_t', 'clock_t', 'aes_key_st',
'__darwin_socklen_t', '__darwin_intptr_t', 'int_fast64_t',
'asn1_string_table_st', 'uint_fast32_t',
'ASN1_VISIBLESTRING', 'DSA_SIG_st', 'obj_name_st',
'X509_LOOKUP_METHOD', 'u_int32_t', 'EVP_CIPHER_INFO',
'__gnuc_va_list', 'AES_KEY', 'PKCS7_ISSUER_AND_SERIAL',
'BN_CTX', '__darwin_blkcnt_t', 'key_t', 'SHA_CTX',
'pkcs7_signed_st', 'SSL', 'N10pem_ctx_st4DOLLAR_16E',
'pthread_attr_t', 'EVP_MD', 'uint', 'ASN1_BOOLEAN',
'ino_t', '__darwin_clock_t', 'ASN1_OCTET_STRING',
'asn1_ctx_st', 'BIO_F_BUFFER_CTX', 'bn_mont_ctx_st',
'X509_REQ_INFO', 'PEM_CTX', 'sigvec',
'__darwin_pthread_mutexattr_t', 'x509_attributes_st',
'stack_t', '__darwin_mode_t', '__mbstate_t',
'asn1_object_st', 'ASN1_ENCODING', '__uint8_t',
'LHASH_NODE', 'PKCS7_SIGNER_INFO', 'asn1_method_st',
'stack_st', 'bio_info_cb', 'div_t', 'UIT_VERIFY',
'PBEPARAM_st', 'N4wait3DOLLAR_3E', 'quad_t', '__siginfo',
'__darwin_mbstate_t', 'rsa_st', 'ASN1_UNIVERSALSTRING',
'uint64_t', 'ssl_comp_st', 'X509_OBJECT', 'pthread_cond_t',
'DH', '__darwin_wctype_t', 'PKCS7_ENVELOPE', 'ASN1_TLC_st',
'sig_atomic_t', 'BIO', 'nlink_t', 'BUF_MEM', 'SSL3_RECORD',
'bio_method_st', 'timeval', 'UI_string_types', 'BIO_dummy',
'ssl_ctx_st', 'NETSCAPE_CERT_SEQUENCE',
'BIT_STRING_BITNAME_st', '__darwin_pthread_attr_t',
'int8_t', '__darwin_wint_t', 'OBJ_NAME',
'PKCS8_PRIV_KEY_INFO', 'PBE2PARAM_st',
'LHASH_DOALL_FN_TYPE', 'x509_st', 'X509_VAL', 'dev_t',
'ASN1_TEMPLATE_st', 'MD5state_st', '__uint16_t',
'LHASH_DOALL_ARG_FN_TYPE', 'mdc2_ctx_st', 'SSL3_STATE',
'ssl3_buffer_st', 'ASN1_ITEM_EXP',
'_opaque_pthread_condattr_t', 'mode_t', 'ASN1_VALUE',
'qaddr_t', '__darwin_gid_t', 'EVP_PKEY', 'CRYPTO_EX_free',
'_ossl_old_des_cblock', 'X509_INFO', 'asn1_string_st',
'intptr_t', 'UIT_INFO', 'int_fast8_t', 'sigaltstack',
'env_md_st', 'LHASH', '__darwin_ucontext_t',
'PKCS7_SIGN_ENVELOPE', '__darwin_mcontext_t', 'ct_rune_t',
'MD2_CTX', 'pthread_once_t', 'SSL3_BUFFER', 'fd_mask',
'ASN1_TYPE', 'PKCS7_SIGNED', 'ssl3_record_st', 'BF_KEY',
'MD4state_st', 'MD4_CTX', 'int16_t', 'SSL_CIPHER',
'rune_t', 'X509_TRUST', 'siginfo_t', 'X509_STORE',
'__sbuf', 'X509_STORE_CTX', '__darwin_blksize_t', 'ldiv_t',
'ASN1_TIME', 'SSL_METHOD', 'X509_LOOKUP',
'Netscape_spki_st', 'P_PID', 'sigaction', 'sig_t',
'hostent', 'x509_cert_aux_st', '_opaque_pthread_cond_t',
'segsz_t', 'ushort', '__darwin_ct_rune_t', 'fd_set',
'BN_RECP_CTX', 'x509_lookup_st', 'uint16_t', 'pkcs7_st',
'asn1_header_st', '__darwin_pthread_key_t',
'x509_trust_st', '__darwin_pthread_handler_rec', 'int32_t',
'X509_CRL_INFO', 'N11evp_pkey_st4DOLLAR_12E', 'MDC2_CTX',
'N23_ossl_old_des_ks_struct4DOLLAR_10E', 'ASN1_HEADER',
'X509_crl_info_st', 'LHASH_HASH_FN_TYPE',
'_opaque_pthread_mutexattr_t', 'ssl_st',
'N8pkcs7_st4DOLLAR_15E', 'evp_pkey_st',
'pkcs7_signedandenveloped_st', '__darwin_mach_port_t',
'EVP_PBE_KEYGEN', '_opaque_pthread_mutex_t',
'ASN1_UTCTIME', 'mcontext', 'crypto_ex_data_func_st',
'u_long', 'PBKDF2PARAM_st', 'rc4_key_st', 'DSA_METHOD',
'EVP_CIPHER', 'BIT_STRING_BITNAME', 'PKCS7_RECIP_INFO',
'ssl3_enc_method', 'X509_CERT_AUX', 'uintmax_t',
'int_fast16_t', 'RC5_32_KEY', 'ucontext64', 'ASN1_INTEGER',
'u_short', 'N14x509_object_st4DOLLAR_14E', 'mcontext64',
'X509_sig_st', 'ASN1_GENERALSTRING', 'PKCS7', '__sFILEX',
'X509_name_entry_st', 'ssl_session_st', 'caddr_t',
'bignum_st', 'X509_CINF', '__darwin_pthread_cond_t',
'ASN1_TLC', 'PKCS7_ENCRYPT', 'NETSCAPE_SPKAC',
'Netscape_spkac_st', 'idtype_t', 'UIT_ERROR',
'uint_fast64_t', 'in_addr_t', 'pthread_mutex_t',
'__int64_t', 'ASN1_BMPSTRING', 'uint32_t',
'PEM_ENCODE_SEAL_CTX', 'suseconds_t', 'ASN1_OBJECT',
'X509_val_st', 'private_key_st', 'CRYPTO_dynlock',
'X509_objects_st', 'CRYPTO_EX_DATA_IMPL',
'pthread_condattr_t', 'PKCS7_DIGEST', 'uint_least32_t',
'ASN1_STRING', '__uint32_t', 'P_PGID', 'rsa_meth_st',
'X509_crl_st', 'RC2_KEY', '__darwin_fsfilcnt_t',
'X509_revoked_st', 'PBE2PARAM', 'blksize_t',
'Netscape_certificate_sequence', 'ssl_cipher_st',
'bignum_ctx', 'register_t', 'ASN1_UTF8STRING',
'pkcs7_encrypted_st', 'RC4_KEY', '__darwin_ucontext64_t',
'N13ssl2_state_st4DOLLAR_19E', 'bn_recp_ctx_st',
'CAST_KEY', 'X509_ATTRIBUTE', '__darwin_suseconds_t',
'__sigaction', 'user_ulong_t', 'syscall_arg_t',
'evp_cipher_ctx_st', 'X509_ALGOR', 'mcontext_t',
'const_DES_cblock', '__darwin_fsblkcnt_t', 'dsa_st',
'int_least8_t', 'MD2state_st', 'X509_EXTENSION',
'GEN_SESSION_CB', 'int_least16_t', '__darwin_wctrans_t',
'PBKDF2PARAM', 'x509_lookup_method_st', 'pem_password_cb',
'X509_info_st', 'x509_store_st', '__darwin_natural_t',
'X509_pubkey_st', 'pkcs7_digest_st', '__darwin_size_t',
'ASN1_STRING_TABLE', 'OSLittleEndian', 'RIPEMD160state_st',
'pkcs7_enveloped_st', 'UI', 'ptrdiff_t', 'X509_REQ',
'CRYPTO_dynlock_value', 'X509_req_st', 'x509_store_ctx_st',
'N13ssl3_state_st4DOLLAR_20E', 'lhash_node_st',
'__darwin_pthread_mutex_t', 'LHASH_COMP_FN_TYPE',
'__darwin_rune_t', 'rlimit', '__darwin_pthread_once_t',
'OSBigEndian', 'uintptr_t', '__darwin_uid_t', 'u_int',
'ASN1_T61STRING', 'gid_t', 'ssl_method_st', 'ASN1_ITEM',
'ASN1_ENUMERATED', '_opaque_pthread_rwlock_t',
'pkcs8_priv_key_info_st', 'intmax_t', 'sigcontext',
'X509_CRL', 'rc2_key_st', 'engine_st', 'x509_object_st',
'_opaque_pthread_once_t', 'DES_ks', 'SSL_COMP',
'dsa_method', 'int64_t', 'bio_st', 'bf_key_st',
'ASN1_GENERALIZEDTIME', 'PKCS7_ENC_CONTENT',
'__darwin_pid_t', 'lldiv_t', 'comp_method_st',
'EVP_MD_CTX', 'evp_cipher_st', 'X509_name_st',
'x509_hash_dir_st', '__darwin_mach_port_name_t',
'useconds_t', 'user_size_t', 'SSL_SESSION', 'rusage',
'ssl_crock_st', 'int_least32_t', '__sigaction_u', 'dh_st',
'P_ALL', '__darwin_stack_t', 'N6DES_ks3DOLLAR_9E',
'comp_ctx_st', 'X509_CERT_FILE_CTX']
|
kevinlondon/sentry
|
refs/heads/master
|
src/sentry/tsdb/dummy.py
|
24
|
"""
sentry.tsdb.dummy
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from sentry.tsdb.base import BaseTSDB
class DummyTSDB(BaseTSDB):
"""
A no-op time-series storage.
"""
def incr(self, model, key, timestamp=None, count=1):
pass
def get_range(self, model, keys, start, end, rollup=None):
return dict((k, []) for k in keys)
|
goddardl/gaffer
|
refs/heads/master
|
python/GafferUI/NumericPlugValueWidget.py
|
2
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import Gaffer
import GafferUI
## \todo Maths expressions to modify the existing value
## \todo Enter names of other plugs to create a connection
## \todo Color change for connected plugs and output plugs
## \todo Reject drag and drop of anything that's not a number
class NumericPlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
self.__numericWidget = GafferUI.NumericWidget( 0 )
GafferUI.PlugValueWidget.__init__( self, self.__numericWidget, plug, **kw )
self._addPopupMenu( self.__numericWidget )
# we use these to decide which actions to merge into a single undo
self.__lastChangedReason = None
self.__mergeGroupId = 0
self.__keyPressConnection = self.__numericWidget.keyPressSignal().connect( Gaffer.WeakMethod( self.__keyPress ) )
self.__valueChangedConnection = self.__numericWidget.valueChangedSignal().connect( Gaffer.WeakMethod( self.__valueChanged ) )
self._updateFromPlug()
self.__updateWidth()
def setPlug( self, plug ) :
GafferUI.PlugValueWidget.setPlug( self, plug )
self.__updateWidth()
def numericWidget( self ) :
return self.__numericWidget
def setHighlighted( self, highlighted ) :
GafferUI.PlugValueWidget.setHighlighted( self, highlighted )
self.numericWidget().setHighlighted( highlighted )
def getToolTip( self ) :
result = GafferUI.PlugValueWidget.getToolTip( self )
if self.getPlug() is not None :
result += "<ul>"
result += "<li>Cursor up/down to increment/decrement</li>"
result += "<ul>"
return result
def _updateFromPlug( self ) :
plug = self.getPlug()
if plug is not None :
with self.getContext() :
try :
value = plug.getValue()
except :
value = None
if value is not None :
with Gaffer.BlockedConnection( self.__valueChangedConnection ) :
self.__numericWidget.setValue( value )
self.__numericWidget.setErrored( value is None )
self.__numericWidget.setEditable( self._editable() )
def __keyPress( self, widget, event ) :
assert( widget is self.__numericWidget )
if not self.__numericWidget.getEditable() :
return False
# escape abandons everything
if event.key=="Escape" :
self._updateFromPlug()
return True
return False
def __valueChanged( self, widget, reason ) :
if self._editable() :
if not widget.changesShouldBeMerged( self.__lastChangedReason, reason ) :
self.__mergeGroupId += 1
self.__lastChangedReason = reason
self.__setPlugValue( mergeGroup = "NumericPlugValueWidget%d%d" % ( id( self, ), self.__mergeGroupId ) )
return False
def __setPlugValue( self, mergeGroup="" ) :
with Gaffer.UndoContext( self.getPlug().ancestor( Gaffer.ScriptNode ), mergeGroup=mergeGroup ) :
with Gaffer.BlockedConnection( self._plugConnections() ) :
try :
self.getPlug().setValue( self.__numericWidget.getValue() )
except :
pass
# now any changes that were made in the numeric widget have been transferred
# into the global undo queue, we remove the text editing changes from the
# widget's private text editing undo queue. it will then ignore undo shortcuts,
# allowing them to fall through to the global undo shortcut.
self.__numericWidget.clearUndo()
# we always need to update the ui from the plug after trying to set it,
# because the plug might clamp the value to something else. furthermore
# it might not even emit plugSetSignal if it happens to clamp to the same
# value as it had before. we block calls to _updateFromPlug() while setting
# the value to avoid having to do the work twice if plugSetSignal is emitted.
self._updateFromPlug()
def __updateWidth( self ) :
charWidth = None
if isinstance( self.getPlug(), Gaffer.IntPlug ) and self.getPlug().hasMaxValue() :
charWidth = len( str( self.getPlug().maxValue() ) )
self.__numericWidget.setFixedCharacterWidth( charWidth )
GafferUI.PlugValueWidget.registerType( Gaffer.FloatPlug, NumericPlugValueWidget )
GafferUI.PlugValueWidget.registerType( Gaffer.IntPlug, NumericPlugValueWidget )
|
jimyx17/jimh
|
refs/heads/master
|
lib/mutagen/asf.py
|
2
|
# Copyright 2006-2007 Lukas Lalinsky
# Copyright 2005-2006 Joe Wreschnig
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# $Id: asf.py 4224 2007-12-03 09:01:49Z luks $
"""Read and write ASF (Window Media Audio) files."""
__all__ = ["ASF", "Open"]
import struct
from lib.mutagen import FileType, Metadata
from lib.mutagen._util import insert_bytes, delete_bytes, DictMixin
class error(IOError): pass
class ASFError(error): pass
class ASFHeaderError(error): pass
class ASFInfo(object):
"""ASF stream information."""
def __init__(self):
self.length = 0.0
self.sample_rate = 0
self.bitrate = 0
self.channels = 0
def pprint(self):
s = "Windows Media Audio %d bps, %s Hz, %d channels, %.2f seconds" % (
self.bitrate, self.sample_rate, self.channels, self.length)
return s
class ASFTags(list, DictMixin, Metadata):
"""Dictionary containing ASF attributes."""
def pprint(self):
return "\n".join(["%s=%s" % (k, v) for k, v in self])
def __getitem__(self, key):
"""A list of values for the key.
This is a copy, so comment['title'].append('a title') will not
work.
"""
values = [value for (k, value) in self if k == key]
if not values: raise KeyError, key
else: return values
def __delitem__(self, key):
"""Delete all values associated with the key."""
to_delete = filter(lambda x: x[0] == key, self)
if not to_delete: raise KeyError, key
else: map(self.remove, to_delete)
def __contains__(self, key):
"""Return true if the key has any values."""
for k, value in self:
if k == key: return True
else: return False
def __setitem__(self, key, values):
"""Set a key's value or values.
Setting a value overwrites all old ones. The value may be a
list of Unicode or UTF-8 strings, or a single Unicode or UTF-8
string.
"""
if not isinstance(values, list):
values = [values]
try: del(self[key])
except KeyError: pass
for value in values:
if key in _standard_attribute_names:
value = unicode(value)
elif not isinstance(value, ASFBaseAttribute):
if isinstance(value, basestring):
value = ASFUnicodeAttribute(value)
elif isinstance(value, bool):
value = ASFBoolAttribute(value)
elif isinstance(value, int):
value = ASFDWordAttribute(value)
elif isinstance(value, long):
value = ASFQWordAttribute(value)
self.append((key, value))
def keys(self):
"""Return all keys in the comment."""
return self and set(zip(*self)[0])
def as_dict(self):
"""Return a copy of the comment data in a real dict."""
d = {}
for key, value in self:
d.setdefault(key, []).append(value)
return d
class ASFBaseAttribute(object):
"""Generic attribute."""
TYPE = None
def __init__(self, value=None, data=None, language=None,
stream=None, **kwargs):
self.language = language
self.stream = stream
if data:
self.value = self.parse(data, **kwargs)
else:
self.value = value
def data_size(self):
raise NotImplementedError
def __repr__(self):
name = "%s(%r" % (type(self).__name__, self.value)
if self.language:
name += ", language=%d" % self.language
if self.stream:
name += ", stream=%d" % self.stream
name += ")"
return name
def render(self, name):
name = name.encode("utf-16-le") + "\x00\x00"
data = self._render()
return (struct.pack("<H", len(name)) + name +
struct.pack("<HH", self.TYPE, len(data)) + data)
def render_m(self, name):
name = name.encode("utf-16-le") + "\x00\x00"
if self.TYPE == 2:
data = self._render(dword=False)
else:
data = self._render()
return (struct.pack("<HHHHI", 0, self.stream or 0, len(name),
self.TYPE, len(data)) + name + data)
def render_ml(self, name):
name = name.encode("utf-16-le") + "\x00\x00"
if self.TYPE == 2:
data = self._render(dword=False)
else:
data = self._render()
return (struct.pack("<HHHHI", self.language or 0, self.stream or 0,
len(name), self.TYPE, len(data)) + name + data)
class ASFUnicodeAttribute(ASFBaseAttribute):
"""Unicode string attribute."""
TYPE = 0x0000
def parse(self, data):
return data.decode("utf-16-le").strip("\x00")
def _render(self):
return self.value.encode("utf-16-le") + "\x00\x00"
def data_size(self):
return len(self.value) * 2 + 2
def __str__(self):
return self.value
def __cmp__(self, other):
return cmp(unicode(self), other)
class ASFByteArrayAttribute(ASFBaseAttribute):
"""Byte array attribute."""
TYPE = 0x0001
def parse(self, data):
return data
def _render(self):
return self.value
def data_size(self):
return len(self.value)
def __str__(self):
return "[binary data (%s bytes)]" % len(self.value)
def __cmp__(self, other):
return cmp(str(self), other)
class ASFBoolAttribute(ASFBaseAttribute):
"""Bool attribute."""
TYPE = 0x0002
def parse(self, data, dword=True):
if dword:
return struct.unpack("<I", data)[0] == 1
else:
return struct.unpack("<H", data)[0] == 1
def _render(self, dword=True):
if dword:
return struct.pack("<I", int(self.value))
else:
return struct.pack("<H", int(self.value))
def data_size(self):
return 4
def __bool__(self):
return self.value
def __str__(self):
return str(self.value)
def __cmp__(self, other):
return cmp(bool(self), other)
class ASFDWordAttribute(ASFBaseAttribute):
"""DWORD attribute."""
TYPE = 0x0003
def parse(self, data):
return struct.unpack("<L", data)[0]
def _render(self):
return struct.pack("<L", self.value)
def data_size(self):
return 4
def __int__(self):
return self.value
def __str__(self):
return str(self.value)
def __cmp__(self, other):
return cmp(int(self), other)
class ASFQWordAttribute(ASFBaseAttribute):
"""QWORD attribute."""
TYPE = 0x0004
def parse(self, data):
return struct.unpack("<Q", data)[0]
def _render(self):
return struct.pack("<Q", self.value)
def data_size(self):
return 8
def __int__(self):
return self.value
def __str__(self):
return str(self.value)
def __cmp__(self, other):
return cmp(int(self), other)
class ASFWordAttribute(ASFBaseAttribute):
"""WORD attribute."""
TYPE = 0x0005
def parse(self, data):
return struct.unpack("<H", data)[0]
def _render(self):
return struct.pack("<H", self.value)
def data_size(self):
return 2
def __int__(self):
return self.value
def __str__(self):
return str(self.value)
def __cmp__(self, other):
return cmp(int(self), other)
class ASFGUIDAttribute(ASFBaseAttribute):
"""GUID attribute."""
TYPE = 0x0006
def parse(self, data):
return data
def _render(self):
return self.value
def data_size(self):
return len(self.value)
def __str__(self):
return self.value
def __cmp__(self, other):
return cmp(str(self), other)
UNICODE = ASFUnicodeAttribute.TYPE
BYTEARRAY = ASFByteArrayAttribute.TYPE
BOOL = ASFBoolAttribute.TYPE
DWORD = ASFDWordAttribute.TYPE
QWORD = ASFQWordAttribute.TYPE
WORD = ASFWordAttribute.TYPE
GUID = ASFGUIDAttribute.TYPE
def ASFValue(value, kind, **kwargs):
for t, c in _attribute_types.items():
if kind == t:
return c(value=value, **kwargs)
raise ValueError("Unknown value type")
_attribute_types = {
ASFUnicodeAttribute.TYPE: ASFUnicodeAttribute,
ASFByteArrayAttribute.TYPE: ASFByteArrayAttribute,
ASFBoolAttribute.TYPE: ASFBoolAttribute,
ASFDWordAttribute.TYPE: ASFDWordAttribute,
ASFQWordAttribute.TYPE: ASFQWordAttribute,
ASFWordAttribute.TYPE: ASFWordAttribute,
ASFGUIDAttribute.TYPE: ASFGUIDAttribute,
}
_standard_attribute_names = [
"Title",
"Author",
"Copyright",
"Description",
"Rating"
]
class BaseObject(object):
"""Base ASF object."""
GUID = None
def parse(self, asf, data, fileobj, size):
self.data = data
def render(self, asf):
data = self.GUID + struct.pack("<Q", len(self.data) + 24) + self.data
size = len(data)
return data
class UnknownObject(BaseObject):
"""Unknown ASF object."""
def __init__(self, guid):
self.GUID = guid
class HeaderObject(object):
"""ASF header."""
GUID = "\x30\x26\xB2\x75\x8E\x66\xCF\x11\xA6\xD9\x00\xAA\x00\x62\xCE\x6C"
class ContentDescriptionObject(BaseObject):
"""Content description."""
GUID = "\x33\x26\xB2\x75\x8E\x66\xCF\x11\xA6\xD9\x00\xAA\x00\x62\xCE\x6C"
def parse(self, asf, data, fileobj, size):
super(ContentDescriptionObject, self).parse(asf, data, fileobj, size)
asf.content_description_obj = self
lengths = struct.unpack("<HHHHH", data[:10])
texts = []
pos = 10
for length in lengths:
end = pos + length
if length > 0:
texts.append(data[pos:end].decode("utf-16-le").strip("\x00"))
else:
texts.append(None)
pos = end
title, author, copyright, desc, rating = texts
for key, value in dict(
Title=title,
Author=author,
Copyright=copyright,
Description=desc,
Rating=rating).items():
if value is not None:
asf.tags[key] = value
def render(self, asf):
def render_text(name):
value = asf.tags.get(name, [])
if value:
return value[0].encode("utf-16-le") + "\x00\x00"
else:
return ""
texts = map(render_text, _standard_attribute_names)
data = struct.pack("<HHHHH", *map(len, texts)) + "".join(texts)
return self.GUID + struct.pack("<Q", 24 + len(data)) + data
class ExtendedContentDescriptionObject(BaseObject):
"""Extended content description."""
GUID = "\x40\xA4\xD0\xD2\x07\xE3\xD2\x11\x97\xF0\x00\xA0\xC9\x5E\xA8\x50"
def parse(self, asf, data, fileobj, size):
super(ExtendedContentDescriptionObject, self).parse(asf, data, fileobj, size)
asf.extended_content_description_obj = self
num_attributes, = struct.unpack("<H", data[0:2])
pos = 2
for i in range(num_attributes):
name_length, = struct.unpack("<H", data[pos:pos+2])
pos += 2
name = data[pos:pos+name_length].decode("utf-16-le").strip("\x00")
pos += name_length
value_type, value_length = struct.unpack("<HH", data[pos:pos+4])
pos += 4
value = data[pos:pos+value_length]
pos += value_length
attr = _attribute_types[value_type](data=value)
asf.tags.append((name, attr))
def render(self, asf):
attrs = asf.to_extended_content_description.items()
data = "".join([attr.render(name) for (name, attr) in attrs])
data = struct.pack("<QH", 26 + len(data), len(attrs)) + data
return self.GUID + data
class FilePropertiesObject(BaseObject):
"""File properties."""
GUID = "\xA1\xDC\xAB\x8C\x47\xA9\xCF\x11\x8E\xE4\x00\xC0\x0C\x20\x53\x65"
def parse(self, asf, data, fileobj, size):
super(FilePropertiesObject, self).parse(asf, data, fileobj, size)
length, _, preroll = struct.unpack("<QQQ", data[40:64])
asf.info.length = length / 10000000.0 - preroll / 1000.0
class StreamPropertiesObject(BaseObject):
"""Stream properties."""
GUID = "\x91\x07\xDC\xB7\xB7\xA9\xCF\x11\x8E\xE6\x00\xC0\x0C\x20\x53\x65"
def parse(self, asf, data, fileobj, size):
super(StreamPropertiesObject, self).parse(asf, data, fileobj, size)
channels, sample_rate, bitrate = struct.unpack("<HII", data[56:66])
asf.info.channels = channels
asf.info.sample_rate = sample_rate
asf.info.bitrate = bitrate * 8
class HeaderExtensionObject(BaseObject):
"""Header extension."""
GUID = "\xb5\x03\xbf_.\xa9\xcf\x11\x8e\xe3\x00\xc0\x0c Se"
def parse(self, asf, data, fileobj, size):
super(HeaderExtensionObject, self).parse(asf, data, fileobj, size)
asf.header_extension_obj = self
datasize, = struct.unpack("<I", data[18:22])
datapos = 0
self.objects = []
while datapos < datasize:
guid, size = struct.unpack("<16sQ", data[22+datapos:22+datapos+24])
if guid in _object_types:
obj = _object_types[guid]()
else:
obj = UnknownObject(guid)
obj.parse(asf, data[22+datapos+24:22+datapos+size], fileobj, size)
self.objects.append(obj)
datapos += size
def render(self, asf):
data = "".join([obj.render(asf) for obj in self.objects])
return (self.GUID + struct.pack("<Q", 24 + 16 + 6 + len(data)) +
"\x11\xD2\xD3\xAB\xBA\xA9\xcf\x11" +
"\x8E\xE6\x00\xC0\x0C\x20\x53\x65" +
"\x06\x00" + struct.pack("<I", len(data)) + data)
class MetadataObject(BaseObject):
"""Metadata description."""
GUID = "\xea\xcb\xf8\xc5\xaf[wH\x84g\xaa\x8cD\xfaL\xca"
def parse(self, asf, data, fileobj, size):
super(MetadataObject, self).parse(asf, data, fileobj, size)
asf.metadata_obj = self
num_attributes, = struct.unpack("<H", data[0:2])
pos = 2
for i in range(num_attributes):
(reserved, stream, name_length, value_type,
value_length) = struct.unpack("<HHHHI", data[pos:pos+12])
pos += 12
name = data[pos:pos+name_length].decode("utf-16-le").strip("\x00")
pos += name_length
value = data[pos:pos+value_length]
pos += value_length
args = {'data': value, 'stream': stream}
if value_type == 2:
args['dword'] = False
attr = _attribute_types[value_type](**args)
asf.tags.append((name, attr))
def render(self, asf):
attrs = asf.to_metadata.items()
data = "".join([attr.render_m(name) for (name, attr) in attrs])
return (self.GUID + struct.pack("<QH", 26 + len(data), len(attrs)) +
data)
class MetadataLibraryObject(BaseObject):
"""Metadata library description."""
GUID = "\x94\x1c#D\x98\x94\xd1I\xa1A\x1d\x13NEpT"
def parse(self, asf, data, fileobj, size):
super(MetadataLibraryObject, self).parse(asf, data, fileobj, size)
asf.metadata_library_obj = self
num_attributes, = struct.unpack("<H", data[0:2])
pos = 2
for i in range(num_attributes):
(language, stream, name_length, value_type,
value_length) = struct.unpack("<HHHHI", data[pos:pos+12])
pos += 12
name = data[pos:pos+name_length].decode("utf-16-le").strip("\x00")
pos += name_length
value = data[pos:pos+value_length]
pos += value_length
args = {'data': value, 'language': language, 'stream': stream}
if value_type == 2:
args['dword'] = False
attr = _attribute_types[value_type](**args)
asf.tags.append((name, attr))
def render(self, asf):
attrs = asf.to_metadata_library
data = "".join([attr.render_ml(name) for (name, attr) in attrs])
return (self.GUID + struct.pack("<QH", 26 + len(data), len(attrs)) +
data)
_object_types = {
ExtendedContentDescriptionObject.GUID: ExtendedContentDescriptionObject,
ContentDescriptionObject.GUID: ContentDescriptionObject,
FilePropertiesObject.GUID: FilePropertiesObject,
StreamPropertiesObject.GUID: StreamPropertiesObject,
HeaderExtensionObject.GUID: HeaderExtensionObject,
MetadataLibraryObject.GUID: MetadataLibraryObject,
MetadataObject.GUID: MetadataObject,
}
class ASF(FileType):
"""An ASF file, probably containing WMA or WMV."""
_mimes = ["audio/x-ms-wma", "audio/x-ms-wmv", "video/x-ms-asf",
"audio/x-wma", "video/x-wmv"]
def load(self, filename):
self.filename = filename
fileobj = file(filename, "rb")
try:
self.size = 0
self.size1 = 0
self.size2 = 0
self.offset1 = 0
self.offset2 = 0
self.num_objects = 0
self.info = ASFInfo()
self.tags = ASFTags()
self.__read_file(fileobj)
finally:
fileobj.close()
def save(self):
# Move attributes to the right objects
self.to_extended_content_description = {}
self.to_metadata = {}
self.to_metadata_library = []
for name, value in self.tags:
if name in _standard_attribute_names:
continue
large_value = value.data_size() > 0xFFFF
if (value.language is None and value.stream is None and
name not in self.to_extended_content_description and
not large_value):
self.to_extended_content_description[name] = value
elif (value.language is None and value.stream is not None and
name not in self.to_metadata and not large_value):
self.to_metadata[name] = value
else:
self.to_metadata_library.append((name, value))
# Add missing objects
if not self.content_description_obj:
self.content_description_obj = \
ContentDescriptionObject()
self.objects.append(self.content_description_obj)
if not self.extended_content_description_obj:
self.extended_content_description_obj = \
ExtendedContentDescriptionObject()
self.objects.append(self.extended_content_description_obj)
if not self.header_extension_obj:
self.header_extension_obj = \
HeaderExtensionObject()
self.objects.append(self.header_extension_obj)
if not self.metadata_obj:
self.metadata_obj = \
MetadataObject()
self.header_extension_obj.objects.append(self.metadata_obj)
if not self.metadata_library_obj:
self.metadata_library_obj = \
MetadataLibraryObject()
self.header_extension_obj.objects.append(self.metadata_library_obj)
# Render the header
data = "".join([obj.render(self) for obj in self.objects])
data = (HeaderObject.GUID +
struct.pack("<QL", len(data) + 30, len(self.objects)) +
"\x01\x02" + data)
fileobj = file(self.filename, "rb+")
try:
size = len(data)
if size > self.size:
insert_bytes(fileobj, size - self.size, self.size)
if size < self.size:
delete_bytes(fileobj, self.size - size, 0)
fileobj.seek(0)
fileobj.write(data)
finally:
fileobj.close()
def __read_file(self, fileobj):
header = fileobj.read(30)
if len(header) != 30 or header[:16] != HeaderObject.GUID:
raise ASFHeaderError, "Not an ASF file."
self.extended_content_description_obj = None
self.content_description_obj = None
self.header_extension_obj = None
self.metadata_obj = None
self.metadata_library_obj = None
self.size, self.num_objects = struct.unpack("<QL", header[16:28])
self.objects = []
for i in range(self.num_objects):
self.__read_object(fileobj)
def __read_object(self, fileobj):
guid, size = struct.unpack("<16sQ", fileobj.read(24))
if guid in _object_types:
obj = _object_types[guid]()
else:
obj = UnknownObject(guid)
data = fileobj.read(size - 24)
obj.parse(self, data, fileobj, size)
self.objects.append(obj)
def score(filename, fileobj, header):
return header.startswith(HeaderObject.GUID) * 2
score = staticmethod(score)
Open = ASF
|
OSSESAC/odoopubarquiluz
|
refs/heads/7.0
|
addons/l10n_in_hr_payroll/__openerp__.py
|
51
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Indian Payroll',
'category': 'Localization',
'init_xml': [],
'author': 'OpenERP SA',
'website':'http://www.openerp.com',
'depends': ['hr_payroll'],
'version': '1.0',
'description': """
Indian Payroll Salary Rules.
============================
-Configuration of hr_payroll for India localization
-All main contributions rules for India payslip.
* New payslip report
* Employee Contracts
* Allow to configure Basic / Gross / Net Salary
* Employee PaySlip
* Allowance / Deduction
* Integrated with Holiday Management
* Medical Allowance, Travel Allowance, Child Allowance, ...
- Payroll Advice and Report
- Yearly Salary by Head and Yearly Salary by Employee Report
""",
'active': False,
'update_xml': [
'l10n_in_hr_payroll_view.xml',
'data/l10n_in_hr_payroll_data.xml',
'data/hr.salary.rule.csv',
'security/ir.model.access.csv',
'l10n_in_hr_payroll_report.xml',
'l10n_in_hr_payroll_sequence.xml',
'wizard/hr_salary_employee_bymonth_view.xml',
'wizard/hr_yearly_salary_detail_view.xml',
'report/payment_advice_report_view.xml',
'report/payslip_report_view.xml',
],
'test': [
'test/payment_advice.yml',
'test/payment_advice_batch.yml'
],
'demo_xml': ['l10n_in_hr_payroll_demo.xml'],
'installable': True
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
OuterDeepSpace/OuterDeepSpace
|
refs/heads/master
|
libs/server/ige/IssueMngr.py
|
1
|
#
# Copyright 2001 - 2006 Ludek Smid [http://www.ospace.net/]
#
# This file is part of IGE - Outer Space.
#
# IGE - Outer Space is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# IGE - Outer Space is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with IGE - Outer Space; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import log
import smtplib
class IssueMngr:
def __init__(self):
pass
def rpc_reportIssue(self, faultID, text):
# log it
log.message("ISSUE:\n", text)
# send it
try:
smtp = smtplib.SMTP("localhost")
smtp.sendmail(
"game_alpha@ospace.net",
"qark@ospace.net",
"Subject: IGE - Outer Space Issue %s\n\n%s" % (
faultID, text
)
)
smtp.quit()
except:
log.warning("Cannot send issue by e-mail")
return 1
def shutdown(self):
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.