repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
philanthropy-u/edx-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/views/tests/test_organizations.py
|
13
|
"""Tests covering the Organizations listing on the Studio home."""
import json
from django.urls import reverse
from django.test import TestCase
from mock import patch
from student.tests.factories import UserFactory
from util.organizations_helpers import add_organization
@patch.dict('django.conf.settings.FEATURES', {'ORGANIZATIONS_APP': True})
class TestOrganizationListing(TestCase):
"""Verify Organization listing behavior."""
@patch.dict('django.conf.settings.FEATURES', {'ORGANIZATIONS_APP': True})
def setUp(self):
super(TestOrganizationListing, self).setUp()
self.staff = UserFactory(is_staff=True)
self.client.login(username=self.staff.username, password='test')
self.org_names_listing_url = reverse('organizations')
self.org_short_names = ["alphaX", "betaX", "orgX"]
for index, short_name in enumerate(self.org_short_names):
add_organization(organization_data={
'name': 'Test Organization %s' % index,
'short_name': short_name,
'description': 'Testing Organization %s Description' % index,
})
def test_organization_list(self):
"""Verify that the organization names list api returns list of organization short names."""
response = self.client.get(self.org_names_listing_url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
org_names = json.loads(response.content)
self.assertEqual(org_names, self.org_short_names)
|
gdementen/numba
|
refs/heads/master
|
numba/tests/test_datamodel.py
|
6
|
from __future__ import print_function, absolute_import
from llvmlite import ir, binding as ll
from numba import types
from numba import unittest_support as unittest
from numba import datamodel
from numba.datamodel.testing import test_factory
class TestBool(test_factory()):
fe_type = types.boolean
class TestPyObject(test_factory()):
fe_type = types.pyobject
class TestInt8(test_factory()):
fe_type = types.int8
class TestInt16(test_factory()):
fe_type = types.int16
class TestInt32(test_factory()):
fe_type = types.int32
class TestInt64(test_factory()):
fe_type = types.int64
class TestUInt8(test_factory()):
fe_type = types.uint8
class TestUInt16(test_factory()):
fe_type = types.uint16
class TestUInt32(test_factory()):
fe_type = types.uint32
class TestUInt64(test_factory()):
fe_type = types.uint64
class TestFloat(test_factory()):
fe_type = types.float32
class TestDouble(test_factory()):
fe_type = types.float64
class TestComplex(test_factory()):
fe_type = types.complex64
class TestDoubleComplex(test_factory()):
fe_type = types.complex128
class TestPointerOfInt32(test_factory()):
fe_type = types.CPointer(types.int32)
class TestUniTupleOf2xInt32(test_factory()):
fe_type = types.UniTuple(types.int32, 2)
class TestUniTupleEmpty(test_factory()):
fe_type = types.UniTuple(types.int32, 0)
class TestTupleInt32Float32(test_factory()):
fe_type = types.Tuple([types.int32, types.float32])
class TestTupleEmpty(test_factory()):
fe_type = types.Tuple([])
class Test1DArrayOfInt32(test_factory()):
fe_type = types.Array(types.int32, 1, 'C')
class Test2DArrayOfComplex128(test_factory()):
fe_type = types.Array(types.complex128, 2, 'C')
class Test0DArrayOfInt32(test_factory()):
fe_type = types.Array(types.int32, 0, 'C')
class TestArgInfo(unittest.TestCase):
def _test_as_arguments(self, fe_args):
"""
Test round-tripping types *fe_args* through the default data model's
argument conversion and unpacking logic.
"""
dmm = datamodel.default_manager
fi = datamodel.ArgPacker(dmm, fe_args)
module = ir.Module()
fnty = ir.FunctionType(ir.VoidType(), [])
function = ir.Function(module, fnty, name="test_arguments")
builder = ir.IRBuilder()
builder.position_at_end(function.append_basic_block())
args = [ir.Constant(dmm.lookup(t).get_value_type(), None)
for t in fe_args]
# Roundtrip
values = fi.as_arguments(builder, args)
asargs = fi.from_arguments(builder, values)
self.assertEqual(len(asargs), len(fe_args))
valtys = tuple([v.type for v in values])
self.assertEqual(valtys, fi.argument_types)
expect_types = [a.type for a in args]
got_types = [a.type for a in asargs]
self.assertEqual(expect_types, got_types)
# Assign names (check this doesn't raise)
fi.assign_names(values, ["arg%i" for i in range(len(fe_args))])
builder.ret_void()
ll.parse_assembly(str(module))
def test_int32_array_complex(self):
fe_args = [types.int32,
types.Array(types.int32, 1, 'C'),
types.complex64]
self._test_as_arguments(fe_args)
def test_two_arrays(self):
fe_args = [types.Array(types.int32, 1, 'C')] * 2
self._test_as_arguments(fe_args)
def test_two_0d_arrays(self):
fe_args = [types.Array(types.int32, 0, 'C')] * 2
self._test_as_arguments(fe_args)
def test_tuples(self):
fe_args = [types.UniTuple(types.int32, 2),
types.UniTuple(types.int32, 3)]
self._test_as_arguments(fe_args)
# Tuple of struct-likes
arrty = types.Array(types.int32, 1, 'C')
fe_args = [types.UniTuple(arrty, 2),
types.UniTuple(arrty, 3)]
self._test_as_arguments(fe_args)
# Nested tuple
fe_args = [types.UniTuple(types.UniTuple(types.int32, 2), 3)]
self._test_as_arguments(fe_args)
def test_empty_tuples(self):
# Empty tuple
fe_args = [types.UniTuple(types.int16, 0),
types.Tuple(()),
types.int32]
self._test_as_arguments(fe_args)
def test_nested_empty_tuples(self):
fe_args = [types.int32,
types.UniTuple(types.Tuple(()), 2),
types.int64]
self._test_as_arguments(fe_args)
if __name__ == '__main__':
unittest.main()
|
joshka/SoundCloud2.Bundle
|
refs/heads/master
|
Contents/Libraries/Shared/requests/packages/chardet/constants.py
|
3007
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
|
tesidroni/mp
|
refs/heads/master
|
Lib/site-packages/numpy/oldnumeric/alter_code1.py
|
101
|
"""
This module converts code written for Numeric to run with numpy
Makes the following changes:
* Changes import statements (warns of use of from Numeric import *)
* Changes import statements (using numerix) ...
* Makes search and replace changes to:
- .typecode()
- .iscontiguous()
- .byteswapped()
- .itemsize()
- .toscalar()
* Converts .flat to .ravel() except for .flat = xxx or .flat[xxx]
* Replace xxx.spacesaver() with True
* Convert xx.savespace(?) to pass + ## xx.savespace(?)
* Converts uses of 'b' to 'B' in the typecode-position of
functions:
eye, tri (in position 4)
ones, zeros, identity, empty, array, asarray, arange,
fromstring, indices, array_constructor (in position 2)
and methods:
astype --- only argument
-- converts uses of '1', 's', 'w', and 'u' to
-- 'b', 'h', 'H', and 'I'
* Converts uses of type(...) is <type>
isinstance(..., <type>)
"""
__all__ = ['convertfile', 'convertall', 'converttree', 'convertsrc']
import sys
import os
import re
import glob
_func4 = ['eye', 'tri']
_meth1 = ['astype']
_func2 = ['ones', 'zeros', 'identity', 'fromstring', 'indices',
'empty', 'array', 'asarray', 'arange', 'array_constructor']
_chars = {'1':'b','s':'h','w':'H','u':'I'}
func_re = {}
meth_re = {}
for name in _func2:
_astr = r"""(%s\s*[(][^,]*?[,][^'"]*?['"])b(['"][^)]*?[)])"""%name
func_re[name] = re.compile(_astr, re.DOTALL)
for name in _func4:
_astr = r"""(%s\s*[(][^,]*?[,][^,]*?[,][^,]*?[,][^'"]*?['"])b(['"][^)]*?[)])"""%name
func_re[name] = re.compile(_astr, re.DOTALL)
for name in _meth1:
_astr = r"""(.%s\s*[(][^'"]*?['"])b(['"][^)]*?[)])"""%name
func_re[name] = re.compile(_astr, re.DOTALL)
for char in _chars.keys():
_astr = r"""(.astype\s*[(][^'"]*?['"])%s(['"][^)]*?[)])"""%char
meth_re[char] = re.compile(_astr, re.DOTALL)
def fixtypechars(fstr):
for name in _func2 + _func4 + _meth1:
fstr = func_re[name].sub('\\1B\\2',fstr)
for char in _chars.keys():
fstr = meth_re[char].sub('\\1%s\\2'%_chars[char], fstr)
return fstr
flatindex_re = re.compile('([.]flat(\s*?[[=]))')
def changeimports(fstr, name, newname):
importstr = 'import %s' % name
importasstr = 'import %s as ' % name
fromstr = 'from %s import ' % name
fromall=0
fstr = re.sub(r'(import\s+[^,\n\r]+,\s*)(%s)' % name,
"\\1%s as %s" % (newname, name), fstr)
fstr = fstr.replace(importasstr, 'import %s as ' % newname)
fstr = fstr.replace(importstr, 'import %s as %s' % (newname,name))
ind = 0
Nlen = len(fromstr)
Nlen2 = len("from %s import " % newname)
while 1:
found = fstr.find(fromstr,ind)
if (found < 0):
break
ind = found + Nlen
if fstr[ind] == '*':
continue
fstr = "%sfrom %s import %s" % (fstr[:found], newname, fstr[ind:])
ind += Nlen2 - Nlen
return fstr, fromall
istest_re = {}
_types = ['float', 'int', 'complex', 'ArrayType', 'FloatType',
'IntType', 'ComplexType']
for name in _types:
_astr = r'type\s*[(]([^)]*)[)]\s+(?:is|==)\s+(.*?%s)'%name
istest_re[name] = re.compile(_astr)
def fixistesting(astr):
for name in _types:
astr = istest_re[name].sub('isinstance(\\1, \\2)', astr)
return astr
def replaceattr(astr):
astr = astr.replace(".typecode()",".dtype.char")
astr = astr.replace(".iscontiguous()",".flags.contiguous")
astr = astr.replace(".byteswapped()",".byteswap()")
astr = astr.replace(".toscalar()", ".item()")
astr = astr.replace(".itemsize()",".itemsize")
# preserve uses of flat that should be o.k.
tmpstr = flatindex_re.sub(r"@@@@\2",astr)
# replace other uses of flat
tmpstr = tmpstr.replace(".flat",".ravel()")
# put back .flat where it was valid
astr = tmpstr.replace("@@@@", ".flat")
return astr
svspc2 = re.compile(r'([^,(\s]+[.]spacesaver[(][)])')
svspc3 = re.compile(r'(\S+[.]savespace[(].*[)])')
#shpe = re.compile(r'(\S+\s*)[.]shape\s*=[^=]\s*(.+)')
def replaceother(astr):
astr = svspc2.sub('True',astr)
astr = svspc3.sub(r'pass ## \1', astr)
#astr = shpe.sub('\\1=\\1.reshape(\\2)', astr)
return astr
import datetime
def fromstr(filestr):
savestr = filestr[:]
filestr = fixtypechars(filestr)
filestr = fixistesting(filestr)
filestr, fromall1 = changeimports(filestr, 'Numeric', 'numpy.oldnumeric')
filestr, fromall1 = changeimports(filestr, 'multiarray','numpy.oldnumeric')
filestr, fromall1 = changeimports(filestr, 'umath', 'numpy.oldnumeric')
filestr, fromall1 = changeimports(filestr, 'Precision', 'numpy.oldnumeric.precision')
filestr, fromall1 = changeimports(filestr, 'UserArray', 'numpy.oldnumeric.user_array')
filestr, fromall1 = changeimports(filestr, 'ArrayPrinter', 'numpy.oldnumeric.array_printer')
filestr, fromall2 = changeimports(filestr, 'numerix', 'numpy.oldnumeric')
filestr, fromall3 = changeimports(filestr, 'scipy_base', 'numpy.oldnumeric')
filestr, fromall3 = changeimports(filestr, 'Matrix', 'numpy.oldnumeric.matrix')
filestr, fromall3 = changeimports(filestr, 'MLab', 'numpy.oldnumeric.mlab')
filestr, fromall3 = changeimports(filestr, 'LinearAlgebra', 'numpy.oldnumeric.linear_algebra')
filestr, fromall3 = changeimports(filestr, 'RNG', 'numpy.oldnumeric.rng')
filestr, fromall3 = changeimports(filestr, 'RNG.Statistics', 'numpy.oldnumeric.rng_stats')
filestr, fromall3 = changeimports(filestr, 'RandomArray', 'numpy.oldnumeric.random_array')
filestr, fromall3 = changeimports(filestr, 'FFT', 'numpy.oldnumeric.fft')
filestr, fromall3 = changeimports(filestr, 'MA', 'numpy.oldnumeric.ma')
fromall = fromall1 or fromall2 or fromall3
filestr = replaceattr(filestr)
filestr = replaceother(filestr)
if savestr != filestr:
today = datetime.date.today().strftime('%b %d, %Y')
name = os.path.split(sys.argv[0])[-1]
filestr = '## Automatically adapted for '\
'numpy.oldnumeric %s by %s\n\n%s' % (today, name, filestr)
return filestr, 1
return filestr, 0
def makenewfile(name, filestr):
fid = file(name, 'w')
fid.write(filestr)
fid.close()
def convertfile(filename, orig=1):
"""Convert the filename given from using Numeric to using NumPy
Copies the file to filename.orig and then over-writes the file
with the updated code
"""
fid = open(filename)
filestr = fid.read()
fid.close()
filestr, changed = fromstr(filestr)
if changed:
if orig:
base, ext = os.path.splitext(filename)
os.rename(filename, base+".orig")
else:
os.remove(filename)
makenewfile(filename, filestr)
def fromargs(args):
filename = args[1]
converttree(filename)
def convertall(direc=os.path.curdir, orig=1):
"""Convert all .py files to use numpy.oldnumeric (from Numeric) in the directory given
For each changed file, a backup of <usesnumeric>.py is made as
<usesnumeric>.py.orig. A new file named <usesnumeric>.py
is then written with the updated code.
"""
files = glob.glob(os.path.join(direc,'*.py'))
for afile in files:
if afile[-8:] == 'setup.py': continue # skip these
convertfile(afile, orig)
header_re = re.compile(r'(Numeric/arrayobject.h)')
def convertsrc(direc=os.path.curdir, ext=None, orig=1):
"""Replace Numeric/arrayobject.h with numpy/oldnumeric.h in all files in the
directory with extension give by list ext (if ext is None, then all files are
replaced)."""
if ext is None:
files = glob.glob(os.path.join(direc,'*'))
else:
files = []
for aext in ext:
files.extend(glob.glob(os.path.join(direc,"*.%s" % aext)))
for afile in files:
fid = open(afile)
fstr = fid.read()
fid.close()
fstr, n = header_re.subn(r'numpy/oldnumeric.h',fstr)
if n > 0:
if orig:
base, ext = os.path.splitext(afile)
os.rename(afile, base+".orig")
else:
os.remove(afile)
makenewfile(afile, fstr)
def _func(arg, dirname, fnames):
convertall(dirname, orig=0)
convertsrc(dirname, ext=['h','c'], orig=0)
def converttree(direc=os.path.curdir):
"""Convert all .py files and source code files in the tree given
"""
os.path.walk(direc, _func, None)
if __name__ == '__main__':
fromargs(sys.argv)
|
chuan9/chromium-crosswalk
|
refs/heads/master
|
tools/cygprofile/check_orderfile.py
|
30
|
#!/usr/bin/python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Check that symbols are ordered into a binary as they appear in the orderfile.
"""
import logging
import optparse
import sys
import cyglog_to_orderfile
import cygprofile_utils
import patch_orderfile
import symbol_extractor
_MAX_WARNINGS_TO_PRINT = 200
def _IsSameMethod(name1, name2):
"""Returns true if name1 or name2 are split method forms of the other."""
return patch_orderfile.RemoveSuffixes(name1) == \
patch_orderfile.RemoveSuffixes(name2)
def _CountMisorderedSymbols(symbols, symbol_infos):
"""Count the number of misordered symbols, and log them.
Args:
symbols: ordered sequence of symbols from the orderfile
symbol_infos: ordered list of SymbolInfo from the binary
Returns:
(misordered_pairs_count, matched_symbols_count, unmatched_symbols_count)
"""
name_to_symbol_info = symbol_extractor.CreateNameToSymbolInfo(symbol_infos)
matched_symbol_infos = []
missing_count = 0
misordered_count = 0
# Find the SymbolInfo matching the orderfile symbols in the binary.
for symbol in symbols:
if symbol in name_to_symbol_info:
matched_symbol_infos.append(name_to_symbol_info[symbol])
else:
missing_count += 1
if missing_count < _MAX_WARNINGS_TO_PRINT:
logging.warning('Symbol "%s" is in the orderfile, not in the binary' %
symbol)
logging.info('%d matched symbols, %d un-matched (Only the first %d unmatched'
' symbols are shown)' % (
len(matched_symbol_infos), missing_count,
_MAX_WARNINGS_TO_PRINT))
# In the order of the orderfile, find all the symbols that are at an offset
# smaller than their immediate predecessor, and record the pair.
previous_symbol_info = symbol_extractor.SymbolInfo(
name='', offset=-1, size=0, section='')
for symbol_info in matched_symbol_infos:
if symbol_info.offset < previous_symbol_info.offset and not (
_IsSameMethod(symbol_info.name, previous_symbol_info.name)):
logging.warning('Misordered pair: %s - %s' % (
str(previous_symbol_info), str(symbol_info)))
misordered_count += 1
previous_symbol_info = symbol_info
return (misordered_count, len(matched_symbol_infos), missing_count)
def main():
parser = optparse.OptionParser(usage=
'usage: %prog [options] <binary> <orderfile>')
parser.add_option('--target-arch', action='store', dest='arch',
choices=['arm', 'arm64', 'x86', 'x86_64', 'x64', 'mips'],
help='The target architecture for the binary.')
parser.add_option('--threshold', action='store', dest='threshold', default=0,
help='The maximum allowed number of out-of-order symbols.')
options, argv = parser.parse_args(sys.argv)
if not options.arch:
options.arch = cygprofile_utils.DetectArchitecture()
if len(argv) != 3:
parser.print_help()
return 1
(binary_filename, orderfile_filename) = argv[1:]
symbol_extractor.SetArchitecture(options.arch)
obj_dir = cygprofile_utils.GetObjDir(binary_filename)
symbol_to_sections_map = \
cyglog_to_orderfile.GetSymbolToSectionsMapFromObjectFiles(obj_dir)
section_to_symbols_map = cygprofile_utils.InvertMapping(
symbol_to_sections_map)
symbols = patch_orderfile.GetSymbolsFromOrderfile(orderfile_filename,
section_to_symbols_map)
symbol_infos = symbol_extractor.SymbolInfosFromBinary(binary_filename)
# Missing symbols is not an error since some of them can be eliminated through
# inlining.
(misordered_pairs_count, matched_symbols, _) = _CountMisorderedSymbols(
symbols, symbol_infos)
return (misordered_pairs_count > options.threshold) or (matched_symbols == 0)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
sys.exit(main())
|
poljeff/odoo
|
refs/heads/8.0
|
addons/base_report_designer/plugin/openerp_report_designer/bin/script/compile_all.py
|
384
|
#########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import compileall
compileall.compile_dir('package')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
gnuhub/intellij-community
|
refs/heads/master
|
python/lib/Lib/string.py
|
92
|
"""A collection of string operations (most are no longer used).
Warning: most of the code you see here isn't normally used nowadays.
Beginning with Python 1.6, many of these functions are implemented as
methods on the standard string object. They used to be implemented by
a built-in module called strop, but strop is now obsolete itself.
Public module variables:
whitespace -- a string containing all characters considered whitespace
lowercase -- a string containing all characters considered lowercase letters
uppercase -- a string containing all characters considered uppercase letters
letters -- a string containing all characters considered letters
digits -- a string containing all characters considered decimal digits
hexdigits -- a string containing all characters considered hexadecimal digits
octdigits -- a string containing all characters considered octal digits
punctuation -- a string containing all characters considered punctuation
printable -- a string containing all characters considered printable
"""
# Some strings for ctype-style character classification
whitespace = ' \t\n\r\v\f'
lowercase = 'abcdefghijklmnopqrstuvwxyz'
uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
letters = lowercase + uppercase
ascii_lowercase = lowercase
ascii_uppercase = uppercase
ascii_letters = ascii_lowercase + ascii_uppercase
digits = '0123456789'
hexdigits = digits + 'abcdef' + 'ABCDEF'
octdigits = '01234567'
punctuation = """!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
printable = digits + letters + punctuation + whitespace
# Case conversion helpers
# Use str to convert Unicode literal in case of -U
l = map(chr, xrange(256))
_idmap = str('').join(l)
del l
# Functions which aren't available as string methods.
# Capitalize the words in a string, e.g. " aBc dEf " -> "Abc Def".
def capwords(s, sep=None):
"""capwords(s, [sep]) -> string
Split the argument into words using split, capitalize each
word using capitalize, and join the capitalized words using
join. Note that this replaces runs of whitespace characters by
a single space.
"""
return (sep or ' ').join([x.capitalize() for x in s.split(sep)])
# Construct a translation string
_idmapL = None
def maketrans(fromstr, tostr):
"""maketrans(frm, to) -> string
Return a translation table (a string of 256 bytes long)
suitable for use in string.translate. The strings frm and to
must be of the same length.
"""
if len(fromstr) != len(tostr):
raise ValueError, "maketrans arguments must have same length"
global _idmapL
if not _idmapL:
_idmapL = map(None, _idmap)
L = _idmapL[:]
fromstr = map(ord, fromstr)
for i in range(len(fromstr)):
L[fromstr[i]] = tostr[i]
return ''.join(L)
####################################################################
import re as _re
class _multimap:
"""Helper class for combining multiple mappings.
Used by .{safe_,}substitute() to combine the mapping and keyword
arguments.
"""
def __init__(self, primary, secondary):
self._primary = primary
self._secondary = secondary
def __getitem__(self, key):
try:
return self._primary[key]
except KeyError:
return self._secondary[key]
class _TemplateMetaclass(type):
pattern = r"""
%(delim)s(?:
(?P<escaped>%(delim)s) | # Escape sequence of two delimiters
(?P<named>%(id)s) | # delimiter and a Python identifier
{(?P<braced>%(id)s)} | # delimiter and a braced identifier
(?P<invalid>) # Other ill-formed delimiter exprs
)
"""
def __init__(cls, name, bases, dct):
super(_TemplateMetaclass, cls).__init__(name, bases, dct)
if 'pattern' in dct:
pattern = cls.pattern
else:
pattern = _TemplateMetaclass.pattern % {
'delim' : _re.escape(cls.delimiter),
'id' : cls.idpattern,
}
cls.pattern = _re.compile(pattern, _re.IGNORECASE | _re.VERBOSE)
class Template:
"""A string class for supporting $-substitutions."""
__metaclass__ = _TemplateMetaclass
delimiter = '$'
idpattern = r'[_a-z][_a-z0-9]*'
def __init__(self, template):
self.template = template
# Search for $$, $identifier, ${identifier}, and any bare $'s
def _invalid(self, mo):
i = mo.start('invalid')
lines = self.template[:i].splitlines(True)
if not lines:
colno = 1
lineno = 1
else:
colno = i - len(''.join(lines[:-1]))
lineno = len(lines)
raise ValueError('Invalid placeholder in string: line %d, col %d' %
(lineno, colno))
def substitute(self, *args, **kws):
if len(args) > 1:
raise TypeError('Too many positional arguments')
if not args:
mapping = kws
elif kws:
mapping = _multimap(kws, args[0])
else:
mapping = args[0]
# Helper function for .sub()
def convert(mo):
# Check the most common path first.
named = mo.group('named') or mo.group('braced')
if named is not None:
val = mapping[named]
# We use this idiom instead of str() because the latter will
# fail if val is a Unicode containing non-ASCII characters.
return '%s' % (val,)
if mo.group('escaped') is not None:
return self.delimiter
if mo.group('invalid') is not None:
self._invalid(mo)
raise ValueError('Unrecognized named group in pattern',
self.pattern)
return self.pattern.sub(convert, self.template)
def safe_substitute(self, *args, **kws):
if len(args) > 1:
raise TypeError('Too many positional arguments')
if not args:
mapping = kws
elif kws:
mapping = _multimap(kws, args[0])
else:
mapping = args[0]
# Helper function for .sub()
def convert(mo):
named = mo.group('named')
if named is not None:
try:
# We use this idiom instead of str() because the latter
# will fail if val is a Unicode containing non-ASCII
return '%s' % (mapping[named],)
except KeyError:
return self.delimiter + named
braced = mo.group('braced')
if braced is not None:
try:
return '%s' % (mapping[braced],)
except KeyError:
return self.delimiter + '{' + braced + '}'
if mo.group('escaped') is not None:
return self.delimiter
if mo.group('invalid') is not None:
return self.delimiter
raise ValueError('Unrecognized named group in pattern',
self.pattern)
return self.pattern.sub(convert, self.template)
####################################################################
# NOTE: Everything below here is deprecated. Use string methods instead.
# This stuff will go away in Python 3.0.
# Backward compatible names for exceptions
index_error = ValueError
atoi_error = ValueError
atof_error = ValueError
atol_error = ValueError
# convert UPPER CASE letters to lower case
def lower(s):
"""lower(s) -> string
Return a copy of the string s converted to lowercase.
"""
return s.lower()
# Convert lower case letters to UPPER CASE
def upper(s):
"""upper(s) -> string
Return a copy of the string s converted to uppercase.
"""
return s.upper()
# Swap lower case letters and UPPER CASE
def swapcase(s):
"""swapcase(s) -> string
Return a copy of the string s with upper case characters
converted to lowercase and vice versa.
"""
return s.swapcase()
# Strip leading and trailing tabs and spaces
def strip(s, chars=None):
"""strip(s [,chars]) -> string
Return a copy of the string s with leading and trailing
whitespace removed.
If chars is given and not None, remove characters in chars instead.
If chars is unicode, S will be converted to unicode before stripping.
"""
return s.strip(chars)
# Strip leading tabs and spaces
def lstrip(s, chars=None):
"""lstrip(s [,chars]) -> string
Return a copy of the string s with leading whitespace removed.
If chars is given and not None, remove characters in chars instead.
"""
return s.lstrip(chars)
# Strip trailing tabs and spaces
def rstrip(s, chars=None):
"""rstrip(s [,chars]) -> string
Return a copy of the string s with trailing whitespace removed.
If chars is given and not None, remove characters in chars instead.
"""
return s.rstrip(chars)
# Split a string into a list of space/tab-separated words
def split(s, sep=None, maxsplit=-1):
"""split(s [,sep [,maxsplit]]) -> list of strings
Return a list of the words in the string s, using sep as the
delimiter string. If maxsplit is given, splits at no more than
maxsplit places (resulting in at most maxsplit+1 words). If sep
is not specified or is None, any whitespace string is a separator.
(split and splitfields are synonymous)
"""
return s.split(sep, maxsplit)
splitfields = split
# Split a string into a list of space/tab-separated words
def rsplit(s, sep=None, maxsplit=-1):
"""rsplit(s [,sep [,maxsplit]]) -> list of strings
Return a list of the words in the string s, using sep as the
delimiter string, starting at the end of the string and working
to the front. If maxsplit is given, at most maxsplit splits are
done. If sep is not specified or is None, any whitespace string
is a separator.
"""
return s.rsplit(sep, maxsplit)
# Join fields with optional separator
def join(words, sep = ' '):
"""join(list [,sep]) -> string
Return a string composed of the words in list, with
intervening occurrences of sep. The default separator is a
single space.
(joinfields and join are synonymous)
"""
return sep.join(words)
joinfields = join
# Find substring, raise exception if not found
def index(s, *args):
"""index(s, sub [,start [,end]]) -> int
Like find but raises ValueError when the substring is not found.
"""
return s.index(*args)
# Find last substring, raise exception if not found
def rindex(s, *args):
"""rindex(s, sub [,start [,end]]) -> int
Like rfind but raises ValueError when the substring is not found.
"""
return s.rindex(*args)
# Count non-overlapping occurrences of substring
def count(s, *args):
"""count(s, sub[, start[,end]]) -> int
Return the number of occurrences of substring sub in string
s[start:end]. Optional arguments start and end are
interpreted as in slice notation.
"""
return s.count(*args)
# Find substring, return -1 if not found
def find(s, *args):
"""find(s, sub [,start [,end]]) -> in
Return the lowest index in s where substring sub is found,
such that sub is contained within s[start,end]. Optional
arguments start and end are interpreted as in slice notation.
Return -1 on failure.
"""
return s.find(*args)
# Find last substring, return -1 if not found
def rfind(s, *args):
"""rfind(s, sub [,start [,end]]) -> int
Return the highest index in s where substring sub is found,
such that sub is contained within s[start,end]. Optional
arguments start and end are interpreted as in slice notation.
Return -1 on failure.
"""
return s.rfind(*args)
# for a bit of speed
_float = float
_int = int
_long = long
# Convert string to float
def atof(s):
"""atof(s) -> float
Return the floating point number represented by the string s.
"""
return _float(s)
# Convert string to integer
def atoi(s , base=10):
"""atoi(s [,base]) -> int
Return the integer represented by the string s in the given
base, which defaults to 10. The string s must consist of one
or more digits, possibly preceded by a sign. If base is 0, it
is chosen from the leading characters of s, 0 for octal, 0x or
0X for hexadecimal. If base is 16, a preceding 0x or 0X is
accepted.
"""
return _int(s, base)
# Convert string to long integer
def atol(s, base=10):
"""atol(s [,base]) -> long
Return the long integer represented by the string s in the
given base, which defaults to 10. The string s must consist
of one or more digits, possibly preceded by a sign. If base
is 0, it is chosen from the leading characters of s, 0 for
octal, 0x or 0X for hexadecimal. If base is 16, a preceding
0x or 0X is accepted. A trailing L or l is not accepted,
unless base is 0.
"""
return _long(s, base)
# Left-justify a string
def ljust(s, width, *args):
"""ljust(s, width[, fillchar]) -> string
Return a left-justified version of s, in a field of the
specified width, padded with spaces as needed. The string is
never truncated. If specified the fillchar is used instead of spaces.
"""
return s.ljust(width, *args)
# Right-justify a string
def rjust(s, width, *args):
"""rjust(s, width[, fillchar]) -> string
Return a right-justified version of s, in a field of the
specified width, padded with spaces as needed. The string is
never truncated. If specified the fillchar is used instead of spaces.
"""
return s.rjust(width, *args)
# Center a string
def center(s, width, *args):
"""center(s, width[, fillchar]) -> string
Return a center version of s, in a field of the specified
width. padded with spaces as needed. The string is never
truncated. If specified the fillchar is used instead of spaces.
"""
return s.center(width, *args)
# Zero-fill a number, e.g., (12, 3) --> '012' and (-3, 3) --> '-03'
# Decadent feature: the argument may be a string or a number
# (Use of this is deprecated; it should be a string as with ljust c.s.)
def zfill(x, width):
"""zfill(x, width) -> string
Pad a numeric string x with zeros on the left, to fill a field
of the specified width. The string x is never truncated.
"""
if not isinstance(x, basestring):
x = repr(x)
return x.zfill(width)
# Expand tabs in a string.
# Doesn't take non-printing chars into account, but does understand \n.
def expandtabs(s, tabsize=8):
"""expandtabs(s [,tabsize]) -> string
Return a copy of the string s with all tab characters replaced
by the appropriate number of spaces, depending on the current
column, and the tabsize (default 8).
"""
return s.expandtabs(tabsize)
# Character translation through look-up table.
def translate(s, table, deletions=""):
"""translate(s,table [,deletions]) -> string
Return a copy of the string s, where all characters occurring
in the optional argument deletions are removed, and the
remaining characters have been mapped through the given
translation table, which must be a string of length 256. The
deletions argument is not allowed for Unicode strings.
"""
if deletions:
return s.translate(table, deletions)
else:
# Add s[:0] so that if s is Unicode and table is an 8-bit string,
# table is converted to Unicode. This means that table *cannot*
# be a dictionary -- for that feature, use u.translate() directly.
return s.translate(table + s[:0])
# Capitalize a string, e.g. "aBc dEf" -> "Abc def".
def capitalize(s):
"""capitalize(s) -> string
Return a copy of the string s with only its first character
capitalized.
"""
return s.capitalize()
# Substring replacement (global)
def replace(s, old, new, maxsplit=-1):
"""replace (str, old, new[, maxsplit]) -> string
Return a copy of string str with all occurrences of substring
old replaced by new. If the optional argument maxsplit is
given, only the first maxsplit occurrences are replaced.
"""
return s.replace(old, new, maxsplit)
# Try importing optional built-in module "strop" -- if it exists,
# it redefines some string operations that are 100-1000 times faster.
# It also defines values for whitespace, lowercase and uppercase
# that match <ctype.h>'s definitions.
try:
from strop import maketrans, lowercase, uppercase, whitespace
letters = lowercase + uppercase
except ImportError:
pass # Use the original versions
|
jeffery9/mixprint_addons
|
refs/heads/master
|
share/wizard/share_wizard.py
|
5
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import random
import time
from urllib import quote_plus
import uuid
from openerp import SUPERUSER_ID
import simplejson
from openerp import tools
from openerp.osv import fields, osv
from openerp.osv import expression
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval
import openerp
_logger = logging.getLogger(__name__)
FULL_ACCESS = ('perm_read', 'perm_write', 'perm_create', 'perm_unlink')
READ_WRITE_ACCESS = ('perm_read', 'perm_write')
READ_ONLY_ACCESS = ('perm_read',)
UID_ROOT = 1
# Pseudo-domain to represent an empty filter, constructed using
# osv.expression's DUMMY_LEAF
DOMAIN_ALL = [(1, '=', 1)]
# A good selection of easy to read password characters (e.g. no '0' vs 'O', etc.)
RANDOM_PASS_CHARACTERS = 'aaaabcdeeeefghjkmnpqrstuvwxyzAAAABCDEEEEFGHJKLMNPQRSTUVWXYZ23456789'
def generate_random_pass():
return ''.join(random.sample(RANDOM_PASS_CHARACTERS,10))
class share_wizard(osv.TransientModel):
_name = 'share.wizard'
_description = 'Share Wizard'
def _assert(self, condition, error_message, context=None):
"""Raise a user error with the given message if condition is not met.
The error_message should have been translated with _().
"""
if not condition:
raise osv.except_osv(_('Sharing access cannot be created.'), error_message)
def has_group(self, cr, uid, module, group_xml_id, context=None):
"""Returns True if current user is a member of the group identified by the module, group_xml_id pair."""
# if the group was deleted or does not exist, we say NO (better safe than sorry)
try:
model, group_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, module, group_xml_id)
except ValueError:
return False
return group_id in self.pool.get('res.users').read(cr, uid, uid, ['groups_id'], context=context)['groups_id']
def has_share(self, cr, uid, unused_param, context=None):
return self.has_group(cr, uid, module='share', group_xml_id='group_share_user', context=context)
def _user_type_selection(self, cr, uid, context=None):
"""Selection values may be easily overridden/extended via inheritance"""
return [('embedded', _('Direct link or embed code')), ('emails',_('Emails')), ]
"""Override of create() to auto-compute the action name"""
def create(self, cr, uid, values, context=None):
if 'action_id' in values and not 'name' in values:
action = self.pool.get('ir.actions.actions').browse(cr, uid, values['action_id'], context=context)
values['name'] = action.name
return super(share_wizard,self).create(cr, uid, values, context=context)
def share_url_template(self, cr, uid, _ids, context=None):
# NOTE: take _ids in parameter to allow usage through browse_record objects
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url', default='', context=context)
if base_url:
base_url += '/login?db=%(dbname)s&login=%(login)s&key=%(password)s'
extra = context and context.get('share_url_template_extra_arguments')
if extra:
base_url += '&' + '&'.join('%s=%%(%s)s' % (x,x) for x in extra)
hash_ = context and context.get('share_url_template_hash_arguments')
if hash_:
base_url += '#' + '&'.join('%s=%%(%s)s' % (x,x) for x in hash_)
return base_url
def _share_root_url(self, cr, uid, ids, _fieldname, _args, context=None):
result = dict.fromkeys(ids, '')
data = dict(dbname=cr.dbname, login='', password='')
for this in self.browse(cr, uid, ids, context=context):
result[this.id] = this.share_url_template() % data
return result
def _generate_embedded_code(self, wizard, options=None):
cr = wizard._cr
uid = wizard._uid
context = wizard._context
if options is None:
options = {}
js_options = {}
title = options['title'] if 'title' in options else wizard.embed_option_title
search = (options['search'] if 'search' in options else wizard.embed_option_search) if wizard.access_mode != 'readonly' else False
if not title:
js_options['display_title'] = False
if search:
js_options['search_view'] = True
js_options_str = (', ' + simplejson.dumps(js_options)) if js_options else ''
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url', default=None, context=context)
user = wizard.result_line_ids[0]
return """
<script type="text/javascript" src="%(base_url)s/web/webclient/js"></script>
<script type="text/javascript">
new openerp.init(%(init)s).web.embed(%(server)s, %(dbname)s, %(login)s, %(password)s,%(action)d%(options)s);
</script> """ % {
'init': simplejson.dumps(openerp.conf.server_wide_modules),
'base_url': base_url or '',
'server': simplejson.dumps(base_url),
'dbname': simplejson.dumps(cr.dbname),
'login': simplejson.dumps(user.login),
'password': simplejson.dumps(user.password),
'action': user.user_id.action_id.id,
'options': js_options_str,
}
def _embed_code(self, cr, uid, ids, _fn, _args, context=None):
result = dict.fromkeys(ids, '')
for this in self.browse(cr, uid, ids, context=context):
result[this.id] = self._generate_embedded_code(this)
return result
def _embed_url(self, cr, uid, ids, _fn, _args, context=None):
if context is None:
context = {}
result = dict.fromkeys(ids, '')
for this in self.browse(cr, uid, ids, context=context):
if this.result_line_ids:
ctx = dict(context, share_url_template_hash_arguments=['action_id'])
user = this.result_line_ids[0]
data = dict(dbname=cr.dbname, login=user.login, password=user.password, action_id=this.action_id.id)
result[this.id] = this.share_url_template(context=ctx) % data
return result
_columns = {
'action_id': fields.many2one('ir.actions.act_window', 'Action to share', required=True,
help="The action that opens the screen containing the data you wish to share."),
'view_type': fields.char('Current View Type', size=32, required=True),
'domain': fields.char('Domain', size=256, help="Optional domain for further data filtering"),
'user_type': fields.selection(lambda s, *a, **k: s._user_type_selection(*a, **k),'Sharing method', required=True,
help="Select the type of user(s) you would like to share data with."),
'new_users': fields.text("Emails"),
'email_1': fields.char('New user email', size=64),
'email_2': fields.char('New user email', size=64),
'email_3': fields.char('New user email', size=64),
'invite': fields.boolean('Invite users to OpenSocial record'),
'access_mode': fields.selection([('readonly','Can view'),('readwrite','Can edit')],'Access Mode', required=True,
help="Access rights to be granted on the shared documents."),
'result_line_ids': fields.one2many('share.wizard.result.line', 'share_wizard_id', 'Summary', readonly=True),
'share_root_url': fields.function(_share_root_url, string='Share Access URL', type='char', size=512, readonly=True,
help='Main access page for users that are granted shared access'),
'name': fields.char('Share Title', size=64, required=True, help="Title for the share (displayed to users as menu and shortcut name)"),
'record_name': fields.char('Record name', size=128, help="Name of the shared record, if sharing a precise record"),
'message': fields.text("Personal Message", help="An optional personal message, to be included in the email notification."),
'embed_code': fields.function(_embed_code, type='text', string='Code',
help="Embed this code in your documents to provide a link to the "\
"shared document."),
'embed_option_title': fields.boolean('Display title'),
'embed_option_search': fields.boolean('Display search view'),
'embed_url': fields.function(_embed_url, string='Share URL', type='char', size=512, readonly=True),
}
_defaults = {
'view_type': 'page',
'user_type' : 'embedded',
'invite': False,
'domain': lambda self, cr, uid, context, *a: context.get('domain', '[]'),
'action_id': lambda self, cr, uid, context, *a: context.get('action_id'),
'access_mode': 'readwrite',
'embed_option_title': True,
'embed_option_search': True,
}
def has_email(self, cr, uid, context=None):
return bool(self.pool.get('res.users').browse(cr, uid, uid, context=context).email)
def go_step_1(self, cr, uid, ids, context=None):
wizard_data = self.browse(cr,uid,ids,context)[0]
if wizard_data.user_type == 'emails' and not self.has_email(cr, uid, context=context):
raise osv.except_osv(_('No email address configured'),
_('You must configure your email address in the user preferences before using the Share button.'))
model, res_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'share', 'action_share_wizard_step1')
action = self.pool.get(model).read(cr, uid, res_id, context=context)
action['res_id'] = ids[0]
action.pop('context', '')
return action
def _create_share_group(self, cr, uid, wizard_data, context=None):
group_obj = self.pool.get('res.groups')
share_group_name = '%s: %s (%d-%s)' %('Shared', wizard_data.name, uid, time.time())
# create share group without putting admin in it
return group_obj.create(cr, UID_ROOT, {'name': share_group_name, 'share': True}, {'noadmin': True})
def _create_new_share_users(self, cr, uid, wizard_data, group_id, context=None):
"""Create one new res.users record for each email address provided in
wizard_data.new_users, ignoring already existing users.
Populates wizard_data.result_line_ids with one new line for
each user (existing or not). New users will also have a value
for the password field, so they can receive it by email.
Returns the ids of the created users, and the ids of the
ignored, existing ones."""
if context is None:
context = {}
user_obj = self.pool.get('res.users')
current_user = user_obj.browse(cr, UID_ROOT, uid, context=context)
# modify context to disable shortcuts when creating share users
context['noshortcut'] = True
created_ids = []
existing_ids = []
if wizard_data.user_type == 'emails':
# get new user list from email data
new_users = (wizard_data.new_users or '').split('\n')
new_users += [wizard_data.email_1 or '', wizard_data.email_2 or '', wizard_data.email_3 or '']
for new_user in new_users:
# Ignore blank lines
new_user = new_user.strip()
if not new_user: continue
# Ignore the user if it already exists.
if not wizard_data.invite:
existing = user_obj.search(cr, UID_ROOT, [('login', '=', new_user)])
else:
existing = user_obj.search(cr, UID_ROOT, [('email', '=', new_user)])
existing_ids.extend(existing)
if existing:
new_line = { 'user_id': existing[0],
'newly_created': False}
wizard_data.write({'result_line_ids': [(0,0,new_line)]})
continue
new_pass = generate_random_pass()
user_id = user_obj.create(cr, UID_ROOT, {
'login': new_user,
'password': new_pass,
'name': new_user,
'email': new_user,
'groups_id': [(6,0,[group_id])],
'share': True,
'company_id': current_user.company_id.id,
'company_ids': [(6, 0, [current_user.company_id.id])],
}, context)
new_line = { 'user_id': user_id,
'password': new_pass,
'newly_created': True}
wizard_data.write({'result_line_ids': [(0,0,new_line)]})
created_ids.append(user_id)
elif wizard_data.user_type == 'embedded':
new_login = 'embedded-%s' % (uuid.uuid4().hex,)
new_pass = generate_random_pass()
user_id = user_obj.create(cr, UID_ROOT, {
'login': new_login,
'password': new_pass,
'name': new_login,
'groups_id': [(6,0,[group_id])],
'share': True,
'company_id': current_user.company_id.id,
'company_ids': [(6, 0, [current_user.company_id.id])],
}, context)
new_line = { 'user_id': user_id,
'password': new_pass,
'newly_created': True}
wizard_data.write({'result_line_ids': [(0,0,new_line)]})
created_ids.append(user_id)
return created_ids, existing_ids
def _create_shortcut(self, cr, uid, values, context=None):
if context is None:
context = {}
new_context = context.copy()
for key in context:
if key.startswith('default_'):
del new_context[key]
dataobj = self.pool.get('ir.model.data')
menu_id = dataobj._get_id(cr, uid, 'base', 'menu_administration_shortcut')
shortcut_menu_id = int(dataobj.read(cr, uid, menu_id, ['res_id'], new_context)['res_id'])
action_id = self.pool.get('ir.actions.act_window').create(cr, UID_ROOT, values, new_context)
menu_data = {'name': values['name'],
'sequence': 10,
'action': 'ir.actions.act_window,'+str(action_id),
'parent_id': shortcut_menu_id,
'icon': 'STOCK_JUSTIFY_FILL'}
menu_obj = self.pool.get('ir.ui.menu')
menu_id = menu_obj.create(cr, UID_ROOT, menu_data)
sc_data = {'name': values['name'], 'sequence': UID_ROOT,'res_id': menu_id }
self.pool.get('ir.ui.view_sc').create(cr, uid, sc_data, new_context)
# update menu cache
user_groups = set(self.pool.get('res.users').read(cr, UID_ROOT, uid, ['groups_id'])['groups_id'])
key = (cr.dbname, shortcut_menu_id, tuple(user_groups))
menu_obj._cache[key] = True
return action_id
def _cleanup_action_context(self, context_str, user_id):
"""Returns a dict representing the context_str evaluated (safe_eval) as
a dict where items that are not useful for shared actions
have been removed. If the evaluation of context_str as a
dict fails, context_str is returned unaltered.
:param user_id: the integer uid to be passed as 'uid' in the
evaluation context
"""
result = False
if context_str:
try:
context = safe_eval(context_str, tools.UnquoteEvalContext(), nocopy=True)
result = dict(context)
for key in context:
# Remove all context keys that seem to toggle default
# filters based on the current user, as it makes no sense
# for shared users, who would not see any data by default.
if key and key.startswith('search_default_') and 'user_id' in key:
result.pop(key)
except Exception:
# Note: must catch all exceptions, as UnquoteEvalContext may cause many
# different exceptions, as it shadows builtins.
_logger.debug("Failed to cleanup action context as it does not parse server-side", exc_info=True)
result = context_str
return result
def _shared_action_def(self, cr, uid, wizard_data, context=None):
copied_action = wizard_data.action_id
if wizard_data.access_mode == 'readonly':
view_mode = wizard_data.view_type
view_id = copied_action.view_id.id if copied_action.view_id.type == wizard_data.view_type else False
else:
view_mode = copied_action.view_mode
view_id = copied_action.view_id.id
action_def = {
'name': wizard_data.name,
'domain': copied_action.domain,
'context': self._cleanup_action_context(wizard_data.action_id.context, uid),
'res_model': copied_action.res_model,
'view_mode': view_mode,
'view_type': copied_action.view_type,
'search_view_id': copied_action.search_view_id.id if wizard_data.access_mode != 'readonly' else False,
'view_id': view_id,
'auto_search': True,
}
if copied_action.view_ids:
action_def['view_ids'] = [(0,0,{'sequence': x.sequence,
'view_mode': x.view_mode,
'view_id': x.view_id.id })
for x in copied_action.view_ids
if (wizard_data.access_mode != 'readonly' or x.view_mode == wizard_data.view_type)
]
return action_def
def _setup_action_and_shortcut(self, cr, uid, wizard_data, user_ids, make_home, context=None):
"""Create a shortcut to reach the shared data, as well as the corresponding action, for
each user in ``user_ids``, and assign it as their home action if ``make_home`` is True.
Meant to be overridden for special cases.
"""
values = self._shared_action_def(cr, uid, wizard_data, context=None)
user_obj = self.pool.get('res.users')
for user_id in user_ids:
action_id = self._create_shortcut(cr, user_id, values)
if make_home:
# We do this only for new share users, as existing ones already have their initial home
# action. Resetting to the default menu does not work well as the menu is rather empty
# and does not contain the shortcuts in most cases.
user_obj.write(cr, UID_ROOT, [user_id], {'action_id': action_id})
def _get_recursive_relations(self, cr, uid, model, ttypes, relation_fields=None, suffix=None, context=None):
"""Returns list of tuples representing recursive relationships of type ``ttypes`` starting from
model with ID ``model_id``.
:param model: browsable model to start loading relationships from
:param ttypes: list of relationship types to follow (e.g: ['one2many','many2many'])
:param relation_fields: list of previously followed relationship tuples - to avoid duplicates
during recursion
:param suffix: optional suffix to append to the field path to reach the main object
"""
if relation_fields is None:
relation_fields = []
local_rel_fields = []
models = [x[1].model for x in relation_fields]
model_obj = self.pool.get('ir.model')
model_osv = self.pool.get(model.model)
for colinfo in model_osv._all_columns.itervalues():
coldef = colinfo.column
coltype = coldef._type
relation_field = None
if coltype in ttypes and colinfo.column._obj not in models:
relation_model_id = model_obj.search(cr, UID_ROOT, [('model','=',coldef._obj)])[0]
relation_model_browse = model_obj.browse(cr, UID_ROOT, relation_model_id, context=context)
relation_osv = self.pool.get(coldef._obj)
if coltype == 'one2many':
# don't record reverse path if it's not a real m2o (that happens, but rarely)
dest_model_ci = relation_osv._all_columns
reverse_rel = coldef._fields_id
if reverse_rel in dest_model_ci and dest_model_ci[reverse_rel].column._type == 'many2one':
relation_field = ('%s.%s'%(reverse_rel, suffix)) if suffix else reverse_rel
local_rel_fields.append((relation_field, relation_model_browse))
for parent in relation_osv._inherits:
if parent not in models:
parent_model = self.pool.get(parent)
parent_colinfos = parent_model._all_columns
parent_model_browse = model_obj.browse(cr, UID_ROOT,
model_obj.search(cr, UID_ROOT, [('model','=',parent)]))[0]
if relation_field and coldef._fields_id in parent_colinfos:
# inverse relationship is available in the parent
local_rel_fields.append((relation_field, parent_model_browse))
else:
# TODO: can we setup a proper rule to restrict inherited models
# in case the parent does not contain the reverse m2o?
local_rel_fields.append((None, parent_model_browse))
if relation_model_id != model.id and coltype in ['one2many', 'many2many']:
local_rel_fields += self._get_recursive_relations(cr, uid, relation_model_browse,
[coltype], relation_fields + local_rel_fields, suffix=relation_field, context=context)
return local_rel_fields
def _get_relationship_classes(self, cr, uid, model, context=None):
"""Computes the *relationship classes* reachable from the given
model. The 4 relationship classes are:
- [obj0]: the given model itself (and its parents via _inherits, if any)
- [obj1]: obj0 and all other models recursively accessible from
obj0 via one2many relationships
- [obj2]: obj0 and all other models recursively accessible from
obj0 via one2many and many2many relationships
- [obj3]: all models recursively accessible from obj1 via many2one
relationships
Each class is returned as a list of pairs [(field,model_browse)], where
``model`` is the browse_record of a reachable ir.model, and ``field`` is
the dot-notation reverse relationship path coming from that model to obj0,
or None if there is no reverse path.
:return: ([obj0], [obj1], [obj2], [obj3])
"""
# obj0 class and its parents
obj0 = [(None, model)]
model_obj = self.pool.get(model.model)
ir_model_obj = self.pool.get('ir.model')
for parent in model_obj._inherits:
parent_model_browse = ir_model_obj.browse(cr, UID_ROOT,
ir_model_obj.search(cr, UID_ROOT, [('model','=',parent)]))[0]
obj0 += [(None, parent_model_browse)]
obj1 = self._get_recursive_relations(cr, uid, model, ['one2many'], relation_fields=obj0, context=context)
obj2 = self._get_recursive_relations(cr, uid, model, ['one2many', 'many2many'], relation_fields=obj0, context=context)
obj3 = self._get_recursive_relations(cr, uid, model, ['many2one'], relation_fields=obj0, context=context)
for dummy, model in obj1:
obj3 += self._get_recursive_relations(cr, uid, model, ['many2one'], relation_fields=obj0, context=context)
return obj0, obj1, obj2, obj3
def _get_access_map_for_groups_and_models(self, cr, uid, group_ids, model_ids, context=None):
model_access_obj = self.pool.get('ir.model.access')
user_right_ids = model_access_obj.search(cr, uid,
[('group_id', 'in', group_ids), ('model_id', 'in', model_ids)],
context=context)
user_access_matrix = {}
if user_right_ids:
for access_right in model_access_obj.browse(cr, uid, user_right_ids, context=context):
access_line = user_access_matrix.setdefault(access_right.model_id.model, set())
for perm in FULL_ACCESS:
if getattr(access_right, perm, 0):
access_line.add(perm)
return user_access_matrix
def _add_access_rights_for_share_group(self, cr, uid, group_id, mode, fields_relations, context=None):
"""Adds access rights to group_id on object models referenced in ``fields_relations``,
intersecting with access rights of current user to avoid granting too much rights
"""
model_access_obj = self.pool.get('ir.model.access')
user_obj = self.pool.get('res.users')
target_model_ids = [x[1].id for x in fields_relations]
perms_to_add = (mode == 'readonly') and READ_ONLY_ACCESS or READ_WRITE_ACCESS
current_user = user_obj.browse(cr, uid, uid, context=context)
current_user_access_map = self._get_access_map_for_groups_and_models(cr, uid,
[x.id for x in current_user.groups_id], target_model_ids, context=context)
group_access_map = self._get_access_map_for_groups_and_models(cr, uid,
[group_id], target_model_ids, context=context)
_logger.debug("Current user access matrix: %r", current_user_access_map)
_logger.debug("New group current access matrix: %r", group_access_map)
# Create required rights if allowed by current user rights and not
# already granted
for dummy, model in fields_relations:
# mail.message is transversal: it should not received directly the access rights
if model.model in ['mail.message']: continue
values = {
'name': _('Copied access for sharing'),
'group_id': group_id,
'model_id': model.id,
}
current_user_access_line = current_user_access_map.get(model.model,set())
existing_group_access_line = group_access_map.get(model.model,set())
need_creation = False
for perm in perms_to_add:
if perm in current_user_access_line \
and perm not in existing_group_access_line:
values.update({perm:True})
group_access_map.setdefault(model.model, set()).add(perm)
need_creation = True
if need_creation:
model_access_obj.create(cr, UID_ROOT, values)
_logger.debug("Creating access right for model %s with values: %r", model.model, values)
def _link_or_copy_current_user_rules(self, cr, current_user, group_id, fields_relations, context=None):
rule_obj = self.pool.get('ir.rule')
rules_done = set()
for group in current_user.groups_id:
for dummy, model in fields_relations:
for rule in group.rule_groups:
if rule.id in rules_done:
continue
rules_done.add(rule.id)
if rule.model_id.id == model.id:
if 'user.' in rule.domain_force:
# Above pattern means there is likely a condition
# specific to current user, so we must copy the rule using
# the evaluated version of the domain.
# And it's better to copy one time too much than too few
rule_obj.copy(cr, UID_ROOT, rule.id, default={
'name': '%s %s' %(rule.name, _('(Copy for sharing)')),
'groups': [(6,0,[group_id])],
'domain_force': rule.domain, # evaluated version!
})
_logger.debug("Copying rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force)
else:
# otherwise we can simply link the rule to keep it dynamic
rule_obj.write(cr, SUPERUSER_ID, [rule.id], {
'groups': [(4,group_id)]
})
_logger.debug("Linking rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force)
def _check_personal_rule_or_duplicate(self, cr, group_id, rule, context=None):
"""Verifies that the given rule only belongs to the given group_id, otherwise
duplicate it for the current group, and unlink the previous one.
The duplicated rule has the original domain copied verbatim, without
any evaluation.
Returns the final rule to use (browse_record), either the original one if it
only belongs to this group, or the copy."""
if len(rule.groups) == 1:
return rule
# duplicate it first:
rule_obj = self.pool.get('ir.rule')
new_id = rule_obj.copy(cr, UID_ROOT, rule.id,
default={
'name': '%s %s' %(rule.name, _('(Duplicated for modified sharing permissions)')),
'groups': [(6,0,[group_id])],
'domain_force': rule.domain_force, # non evaluated!
})
_logger.debug("Duplicating rule %s (%s) (domain: %s) for modified access ", rule.name, rule.id, rule.domain_force)
# then disconnect from group_id:
rule.write({'groups':[(3,group_id)]}) # disconnects, does not delete!
return rule_obj.browse(cr, UID_ROOT, new_id, context=context)
def _create_or_combine_sharing_rule(self, cr, current_user, wizard_data, group_id, model_id, domain, restrict=False, rule_name=None, context=None):
"""Add a new ir.rule entry for model_id and domain on the target group_id.
If ``restrict`` is True, instead of adding a rule, the domain is
combined with AND operator with all existing rules in the group, to implement
an additional restriction (as of 6.1, multiple rules in the same group are
OR'ed by default, so a restriction must alter all existing rules)
This is necessary because the personal rules of the user that is sharing
are first copied to the new share group. Afterwards the filters used for
sharing are applied as an additional layer of rules, which are likely to
apply to the same model. The default rule algorithm would OR them (as of 6.1),
which would result in a combined set of permission that could be larger
than those of the user that is sharing! Hence we must forcefully AND the
rules at this stage.
One possibly undesirable effect can appear when sharing with a
pre-existing group, in which case altering pre-existing rules would not
be desired. This is addressed in the portal module.
"""
if rule_name is None:
rule_name = _('Sharing filter created by user %s (%s) for group %s') % \
(current_user.name, current_user.login, group_id)
rule_obj = self.pool.get('ir.rule')
rule_ids = rule_obj.search(cr, UID_ROOT, [('groups', 'in', group_id), ('model_id', '=', model_id)])
if rule_ids:
for rule in rule_obj.browse(cr, UID_ROOT, rule_ids, context=context):
if rule.domain_force == domain:
# don't create it twice!
if restrict:
continue
else:
_logger.debug("Ignoring sharing rule on model %s with domain: %s the same rule exists already", model_id, domain)
return
if restrict:
# restricting existing rules is done by adding the clause
# with an AND, but we can't alter the rule if it belongs to
# other groups, so we duplicate if needed
rule = self._check_personal_rule_or_duplicate(cr, group_id, rule, context=context)
eval_ctx = rule_obj._eval_context_for_combinations()
org_domain = expression.normalize(eval(rule.domain_force, eval_ctx))
new_clause = expression.normalize(eval(domain, eval_ctx))
combined_domain = expression.AND([new_clause, org_domain])
rule.write({'domain_force': combined_domain, 'name': rule.name + _('(Modified)')})
_logger.debug("Combining sharing rule %s on model %s with domain: %s", rule.id, model_id, domain)
if not rule_ids or not restrict:
# Adding the new rule in the group is ok for normal cases, because rules
# in the same group and for the same model will be combined with OR
# (as of v6.1), so the desired effect is achieved.
rule_obj.create(cr, UID_ROOT, {
'name': rule_name,
'model_id': model_id,
'domain_force': domain,
'groups': [(4,group_id)]
})
_logger.debug("Created sharing rule on model %s with domain: %s", model_id, domain)
def _create_indirect_sharing_rules(self, cr, current_user, wizard_data, group_id, fields_relations, context=None):
rule_name = _('Indirect sharing filter created by user %s (%s) for group %s') % \
(current_user.name, current_user.login, group_id)
try:
domain = safe_eval(wizard_data.domain)
if domain:
for rel_field, model in fields_relations:
# mail.message is transversal: it should not received directly the access rights
if model.model in ['mail.message']: continue
related_domain = []
if not rel_field: continue
for element in domain:
if expression.is_leaf(element):
left, operator, right = element
left = '%s.%s'%(rel_field, left)
element = left, operator, right
related_domain.append(element)
self._create_or_combine_sharing_rule(cr, current_user, wizard_data,
group_id, model_id=model.id, domain=str(related_domain),
rule_name=rule_name, restrict=True, context=context)
except Exception:
_logger.exception('Failed to create share access')
raise osv.except_osv(_('Sharing access cannot be created.'),
_('Sorry, the current screen and filter you are trying to share are not supported at the moment.\nYou may want to try a simpler filter.'))
def _check_preconditions(self, cr, uid, wizard_data, context=None):
self._assert(wizard_data.action_id and wizard_data.access_mode,
_('Action and Access Mode are required to create a shared access.'),
context=context)
self._assert(self.has_share(cr, uid, context=context),
_('You must be a member of the Share/User group to use the share wizard.'),
context=context)
if wizard_data.user_type == 'emails':
self._assert((wizard_data.new_users or wizard_data.email_1 or wizard_data.email_2 or wizard_data.email_3),
_('Please indicate the emails of the persons to share with, one per line.'),
context=context)
def _create_share_users_group(self, cr, uid, wizard_data, context=None):
"""Creates the appropriate share group and share users, and populates
result_line_ids of wizard_data with one line for each user.
:return: a tuple composed of the new group id (to which the shared access should be granted),
the ids of the new share users that have been created and the ids of the existing share users
"""
group_id = self._create_share_group(cr, uid, wizard_data, context=context)
# First create any missing user, based on the email addresses provided
new_ids, existing_ids = self._create_new_share_users(cr, uid, wizard_data, group_id, context=context)
# Finally, setup the new action and shortcut for the users.
if existing_ids:
# existing users still need to join the new group
self.pool.get('res.users').write(cr, UID_ROOT, existing_ids, {
'groups_id': [(4,group_id)],
})
# existing user don't need their home action replaced, only a new shortcut
self._setup_action_and_shortcut(cr, uid, wizard_data, existing_ids, make_home=False, context=context)
if new_ids:
# new users need a new shortcut AND a home action
self._setup_action_and_shortcut(cr, uid, wizard_data, new_ids, make_home=True, context=context)
return group_id, new_ids, existing_ids
def go_step_2(self, cr, uid, ids, context=None):
wizard_data = self.browse(cr, uid, ids[0], context=context)
self._check_preconditions(cr, uid, wizard_data, context=context)
# Create shared group and users
group_id, new_ids, existing_ids = self._create_share_users_group(cr, uid, wizard_data, context=context)
current_user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
model_obj = self.pool.get('ir.model')
model_id = model_obj.search(cr, uid, [('model','=', wizard_data.action_id.res_model)])[0]
model = model_obj.browse(cr, uid, model_id, context=context)
# ACCESS RIGHTS
# We have several classes of objects that should receive different access rights:
# Let:
# - [obj0] be the target model itself (and its parents via _inherits, if any)
# - [obj1] be the target model and all other models recursively accessible from
# obj0 via one2many relationships
# - [obj2] be the target model and all other models recursively accessible from
# obj0 via one2many and many2many relationships
# - [obj3] be all models recursively accessible from obj1 via many2one relationships
# (currently not used)
obj0, obj1, obj2, obj3 = self._get_relationship_classes(cr, uid, model, context=context)
mode = wizard_data.access_mode
# Add access to [obj0] and [obj1] according to chosen mode
self._add_access_rights_for_share_group(cr, uid, group_id, mode, obj0, context=context)
self._add_access_rights_for_share_group(cr, uid, group_id, mode, obj1, context=context)
# Add read-only access (always) to [obj2]
self._add_access_rights_for_share_group(cr, uid, group_id, 'readonly', obj2, context=context)
# IR.RULES
# A. On [obj0], [obj1], [obj2]: add all rules from all groups of
# the user that is sharing
# Warning: rules must be copied instead of linked if they contain a reference
# to uid or if the rule is shared with other groups (and it must be replaced correctly)
# B. On [obj0]: 1 rule with domain of shared action
# C. For each model in [obj1]: 1 rule in the form:
# many2one_rel.domain_of_obj0
# where many2one_rel is the many2one used in the definition of the
# one2many, and domain_of_obj0 is the sharing domain
# For example if [obj0] is project.project with a domain of
# ['id', 'in', [1,2]]
# then we will have project.task in [obj1] and we need to create this
# ir.rule on project.task:
# ['project_id.id', 'in', [1,2]]
# A.
all_relations = obj0 + obj1 + obj2
self._link_or_copy_current_user_rules(cr, current_user, group_id, all_relations, context=context)
# B.
main_domain = wizard_data.domain if wizard_data.domain != '[]' else str(DOMAIN_ALL)
self._create_or_combine_sharing_rule(cr, current_user, wizard_data,
group_id, model_id=model.id, domain=main_domain,
restrict=True, context=context)
# C.
self._create_indirect_sharing_rules(cr, current_user, wizard_data, group_id, obj1, context=context)
# refresh wizard_data
wizard_data = self.browse(cr, uid, ids[0], context=context)
# EMAILS AND NOTIFICATIONS
# A. Not invite: as before
# -> send emails to destination users
# B. Invite (OpenSocial)
# -> subscribe all users (existing and new) to the record
# -> send a notification with a summary to the current record
# -> send a notification to all users; users allowing to receive
# emails in preferences will receive it
# new users by default receive all notifications by email
# A.
if not wizard_data.invite:
self.send_emails(cr, uid, wizard_data, context=context)
# B.
else:
# Invite (OpenSocial): automatically subscribe users to the record
res_id = 0
for cond in safe_eval(main_domain):
if cond[0] == 'id':
res_id = cond[2]
# Record id not found: issue
if res_id <= 0:
raise osv.except_osv(_('Record id not found'), _('The share engine has not been able to fetch a record_id for your invitation.'))
self.pool.get(model.model).message_subscribe(cr, uid, [res_id], new_ids + existing_ids, context=context)
# self.send_invite_email(cr, uid, wizard_data, context=context)
# self.send_invite_note(cr, uid, model.model, res_id, wizard_data, context=context)
# CLOSE
# A. Not invite: as before
# B. Invite: skip summary screen, get back to the record
# A.
if not wizard_data.invite:
dummy, step2_form_view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'share', 'share_step2_form')
return {
'name': _('Shared access created!'),
'view_type': 'form',
'view_mode': 'form',
'res_model': 'share.wizard',
'view_id': False,
'res_id': ids[0],
'views': [(step2_form_view_id, 'form'), (False, 'tree'), (False, 'calendar'), (False, 'graph')],
'type': 'ir.actions.act_window',
'target': 'new'
}
# B.
else:
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': model.model,
'view_id': False,
'res_id': res_id,
'views': [(False, 'form'), (False, 'tree'), (False, 'calendar'), (False, 'graph')],
'type': 'ir.actions.act_window',
}
def send_invite_note(self, cr, uid, model_name, res_id, wizard_data, context=None):
subject = _('Invitation')
body = 'has been <b>shared</b> with'
tmp_idx = 0
for result_line in wizard_data.result_line_ids:
body += ' @%s' % (result_line.user_id.login)
if tmp_idx < len(wizard_data.result_line_ids)-2:
body += ','
elif tmp_idx == len(wizard_data.result_line_ids)-2:
body += ' and'
body += '.'
return self.pool.get(model_name).message_post(cr, uid, [res_id], body=body, context=context)
def send_invite_email(self, cr, uid, wizard_data, context=None):
# TDE Note: not updated because will disappear
message_obj = self.pool.get('mail.message')
notification_obj = self.pool.get('mail.notification')
user = self.pool.get('res.users').browse(cr, UID_ROOT, uid)
if not user.email:
raise osv.except_osv(_('Email required'), _('The current user must have an email address configured in User Preferences to be able to send outgoing emails.'))
# TODO: also send an HTML version of this mail
for result_line in wizard_data.result_line_ids:
email_to = result_line.user_id.email
if not email_to:
continue
subject = _('Invitation to collaborate about %s') % (wizard_data.record_name)
body = _("Hello,\n\n")
body += _("I have shared %s (%s) with you!\n\n") % (wizard_data.record_name, wizard_data.name)
if wizard_data.message:
body += "%s\n\n" % (wizard_data.message)
if result_line.newly_created:
body += _("The documents are not attached, you can view them online directly on my OpenERP server at:\n %s\n\n") % (result_line.share_url)
body += _("These are your credentials to access this protected area:\n")
body += "%s: %s" % (_("Username"), result_line.user_id.login) + "\n"
body += "%s: %s" % (_("Password"), result_line.password) + "\n"
body += "%s: %s" % (_("Database"), cr.dbname) + "\n"
body += _("The documents have been automatically added to your subscriptions.\n\n")
body += '%s\n\n' % ((user.signature or ''))
body += "--\n"
body += _("OpenERP is a powerful and user-friendly suite of Business Applications (CRM, Sales, HR, etc.)\n"
"It is open source and can be found on http://www.openerp.com.")
msg_id = message_obj.schedule_with_attach(cr, uid, user.email, [email_to], subject, body, model='', context=context)
notification_obj.create(cr, uid, {'user_id': result_line.user_id.id, 'message_id': msg_id}, context=context)
def send_emails(self, cr, uid, wizard_data, context=None):
_logger.info('Sending share notifications by email...')
mail_mail = self.pool.get('mail.mail')
user = self.pool.get('res.users').browse(cr, UID_ROOT, uid)
if not user.email:
raise osv.except_osv(_('Email required'), _('The current user must have an email address configured in User Preferences to be able to send outgoing emails.'))
# TODO: also send an HTML version of this mail
mail_ids = []
for result_line in wizard_data.result_line_ids:
email_to = result_line.user_id.email
if not email_to:
continue
subject = wizard_data.name
body = _("Hello,\n\n")
body += _("I've shared %s with you!\n\n") % wizard_data.name
body += _("The documents are not attached, you can view them online directly on my OpenERP server at:\n %s\n\n") % (result_line.share_url)
if wizard_data.message:
body += '%s\n\n' % (wizard_data.message)
if result_line.newly_created:
body += _("These are your credentials to access this protected area:\n")
body += "%s: %s\n" % (_("Username"), result_line.user_id.login)
body += "%s: %s\n" % (_("Password"), result_line.password)
body += "%s: %s\n" % (_("Database"), cr.dbname)
else:
body += _("The documents have been automatically added to your current OpenERP documents.\n")
body += _("You may use your current login (%s) and password to view them.\n") % result_line.user_id.login
body += "\n\n%s\n\n" % ( (user.signature or '') )
body += "--\n"
body += _("OpenERP is a powerful and user-friendly suite of Business Applications (CRM, Sales, HR, etc.)\n"
"It is open source and can be found on http://www.openerp.com.")
mail_ids.append(mail_mail.create(cr, uid, {
'email_from': user.email,
'email_to': email_to,
'subject': subject,
'body_html': '<pre>%s</pre>' % body}, context=context))
# force direct delivery, as users expect instant notification
mail_mail.send(cr, uid, mail_ids, context=context)
_logger.info('%d share notification(s) sent.', len(mail_ids))
def onchange_embed_options(self, cr, uid, ids, opt_title, opt_search, context=None):
wizard = self.browse(cr, uid, ids[0], context)
options = dict(title=opt_title, search=opt_search)
return {'value': {'embed_code': self._generate_embedded_code(wizard, options)}}
share_wizard()
class share_result_line(osv.osv_memory):
_name = 'share.wizard.result.line'
_rec_name = 'user_id'
def _share_url(self, cr, uid, ids, _fieldname, _args, context=None):
result = dict.fromkeys(ids, '')
for this in self.browse(cr, uid, ids, context=context):
data = dict(dbname=cr.dbname, login=this.login, password=this.password)
if this.share_wizard_id and this.share_wizard_id.action_id:
data['action_id'] = this.share_wizard_id.action_id.id
ctx = dict(context, share_url_template_hash_arguments=['action_id'])
result[this.id] = this.share_wizard_id.share_url_template(context=ctx) % data
return result
_columns = {
'user_id': fields.many2one('res.users', required=True, readonly=True),
'login': fields.related('user_id', 'login', string='Login', type='char', size=64, required=True, readonly=True),
'password': fields.char('Password', size=64, readonly=True),
'share_url': fields.function(_share_url, string='Share URL', type='char', size=512),
'share_wizard_id': fields.many2one('share.wizard', 'Share Wizard', required=True),
'newly_created': fields.boolean('Newly created', readonly=True),
}
_defaults = {
'newly_created': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
carvalhomb/tsmells
|
refs/heads/master
|
guess/src/Lib/encodings/cp856.py
|
2
|
""" Python Character Mapping Codec generated from 'CP856.TXT' with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x05d0, # HEBREW LETTER ALEF
0x0081: 0x05d1, # HEBREW LETTER BET
0x0082: 0x05d2, # HEBREW LETTER GIMEL
0x0083: 0x05d3, # HEBREW LETTER DALET
0x0084: 0x05d4, # HEBREW LETTER HE
0x0085: 0x05d5, # HEBREW LETTER VAV
0x0086: 0x05d6, # HEBREW LETTER ZAYIN
0x0087: 0x05d7, # HEBREW LETTER HET
0x0088: 0x05d8, # HEBREW LETTER TET
0x0089: 0x05d9, # HEBREW LETTER YOD
0x008a: 0x05da, # HEBREW LETTER FINAL KAF
0x008b: 0x05db, # HEBREW LETTER KAF
0x008c: 0x05dc, # HEBREW LETTER LAMED
0x008d: 0x05dd, # HEBREW LETTER FINAL MEM
0x008e: 0x05de, # HEBREW LETTER MEM
0x008f: 0x05df, # HEBREW LETTER FINAL NUN
0x0090: 0x05e0, # HEBREW LETTER NUN
0x0091: 0x05e1, # HEBREW LETTER SAMEKH
0x0092: 0x05e2, # HEBREW LETTER AYIN
0x0093: 0x05e3, # HEBREW LETTER FINAL PE
0x0094: 0x05e4, # HEBREW LETTER PE
0x0095: 0x05e5, # HEBREW LETTER FINAL TSADI
0x0096: 0x05e6, # HEBREW LETTER TSADI
0x0097: 0x05e7, # HEBREW LETTER QOF
0x0098: 0x05e8, # HEBREW LETTER RESH
0x0099: 0x05e9, # HEBREW LETTER SHIN
0x009a: 0x05ea, # HEBREW LETTER TAV
0x009b: None, # UNDEFINED
0x009c: 0x00a3, # POUND SIGN
0x009d: None, # UNDEFINED
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: None, # UNDEFINED
0x00a0: None, # UNDEFINED
0x00a1: None, # UNDEFINED
0x00a2: None, # UNDEFINED
0x00a3: None, # UNDEFINED
0x00a4: None, # UNDEFINED
0x00a5: None, # UNDEFINED
0x00a6: None, # UNDEFINED
0x00a7: None, # UNDEFINED
0x00a8: None, # UNDEFINED
0x00a9: 0x00ae, # REGISTERED SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: None, # UNDEFINED
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: None, # UNDEFINED
0x00b6: None, # UNDEFINED
0x00b7: None, # UNDEFINED
0x00b8: 0x00a9, # COPYRIGHT SIGN
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x00a2, # CENT SIGN
0x00be: 0x00a5, # YEN SIGN
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: None, # UNDEFINED
0x00c7: None, # UNDEFINED
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: None, # UNDEFINED
0x00d1: None, # UNDEFINED
0x00d2: None, # UNDEFINED
0x00d3: None, # UNDEFINEDS
0x00d4: None, # UNDEFINED
0x00d5: None, # UNDEFINED
0x00d6: None, # UNDEFINEDE
0x00d7: None, # UNDEFINED
0x00d8: None, # UNDEFINED
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x00a6, # BROKEN BAR
0x00de: None, # UNDEFINED
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: None, # UNDEFINED
0x00e1: None, # UNDEFINED
0x00e2: None, # UNDEFINED
0x00e3: None, # UNDEFINED
0x00e4: None, # UNDEFINED
0x00e5: None, # UNDEFINED
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: None, # UNDEFINED
0x00e8: None, # UNDEFINED
0x00e9: None, # UNDEFINED
0x00ea: None, # UNDEFINED
0x00eb: None, # UNDEFINED
0x00ec: None, # UNDEFINED
0x00ed: None, # UNDEFINED
0x00ee: 0x00af, # MACRON
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2017, # DOUBLE LOW LINE
0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
0x00f4: 0x00b6, # PILCROW SIGN
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x00b9, # SUPERSCRIPT ONE
0x00fc: 0x00b3, # SUPERSCRIPT THREE
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Encoding Map
encoding_map = {}
for k,v in decoding_map.items():
encoding_map[v] = k
|
SamiHiltunen/invenio-deposit
|
refs/heads/master
|
invenio_deposit/__init__.py
|
22
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Deposit module."""
|
rbarlow/pulp
|
refs/heads/master
|
nodes/extensions/admin/pulp_node/extensions/admin/__init__.py
|
187
|
# Copyright (c) 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
|
Work4Labs/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.3/django/utils/dateformat.py
|
234
|
"""
PHP date() style date formatting
See http://www.php.net/date for format strings
Usage:
>>> import datetime
>>> d = datetime.datetime.now()
>>> df = DateFormat(d)
>>> print df.format('jS F Y H:i')
7th October 2003 11:39
>>>
"""
import re
import time
import calendar
from django.utils.dates import MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR
from django.utils.tzinfo import LocalTimezone
from django.utils.translation import ugettext as _
from django.utils.encoding import force_unicode
re_formatchars = re.compile(r'(?<!\\)([aAbBcdDEfFgGhHiIjlLmMnNOPrsStTUuwWyYzZ])')
re_escaped = re.compile(r'\\(.)')
class Formatter(object):
def format(self, formatstr):
pieces = []
for i, piece in enumerate(re_formatchars.split(force_unicode(formatstr))):
if i % 2:
pieces.append(force_unicode(getattr(self, piece)()))
elif piece:
pieces.append(re_escaped.sub(r'\1', piece))
return u''.join(pieces)
class TimeFormat(Formatter):
def __init__(self, t):
self.data = t
def a(self):
"'a.m.' or 'p.m.'"
if self.data.hour > 11:
return _('p.m.')
return _('a.m.')
def A(self):
"'AM' or 'PM'"
if self.data.hour > 11:
return _('PM')
return _('AM')
def B(self):
"Swatch Internet time"
raise NotImplementedError
def f(self):
"""
Time, in 12-hour hours and minutes, with minutes left off if they're
zero.
Examples: '1', '1:30', '2:05', '2'
Proprietary extension.
"""
if self.data.minute == 0:
return self.g()
return u'%s:%s' % (self.g(), self.i())
def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour
def G(self):
"Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
return self.data.hour
def h(self):
"Hour, 12-hour format; i.e. '01' to '12'"
return u'%02d' % self.g()
def H(self):
"Hour, 24-hour format; i.e. '00' to '23'"
return u'%02d' % self.G()
def i(self):
"Minutes; i.e. '00' to '59'"
return u'%02d' % self.data.minute
def P(self):
"""
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
if they're zero and the strings 'midnight' and 'noon' if appropriate.
Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
Proprietary extension.
"""
if self.data.minute == 0 and self.data.hour == 0:
return _('midnight')
if self.data.minute == 0 and self.data.hour == 12:
return _('noon')
return u'%s %s' % (self.f(), self.a())
def s(self):
"Seconds; i.e. '00' to '59'"
return u'%02d' % self.data.second
def u(self):
"Microseconds"
return self.data.microsecond
class DateFormat(TimeFormat):
year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
def __init__(self, dt):
# Accepts either a datetime or date object.
self.data = dt
self.timezone = getattr(dt, 'tzinfo', None)
if hasattr(self.data, 'hour') and not self.timezone:
self.timezone = LocalTimezone(dt)
def b(self):
"Month, textual, 3 letters, lowercase; e.g. 'jan'"
return MONTHS_3[self.data.month]
def c(self):
"""
ISO 8601 Format
Example : '2008-01-02T10:30:00.000123'
"""
return self.data.isoformat()
def d(self):
"Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
return u'%02d' % self.data.day
def D(self):
"Day of the week, textual, 3 letters; e.g. 'Fri'"
return WEEKDAYS_ABBR[self.data.weekday()]
def E(self):
"Alternative month names as required by some locales. Proprietary extension."
return MONTHS_ALT[self.data.month]
def F(self):
"Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month]
def I(self):
"'1' if Daylight Savings Time, '0' otherwise."
if self.timezone and self.timezone.dst(self.data):
return u'1'
else:
return u'0'
def j(self):
"Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day
def l(self):
"Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()]
def L(self):
"Boolean for whether it is a leap year; i.e. True or False"
return calendar.isleap(self.data.year)
def m(self):
"Month; i.e. '01' to '12'"
return u'%02d' % self.data.month
def M(self):
"Month, textual, 3 letters; e.g. 'Jan'"
return MONTHS_3[self.data.month].title()
def n(self):
"Month without leading zeros; i.e. '1' to '12'"
return self.data.month
def N(self):
"Month abbreviation in Associated Press style. Proprietary extension."
return MONTHS_AP[self.data.month]
def O(self):
"Difference to Greenwich time in hours; e.g. '+0200'"
seconds = self.Z()
return u"%+03d%02d" % (seconds // 3600, (seconds // 60) % 60)
def r(self):
"RFC 2822 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
return self.format('D, j M Y H:i:s O')
def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return u'th'
last = self.data.day % 10
if last == 1:
return u'st'
if last == 2:
return u'nd'
if last == 3:
return u'rd'
return u'th'
def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return u'%02d' % calendar.monthrange(self.data.year, self.data.month)[1]
def T(self):
"Time zone of this machine; e.g. 'EST' or 'MDT'"
name = self.timezone and self.timezone.tzname(self.data) or None
if name is None:
name = self.format('O')
return unicode(name)
def U(self):
"Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
if getattr(self.data, 'tzinfo', None):
return int(calendar.timegm(self.data.utctimetuple()))
else:
return int(time.mktime(self.data.timetuple()))
def w(self):
"Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
return (self.data.weekday() + 1) % 7
def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
week_number = None
jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
weekday = self.data.weekday() + 1
day_of_year = self.z()
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year-1)):
week_number = 53
else:
week_number = 52
else:
if calendar.isleap(self.data.year):
i = 366
else:
i = 365
if (i - day_of_year) < (4 - weekday):
week_number = 1
else:
j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
week_number = j // 7
if jan1_weekday > 4:
week_number -= 1
return week_number
def y(self):
"Year, 2 digits; e.g. '99'"
return unicode(self.data.year)[2:]
def Y(self):
"Year, 4 digits; e.g. '1999'"
return self.data.year
def z(self):
"Day of the year; i.e. '0' to '365'"
doy = self.year_days[self.data.month] + self.data.day
if self.L() and self.data.month > 2:
doy += 1
return doy
def Z(self):
"""
Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
timezones west of UTC is always negative, and for those east of UTC is
always positive.
"""
if not self.timezone:
return 0
offset = self.timezone.utcoffset(self.data)
# Only days can be negative, so negative offsets have days=-1 and
# seconds positive. Positive offsets have days=0
return offset.days * 86400 + offset.seconds
def format(value, format_string):
"Convenience function"
df = DateFormat(value)
return df.format(format_string)
def time_format(value, format_string):
"Convenience function"
tf = TimeFormat(value)
return tf.format(format_string)
|
chriskmanx/qmole
|
refs/heads/master
|
QMOLEDEV/node/tools/scons/scons-local-1.2.0/SCons/Tool/cc.py
|
12
|
"""SCons.Tool.cc
Tool-specific initialization for generic Posix C compilers.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/cc.py 3842 2008/12/20 22:59:52 scons"
import SCons.Tool
import SCons.Defaults
import SCons.Util
CSuffixes = ['.c', '.m']
if not SCons.Util.case_sensitive_suffixes('.c', '.C'):
CSuffixes.append('.C')
def add_common_cc_variables(env):
"""
Add underlying common "C compiler" variables that
are used by multiple tools (specifically, c++).
"""
if not env.has_key('_CCCOMCOM'):
env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS'
# It's a hack to test for darwin here, but the alternative
# of creating an applecc.py to contain this seems overkill.
# Maybe someday the Apple platform will require more setup and
# this logic will be moved.
env['FRAMEWORKS'] = SCons.Util.CLVar('')
env['FRAMEWORKPATH'] = SCons.Util.CLVar('')
if env['PLATFORM'] == 'darwin':
env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH'
if not env.has_key('CCFLAGS'):
env['CCFLAGS'] = SCons.Util.CLVar('')
if not env.has_key('SHCCFLAGS'):
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
def generate(env):
"""
Add Builders and construction variables for C compilers to an Environment.
"""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
#<<<<<<< .working
#
# env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS'
# # It's a hack to test for darwin here, but the alternative of creating
# # an applecc.py to contain this seems overkill. Maybe someday the Apple
# # platform will require more setup and this logic will be moved.
# env['FRAMEWORKS'] = SCons.Util.CLVar('')
# env['FRAMEWORKPATH'] = SCons.Util.CLVar('')
# if env['PLATFORM'] == 'darwin':
# env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH'
#=======
#>>>>>>> .merge-right.r1907
add_common_cc_variables(env)
env['CC'] = 'cc'
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
env['SHCC'] = '$CC'
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = ''
env['SHOBJSUFFIX'] = '.os'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
env['CFILESUFFIX'] = '.c'
def exists(env):
return env.Detect('cc')
|
xxxVxxx/troposphere
|
refs/heads/master
|
troposphere/ecs.py
|
20
|
from . import AWSObject, AWSProperty, Ref
from .validators import boolean, network_port, positive_integer
class Cluster(AWSObject):
resource_type = "AWS::ECS::Cluster"
props = {}
class LoadBalancer(AWSProperty):
props = {
'ContainerName': (basestring, False),
'ContainerPort': (network_port, False),
'LoadBalancerName': ([basestring, Ref], False),
}
class Service(AWSObject):
resource_type = "AWS::ECS::Service"
props = {
'Cluster': (basestring, False),
'DesiredCount': (positive_integer, False),
'LoadBalancers': ([LoadBalancer], False),
'Role': (basestring, False),
'TaskDefinition': (basestring, False),
}
class Environment(AWSProperty):
props = {
'Name': (basestring, True),
'Value': (basestring, True),
}
class MountPoint(AWSProperty):
props = {
'ContainerPath': (basestring, True),
'SourceVolume': (basestring, True),
'ReadOnly': (boolean, False),
}
class PortMapping(AWSProperty):
props = {
'ContainerPort': (network_port, True),
'HostPort': (network_port, False),
}
class VolumesFrom(AWSProperty):
props = {
'SourceContainer': (basestring, True),
'ReadOnly': (boolean, False),
}
class ContainerDefinition(AWSProperty):
props = {
'Command': ([basestring], False),
'Cpu': (positive_integer, False),
'EntryPoint': ([basestring], False),
'Environment': ([Environment], False),
'Essential': (boolean, False),
'Image': (basestring, True),
'Links': ([basestring], False),
'Memory': (positive_integer, True),
'MountPoints': ([MountPoint], False),
'Name': (basestring, True),
'PortMappings': ([PortMapping], False),
'VolumesFrom': ([VolumesFrom], False),
}
class Host(AWSProperty):
props = {
'SourcePath': (basestring, False),
}
class Volume(AWSProperty):
props = {
'Name': (basestring, True),
'Host': (Host, False),
}
class TaskDefinition(AWSObject):
resource_type = "AWS::ECS::TaskDefinition"
props = {
'ContainerDefinitions': ([ContainerDefinition], True),
'Volumes': ([Volume], True),
}
|
nemesisdesign/django
|
refs/heads/master
|
tests/generic_relations/models.py
|
90
|
"""
Generic relations
Generic relations let an object have a foreign key to any object through a
content-type/object-id field. A ``GenericForeignKey`` field can point to any
object, be it animal, vegetable, or mineral.
The canonical example is tags (although this example implementation is *far*
from complete).
"""
from __future__ import unicode_literals
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class TaggedItem(models.Model):
"""A tag on an item."""
tag = models.SlugField()
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class Meta:
ordering = ["tag", "content_type__model"]
def __str__(self):
return self.tag
class ValuableTaggedItem(TaggedItem):
value = models.PositiveIntegerField()
class AbstractComparison(models.Model):
comparative = models.CharField(max_length=50)
content_type1 = models.ForeignKey(ContentType, models.CASCADE, related_name="comparative1_set")
object_id1 = models.PositiveIntegerField()
first_obj = GenericForeignKey(ct_field="content_type1", fk_field="object_id1")
@python_2_unicode_compatible
class Comparison(AbstractComparison):
"""
A model that tests having multiple GenericForeignKeys. One is defined
through an inherited abstract model and one defined directly on this class.
"""
content_type2 = models.ForeignKey(ContentType, models.CASCADE, related_name="comparative2_set")
object_id2 = models.PositiveIntegerField()
other_obj = GenericForeignKey(ct_field="content_type2", fk_field="object_id2")
def __str__(self):
return "%s is %s than %s" % (self.first_obj, self.comparative, self.other_obj)
@python_2_unicode_compatible
class Animal(models.Model):
common_name = models.CharField(max_length=150)
latin_name = models.CharField(max_length=150)
tags = GenericRelation(TaggedItem, related_query_name='animal')
comparisons = GenericRelation(Comparison,
object_id_field="object_id1",
content_type_field="content_type1")
def __str__(self):
return self.common_name
@python_2_unicode_compatible
class Vegetable(models.Model):
name = models.CharField(max_length=150)
is_yucky = models.BooleanField(default=True)
tags = GenericRelation(TaggedItem)
def __str__(self):
return self.name
class Carrot(Vegetable):
pass
@python_2_unicode_compatible
class Mineral(models.Model):
name = models.CharField(max_length=150)
hardness = models.PositiveSmallIntegerField()
# note the lack of an explicit GenericRelation here...
def __str__(self):
return self.name
class GeckoManager(models.Manager):
def get_queryset(self):
return super(GeckoManager, self).get_queryset().filter(has_tail=True)
class Gecko(models.Model):
has_tail = models.BooleanField(default=False)
objects = GeckoManager()
# To test fix for #11263
class Rock(Mineral):
tags = GenericRelation(TaggedItem)
class ValuableRock(Mineral):
tags = GenericRelation(ValuableTaggedItem)
class ManualPK(models.Model):
id = models.IntegerField(primary_key=True)
tags = GenericRelation(TaggedItem, related_query_name='manualpk')
class ForProxyModelModel(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
obj = GenericForeignKey(for_concrete_model=False)
title = models.CharField(max_length=255, null=True)
class ForConcreteModelModel(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
obj = GenericForeignKey()
class ConcreteRelatedModel(models.Model):
bases = GenericRelation(ForProxyModelModel, for_concrete_model=False)
class ProxyRelatedModel(ConcreteRelatedModel):
class Meta:
proxy = True
# To test fix for #7551
class AllowsNullGFK(models.Model):
content_type = models.ForeignKey(ContentType, models.SET_NULL, null=True)
object_id = models.PositiveIntegerField(null=True)
content_object = GenericForeignKey()
|
willdecker/suds
|
refs/heads/master
|
suds/xsd/doctor.py
|
205
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{doctor} module provides classes for fixing broken (sick)
schema(s).
"""
from logging import getLogger
from suds.sax import splitPrefix, Namespace
from suds.sax.element import Element
from suds.plugin import DocumentPlugin, DocumentContext
log = getLogger(__name__)
class Doctor:
"""
Schema Doctor.
"""
def examine(self, root):
"""
Examine and repair the schema (if necessary).
@param root: A schema root element.
@type root: L{Element}
"""
pass
class Practice(Doctor):
"""
A collection of doctors.
@ivar doctors: A list of doctors.
@type doctors: list
"""
def __init__(self):
self.doctors = []
def add(self, doctor):
"""
Add a doctor to the practice
@param doctor: A doctor to add.
@type doctor: L{Doctor}
"""
self.doctors.append(doctor)
def examine(self, root):
for d in self.doctors:
d.examine(root)
return root
class TnsFilter:
"""
Target Namespace filter.
@ivar tns: A list of target namespaces.
@type tns: [str,...]
"""
def __init__(self, *tns):
"""
@param tns: A list of target namespaces.
@type tns: [str,...]
"""
self.tns = []
self.add(*tns)
def add(self, *tns):
"""
Add I{targetNamesapces} to be added.
@param tns: A list of target namespaces.
@type tns: [str,...]
"""
self.tns += tns
def match(self, root, ns):
"""
Match by I{targetNamespace} excluding those that
are equal to the specified namespace to prevent
adding an import to itself.
@param root: A schema root.
@type root: L{Element}
"""
tns = root.get('targetNamespace')
if len(self.tns):
matched = ( tns in self.tns )
else:
matched = 1
itself = ( ns == tns )
return ( matched and not itself )
class Import:
"""
An <xs:import/> to be applied.
@cvar xsdns: The XSD namespace.
@type xsdns: (p,u)
@ivar ns: An import namespace.
@type ns: str
@ivar location: An optional I{schemaLocation}.
@type location: str
@ivar filter: A filter used to restrict application to
a particular schema.
@type filter: L{TnsFilter}
"""
xsdns = Namespace.xsdns
def __init__(self, ns, location=None):
"""
@param ns: An import namespace.
@type ns: str
@param location: An optional I{schemaLocation}.
@type location: str
"""
self.ns = ns
self.location = location
self.filter = TnsFilter()
def setfilter(self, filter):
"""
Set the filter.
@param filter: A filter to set.
@type filter: L{TnsFilter}
"""
self.filter = filter
def apply(self, root):
"""
Apply the import (rule) to the specified schema.
If the schema does not already contain an import for the
I{namespace} specified here, it is added.
@param root: A schema root.
@type root: L{Element}
"""
if not self.filter.match(root, self.ns):
return
if self.exists(root):
return
node = Element('import', ns=self.xsdns)
node.set('namespace', self.ns)
if self.location is not None:
node.set('schemaLocation', self.location)
log.debug('inserting: %s', node)
root.insert(node)
def add(self, root):
"""
Add an <xs:import/> to the specified schema root.
@param root: A schema root.
@type root: L{Element}
"""
node = Element('import', ns=self.xsdns)
node.set('namespace', self.ns)
if self.location is not None:
node.set('schemaLocation', self.location)
log.debug('%s inserted', node)
root.insert(node)
def exists(self, root):
"""
Check to see if the <xs:import/> already exists
in the specified schema root by matching I{namesapce}.
@param root: A schema root.
@type root: L{Element}
"""
for node in root.children:
if node.name != 'import':
continue
ns = node.get('namespace')
if self.ns == ns:
return 1
return 0
class ImportDoctor(Doctor, DocumentPlugin):
"""
Doctor used to fix missing imports.
@ivar imports: A list of imports to apply.
@type imports: [L{Import},...]
"""
def __init__(self, *imports):
"""
"""
self.imports = []
self.add(*imports)
def add(self, *imports):
"""
Add a namesapce to be checked.
@param imports: A list of L{Import} objects.
@type imports: [L{Import},..]
"""
self.imports += imports
def examine(self, node):
for imp in self.imports:
imp.apply(node)
def parsed(self, context):
node = context.document
# xsd root
if node.name == 'schema' and Namespace.xsd(node.namespace()):
self.examine(node)
return
# look deeper
context = DocumentContext()
for child in node:
context.document = child
self.parsed(context)
|
codycuellar/bill_tracker
|
refs/heads/master
|
billtracker/notify.py
|
1
|
#! /usr/bin/env python
import core
import logging
import config as cfg
import argparse
logger = logging.getLogger()
def startup():
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', action='store_true', default=False,
help='Enable debug mode.')
args = parser.parse_args()
if args.debug:
cfg.configure_logging(logger, logname='notify',
log_level=logging.DEBUG,
file_level=logging.DEBUG,
console_level=logging.DEBUG)
else:
cfg.configure_logging(logger, logname='notify',
log_level=logging.INFO,
file_level=logging.INFO,
console_level=logging.DEBUG)
main(DEBUG=args.debug)
def main(DEBUG=False):
core.load_bills()
core.check_due_dates()
core.all_bills_to_json()
core.send_email(DEBUG=DEBUG)
if __name__ == '__main__':
startup()
|
guettli/django
|
refs/heads/master
|
django/contrib/gis/gdal/libgdal.py
|
64
|
from __future__ import unicode_literals
import logging
import os
import re
from ctypes import CDLL, CFUNCTYPE, c_char_p, c_int
from ctypes.util import find_library
from django.contrib.gis.gdal.error import GDALException
from django.core.exceptions import ImproperlyConfigured
logger = logging.getLogger('django.contrib.gis')
# Custom library path set?
try:
from django.conf import settings
lib_path = settings.GDAL_LIBRARY_PATH
except (AttributeError, EnvironmentError,
ImportError, ImproperlyConfigured):
lib_path = None
if lib_path:
lib_names = None
elif os.name == 'nt':
# Windows NT shared libraries
lib_names = ['gdal111', 'gdal110', 'gdal19', 'gdal18', 'gdal17']
elif os.name == 'posix':
# *NIX library names.
lib_names = ['gdal', 'GDAL', 'gdal1.11.0', 'gdal1.10.0', 'gdal1.9.0', 'gdal1.8.0', 'gdal1.7.0']
else:
raise GDALException('Unsupported OS "%s"' % os.name)
# Using the ctypes `find_library` utility to find the
# path to the GDAL library from the list of library names.
if lib_names:
for lib_name in lib_names:
lib_path = find_library(lib_name)
if lib_path is not None:
break
if lib_path is None:
raise GDALException(
'Could not find the GDAL library (tried "%s"). Try setting '
'GDAL_LIBRARY_PATH in your settings.' % '", "'.join(lib_names)
)
# This loads the GDAL/OGR C library
lgdal = CDLL(lib_path)
# On Windows, the GDAL binaries have some OSR routines exported with
# STDCALL, while others are not. Thus, the library will also need to
# be loaded up as WinDLL for said OSR functions that require the
# different calling convention.
if os.name == 'nt':
from ctypes import WinDLL
lwingdal = WinDLL(lib_path)
def std_call(func):
"""
Returns the correct STDCALL function for certain OSR routines on Win32
platforms.
"""
if os.name == 'nt':
return lwingdal[func]
else:
return lgdal[func]
# #### Version-information functions. ####
# Returns GDAL library version information with the given key.
_version_info = std_call('GDALVersionInfo')
_version_info.argtypes = [c_char_p]
_version_info.restype = c_char_p
def gdal_version():
"Returns only the GDAL version number information."
return _version_info(b'RELEASE_NAME')
def gdal_full_version():
"Returns the full GDAL version information."
return _version_info('')
version_regex = re.compile(r'^(?P<major>\d+)\.(?P<minor>\d+)(\.(?P<subminor>\d+))?')
def gdal_version_info():
ver = gdal_version().decode()
m = version_regex.match(ver)
if not m:
raise GDALException('Could not parse GDAL version string "%s"' % ver)
return {key: m.group(key) for key in ('major', 'minor', 'subminor')}
_verinfo = gdal_version_info()
GDAL_MAJOR_VERSION = int(_verinfo['major'])
GDAL_MINOR_VERSION = int(_verinfo['minor'])
GDAL_SUBMINOR_VERSION = _verinfo['subminor'] and int(_verinfo['subminor'])
GDAL_VERSION = (GDAL_MAJOR_VERSION, GDAL_MINOR_VERSION, GDAL_SUBMINOR_VERSION)
del _verinfo
# Set library error handling so as errors are logged
CPLErrorHandler = CFUNCTYPE(None, c_int, c_int, c_char_p)
def err_handler(error_class, error_number, message):
logger.error('GDAL_ERROR %d: %s', error_number, message)
err_handler = CPLErrorHandler(err_handler)
def function(name, args, restype):
func = std_call(name)
func.argtypes = args
func.restype = restype
return func
set_error_handler = function('CPLSetErrorHandler', [CPLErrorHandler], CPLErrorHandler)
set_error_handler(err_handler)
|
Jimmyhua94/MyoFlie
|
refs/heads/master
|
lib/cfclient/utils/zmq_led_driver.py
|
5
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2015 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
"""
Give access to the LED driver memory via ZMQ.
"""
import cflib
import cflib.crazyflie
from cflib.crazyflie.mem import MemoryElement
import logging
from threading import Thread, Lock
ZMQ_PULL_PORT = 1024 + 190
logger = logging.getLogger(__name__)
enabled = False
try:
import zmq
enabled = True
except Exception as e:
logger.warning("Not enabling ZMQ LED driver access,"
"import failed ({})".format(e))
class _PullReader(Thread):
"""Blocking thread for reading from ZMQ socket"""
def __init__(self, receiver, callback, *args):
"""Initialize"""
super(_PullReader, self).__init__(*args)
self._receiver = receiver
self._cb = callback
self.daemon = True
self.lock = Lock()
def run(self):
while True:
# self.lock.acquire()
self._cb(self._receiver.recv_json())
class ZMQLEDDriver:
"""Used for reading data from input devices using the PyGame API."""
def __init__(self, crazyflie):
if enabled:
self._cf = crazyflie
context = zmq.Context()
self._receiver = context.socket(zmq.PULL)
self._bind_addr = "tcp://*:{}".format(ZMQ_PULL_PORT)
# If the port is already bound an exception will be thrown
# and caught in the initialization of the readers and handled.
self._receiver.bind(self._bind_addr)
logger.info("Biding ZMQ for LED driver"
"at {}".format(self._bind_addr))
self._receiver_thread = _PullReader(self._receiver,
self._cmd_callback)
def start(self):
if enabled:
self._receiver_thread.start()
def _cmd_callback(self, data):
"""Called when new data arrives via ZMQ"""
if len(self._cf.mem.get_mems(MemoryElement.TYPE_DRIVER_LED)) > 0:
logger.info("Updating memory")
memory = self._cf.mem.get_mems(MemoryElement.TYPE_DRIVER_LED)[0]
for i_led in range(len(data["rgbleds"])):
memory.leds[i_led].set(data["rgbleds"][i_led][0],
data["rgbleds"][i_led][1],
data["rgbleds"][i_led][2])
memory.write_data(self._write_cb)
def _write_cb(self, mem, addr):
return
|
JeffAbrahamson/gtd
|
refs/heads/master
|
find_similar.py
|
1
|
#!/usr/bin/env python
"""Generate a plot of my recent time usage at my computer.
If a first and numeric argument is present, it is the number of days of
history to show. The default is ten.
"""
from __future__ import print_function
import argparse
from lib_gtd import gtd_load
from operator import itemgetter
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
def find_similar(input_filename, target):
"""Find some phrases similar to target.
"""
dataframe = gtd_load(input_filename, 'tasks')
labels = dataframe.label.unique()
print('Got {n} labels.'.format(n=len(labels)))
max_score = .5
# vectorizer = CountVectorizer(analyzer='word')
# vectorizer = CountVectorizer(analyzer='word', ngram_range=(1,2))
# vectorizer = HashingVectorizer(analyzer='word', ngram_range=(1,2))
# For TfIdf, scores are bigger.
max_score = .8
vectorizer = TfidfVectorizer(analyzer='word')
# vectorizer = TfidfVectorizer(analyzer='word', ngram_range=(1,2))
print('Learning model (TF-IDF)...')
ft_matrix = vectorizer.fit_transform(labels)
print('Got model: {r}x{c}.'.format(r=ft_matrix.shape[0], c=ft_matrix.shape[1]))
cosine_distance = 1 - cosine_similarity(ft_matrix)
target_index = list(labels).index(target)
print('Found target at index {i}'.format(i=target_index))
if len(labels) != cosine_distance.shape[0]:
print('Warning: {num_labels} labels, {num_dist} distances'.format(
num_labels=len(labels), num_dist=cosine_distance.shape[0]))
print('Searching for similarities (among {n})...'.format(n=len(labels)))
similar = []
for pattern_index in range(len(labels)):
pattern = labels[pattern_index]
if pattern != target:
score = cosine_distance[target_index, pattern_index]
if score < max_score:
similar.append((pattern_index, score))
similar.sort(key=itemgetter(1))
print(len(similar))
print(target)
for candidate in similar[:10]:
print(' {score:.2} {phrase}'.format(
score=candidate[1], phrase=labels[candidate[0]]))
print(target)
for candidate in similar[90:100]:
print(' {score:.2} {phrase}'.format(
score=candidate[1], phrase=labels[candidate[0]]))
def main():
"""Do what we do."""
parser = argparse.ArgumentParser()
named_args = parser.add_argument_group('arguments')
named_args.add_argument('--input-filename', type=str,
default='/tmp/gtd-data',
help='Path and filename prefix to pickled data file')
named_args.add_argument('--target', type=str,
help='Target phrase for finding similar phrases')
# parser.add_argument('--verbose', dest='verbose', action='store_true')
args = parser.parse_args()
find_similar(args.input_filename, args.target)
if __name__ == '__main__':
main()
|
jmcarp/osf.io
|
refs/heads/develop
|
scripts/dataverse/connect_external_accounts.py
|
44
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.dataverse.model import AddonDataverseNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in AddonDataverseNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
if not user_addon.external_accounts:
logger.warning('User {0} has no dataverse external account'.format(user_addon.owner._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
evansd/django
|
refs/heads/master
|
tests/i18n/test_management.py
|
48
|
import os
from django.core.management.commands.makemessages import TranslatableFile
from django.test import SimpleTestCase
class TranslatableFileTests(SimpleTestCase):
def test_repr(self):
dirpath = 'dir'
file_name = 'example'
trans_file = TranslatableFile(dirpath=dirpath, file_name=file_name, locale_dir=None)
self.assertEqual(repr(trans_file), '<TranslatableFile: %s>' % os.path.join(dirpath, file_name))
|
thinkopensolutions/l10n-brazil
|
refs/heads/10.0
|
sped_nfe/wizards/wizard_nfe_motivo_cancelamento.py
|
2
|
from __future__ import division, print_function, unicode_literals
from odoo import api, fields, models, _
from odoo.exceptions import Warning as UserError
class NfeCancelamentoWizard(models.TransientModel):
_name = b'nfe.cancelamento.wizard'
motivo_cancelamento = fields.Char(
string='Justificativa para Cancelamento de NF-e',
required=True,
size=255,
)
@api.multi
def action_motivo_cancelamento(self):
"""
:return:
"""
self.ensure_one()
if len(self.motivo_cancelamento) < 10:
raise UserError("A justificativa deve ter mais de 10 caracteres.")
nfe = self.env['sped.documento'].browse(self.env.context['active_id'])
nfe.justificativa = self.motivo_cancelamento
nfe.cancela_nfe()
return {'type': 'ir.actions.act_window_close'}
|
FHannes/intellij-community
|
refs/heads/master
|
python/lib/Lib/SocketServer.py
|
70
|
"""Generic socket server classes.
This module tries to capture the various aspects of defining a server:
For socket-based servers:
- address family:
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
- AF_UNIX: Unix domain sockets
- others, e.g. AF_DECNET are conceivable (see <socket.h>
- socket type:
- SOCK_STREAM (reliable stream, e.g. TCP)
- SOCK_DGRAM (datagrams, e.g. UDP)
For request-based servers (including socket-based):
- client address verification before further looking at the request
(This is actually a hook for any processing that needs to look
at the request before anything else, e.g. logging)
- how to handle multiple requests:
- synchronous (one request is handled at a time)
- forking (each request is handled by a new process)
- threading (each request is handled by a new thread)
The classes in this module favor the server type that is simplest to
write: a synchronous TCP/IP server. This is bad class design, but
save some typing. (There's also the issue that a deep class hierarchy
slows down method lookups.)
There are five classes in an inheritance diagram, four of which represent
synchronous servers of four types:
+------------+
| BaseServer |
+------------+
|
v
+-----------+ +------------------+
| TCPServer |------->| UnixStreamServer |
+-----------+ +------------------+
|
v
+-----------+ +--------------------+
| UDPServer |------->| UnixDatagramServer |
+-----------+ +--------------------+
Note that UnixDatagramServer derives from UDPServer, not from
UnixStreamServer -- the only difference between an IP and a Unix
stream server is the address family, which is simply repeated in both
unix server classes.
Forking and threading versions of each type of server can be created
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
instance, a threading UDP server class is created as follows:
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
The Mix-in class must come first, since it overrides a method defined
in UDPServer! Setting the various member variables also changes
the behavior of the underlying server mechanism.
To implement a service, you must derive a class from
BaseRequestHandler and redefine its handle() method. You can then run
various versions of the service by combining one of the server classes
with your request handler class.
The request handler class must be different for datagram or stream
services. This can be hidden by using the request handler
subclasses StreamRequestHandler or DatagramRequestHandler.
Of course, you still have to use your head!
For instance, it makes no sense to use a forking server if the service
contains state in memory that can be modified by requests (since the
modifications in the child process would never reach the initial state
kept in the parent process and passed to each child). In this case,
you can use a threading server, but you will probably have to use
locks to avoid two requests that come in nearly simultaneous to apply
conflicting changes to the server state.
On the other hand, if you are building e.g. an HTTP server, where all
data is stored externally (e.g. in the file system), a synchronous
class will essentially render the service "deaf" while one request is
being handled -- which may be for a very long time if a client is slow
to reqd all the data it has requested. Here a threading or forking
server is appropriate.
In some cases, it may be appropriate to process part of a request
synchronously, but to finish processing in a forked child depending on
the request data. This can be implemented by using a synchronous
server and doing an explicit fork in the request handler class
handle() method.
Another approach to handling multiple simultaneous requests in an
environment that supports neither threads nor fork (or where these are
too expensive or inappropriate for the service) is to maintain an
explicit table of partially finished requests and to use select() to
decide which request to work on next (or whether to handle a new
incoming request). This is particularly important for stream services
where each client can potentially be connected for a long time (if
threads or subprocesses cannot be used).
Future work:
- Standard classes for Sun RPC (which uses either UDP or TCP)
- Standard mix-in classes to implement various authentication
and encryption schemes
- Standard framework for select-based multiplexing
XXX Open problems:
- What to do with out-of-band data?
BaseServer:
- split generic "request" functionality out into BaseServer class.
Copyright (C) 2000 Luke Kenneth Casson Leighton <lkcl@samba.org>
example: read entries from a SQL database (requires overriding
get_request() to return a table entry from the database).
entry is processed by a RequestHandlerClass.
"""
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
# XXX Warning!
# There is a test suite for this module, but it cannot be run by the
# standard regression test.
# To run it manually, run Lib/test/test_socketserver.py.
__version__ = "0.4"
import socket
import sys
import os
__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
"ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
"StreamRequestHandler","DatagramRequestHandler",
"ThreadingMixIn", "ForkingMixIn"]
if hasattr(socket, "AF_UNIX"):
__all__.extend(["UnixStreamServer","UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
class BaseServer:
"""Base class for server classes.
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever()
- handle_request() # if you do not use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- verify_request(request, client_address)
- server_close()
- process_request(request, client_address)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- address_family
- socket_type
- allow_reuse_address
Instance variables:
- RequestHandlerClass
- socket
"""
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
self.server_address = server_address
self.RequestHandlerClass = RequestHandlerClass
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
pass
def serve_forever(self):
"""Handle one request at a time until doomsday."""
while 1:
self.handle_request()
# The distinction between handling, getting, processing and
# finishing a request is fairly arbitrary. Remember:
#
# - handle_request() is the top-level call. It calls
# get_request(), verify_request() and process_request()
# - get_request() is different for stream or datagram sockets
# - process_request() is the place that may fork a new process
# or create a new thread to finish the request
# - finish_request() instantiates the request handler class;
# this constructor will handle the request all by itself
def handle_request(self):
"""Handle one request, possibly blocking."""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except:
self.handle_error(request, client_address)
self.close_request(request)
def verify_request(self, request, client_address):
"""Verify the request. May be overridden.
Return True if we should proceed with this request.
"""
return True
def process_request(self, request, client_address):
"""Call finish_request.
Overridden by ForkingMixIn and ThreadingMixIn.
"""
self.finish_request(request, client_address)
self.close_request(request)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
pass
def finish_request(self, request, client_address):
"""Finish one request by instantiating RequestHandlerClass."""
self.RequestHandlerClass(request, client_address, self)
def close_request(self, request):
"""Called to clean up an individual request."""
pass
def handle_error(self, request, client_address):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
print '-'*40
print 'Exception happened during processing of request from',
print client_address
import traceback
traceback.print_exc() # XXX But this goes to stderr!
print '-'*40
class TCPServer(BaseServer):
"""Base class for various socket-based server classes.
Defaults to synchronous IP stream (i.e., TCP).
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever()
- handle_request() # if you don't use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- verify_request(request, client_address)
- process_request(request, client_address)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- address_family
- socket_type
- request_queue_size (only for stream sockets)
- allow_reuse_address
Instance variables:
- server_address
- RequestHandlerClass
- socket
"""
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 5
allow_reuse_address = False
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.socket = socket.socket(self.address_family,
self.socket_type)
self.server_bind()
self.server_activate()
def server_bind(self):
"""Called by constructor to bind the socket.
May be overridden.
"""
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
self.socket.listen(self.request_queue_size)
# Adding a second call to getsockname() because of this issue
# http://wiki.python.org/jython/NewSocketModule#Deferredsocketcreationonjython
self.server_address = self.socket.getsockname()
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
self.socket.close()
def fileno(self):
"""Return socket file number.
Interface required by select().
"""
return self.socket.fileno()
def get_request(self):
"""Get the request and client address from the socket.
May be overridden.
"""
return self.socket.accept()
def close_request(self, request):
"""Called to clean up an individual request."""
request.close()
class UDPServer(TCPServer):
"""UDP server class."""
allow_reuse_address = False
socket_type = socket.SOCK_DGRAM
max_packet_size = 8192
def get_request(self):
data, client_addr = self.socket.recvfrom(self.max_packet_size)
return (data, self.socket), client_addr
def server_activate(self):
# No need to call listen() for UDP.
pass
def close_request(self, request):
# No need to close anything.
pass
class ForkingMixIn:
"""Mix-in class to handle each request in a new process."""
active_children = None
max_children = 40
def collect_children(self):
"""Internal routine to wait for died children."""
while self.active_children:
if len(self.active_children) < self.max_children:
options = os.WNOHANG
else:
# If the maximum number of children are already
# running, block while waiting for a child to exit
options = 0
try:
pid, status = os.waitpid(0, options)
except os.error:
pid = None
if not pid: break
self.active_children.remove(pid)
def process_request(self, request, client_address):
"""Fork a new subprocess to process the request."""
self.collect_children()
pid = os.fork()
if pid:
# Parent process
if self.active_children is None:
self.active_children = []
self.active_children.append(pid)
self.close_request(request)
return
else:
# Child process.
# This must never return, hence os._exit()!
try:
self.finish_request(request, client_address)
os._exit(0)
except:
try:
self.handle_error(request, client_address)
finally:
os._exit(1)
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
# Decides how threads will act upon termination of the
# main process
daemon_threads = False
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
In addition, exception handling is done here.
"""
try:
self.finish_request(request, client_address)
self.close_request(request)
except:
self.handle_error(request, client_address)
self.close_request(request)
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
import threading
t = threading.Thread(target = self.process_request_thread,
args = (request, client_address))
if self.daemon_threads:
t.setDaemon (1)
t.start()
class ForkingUDPServer(ForkingMixIn, UDPServer): pass
class ForkingTCPServer(ForkingMixIn, TCPServer): pass
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
if hasattr(socket, 'AF_UNIX'):
class UnixStreamServer(TCPServer):
address_family = socket.AF_UNIX
class UnixDatagramServer(UDPServer):
address_family = socket.AF_UNIX
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
class BaseRequestHandler:
"""Base class for request handler classes.
This class is instantiated for each request to be handled. The
constructor sets the instance variables request, client_address
and server, and then calls the handle() method. To implement a
specific service, all you need to do is to derive a class which
defines a handle() method.
The handle() method can find the request as self.request, the
client address as self.client_address, and the server (in case it
needs access to per-server information) as self.server. Since a
separate instance is created for each request, the handle() method
can define arbitrary other instance variariables.
"""
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
self.setup()
try:
self.handle()
finally:
self.finish()
def setup(self):
pass
def handle(self):
pass
def finish(self):
pass
# The following two classes make it possible to use the same service
# class for stream or datagram servers.
# Each class sets up these instance variables:
# - rfile: a file object from which receives the request is read
# - wfile: a file object to which the reply is written
# When the handle() method returns, wfile is flushed properly
class StreamRequestHandler(BaseRequestHandler):
"""Define self.rfile and self.wfile for stream sockets."""
# Default buffer sizes for rfile, wfile.
# We default rfile to buffered because otherwise it could be
# really slow for large data (a getc() call per byte); we make
# wfile unbuffered because (a) often after a write() we want to
# read and we need to flush the line; (b) big writes to unbuffered
# files are typically optimized by stdio even when big reads
# aren't.
rbufsize = -1
wbufsize = 0
def setup(self):
self.connection = self.request
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
def finish(self):
if not self.wfile.closed:
self.wfile.flush()
self.wfile.close()
self.rfile.close()
class DatagramRequestHandler(BaseRequestHandler):
# XXX Regrettably, I cannot get this working on Linux;
# s.recvfrom() doesn't return a meaningful client address.
"""Define self.rfile and self.wfile for datagram sockets."""
def setup(self):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
self.packet, self.socket = self.request
self.rfile = StringIO(self.packet)
self.wfile = StringIO()
def finish(self):
self.socket.sendto(self.wfile.getvalue(), self.client_address)
|
Zhongqilong/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/distutils/tests/test_util.py
|
94
|
"""Tests for distutils.util."""
import os
import sys
import unittest
from copy import copy
from test.support import run_unittest
from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError
from distutils.util import (get_platform, convert_path, change_root,
check_environ, split_quoted, strtobool,
rfc822_escape, byte_compile,
grok_environment_error)
from distutils import util # used to patch _environ_checked
from distutils.sysconfig import get_config_vars
from distutils import sysconfig
from distutils.tests import support
import _osx_support
class UtilTestCase(support.EnvironGuard, unittest.TestCase):
def setUp(self):
super(UtilTestCase, self).setUp()
# saving the environment
self.name = os.name
self.platform = sys.platform
self.version = sys.version
self.sep = os.sep
self.join = os.path.join
self.isabs = os.path.isabs
self.splitdrive = os.path.splitdrive
self._config_vars = copy(sysconfig._config_vars)
# patching os.uname
if hasattr(os, 'uname'):
self.uname = os.uname
self._uname = os.uname()
else:
self.uname = None
self._uname = None
os.uname = self._get_uname
def tearDown(self):
# getting back the environment
os.name = self.name
sys.platform = self.platform
sys.version = self.version
os.sep = self.sep
os.path.join = self.join
os.path.isabs = self.isabs
os.path.splitdrive = self.splitdrive
if self.uname is not None:
os.uname = self.uname
else:
del os.uname
sysconfig._config_vars = copy(self._config_vars)
super(UtilTestCase, self).tearDown()
def _set_uname(self, uname):
self._uname = uname
def _get_uname(self):
return self._uname
def test_get_platform(self):
# windows XP, 32bits
os.name = 'nt'
sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) '
'[MSC v.1310 32 bit (Intel)]')
sys.platform = 'win32'
self.assertEqual(get_platform(), 'win32')
# windows XP, amd64
os.name = 'nt'
sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) '
'[MSC v.1310 32 bit (Amd64)]')
sys.platform = 'win32'
self.assertEqual(get_platform(), 'win-amd64')
# windows XP, itanium
os.name = 'nt'
sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) '
'[MSC v.1310 32 bit (Itanium)]')
sys.platform = 'win32'
self.assertEqual(get_platform(), 'win-ia64')
# macbook
os.name = 'posix'
sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) '
'\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]')
sys.platform = 'darwin'
self._set_uname(('Darwin', 'macziade', '8.11.1',
('Darwin Kernel Version 8.11.1: '
'Wed Oct 10 18:23:28 PDT 2007; '
'root:xnu-792.25.20~1/RELEASE_I386'), 'i386'))
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g '
'-fwrapv -O3 -Wall -Wstrict-prototypes')
cursize = sys.maxsize
sys.maxsize = (2 ** 31)-1
try:
self.assertEqual(get_platform(), 'macosx-10.3-i386')
finally:
sys.maxsize = cursize
# macbook with fat binaries (fat, universal or fat64)
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4'
get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-fat')
_osx_support._remove_original_values(get_config_vars())
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.1'
self.assertEqual(get_platform(), 'macosx-10.4-fat')
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-intel')
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-fat3')
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-universal')
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3')
self.assertEqual(get_platform(), 'macosx-10.4-fat64')
for arch in ('ppc', 'i386', 'x86_64', 'ppc64'):
_osx_support._remove_original_values(get_config_vars())
get_config_vars()['CFLAGS'] = ('-arch %s -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3'%(arch,))
self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,))
# linux debian sarge
os.name = 'posix'
sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) '
'\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]')
sys.platform = 'linux2'
self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7',
'#1 Mon Apr 30 17:25:38 CEST 2007', 'i686'))
self.assertEqual(get_platform(), 'linux-i686')
# XXX more platforms to tests here
def test_convert_path(self):
# linux/mac
os.sep = '/'
def _join(path):
return '/'.join(path)
os.path.join = _join
self.assertEqual(convert_path('/home/to/my/stuff'),
'/home/to/my/stuff')
# win
os.sep = '\\'
def _join(*path):
return '\\'.join(path)
os.path.join = _join
self.assertRaises(ValueError, convert_path, '/home/to/my/stuff')
self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/')
self.assertEqual(convert_path('home/to/my/stuff'),
'home\\to\\my\\stuff')
self.assertEqual(convert_path('.'),
os.curdir)
def test_change_root(self):
# linux/mac
os.name = 'posix'
def _isabs(path):
return path[0] == '/'
os.path.isabs = _isabs
def _join(*path):
return '/'.join(path)
os.path.join = _join
self.assertEqual(change_root('/root', '/old/its/here'),
'/root/old/its/here')
self.assertEqual(change_root('/root', 'its/here'),
'/root/its/here')
# windows
os.name = 'nt'
def _isabs(path):
return path.startswith('c:\\')
os.path.isabs = _isabs
def _splitdrive(path):
if path.startswith('c:'):
return ('', path.replace('c:', ''))
return ('', path)
os.path.splitdrive = _splitdrive
def _join(*path):
return '\\'.join(path)
os.path.join = _join
self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'),
'c:\\root\\old\\its\\here')
self.assertEqual(change_root('c:\\root', 'its\\here'),
'c:\\root\\its\\here')
# BugsBunny os (it's a great os)
os.name = 'BugsBunny'
self.assertRaises(DistutilsPlatformError,
change_root, 'c:\\root', 'its\\here')
# XXX platforms to be covered: mac
def test_check_environ(self):
util._environ_checked = 0
if 'HOME' in os.environ:
del os.environ['HOME']
# posix without HOME
if os.name == 'posix': # this test won't run on windows
check_environ()
import pwd
self.assertEqual(os.environ['HOME'], pwd.getpwuid(os.getuid())[5])
else:
check_environ()
self.assertEqual(os.environ['PLAT'], get_platform())
self.assertEqual(util._environ_checked, 1)
def test_split_quoted(self):
self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'),
['one', 'two', 'three', 'four'])
def test_strtobool(self):
yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1')
no = ('n', 'no', 'f', 'false', 'off', '0', 'Off', 'No', 'N')
for y in yes:
self.assertTrue(strtobool(y))
for n in no:
self.assertFalse(strtobool(n))
def test_rfc822_escape(self):
header = 'I am a\npoor\nlonesome\nheader\n'
res = rfc822_escape(header)
wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s'
'header%(8s)s') % {'8s': '\n'+8*' '}
self.assertEqual(res, wanted)
def test_dont_write_bytecode(self):
# makes sure byte_compile raise a DistutilsError
# if sys.dont_write_bytecode is True
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
self.assertRaises(DistutilsByteCompileError, byte_compile, [])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
def test_grok_environment_error(self):
# test obsolete function to ensure backward compat (#4931)
exc = IOError("Unable to find batch file")
msg = grok_environment_error(exc)
self.assertEqual(msg, "error: Unable to find batch file")
def test_suite():
return unittest.makeSuite(UtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
|
edwardzhou1980/bite-project
|
refs/heads/master
|
deps/gdata-python-client/samples/apps/provisioning_oauth2_example.py
|
23
|
#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Sample for the Provisioning API and the Email Settings API with OAuth 2.0."""
__author__ = 'Shraddha Gupta <shraddhag@google.com>'
from optparse import OptionParser
import gdata.apps
import gdata.apps.emailsettings.client
import gdata.apps.groups.client
import gdata.client
import gdata.gauth
API_VERSION = '2.0'
BASE_URL = '/a/feeds/group/%s' % API_VERSION
SCOPE = ('https://apps-apis.google.com/a/feeds/groups/ '
'https://apps-apis.google.com/a/feeds/emailsettings/2.0/')
HOST = 'apps-apis.google.com'
class OAuth2ClientSample(object):
"""OAuth2ClientSample object demos the use of OAuth2Token for retrieving
Members of a Group and updating Email Settings for them."""
def __init__(self, domain, client_id, client_secret):
"""
Args:
domain: string Domain name (e.g. domain.com)
client_id: string Client_id of domain admin account.
client_secret: string Client_secret of domain admin account.
"""
try:
self.token = gdata.gauth.OAuth2Token(client_id=client_id,
client_secret=client_secret,
scope=SCOPE,
user_agent='oauth2-provisioningv2')
self.uri = self.token.generate_authorize_url()
print 'Please visit this URL to authorize the application:'
print self.uri
# Get the verification code from the standard input.
code = raw_input('What is the verification code? ').strip()
self.token.get_access_token(code)
except gdata.gauth.OAuth2AccessTokenError, e:
print 'Invalid Access token, Check your credentials %s' % e
exit(0)
self.domain = domain
self.baseuri = '%s/%s' % (BASE_URL, domain)
self.client = gdata.apps.groups.client.GroupsProvisioningClient(
domain=self.domain, auth_token=self.token)
# Authorize the client.
# This will add the Authorization header to all future requests.
self.token.authorize(self.client)
self.email_client = gdata.apps.emailsettings.client.EmailSettingsClient(
domain=self.domain, auth_token=self.token)
self.token.authorize(self.email_client)
def create_filter(self, feed):
"""Creates a mail filter that marks as read all messages not containing
Domain name as one of their words for each member of the group.
Args:
feed: GroupMemberFeed members whose emailsettings need to updated
"""
for entry in feed.entry:
user_name, domain = entry.member_id.split('@', 1)
if entry.member_type == 'User' and domain == self.domain:
print 'creating filter for %s' % entry.member_id
self.email_client.CreateFilter(user_name,
does_not_have_the_word=self.domain,
mark_as_read=True)
elif entry.member_type == 'User':
print 'User belongs to other Domain %s' %entry.member_id
else:
print 'Member is a group %s' %entry.member_id
def run(self, group):
feed = self.client.RetrieveAllMembers(group)
self.create_filter(feed)
def main():
"""Demos the Provisioning API and the Email Settings API with OAuth 2.0."""
usage = 'usage: %prog [options]'
parser = OptionParser(usage=usage)
parser.add_option('--DOMAIN',
help='Google Apps Domain, e.g. "domain.com".')
parser.add_option('--CLIENT_ID',
help='Registered CLIENT_ID of Domain.')
parser.add_option('--CLIENT_SECRET',
help='Registered CLIENT_SECRET of Domain.')
parser.add_option('--GROUP',
help='Group identifier')
(options, args) = parser.parse_args()
if None in (options.DOMAIN, options.CLIENT_ID, options.CLIENT_SECRET,
options.GROUP):
parser.print_help()
return
sample = OAuth2ClientSample(options.DOMAIN,
options.CLIENT_ID, options.CLIENT_SECRET)
sample.run(options.GROUP)
if __name__ == '__main__':
main()
|
blackye/luscan-devel
|
refs/heads/master
|
golismero/libs/bind_sql_inject/__init__.py
|
4
|
#!/usr/bin/env/python
#-*- coding:utf-8 -*-
__author__ = 'BlackYe.'
|
akretion/project-service
|
refs/heads/8.0
|
service_desk_issue/__openerp__.py
|
8
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012-2013 Daniel Reis
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Service Desk for Issues',
'summary': 'Use Project Issues for Service Desks and service teams',
'version': '8.0.1.1.0',
"category": "Project Management",
'description': """\
This module extends the ``service_desk`` module to also work with Issues.
Please refer to that module's description.
""",
'author': "Daniel Reis,Odoo Community Association (OCA)",
'website': '',
'license': 'AGPL-3',
'depends': [
'project_issue',
'service_desk',
],
'data': [
'service_desk_view.xml',
],
'installable': True,
'auto_install': True,
}
|
NL66278/OCB
|
refs/heads/8.0
|
addons/gamification/__openerp__.py
|
299
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Gamification',
'version': '1.0',
'author': 'OpenERP SA',
'category': 'Human Resources',
'website' : 'https://www.odoo.com/page/gamification',
'depends': ['mail', 'email_template', 'web_kanban_gauge'],
'description': """
Gamification process
====================
The Gamification module provides ways to evaluate and motivate the users of OpenERP.
The users can be evaluated using goals and numerical objectives to reach.
**Goals** are assigned through **challenges** to evaluate and compare members of a team with each others and through time.
For non-numerical achievements, **badges** can be granted to users. From a simple "thank you" to an exceptional achievement, a badge is an easy way to exprimate gratitude to a user for their good work.
Both goals and badges are flexibles and can be adapted to a large range of modules and actions. When installed, this module creates easy goals to help new users to discover OpenERP and configure their user profile.
""",
'data': [
'wizard/update_goal.xml',
'wizard/grant_badge.xml',
'views/badge.xml',
'views/challenge.xml',
'views/goal.xml',
'data/cron.xml',
'security/gamification_security.xml',
'security/ir.model.access.csv',
'data/goal_base.xml',
'data/badge.xml',
'views/gamification.xml',
],
'application': True,
'auto_install': False,
'qweb': ['static/src/xml/gamification.xml'],
}
|
bloyl/mne-python
|
refs/heads/placeholder
|
examples/preprocessing/define_target_events.py
|
29
|
"""
============================================================
Define target events based on time lag, plot evoked response
============================================================
This script shows how to define higher order events based on
time lag between reference and target events. For
illustration, we will put face stimuli presented into two
classes, that is 1) followed by an early button press
(within 590 milliseconds) and followed by a late button
press (later than 590 milliseconds). Finally, we will
visualize the evoked responses to both 'quickly-processed'
and 'slowly-processed' face stimuli.
"""
# Authors: Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import mne
from mne import io
from mne.event import define_target_events
from mne.datasets import sample
import matplotlib.pyplot as plt
print(__doc__)
data_path = sample.data_path()
###############################################################################
# Set parameters
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
# Setup for reading the raw data
raw = io.read_raw_fif(raw_fname)
events = mne.read_events(event_fname)
# Set up pick list: EEG + STI 014 - bad channels (modify to your needs)
include = [] # or stim channels ['STI 014']
raw.info['bads'] += ['EEG 053'] # bads
# pick MEG channels
picks = mne.pick_types(raw.info, meg='mag', eeg=False, stim=False, eog=True,
include=include, exclude='bads')
###############################################################################
# Find stimulus event followed by quick button presses
reference_id = 5 # presentation of a smiley face
target_id = 32 # button press
sfreq = raw.info['sfreq'] # sampling rate
tmin = 0.1 # trials leading to very early responses will be rejected
tmax = 0.59 # ignore face stimuli followed by button press later than 590 ms
new_id = 42 # the new event id for a hit. If None, reference_id is used.
fill_na = 99 # the fill value for misses
events_, lag = define_target_events(events, reference_id, target_id,
sfreq, tmin, tmax, new_id, fill_na)
print(events_) # The 99 indicates missing or too late button presses
# besides the events also the lag between target and reference is returned
# this could e.g. be used as parametric regressor in subsequent analyses.
print(lag[lag != fill_na]) # lag in milliseconds
# #############################################################################
# Construct epochs
tmin_ = -0.2
tmax_ = 0.4
event_id = dict(early=new_id, late=fill_na)
epochs = mne.Epochs(raw, events_, event_id, tmin_,
tmax_, picks=picks, baseline=(None, 0),
reject=dict(mag=4e-12))
# average epochs and get an Evoked dataset.
early, late = [epochs[k].average() for k in event_id]
###############################################################################
# View evoked response
times = 1e3 * epochs.times # time in milliseconds
title = 'Evoked response followed by %s button press'
fig, axes = plt.subplots(2, 1)
early.plot(axes=axes[0], time_unit='s')
axes[0].set(title=title % 'late', ylabel='Evoked field (fT)')
late.plot(axes=axes[1], time_unit='s')
axes[1].set(title=title % 'early', ylabel='Evoked field (fT)')
plt.show()
|
gelahcem/ATF
|
refs/heads/master
|
Test/FunctionalTests/CommonTestScripts/FsmUtil.py
|
10
|
#Copyright (c) 2014 Sony Computer Entertainment America LLC. See License.txt.
import Test
def AddNewStateAndVerify(editingContext, xPos, yPos, label, size=64):
newState = editingContext.InsertState(xPos, yPos, label, size)
Test.Equal(label, newState.Name, "Verify label")
Test.Equal(xPos, newState.Position.X, "Verify x position")
Test.Equal(yPos, newState.Position.Y, "Verify y position")
return newState
def AddNewCommentAndVerify(editingContext, xPos, yPos, text):
newComment = editingContext.InsertComment(xPos, yPos, text)
Test.Equal(text, newComment.Text, "Verify text")
#Difficult to verify the exact position because it is now the
#center (previously was top left corner). Needs a calculation
#of the center of the comment based on the text length
#Test.Equal(xPos, newComment.Location.X, "Verify x position")
#Test.Equal(yPos, newComment.Location.Y, "Verify y position")
return newComment
def AddNewTransitionAndVerify(editingContext, state1, state2):
newTransition = editingContext.InsertTransition(state1, state2)
Test.Equal(state1.Name, newTransition.FromState.Name, "Verify from state name")
Test.Equal(state2.Name, newTransition.ToState.Name, "Verify to state name")
return newTransition
|
jamesliu/mxnet
|
refs/heads/master
|
example/image-classification/fine-tune.py
|
5
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import argparse
import logging
logging.basicConfig(level=logging.DEBUG)
from common import find_mxnet
from common import data, fit, modelzoo
import mxnet as mx
import numpy as np
def get_fine_tune_model(symbol, arg_params, num_classes, layer_name, dtype='float32'):
"""
symbol: the pre-trained network symbol
arg_params: the argument parameters of the pre-trained model
num_classes: the number of classes for the fine-tune datasets
layer_name: the layer name before the last fully-connected layer
"""
all_layers = symbol.get_internals()
net = all_layers[layer_name+'_output']
net = mx.symbol.FullyConnected(data=net, num_hidden=num_classes, name='fc')
if dtype == 'float16':
net = mx.sym.Cast(data=net, dtype=np.float32)
net = mx.symbol.SoftmaxOutput(data=net, name='softmax')
new_args = dict({k:arg_params[k] for k in arg_params if 'fc' not in k})
return (net, new_args)
if __name__ == "__main__":
# parse args
parser = argparse.ArgumentParser(description="fine-tune a dataset",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
train = fit.add_fit_args(parser)
data.add_data_args(parser)
aug = data.add_data_aug_args(parser)
parser.add_argument('--pretrained-model', type=str,
help='the pre-trained model. can be prefix of local model files prefix \
or a model name from common/modelzoo')
parser.add_argument('--layer-before-fullc', type=str, default='flatten0',
help='the name of the layer before the last fullc layer')\
# use less augmentations for fine-tune
data.set_data_aug_level(parser, 1)
# use a small learning rate and less regularizations
parser.set_defaults(image_shape='3,224,224',
num_epochs=30,
lr=.01,
lr_step_epochs='20',
wd=0,
mom=0)
args = parser.parse_args()
# load pretrained model and params
dir_path = os.path.dirname(os.path.realpath(__file__))
(prefix, epoch) = modelzoo.download_model(
args.pretrained_model, os.path.join(dir_path, 'model'))
if prefix is None:
(prefix, epoch) = (args.pretrained_model, args.load_epoch)
sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
if args.dtype != 'float32':
# load symbol of trained network, so we can cast it to support other dtype
# fine tuning a network in a datatype which was not used for training originally,
# requires access to the code used to generate the symbol used to train that model.
# we then need to modify the symbol to add a layer at the beginning
# to cast data to that dtype. We also need to cast output of layers before softmax
# to float32 so that softmax can still be in float32.
# if the network chosen from symols/ folder doesn't have cast for the new datatype,
# it will still train in fp32
if args.network not in ['inception-v3',\
'inception-v4', 'resnet-v1', 'resnet', 'resnext', 'vgg']:
raise ValueError('Given network does not have support for dtypes other than float32.\
Please add a cast layer at the beginning to train in that mode.')
from importlib import import_module
net = import_module('symbols.'+args.network)
sym = net.get_symbol(**vars(args))
# remove the last fullc layer and add a new softmax layer
(new_sym, new_args) = get_fine_tune_model(sym, arg_params, args.num_classes,
args.layer_before_fullc, args.dtype)
# train
fit.fit(args = args,
network = new_sym,
data_loader = data.get_rec_iter,
arg_params = new_args,
aux_params = aux_params)
|
blrm/robottelo
|
refs/heads/master
|
tests/foreman/cli/test_capsule.py
|
3
|
# -*- encoding: utf-8 -*-
"""Test class for the capsule CLI."""
import random
import re
from fauxfactory import gen_alphanumeric, gen_string
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.factory import CLIFactoryError, make_proxy
from robottelo.cli.proxy import Proxy, default_url_on_new_port
from robottelo.datafactory import valid_data_list
from robottelo.decorators import run_only_on, stubbed, tier1, tier2
from robottelo.test import CLITestCase
class CapsuleTestCase(CLITestCase):
"""Proxy cli tests"""
@run_only_on('sat')
@tier1
def test_negative_create_with_url(self):
"""Proxy creation with random URL
@Feature: Smart Proxy
@Assert: Proxy is not created
"""
# Create a random proxy
with self.assertRaises(CLIFactoryError):
make_proxy({
u'url': u'http://{0}:{1}'.format(
gen_string('alpha', 6),
gen_string('numeric', 4)),
})
@run_only_on('sat')
@tier1
def test_positive_create_with_name(self):
"""Proxy creation with the home proxy
@Feature: Smart Proxy
@Assert: Proxy is created
"""
for name in valid_data_list():
with self.subTest(name):
proxy = make_proxy({u'name': name})
self.assertEquals(proxy['name'], name)
@run_only_on('sat')
@tier1
def test_positive_delete_by_id(self):
"""Proxy deletion with the home proxy
@Feature: Smart Proxy
@Assert: Proxy is deleted
"""
for name in valid_data_list():
with self.subTest(name):
proxy = make_proxy({u'name': name})
Proxy.delete({u'id': proxy['id']})
with self.assertRaises(CLIReturnCodeError):
Proxy.info({u'id': proxy['id']})
@run_only_on('sat')
@tier1
def test_positive_update_name(self):
"""Proxy name update with the home proxy
@Feature: Smart Proxy
@Assert: Proxy has the name updated
"""
proxy = make_proxy({u'name': gen_alphanumeric()})
newport = random.randint(9091, 49090)
for new_name in valid_data_list():
with self.subTest(new_name):
with default_url_on_new_port(9090, newport) as url:
Proxy.update({
u'id': proxy['id'],
u'name': new_name,
u'url': url,
})
proxy = Proxy.info({u'id': proxy['id']})
self.assertEqual(proxy['name'], new_name)
@run_only_on('sat')
@tier2
def test_positive_refresh_features_by_id(self):
"""Refresh smart proxy features, search for proxy by id
@Feature: Smart Proxy
@Assert: Proxy features are refreshed
"""
proxy = make_proxy()
# parse the port number so we can reopen the SSH tunnel
port_regexp = re.search(u':([0-9]+)', proxy['url'])
if port_regexp:
port = port_regexp.group(1)
with default_url_on_new_port(9090, port):
Proxy.refresh_features({u'id': proxy['id']})
else:
raise ValueError('Unable to parse port number from proxy URL')
@run_only_on('sat')
@tier2
def test_positive_refresh_features_by_name(self):
"""Refresh smart proxy features, search for proxy by name
@Feature: Smart Proxy
@Assert: Proxy features are refreshed
"""
proxy = make_proxy()
# parse the port number so we can reopen the SSH tunnel
port_regexp = re.search(u':([0-9]+)', proxy['url'])
if port_regexp:
port = port_regexp.group(1)
with default_url_on_new_port(9090, port):
Proxy.refresh_features({u'name': proxy['name']})
else:
raise ValueError('Unable to parse port number from proxy URL')
class CapsuleIntegrationTestCase(CLITestCase):
"""Tests for capsule functionality."""
@stubbed()
def test_positive_provision(self):
"""User can provision through a capsule
@Feature: Capsules
@Setup: Some valid, functional compute resource (perhaps one variation
of this case for each supported compute resource type). Also,
functioning capsule with proxy is required.
@Steps:
1. Attempt to route provisioning content through capsule that is using
a proxy
2. Attempt to provision instance
@Assert: Instance can be provisioned, with content coming through
proxy-enabled capsule.
@Status: Manual
"""
@stubbed()
def test_positive_register(self):
"""User can register system through proxy-enabled capsule
@Feature: Capsules
@Steps:
1. attempt to register a system trhough a proxy-enabled capsule
@Assert: system is successfully registered
@Status: Manual
"""
@stubbed()
def test_positive_unregister(self):
"""User can unregister system through proxy-enabled capsule
@Feature: Capsules
@Steps:
1. attempt to unregister a system through a proxy-enabled capsule
@Assert: system is successfully unregistered
@Status: Manual
"""
@stubbed()
def test_positive_subscribe(self):
"""User can subscribe system to content through proxy-enabled
capsule
@Feature: Capsules
@Setup: Content source types configured/synced for [RH, Custom, Puppet,
Docker] etc.
@Steps:
1. attempt to subscribe a system to a content type variation, via a
proxy-enabled capsule
@Assert: system is successfully subscribed to each content type
@Status: Manual
"""
@stubbed()
def test_positive_consume_content(self):
"""User can consume content on system, from a content source,
through proxy-enabled capsule
@Feature: Capsules
@Setup: Content source types configured/synced for [RH, Custom, Puppet,
Docker] etc.
@Steps:
1. attempt to subscribe a system to a content type variation, via a
proxy-enabled capsule
2. Attempt to install content (RPMs, puppet modules) via proxy-enabled
capsule
@Assert: system successfully consume content
@Status: Manual
"""
@stubbed()
def test_positive_unsubscribe(self):
"""User can unsubscribe system from content through
proxy-enabled capsule
@Feature: Capsules
@Setup: Content source types configured/synced for [RH, Custom, Puppet]
etc.
@Steps:
1. attempt to subscribe a system to a content type variation, via a
proxy-enabled capsule
2. attempt to unsubscribe a system from said content type(s) via a
proxy-enabled capsule
@Assert: system is successfully unsubscribed from each content type
@Status: Manual
"""
@stubbed()
def test_positive_reregister_with_capsule_cert(self):
"""system can register via capsule using cert provided by
the capsule itself.
@Feature: Capsules
@Setup: functional capsule and certs rpm installed on target client.
@Steps:
1. Attempt to register from parent satellite; unregister and remove
cert rpm
2. Attempt to reregister using same credentials and certs from a
functional capsule.
@Assert: Registration works , and certs RPM installed
from capsule.
@Status: Manual
"""
@stubbed()
def test_positive_ssl_capsule(self):
"""Assure SSL functionality for capsules
@Feature: Capsules
@Setup: A capsule installed with SSL enabled.
@Steps:
1. Execute basic steps from above (register, subscribe, consume,
unsubscribe, unregister) while connected to a capsule that is
SSL-enabled
@Assert: No failures executing said test scenarios against SSL,
baseline functionality identical to non-SSL
@Status: Manual
"""
@stubbed()
def test_positive_enable_bmc(self):
"""Enable BMC feature on smart-proxy
@Feature: Capsules
@Setup: A capsule installed with SSL enabled.
@Steps:
1. Enable BMC feature on proxy by running installer with:
``katello-installer --foreman-proxy-bmc 'true'``
2. Please make sure to check default values to other BMC options.
Should be like below:
``--foreman-proxy-bmc-default-provider BMC default provider.
(default: "ipmitool")``
``--foreman-proxy-bmc-listen-on BMC proxy to listen on https, http,
or both (default: "https")``
3. Check if BMC plugin is enabled with:
``#cat /etc/foreman-proxy/settings.d/bmc.yml | grep enabled``
4. Restart foreman-proxy service
@Assert: Katello installer should show the options to enable BMC
@Status: Manual
"""
|
ronbeltran/webapp2-bedrock
|
refs/heads/master
|
bedrock/app/utils/jinja.py
|
1
|
import os
import webapp2
import jinja2
import config
from app.utils.compressor import WEBASSETS_ENV
JINJA_ENV = jinja2.Environment(
autoescape=lambda x: True,
extensions=['jinja2.ext.autoescape',
'webassets.ext.jinja2.AssetsExtension'],
loader=jinja2.FileSystemLoader(
os.path.join(config.PROJECT_ROOT, 'templates')),
)
JINJA_ENV.globals.update({'uri_for': webapp2.uri_for})
JINJA_ENV.assets_environment = WEBASSETS_ENV
|
mozman/ezdxf
|
refs/heads/master
|
examples/render/show_all_std_line_types.py
|
1
|
# Copyright (c) 2019 Manfred Moitzi
# License: MIT License
import ezdxf
from ezdxf.math import Vec3
from ezdxf.tools.standards import linetypes
doc = ezdxf.new('R2007', setup=True)
msp = doc.modelspace()
# How to change the global linetype scaling:
doc.header['$LTSCALE'] = .5
p1 = Vec3(0, 0)
p2 = Vec3(9, 0)
delta = Vec3(0, -1)
text_offset = Vec3(0, .1)
for lt in linetypes():
name = lt[0]
msp.add_line(p1, p2, dxfattribs={'linetype': name, 'lineweight': 25})
msp.add_text(name, dxfattribs={'style': 'OpenSansCondensed-Light', 'height': 0.25}).set_pos(p1+text_offset)
p1 += delta
p2 += delta
doc.set_modelspace_vport(25, center=(5, -10))
doc.saveas('all_std_line_types.dxf')
|
tkurnosova/selenium
|
refs/heads/master
|
py/test/selenium/webdriver/common/executing_async_javascript_tests.py
|
63
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
import unittest
from selenium.webdriver.common.by import By
from selenium.common.exceptions import WebDriverException
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.remote.webelement import WebElement
@pytest.mark.ignore_phantomjs
class ExecutingAsyncJavaScriptTests(unittest.TestCase):
def testShouldNotTimeoutIfCallbackInvokedImmediately(self):
self._loadPage("ajaxy_page")
result = self.driver.execute_async_script("arguments[arguments.length - 1](123);")
self.assertTrue(type(result) == int)
self.assertEqual(123, result)
def testShouldBeAbleToReturnJavascriptPrimitivesFromAsyncScripts_NeitherNoneNorUndefined(self):
self._loadPage("ajaxy_page")
self.assertEqual(123, self.driver.execute_async_script(
"arguments[arguments.length - 1](123);"))
self.assertEqual("abc", self.driver.execute_async_script("arguments[arguments.length - 1]('abc');"))
self.assertFalse(bool(self.driver.execute_async_script("arguments[arguments.length - 1](false);")))
self.assertTrue(bool(self.driver.execute_async_script("arguments[arguments.length - 1](true);")))
#@Ignore(value = SELENESE, reason = "SeleniumRC cannot return null values.")
def testShouldBeAbleToReturnJavascriptPrimitivesFromAsyncScripts_NullAndUndefined(self):
self._loadPage("ajaxy_page")
self.assertTrue(self.driver.execute_async_script("arguments[arguments.length - 1](null)") is None)
self.assertTrue(self.driver.execute_async_script("arguments[arguments.length - 1]()") is None)
#@Ignore(value = SELENESE, reason = "Selenium cannot return arrays")
def testShouldBeAbleToReturnAnArrayLiteralFromAnAsyncScript(self):
self._loadPage("ajaxy_page")
result = self.driver.execute_async_script("arguments[arguments.length - 1]([]);")
self.assertTrue("Expected not to be null!", result is not None)
self.assertTrue(type(result) == list)
self.assertTrue(len(result) == 0)
#@Ignore(value = SELENESE, reason = "Selenium cannot return arrays")
def testShouldBeAbleToReturnAnArrayObjectFromAnAsyncScript(self):
self._loadPage("ajaxy_page")
result = self.driver.execute_async_script("arguments[arguments.length - 1](new Array());")
self.assertTrue("Expected not to be null!", result is not None)
self.assertTrue(type(result) == list)
self.assertTrue(len(result) == 0)
#@Ignore(value = ANDROID, SELENESE,
# reason = "Android does not properly handle arrays; Selenium cannot return arrays")
def testShouldBeAbleToReturnArraysOfPrimitivesFromAsyncScripts(self):
self._loadPage("ajaxy_page")
result = self.driver.execute_async_script(
"arguments[arguments.length - 1]([null, 123, 'abc', true, false]);")
self.assertTrue(result is not None)
self.assertTrue(type(result) == list)
self.assertFalse(bool(result.pop()))
self.assertTrue(bool(result.pop()))
self.assertEqual("abc", result.pop())
self.assertEqual(123, result.pop())
self.assertTrue(result.pop() is None)
self.assertTrue(len(result) == 0)
#@Ignore(value = SELENESE, reason = "Selenium cannot return elements from scripts")
def testShouldBeAbleToReturnWebElementsFromAsyncScripts(self):
self._loadPage("ajaxy_page")
result = self.driver.execute_async_script("arguments[arguments.length - 1](document.body);")
self.assertTrue(type(result) == WebElement)
self.assertEqual("body", result.tag_name.lower())
#@Ignore(value = ANDROID, SELENESE,
# reason = "Android does not properly handle arrays; Selenium cannot return elements")
def testShouldBeAbleToReturnArraysOfWebElementsFromAsyncScripts(self):
self._loadPage("ajaxy_page")
result = self.driver.execute_async_script(
"arguments[arguments.length - 1]([document.body, document.body]);")
self.assertTrue(result is not None)
self.assertTrue(type(result) == list)
list_ = result;
self.assertEqual(2, len(list_))
self.assertTrue(type(list_[0]) == WebElement)
self.assertTrue(type(list_[1]) == WebElement)
self.assertEqual("body", list_[0].tag_name)
#self.assertEqual(list_[0], list_[1])
def testShouldTimeoutIfScriptDoesNotInvokeCallback(self):
self._loadPage("ajaxy_page")
try:
#Script is expected to be async and explicitly callback, so this should timeout.
self.driver.execute_async_script("return 1 + 2;")
self.fail("Should have thrown a TimeOutException!")
except TimeoutException as e :
pass
def testShouldTimeoutIfScriptDoesNotInvokeCallbackWithAZeroTimeout(self):
self._loadPage("ajaxy_page")
try:
self.driver.execute_async_script("window.setTimeout(function() {}, 0);")
fail("Should have thrown a TimeOutException!")
except TimeoutException as e:
pass
def testShouldNotTimeoutIfScriptCallsbackInsideAZeroTimeout(self):
self._loadPage("ajaxy_page")
self.driver.execute_async_script(
"""var callback = arguments[arguments.length - 1];
window.setTimeout(function() { callback(123); }, 0)""")
def testShouldTimeoutIfScriptDoesNotInvokeCallbackWithLongTimeout(self):
self.driver.set_script_timeout(0.5)
self._loadPage("ajaxy_page")
try:
self.driver.execute_async_script(
"""var callback = arguments[arguments.length - 1];
window.setTimeout(callback, 1500);""")
self.fail("Should have thrown a TimeOutException!")
except TimeoutException as e:
pass
def testShouldDetectPageLoadsWhileWaitingOnAnAsyncScriptAndReturnAnError(self):
self._loadPage("ajaxy_page")
self.driver.set_script_timeout(0.1)
try:
self.driver.execute_async_script("window.location = '" + self._pageURL("dynamic") + "';")
self.fail('Should have throw a WebDriverException')
except WebDriverException as expected:
pass
def testShouldCatchErrorsWhenExecutingInitialScript(self):
self._loadPage("ajaxy_page")
try:
self.driver.execute_async_script("throw Error('you should catch this!');")
self.fail("Should have thrown a WebDriverException")
except WebDriverException as expected:
pass
#@Ignore(value = ANDROID, CHROME,
# reason = "Android: Emulator is too slow and latency causes test to fall out of sync with app;"
# + "Chrome: Click is not working")
def testShouldBeAbleToExecuteAsynchronousScripts(self):
self._loadPage("ajaxy_page")
typer = self.driver.find_element(by=By.NAME, value="typer")
typer.send_keys("bob")
self.assertEqual("bob", typer.get_attribute("value"))
self.driver.find_element(by=By.ID, value="red").click()
self.driver.find_element(by=By.NAME, value="submit").click()
self.assertEqual(1, len(self.driver.find_elements(by=By.TAG_NAME, value='div')),
"There should only be 1 DIV at this point, which is used for the butter message")
self.driver.set_script_timeout(10)
text = self.driver.execute_async_script(
"""var callback = arguments[arguments.length - 1];
window.registerListener(arguments[arguments.length - 1]);""")
self.assertEqual("bob", text)
self.assertEqual("", typer.get_attribute("value"))
self.assertEqual(2, len(self.driver.find_elements(by=By.TAG_NAME, value='div')),
"There should be 1 DIV (for the butter message) + 1 DIV (for the new label)")
def testShouldBeAbleToPassMultipleArgumentsToAsyncScripts(self):
self._loadPage("ajaxy_page")
result = self.driver.execute_async_script("""
arguments[arguments.length - 1](arguments[0] + arguments[1]);""", 1, 2)
self.assertEqual(3, result)
#TODO DavidBurns Disabled till Java WebServer is used
#def testShouldBeAbleToMakeXMLHttpRequestsAndWaitForTheResponse(self):
# script = """
# var url = arguments[0];
# var callback = arguments[arguments.length - 1];
# // Adapted from http://www.quirksmode.org/js/xmlhttp.html
# var XMLHttpFactories = [
# function () return new XMLHttpRequest(),
# function () return new ActiveXObject('Msxml2.XMLHTTP'),
# function () return new ActiveXObject('Msxml3.XMLHTTP'),
# function () return new ActiveXObject('Microsoft.XMLHTTP')
# ];
# var xhr = false;
# while (!xhr && XMLHttpFactories.length)
# try{
# xhr = XMLHttpFactories.shift().call();
# }catch (e)
#
# if (!xhr) throw Error('unable to create XHR object');
# xhr.open('GET', url, true);
# xhr.onreadystatechange = function()
# if (xhr.readyState == 4) callback(xhr.responseText);
#
# xhr.send('');""" # empty string to stop firefox 3 from choking
#
# self._loadPage("ajaxy_page")
# self.driver.set_script_timeout(3)
# response = self.driver.execute_async_script(script, pages.sleepingPage + "?time=2")
# htm = "<html><head><title>Done</title></head><body>Slept for 2s</body></html>"
# self.assertTrue(response.strip() == htm)
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
|
zsiciarz/django
|
refs/heads/master
|
django/db/backends/base/client.py
|
98
|
class BaseDatabaseClient:
"""Encapsulate backend-specific methods for opening a client shell."""
# This should be a string representing the name of the executable
# (e.g., "psql"). Subclasses must override this.
executable_name = None
def __init__(self, connection):
# connection is an instance of BaseDatabaseWrapper.
self.connection = connection
def runshell(self):
raise NotImplementedError('subclasses of BaseDatabaseClient must provide a runshell() method')
|
oliverlee/sympy
|
refs/heads/master
|
sympy/diffgeom/tests/test_diffgeom.py
|
79
|
from sympy.diffgeom.rn import R2, R2_p, R2_r, R3_r, R3_c, R3_s
from sympy.diffgeom import (Commutator, Differential, TensorProduct,
WedgeProduct, BaseCovarDerivativeOp, CovarDerivativeOp, LieDerivative,
covariant_order, contravariant_order, twoform_to_matrix, metric_to_Christoffel_1st,
metric_to_Christoffel_2nd, metric_to_Riemann_components,
metric_to_Ricci_components, intcurve_diffequ, intcurve_series)
from sympy.core import Symbol, symbols
from sympy.simplify import trigsimp, simplify
from sympy.functions import sqrt, atan2, sin
from sympy.matrices import Matrix
from sympy.utilities.pytest import raises
TP = TensorProduct
def test_R2():
x0, y0, r0, theta0 = symbols('x0, y0, r0, theta0', real=True)
point_r = R2_r.point([x0, y0])
point_p = R2_p.point([r0, theta0])
# r**2 = x**2 + y**2
assert (R2.r**2 - R2.x**2 - R2.y**2).rcall(point_r) == 0
assert trigsimp( (R2.r**2 - R2.x**2 - R2.y**2).rcall(point_p) ) == 0
assert trigsimp(R2.e_r(R2.x**2 + R2.y**2).rcall(point_p).doit()) == 2*r0
# polar->rect->polar == Id
a, b = symbols('a b', positive=True)
m = Matrix([[a], [b]])
#TODO assert m == R2_r.coord_tuple_transform_to(R2_p, R2_p.coord_tuple_transform_to(R2_r, [a, b])).applyfunc(simplify)
assert m == R2_p.coord_tuple_transform_to(
R2_r, R2_r.coord_tuple_transform_to(R2_p, m)).applyfunc(simplify)
def test_R3():
a, b, c = symbols('a b c', positive=True)
m = Matrix([[a], [b], [c]])
assert m == R3_c.coord_tuple_transform_to(
R3_r, R3_r.coord_tuple_transform_to(R3_c, m)).applyfunc(simplify)
#TODO assert m == R3_r.coord_tuple_transform_to(R3_c, R3_c.coord_tuple_transform_to(R3_r, m)).applyfunc(simplify)
assert m == R3_s.coord_tuple_transform_to(
R3_r, R3_r.coord_tuple_transform_to(R3_s, m)).applyfunc(simplify)
#TODO assert m == R3_r.coord_tuple_transform_to(R3_s, R3_s.coord_tuple_transform_to(R3_r, m)).applyfunc(simplify)
assert m == R3_s.coord_tuple_transform_to(
R3_c, R3_c.coord_tuple_transform_to(R3_s, m)).applyfunc(simplify)
#TODO assert m == R3_c.coord_tuple_transform_to(R3_s, R3_s.coord_tuple_transform_to(R3_c, m)).applyfunc(simplify)
def test_point():
x, y = symbols('x, y')
p = R2_r.point([x, y])
#TODO assert p.free_symbols() == set([x, y])
assert p.coords(R2_r) == p.coords() == Matrix([x, y])
assert p.coords(R2_p) == Matrix([sqrt(x**2 + y**2), atan2(y, x)])
def test_commutator():
assert Commutator(R2.e_x, R2.e_y) == 0
assert Commutator(R2.x*R2.e_x, R2.x*R2.e_x) == 0
assert Commutator(R2.x*R2.e_x, R2.x*R2.e_y) == R2.x*R2.e_y
c = Commutator(R2.e_x, R2.e_r)
assert c(R2.x) == R2.y*(R2.x**2 + R2.y**2)**(-1)*sin(R2.theta)
def test_differential():
xdy = R2.x*R2.dy
dxdy = Differential(xdy)
assert xdy.rcall(None) == xdy
assert dxdy(R2.e_x, R2.e_y) == 1
assert dxdy(R2.e_x, R2.x*R2.e_y) == R2.x
assert Differential(dxdy) == 0
def test_products():
assert TensorProduct(
R2.dx, R2.dy)(R2.e_x, R2.e_y) == R2.dx(R2.e_x)*R2.dy(R2.e_y) == 1
assert WedgeProduct(R2.dx, R2.dy)(R2.e_x, R2.e_y) == 1
assert TensorProduct(R2.dx, R2.dy)(None, R2.e_y) == R2.dx
assert TensorProduct(R2.dx, R2.dy)(R2.e_x, None) == R2.dy
assert TensorProduct(R2.dx, R2.dy)(R2.e_x) == R2.dy
assert TensorProduct(R2.x, R2.dx) == R2.x*R2.dx
def test_lie_derivative():
assert LieDerivative(R2.e_x, R2.y) == R2.e_x(R2.y) == 0
assert LieDerivative(R2.e_x, R2.x) == R2.e_x(R2.x) == 1
assert LieDerivative(R2.e_x, R2.e_x) == Commutator(R2.e_x, R2.e_x) == 0
assert LieDerivative(R2.e_x, R2.e_r) == Commutator(R2.e_x, R2.e_r)
assert LieDerivative(R2.e_x + R2.e_y, R2.x) == 1
assert LieDerivative(
R2.e_x, TensorProduct(R2.dx, R2.dy))(R2.e_x, R2.e_y) == 0
def test_covar_deriv():
ch = metric_to_Christoffel_2nd(TP(R2.dx, R2.dx) + TP(R2.dy, R2.dy))
cvd = BaseCovarDerivativeOp(R2_r, 0, ch)
assert cvd(R2.x) == 1
assert cvd(R2.x*R2.e_x) == R2.e_x
cvd = CovarDerivativeOp(R2.x*R2.e_x, ch)
assert cvd(R2.x) == R2.x
assert cvd(R2.x*R2.e_x) == R2.x*R2.e_x
def test_intcurve_diffequ():
t = symbols('t')
start_point = R2_r.point([1, 0])
vector_field = -R2.y*R2.e_x + R2.x*R2.e_y
equations, init_cond = intcurve_diffequ(vector_field, t, start_point)
assert str(equations) == '[f_1(t) + Derivative(f_0(t), t), -f_0(t) + Derivative(f_1(t), t)]'
assert str(init_cond) == '[f_0(0) - 1, f_1(0)]'
equations, init_cond = intcurve_diffequ(vector_field, t, start_point, R2_p)
assert str(
equations) == '[Derivative(f_0(t), t), Derivative(f_1(t), t) - 1]'
assert str(init_cond) == '[f_0(0) - 1, f_1(0)]'
def test_helpers_and_coordinate_dependent():
one_form = R2.dr + R2.dx
two_form = Differential(R2.x*R2.dr + R2.r*R2.dx)
three_form = Differential(
R2.y*two_form) + Differential(R2.x*Differential(R2.r*R2.dr))
metric = TensorProduct(R2.dx, R2.dx) + TensorProduct(R2.dy, R2.dy)
metric_ambig = TensorProduct(R2.dx, R2.dx) + TensorProduct(R2.dr, R2.dr)
misform_a = TensorProduct(R2.dr, R2.dr) + R2.dr
misform_b = R2.dr**4
misform_c = R2.dx*R2.dy
twoform_not_sym = TensorProduct(R2.dx, R2.dx) + TensorProduct(R2.dx, R2.dy)
twoform_not_TP = WedgeProduct(R2.dx, R2.dy)
assert covariant_order(one_form) == 1
assert covariant_order(two_form) == 2
assert covariant_order(three_form) == 3
assert covariant_order(two_form + metric) == 2
assert covariant_order(two_form + metric_ambig) == 2
assert covariant_order(two_form + twoform_not_sym) == 2
assert covariant_order(two_form + twoform_not_TP) == 2
raises(ValueError, lambda: covariant_order(misform_a))
raises(ValueError, lambda: covariant_order(misform_b))
raises(ValueError, lambda: covariant_order(misform_c))
assert twoform_to_matrix(metric) == Matrix([[1, 0], [0, 1]])
assert twoform_to_matrix(twoform_not_sym) == Matrix([[1, 0], [1, 0]])
assert twoform_to_matrix(twoform_not_TP) == Matrix([[0, -1], [1, 0]])
raises(ValueError, lambda: twoform_to_matrix(one_form))
raises(ValueError, lambda: twoform_to_matrix(three_form))
raises(ValueError, lambda: twoform_to_matrix(metric_ambig))
raises(ValueError, lambda: metric_to_Christoffel_1st(twoform_not_sym))
raises(ValueError, lambda: metric_to_Christoffel_2nd(twoform_not_sym))
raises(ValueError, lambda: metric_to_Riemann_components(twoform_not_sym))
raises(ValueError, lambda: metric_to_Ricci_components(twoform_not_sym))
def test_correct_arguments():
raises(ValueError, lambda: R2.e_x(R2.e_x))
raises(ValueError, lambda: R2.e_x(R2.dx))
raises(ValueError, lambda: Commutator(R2.e_x, R2.x))
raises(ValueError, lambda: Commutator(R2.dx, R2.e_x))
raises(ValueError, lambda: Differential(Differential(R2.e_x)))
raises(ValueError, lambda: R2.dx(R2.x))
raises(ValueError, lambda: TensorProduct(R2.e_x, R2.dx))
raises(ValueError, lambda: LieDerivative(R2.dx, R2.dx))
raises(ValueError, lambda: LieDerivative(R2.x, R2.dx))
raises(ValueError, lambda: CovarDerivativeOp(R2.dx, []))
raises(ValueError, lambda: CovarDerivativeOp(R2.x, []))
a = Symbol('a')
raises(ValueError, lambda: intcurve_series(R2.dx, a, R2_r.point([1, 2])))
raises(ValueError, lambda: intcurve_series(R2.x, a, R2_r.point([1, 2])))
raises(ValueError, lambda: intcurve_diffequ(R2.dx, a, R2_r.point([1, 2])))
raises(ValueError, lambda: intcurve_diffequ(R2.x, a, R2_r.point([1, 2])))
raises(ValueError, lambda: contravariant_order(R2.e_x + R2.dx))
raises(ValueError, lambda: covariant_order(R2.e_x + R2.dx))
raises(ValueError, lambda: contravariant_order(R2.e_x*R2.e_y))
raises(ValueError, lambda: covariant_order(R2.dx*R2.dy))
def test_simplify():
x, y = R2_r.coord_functions()
dx, dy = R2_r.base_oneforms()
ex, ey = R2_r.base_vectors()
assert simplify(x) == x
assert simplify(x*y) == x*y
assert simplify(dx*dy) == dx*dy
assert simplify(ex*ey) == ex*ey
assert ((1-x)*dx)/(1-x)**2 == dx/(1-x)
|
ycaihua/kbengine
|
refs/heads/master
|
kbe/tools/server/install/installer.py
|
22
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import urllib, socket
import tarfile, zipfile, tempfile
import os, sys, re, platform, getopt, getpass, random, time, subprocess, shutil, string
from xml.etree import ElementTree as ET
from subprocess import check_call
if platform.system() == 'Windows':
pass
else:
import pwd
if sys.hexversion >= 0x03000000:
import urllib.request
import http.client
import configparser
from urllib.parse import urlparse
if platform.system() == 'Windows':
import winreg
else:
import ConfigParser
import urlparse
import httplib
if platform.system() == 'Windows':
import _winreg as winreg
# Sources and binary releases
source_url = "https://github.com/kbengine/kbengine/releases/latest"
bin_zip_url = "https://sourceforge.net/projects/kbengine/files/bin/latest.zip/download"
bin_tgz_url = "https://sourceforge.net/projects/kbengine/files/bin/latest.tar.gz/download"
bin_mysql_url = "https://sourceforge.net/projects/kbengine/files/bin/deps/mysql-win32.msi/download"
# MySQL installation directory
mysql_root = ""
mysql_home = ""
# MySQL version information
mysql_verinfo = ""
# MySQL port
mysql_ip = ""
mysql_port = ""
# MySQL root password
mysql_root_password = ""
# MySQL KBE account name and password
mysql_kbe_name = ""
mysql_kbe_password = ""
# MySQL db name
mysql_kbe_db_name = ""
# MySQL service name
mysql_sercive_name = ""
# According to root determine the installation position
KBE_ROOT = ''
KBE_RES_PATH = ''
KBE_BIN_PATH = ''
KBE_UID = ''
kbe_res_path = ""
# The system user name password, the temporary use of installation
os_user_name = ""
os_user_passwd = ""
# Tool environment variable name
INSTALLER_EVN_NAME = 'KBT'
_zip_kbengine_path = ""
_zip_kbengine_dirname = ""
_install_path = ""
def hello():
# echoSystemEnvironment()
# echoKBEEnvironment()
OUT_MSG("###########################################################################")
OUT_MSG("# installer: #")
OUT_MSG("# KBEngine installation tools. #")
OUT_MSG("# Install, Uninstall, Check the version, #")
OUT_MSG("# Environment settings, etc. #")
OUT_MSG("# homepage: #")
OUT_MSG("# http://kbengine.org #")
OUT_MSG("# sources: #")
OUT_MSG("# https://github.com/kbengine/kbengine/*/kbe/tools/server/install #")
OUT_MSG("###########################################################################")
OUT_MSG("")
def help():
OUT_MSG("")
OUT_MSG("Usage:")
OUT_MSG("\tpython installer.py [command]")
OUT_MSG("")
OUT_MSG("install:")
OUT_MSG("\tInstall KBEngine.\n")
OUT_MSG("\tinstaller.py install: Install development environment (dependent, environment variables, etc.), From the KBE_ROOT search.")
OUT_MSG("\tinstaller.py install=localsrc : Install from local-disk(Source code), From the KBE_ROOT search.")
OUT_MSG("\tinstaller.py install=remotesrc : Install from github(Source code).")
OUT_MSG("\tinstaller.py install=remotebin : Install from sourceforge(Binary releases).")
OUT_MSG("\tinstaller.py install={xxx.zip, xxx.tar.gz} : Install .zip/.tar.gz file.")
OUT_MSG("")
OUT_MSG("uninstall:")
OUT_MSG("\tUninstall KBEngine.")
OUT_MSG("")
OUT_MSG("update:")
OUT_MSG("\tUpdate to the latest version(KBEngine).")
OUT_MSG("")
OUT_MSG("version:")
OUT_MSG("\tGet the KBEngine current version.")
OUT_MSG("")
OUT_MSG("evn:")
OUT_MSG("\tThe output of the KBEngine environment.")
OUT_MSG("")
OUT_MSG("resetevn:")
OUT_MSG("\tReset the KBEngine environment.")
OUT_MSG("")
OUT_MSG("help:")
OUT_MSG("\tList all of the command descriptions.")
OUT_MSG("--------------------------------------------------")
def OUT_MSG(msg):
try:
print(msg)
except UnicodeEncodeError:
if sys.hexversion >= 0x03000000:
print(msg.encode('utf8').decode(sys.stdout.encoding))
else:
print(msg.encode('utf8'))
def INFO_MSG(msg):
print(msg)
def ERROR_MSG(msg):
print('ERROR: ' + msg)
def WARING_MSG(msg):
print('WARING: ' + msg)
def getInput(s):
if sys.hexversion >= 0x03000000:
return input(s)
return raw_input(s)
def echoKBEEnvironment():
global KBE_ROOT
global KBE_RES_PATH
global KBE_BIN_PATH
global KBE_UID
KBE_ROOT = getEnvironment('user', 'KBE_ROOT')
KBE_RES_PATH = getEnvironment('user', 'KBE_RES_PATH')
KBE_BIN_PATH = getEnvironment('user', 'KBE_BIN_PATH')
_checkKBEEnvironment(False)
OUT_MSG("KBE_ROOT=" + KBE_ROOT)
OUT_MSG("KBE_RES_PATH=" + KBE_RES_PATH)
OUT_MSG("KBE_BIN_PATH=" + KBE_BIN_PATH)
OUT_MSG("kbe_core_res_path=%s" % kbe_res_path)
def findKBEngine(dir):
if len(_zip_kbengine_dirname) > 0:
return dir + "/" + _zip_kbengine_dirname + "/"
if dir[-1] != '/' and dir[-1] != '\\':
dir += '/'
paths = []
for x in os.listdir(dir):
if "kbengine" in x:
if os.path.isfile(dir + x + "/kbe/res/server/kbengine_defs.xml"):
paths.append(dir + x + "/")
return paths
def find_file_by_pattern(pattern = '.*', base = ".", circle = True):
if base == ".":
base = os.getcwd()
final_file_list = []
cur_list = os.listdir(base)
for item in cur_list:
full_path = os.path.join(base, item)
if os.path.isfile(full_path):
if full_path.endswith(pattern):
final_file_list.append(full_path)
else:
if (True == circle):
final_file_list += find_file_by_pattern(pattern, full_path, circle)
return final_file_list
def resetKBEEnvironment():
global KBE_ROOT
global KBE_RES_PATH
global KBE_BIN_PATH
global KBE_UID
KBE_ROOT = getEnvironment('user', 'KBE_ROOT')
KBE_RES_PATH = getEnvironment('user', 'KBE_RES_PATH')
KBE_BIN_PATH = getEnvironment('user', 'KBE_BIN_PATH')
KBE_UID = getEnvironment('user', 'UID')
# if root environment configuration is not found, it checks if it is currently in the KBEngine directory,
# if in the KBEngine directory, then we automatically set the environment
x_KBE_ROOT = KBE_ROOT
x_KBE_RES_PATH = KBE_RES_PATH
x_KBE_BIN_PATH = KBE_BIN_PATH
x_KBE_UID = KBE_UID
if len(KBE_ROOT) == 0:
curr = os.getcwd()
curr = curr.replace("\\", "/")
if "kbe/tools/server/install" in curr:
curr = curr.replace("kbe/tools/server/install", "").replace("//", "/")
x_KBE_ROOT = curr
if x_KBE_ROOT[-1] != "/":
x_KBE_ROOT += "/"
else:
ret = findKBEngine(os.getcwd())
if len(ret) > 0:
x_KBE_ROOT = ret[0]
x_KBE_ROOT = x_KBE_ROOT.replace("\\", "/").replace("//", "/")
if platform.system() == 'Windows':
x_KBE_RES_PATH = "%KBE_ROOT%/kbe/res/;%KBE_ROOT%/assets/;%KBE_ROOT%/assets/scripts/;%KBE_ROOT%/assets/res/"
else:
x_KBE_RES_PATH = "$KBE_ROOT/kbe/res/:$KBE_ROOT/assets/:$KBE_ROOT/assets/scripts/:$KBE_ROOT/assets/res/"
if platform.architecture()[0] == '32bit':
x_KBE_BIN_PATH = "%KBE_ROOT%/kbe/bin/server/"
else:
x_KBE_BIN_PATH = "%KBE_ROOT%/kbe/bin/server/"
if not os.path.isdir(x_KBE_BIN_PATH):
x_KBE_BIN_PATH = "%KBE_ROOT%/kbe/bin/server/"
if platform.system() != 'Windows':
x_KBE_BIN_PATH = x_KBE_BIN_PATH.replace("%KBE_ROOT%", "$KBE_ROOT")
x_KBE_BIN_PATH = x_KBE_BIN_PATH.replace("\\", "/").replace("//", "/")
if len(KBE_UID) == 0:
x_KBE_UID = str(random.randint(1, 65535))
while True:
INFO_MSG("\nKBE_ROOT current: %s" % (KBE_ROOT))
KBE_ROOT = getInput('reset KBE_ROOT(No input is [%s]):' % (x_KBE_ROOT)).strip()
if len(KBE_ROOT) == 0:
if len(x_KBE_ROOT) == 0:
INFO_MSG('KBE_ROOT: no change!')
else:
KBE_ROOT = x_KBE_ROOT
INFO_MSG("\nKBE_RES_PATH current: %s" % (x_KBE_RES_PATH))
KBE_RES_PATH = getInput('reset KBE_RES_PATH(No input is [%s]):' % (x_KBE_RES_PATH)).strip()
if len(KBE_RES_PATH) == 0:
if len(x_KBE_RES_PATH) == 0:
INFO_MSG('KBE_RES_PATH: no change!')
else:
KBE_RES_PATH = x_KBE_RES_PATH
INFO_MSG("\nKBE_BIN_PATH current: %s" % (x_KBE_BIN_PATH))
KBE_BIN_PATH = getInput('reset KBE_BIN_PATH(No input is [%s]):' % (x_KBE_BIN_PATH)).strip()
if len(KBE_BIN_PATH) == 0:
if len(x_KBE_BIN_PATH) == 0:
INFO_MSG('KBE_BIN_PATH: no change!')
else:
KBE_BIN_PATH = x_KBE_BIN_PATH
INFO_MSG("\nKBE_UID current: %s" % (x_KBE_UID))
username = ""
if platform.system() == 'Windows':
KBE_UID = getInput('reset KBE_UID(No input is [%s]):' % (x_KBE_UID)).strip()
else:
# Linux needs to modify the system user ID
tmp = os_user_name
if len(tmp) == 0:
tmp = getpass.getuser()
username = getInput('os system-username(%s):' % tmp).strip()
if len(username) == 0:
username = tmp
KBE_UID = getInput('usermod -u [No input is %s] %s, Enter new uid:' % (KBE_UID, username)).strip()
if len(KBE_UID) == 0:
if len(x_KBE_UID) == 0:
INFO_MSG('KBE_UID: no change!')
else:
KBE_UID = x_KBE_UID
if len(KBE_ROOT) > 0:
setEnvironment('user', 'KBE_ROOT', KBE_ROOT)
if len(KBE_RES_PATH) > 0:
setEnvironment('user', 'KBE_RES_PATH', KBE_RES_PATH)
if len(KBE_BIN_PATH) > 0:
setEnvironment('user', 'KBE_BIN_PATH', KBE_BIN_PATH)
if len(KBE_UID) > 0:
if platform.system() == 'Windows':
setEnvironment('user', 'UID', KBE_UID)
else:
setEnvironment('user', 'UID', (KBE_UID, username))
if _checkKBEEnvironment(True):
break
INFO_MSG("\n---------------------------------------------")
if getInput('Check to some problems, if you are sure this is not a problem please skip: [yes|no]') == "yes":
return
echoKBEEnvironment()
def get_linux_ugid(username):
fileobj1 = open('/etc/passwd')
fileobj2 = open('/etc/group')
uid = None
gid = None
for line in fileobj1:
if line.startswith(username + ':'):
tmp = line.split(':')
uid = tmp[2]
for line in fileobj2:
if line.startswith(username + ':'):
tmp = line.split(':')
gid = tmp[2]
return (uid, gid)
def _checkKBEEnvironment(is_get_error):
global KBE_ROOT
global KBE_RES_PATH
global KBE_BIN_PATH
global kbe_res_path
KBE_ROOT = getEnvironment('user', 'KBE_ROOT')
KBE_RES_PATH = getEnvironment('user', 'KBE_RES_PATH')
KBE_BIN_PATH = getEnvironment('user', 'KBE_BIN_PATH')
kbe_path = KBE_ROOT + "/kbe"
kbe_path = kbe_path.replace("\\", "/").replace("//", "/")
if not os.path.isdir(kbe_path):
if is_get_error:
ERROR_MSG("KBE_ROOT: is error! The directory or file not found:\n%s" % (kbe_path))
return False
paths = []
checkKBERes = [
"server/kbengine_defs.xml",
"scripts",
]
checkKBEUserRes = [
"server/kbengine.xml",
"scripts/entities.xml",
]
KBE_RES_PATH = KBE_RES_PATH.replace("%KBE_ROOT%", KBE_ROOT)
KBE_RES_PATH = KBE_RES_PATH.replace("$KBE_ROOT", KBE_ROOT)
if ";" in KBE_RES_PATH:
paths = KBE_RES_PATH.split(";")
else:
paths = KBE_RES_PATH.split(":")
paths1 = list(paths)
paths = []
for p in paths1:
paths.append(os.path.expanduser(p))
for path in paths:
if not os.path.isdir(path):
if is_get_error:
ERROR_MSG("KBE_RES_PATH: is error! The directory or file not found:\n%s" % (path))
return False
KBE_BIN_PATH = os.path.expanduser(KBE_BIN_PATH.replace("%KBE_ROOT%", KBE_ROOT).replace("$KBE_ROOT", KBE_ROOT))
if not os.path.isdir(KBE_BIN_PATH):
if is_get_error:
WARING_MSG("KBE_BIN_PATH: is error! The directory or file not found:\n%s" % (KBE_BIN_PATH))
kbe_res_path = ""
for res in checkKBERes:
found = False
tmp = ""
for path in paths:
if path[-1] != '/' and path[-1] != '\\':
path += '/'
path1 = path + res
tmp += path1 + "\n"
if os.path.isdir(path1) or os.path.isfile(path1):
kbe_res_path = path
found = True
break
if not found:
if is_get_error:
ERROR_MSG("KBE_RES_PATH: is error! The directory or file not found:\n%s" % (tmp))
return False
for res in checkKBEUserRes:
found = False
tmp = ""
for path in paths:
if path[-1] != '/' and path[-1] != '\\':
path += '/'
path = path + res
tmp += path + "\n"
if os.path.isdir(path) or os.path.isfile(path):
found = True
break
if not found:
if is_get_error:
ERROR_MSG("KBE_RES_PATH: is error! The directory or file not found:\n%s" % (tmp))
return False
return True
def echoSystemEnvironment():
OUT_MSG("platform=" + platform.platform())
OUT_MSG("python_version=" + sys.version)
OUT_MSG("python_path=" + sys.executable)
OUT_MSG("currpath=" + os.getcwd())
def findLocalKBEVersion():
global KBE_ROOT
KBE_ROOT = getEnvironment('user', 'KBE_ROOT')
fpath = "../../../../HISTORY.md"
if len(KBE_ROOT) > 0:
fpath = KBE_ROOT + "/HISTORY.md"
try:
f = open(fpath)
for line in f.readlines():
if "#v" in line:
f.close()
return line.replace("#", "")
f.close()
except:
pass
return "unknown"
def echoKBEVersion():
INFO_MSG("version=" + findLocalKBEVersion())
if getInput("View the latest version of GitHub? [yes|no]") != "yes":
return
OUT_MSG("")
INFO_MSG("Check out the latest version...")
urls = get_sources_infos()
src_master_zip_url = urls[0]
src_zip_url = urls[1]
src_tgz_url = urls[2]
release_title = urls[3]
descrs = urls[4]
INFO_MSG("-------------------------")
INFO_MSG(release_title)
INFO_MSG(descrs)
def removeLinuxEnvironment(scope, name):
assert scope in ('user', 'system')
files = []
if os.geteuid() == 0:
if len(os_user_name) > 0:
files.append("%s/.bashrc" % (pwd.getpwnam(username).pw_dir))
files.append("%s/.bash_profile" % (pwd.getpwnam(username).pw_dir))
files.append("%s/.bash_profile" % (pwd.getpwnam(username).pw_dir))
else:
files.extend(["~/.bashrc", "~/.bash_profile", "~/.bash_profile"])
for file in files:
bodys = []
f = open(os.path.expanduser(file))
#INFO_MSG("find %s: %s" % (file, name))
for x in f.readlines():
if name in x:
INFO_MSG("remove %s: %s" % (file, x))
continue
bodys.append(x)
f.close()
f = open(os.path.expanduser(file), "w")
f.writelines(bodys)
f.close()
if os.geteuid() != 0:
syscommand('bash -c \'source %s\'' % file, False)
def setEnvironment(scope, name, value):
assert scope in ('user', 'system')
#INFO_MSG('set environment: name=%s, value=%s' % (name, value))
if platform.system() == 'Windows':
root, subkey = getWindowsEnvironmentKey(scope)
# Note: for 'system' scope, you must run this as Administrator
key = winreg.OpenKey(root, subkey, 0, winreg.KEY_ALL_ACCESS)
winreg.SetValueEx(key, name, 0, winreg.REG_EXPAND_SZ, value)
winreg.CloseKey(key)
else:
if name.lower() == 'uid':
uid, username = value
if uid != str(os.geteuid()):
ret, cret = syscommand('bash -c \'usermod -d /home/%s/ -u %s %s\'' % (pwd.getpwnam(username).pw_dir, uid, username), True)
INFO_MSG(ret)
INFO_MSG(cret)
return
userhome = "~"
if len(os_user_name) > 0:
userhome = pwd.getpwnam(os_user_name).pw_dir
f = open('%s/.bashrc' % userhome, 'a')
f.write("export %s=%s\n\n" % (name, value))
f.close()
if os.geteuid() > 0:
syscommand('bash -c \'source %s/.bashrc\'' % userhome, False)
def getWindowsEnvironmentKey(scope):
assert scope in ('user', 'system')
root = winreg.HKEY_CURRENT_USER
subkey = 'Environment'
if scope != 'user':
root = winreg.HKEY_LOCAL_MACHINE
subkey = r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'
return (root, subkey)
def remmoveEnvironment(scope, name):
assert scope in ('user', 'system')
if platform.system() == 'Windows':
root, subkey = getWindowsEnvironmentKey(scope)
key = winreg.OpenKey(root, subkey, 0, winreg.KEY_ALL_ACCESS)
try:
winreg.DeleteValue(key, name)
except WindowsError:
pass
else:
removeLinuxEnvironment(scope, name)
def removeKBEEnvironment():
INFO_MSG("Remove the KBEngine-environment variables.")
global KBE_ROOT
global KBE_RES_PATH
global KBE_BIN_PATH
global KBE_UID
global INSTALLER_EVN_NAME
remmoveEnvironment("user", "KBE_ROOT")
remmoveEnvironment("user", "KBE_RES_PATH")
remmoveEnvironment("user", "KBE_BIN_PATH")
remmoveEnvironment("user", "KBE_UID")
remmoveEnvironment("user", INSTALLER_EVN_NAME)
KBE_ROOT = ""
KBE_RES_PATH = ""
KBE_BIN_PATH = ""
KBE_UID = ""
INSTALLER_EVN_NAME = ""
def getEnvironment(scope, name):
assert scope in ('user', 'system')
value = ''
if platform.system() == 'Windows':
root, subkey = getWindowsEnvironmentKey(scope)
key = winreg.OpenKey(root, subkey, 0, winreg.KEY_READ)
try:
value, _ = winreg.QueryValueEx(key, name)
except WindowsError:
value = ''
else:
if name.lower() == 'uid':
return str(os.geteuid())
if len(os_user_name) > 0:
ret, cret = syscommand('su -l %s -c \'echo ${%s}\'' % (os_user_name, name), True)
if len(ret) > 0:
value = ret[0].strip()
else:
value = os.environ.get(name, "")
return value
def getMysqlConfig():
global mysql_root
cfg = "my.ini"
if platform.system() != 'Windows':
cfg = "my.cnf"
cnf = mysql_root + cfg
while True:
if not os.path.isfile(cnf):
if not os.path.isfile('/etc/' + cfg):
if not os.path.isfile(mysql_root + "my-default.ini"):
ERROR_MSG("not found mysqlconfig[%s]." % cnf)
if platform.system() == 'Windows':
cnf = getInput("Enter the mysqlconfig path(such as [c:/mysql/my.ini or my-default.ini]):")
else:
cnf = getInput("Enter the mysqlconfig path(such as [/etc/my.cnf]):")
else:
cnf = mysql_root + "my-default.ini"
else:
cnf = '/etc/' + cfg
else:
break
config = configparser.ConfigParser()
config.read(cnf)
return config, cnf
def installMysql():
if platform.system() != 'Windows':
INFO_MSG("You are Linux, please install MySQL manually!")
return False
file = 'mysql-win32.msi'
try:
os.remove(file)
except:
pass
file = download(bin_mysql_url, file)[0]
INFO_MSG("wait for install:" + file)
syscommand(file, False)
while True:
getInput("The MySQL service installation is complete? [yes|no]")
if not findMysqlService():
ERROR_MSG("- not found MySQL service.")
syscommand(file, False)
os.remove(file)
return True
def restartMsql():
global mysql_sercive_name
INFO_MSG('Try to stop %s...' % mysql_sercive_name)
if platform.system() == 'Windows':
syscommand('net stop ' + mysql_sercive_name, False)
else:
syscommand('bash -c \'/etc/init.d/%s stop\'' % mysql_sercive_name, False)
if findMysqlService():
WARING_MSG('Unable to stop the MySQL, You need administrator privileges.')
INFO_MSG('Try to start %s...' % mysql_sercive_name)
if platform.system() == 'Windows':
syscommand('net start ' + mysql_sercive_name, False)
else:
syscommand('bash -c \'/etc/init.d/%s start\'' % mysql_sercive_name, False)
if not findMysqlService():
WARING_MSG('Unable to start the MySQL, You need administrator privileges.')
else:
INFO_MSG('MySQL is ok')
def findMysqlService():
global mysql_sercive_name
ret = []
cret = []
if platform.system() == 'Windows':
ret, cret = syscommand('net start', True)
else:
ret, cret = syscommand('bash -c \'service --status-all | grep \"mysql\"\'', True)
for s in ret:
if "mysql" in s.strip().lower():
if platform.system() != 'Windows':
if "run" not in s.strip().lower():
continue
if len(mysql_sercive_name) == 0:
if "mysqld" in s:
mysql_sercive_name = "mysqld"
else:
mysql_sercive_name = "mysql"
INFO_MSG("found mysql service[%s]" % mysql_sercive_name)
return True
return False
def syscommand(cmdstr, isGetRet):
filename = os.getcwd() + "/" + str(random.randint(0, 9999999)) + ".log"
if isGetRet:
cmdstr = "(" + cmdstr + ")"
cmdstr += " > \"" + filename + "\""
cret = subprocess.Popen(cmdstr, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.readlines()
#if len(ret) > 0:
# os.remove(filename)
# return ret
ret = []
if not isGetRet:
return (ret, [],)
i = 0
while True:
if os.path.isfile(filename):
break
INFO_MSG("wating(%d) ..." % i)
i += 1
INFO_MSG(cmdstr)
time.sleep(0.1)
f = open(filename)
while f:
line = f.readline()
if not line:
break
ret.append(line)
f.close()
os.remove(filename)
return (ret, cret,)
def modifyKBEConfig():
_checkKBEEnvironment(False)
global mysql_ip
global mysql_port
global mysql_kbe_name
global mysql_kbe_password
global mysql_kbe_db_name
global kbe_res_path
kbengine_defs = kbe_res_path + "server/kbengine_defs.xml"
INFO_MSG("Modified: %s" % kbengine_defs)
if not os.path.isfile(kbengine_defs):
ERROR_MSG("not found [%s], KBEngine is not installed?" % kbengine_defs)
ERROR_MSG("Please use the \'python installer.py --install=remotesrc\' or \'python installer.py --install=bin\'")
return False
if len(mysql_ip) == 0:
mysql_ip = "localhost"
if len(mysql_port) == 0:
mysql_port = "0"
if len(mysql_kbe_name) == 0:
mysql_kbe_name = "kbe"
if len(mysql_kbe_password) == 0:
mysql_kbe_password = "kbe"
if len(mysql_kbe_db_name) == 0:
mysql_kbe_db_name = "kbe"
state = 0
f = None
try:
f = open(kbengine_defs, encoding='UTF-8')
except:
f = open(kbengine_defs)
newxml = []
for x in f.readlines():
if "</dbmgr>" in x:
state = -1
if state == 0:
if "<dbmgr>" in x:
state += 1
if state == 1:
if "<host>" in x and "localhost" in x:
x = x.replace("localhost", mysql_ip)
if "<port>" in x and "0" in x:
x = x.replace("0", mysql_port)
if "<username>" in x and "kbe" in x:
x = x.replace("kbe", mysql_kbe_name)
if "<password>" in x and "kbe" in x:
x = x.replace("kbe", mysql_kbe_password)
if "<databaseName>" in x and "kbe" in x:
x = x.replace("kbe", mysql_kbe_db_name)
newxml.append(x)
f.close()
try:
f = open(kbengine_defs, "w", encoding='UTF-8')
except:
f = open(kbengine_defs, "w")
f.writelines(newxml)
f.close()
return True
def createDatabase():
global mysql_home
mysql_home = getEnvironment('user', 'MYSQL_HOME')
mysql_home = mysql_home.replace("\\", "/")
global mysql_root
global mysql_verinfo
global mysql_ip
global mysql_port
global mysql_root_password
global mysql_kbe_name
global mysql_kbe_password
global mysql_kbe_db_name
lower_case_table_names = ''
def getRootOpt(rootpasswd):
if len(rootpasswd) == 0:
return "-uroot "
return "-uroot -p" + rootpasswd
rootusePortArgs = ""
mysql_ip = "localhost"
mysql_port = "3306"
while True:
if len(mysql_home) > 0:
if mysql_home[-1] != '\\' and mysql_home[-1] != '/':
mysql_home += "/"
ret, cret = syscommand("\"" + mysql_home + "mysql\" --help", True)
if len(ret) == 0:
if platform.system() == 'Windows':
#binpath = find_file_by_pattern("MySQL Command Line Client.lnk", "C:\\ProgramData\\Microsoft\\Windows\\Start Menu\\Programs\\", True)
#if len(binpath) > 0:
# binpath = binpath[0]
mysql_home = getInput("\Enter mysql.exe path(such as: C:\\MySQL Server 5.1\\bin\\):")
else:
tmp = ""
ret = subprocess.Popen("whereis mysql", shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.readlines()[0].split()
if len(ret) > 0:
tmp = ret[1].replace("mysql", "")
if len(tmp) == 0:
mysql_home = getInput("\Enter mysql(The executable file) path(such as: /usr/bin/):")
continue
else:
setEnvironment('user', 'MYSQL_HOME', mysql_home)
if len(mysql_root_password) == 0:
if len(mysql_root_password) == 0:
mysql_root_password = getInput("- Enter mysql-root password(Don't enter without password):")
sql = "\"select VERSION();show variables like \'port\';show variables like \'lower_case_table_names\';select @@basedir as basePath from dual\""
cmd = "\"" + mysql_home + ("mysql\" %s%s -hlocalhost -e" % (getRootOpt(mysql_root_password), rootusePortArgs)) + sql
ret, cret = syscommand(cmd, True)
if len(ret) == 0:
mysql_root_password = ""
ERROR_MSG("The password or port error! \n\n\terrorinfos - %s\n\n\tcommand - %s\n\n" % (cret, cmd))
if str(cret).find("2003") > 0 and str(cret).find("10061") > 0:
mysql_port = getInput("Please enter the MySQL port number:")
rootusePortArgs = " -P" + mysql_port
continue
else:
mysql_verinfo = ret[1].strip()
INFO_MSG("MySQL_Version:" + mysql_verinfo)
mysql_port = ret[3].replace('port', '').strip()
INFO_MSG("MySQL_Port:" + mysql_port)
lower_case_table_names = ret[5].replace('lower_case_table_names', '').strip()
mysql_root = ret[7].strip()
if lower_case_table_names != '0':
ERROR_MSG('mysql lower_case_table_names not is 0')
config, cnf = getMysqlConfig()
INFO_MSG('Attempt to modify the [%s]...' % cnf)
config.set('mysqld', 'lower_case_table_names', '0')
config.write(open(cnf, "w"))
restartMsql()
continue
sql = "\"delete from user where user=\'\';FLUSH PRIVILEGES\""
cmd = "\"" + mysql_home + ("mysql\" %s%s -hlocalhost -e" % (getRootOpt(mysql_root_password), rootusePortArgs)) + sql + " mysql"
syscommand(cmd, False)
if len(mysql_kbe_name) == 0:
OUT_MSG('')
INFO_MSG("create kbe mysql-account:")
mysql_kbe_name = getInput("- username(Do not enter the default is \'kbe\')): ")
if len(mysql_kbe_name) == 0:
mysql_kbe_name = "kbe"
mysql_kbe_password = getInput("- password(Do not enter the default is \'kbe\')): ")
if len(mysql_kbe_password) == 0:
mysql_kbe_password = "kbe"
INFO_MSG('Create kbe-account: name=%s, password=%s successfully!' % (mysql_kbe_name, mysql_kbe_password))
if len(mysql_kbe_db_name) == 0:
OUT_MSG('')
mysql_kbe_db_name = getInput("Create game database(Do not enter the default is \'kbe\'):")
if len(mysql_kbe_db_name) == 0:
mysql_kbe_db_name = "kbe"
sql = "\"grant all privileges on *.* to %s@\'%%\' identified by \'%s\';grant select,insert,update,delete,create,drop on *.* to %s@\'%%\' identified by \'%s\';FLUSH PRIVILEGES\"" % (mysql_kbe_name, mysql_kbe_password, mysql_kbe_name, mysql_kbe_password)
cmd = "\"" + mysql_home + ("mysql\" %s%s -hlocalhost -e" % (getRootOpt(mysql_root_password), rootusePortArgs)) + sql + " mysql"
syscommand(cmd, False)
sql = "\"delete from user where user=\'\';FLUSH PRIVILEGES\""
cmd = "\"" + mysql_home + ("mysql\" %s%s -hlocalhost -e" % (getRootOpt(mysql_root_password), rootusePortArgs)) + sql + " mysql"
syscommand(cmd, False)
# If the table exists, then an error
has_db_sql = "\"SELECT * FROM information_schema.SCHEMATA where SCHEMA_NAME=\'%s\'\"" % (mysql_kbe_db_name)
cmd = "\"" + mysql_home + ("mysql\" -u%s -p%s -hlocalhost -P%s -e" % (mysql_kbe_name, mysql_kbe_password, mysql_port)) + has_db_sql
ret, cret = syscommand(cmd, True)
if len(ret) > 0:
ERROR_MSG("database[%s] has exist!" % (mysql_kbe_db_name))
mysql_kbe_db_name = "";
mysql_kbe_db_name = "";
mysql_kbe_name = ''
continue
# Create table
sql = "\"create database %s\"" % (mysql_kbe_db_name)
cmd = "\"" + mysql_home + ("mysql\" -u%s -p%s -hlocalhost -P%s -e" % (mysql_kbe_name, mysql_kbe_password, mysql_port)) + sql
syscommand(cmd, False)
# Once again, check whether the created successfully, return >0, otherwise a new request to create the
cmd = "\"" + mysql_home + ("mysql\" -u%s -p%s -hlocalhost -P%s -e" % (mysql_kbe_name, mysql_kbe_password, mysql_port)) + has_db_sql
ret, cret = syscommand(cmd, True)
if len(ret) == 0:
ERROR_MSG("database is error! %s" % (cret))
mysql_kbe_db_name = "";
mysql_kbe_db_name = "";
mysql_kbe_name = ''
continue
else:
INFO_MSG("create database(%s) is successfully!" % mysql_kbe_db_name)
break
return True
def checkMysql():
global mysql_ip
global mysql_port
global mysql_kbe_name
global mysql_kbe_password
global mysql_kbe_db_name
ret = getInput("- MySQL is installed on the remote machine?[yes/no]")
if ret == 'yes':
while True:
if len(mysql_ip) == 0:
mysql_ip = getInput("- Enter mysql ip-address:")
continue
if len(mysql_port) == 0:
mysql_port = getInput("- Enter mysql ip-port:")
continue
if len(mysql_kbe_name) == 0:
mysql_kbe_name = getInput("- Enter mysql-account:")
continue
if len(mysql_kbe_password) == 0:
mysql_kbe_password = getInput("- Enter mysql-password:")
continue
if len(mysql_kbe_db_name) == 0:
mysql_kbe_db_name = getInput("- Enter mysql-databaseName:")
continue
break
return True
itry = 0
if platform.system() == 'Windows':
syscommand('net start mysql', False)
else:
syscommand('bash -c \'/etc/init.d/mysql start\'', False)
syscommand('bash -c \'/etc/init.d/mysqld start\'', False)
found = findMysqlService()
INFO_MSG("MySQL is installed on the local.")
INFO_MSG("- check mysql service...")
restartMsql()
manual_installation = False
while True:
if not found:
found = findMysqlService()
if not found:
INFO_MSG("")
ERROR_MSG("- not found MySQL service.")
if itry == 1:
return False
ret = getInput("- Allow automatic installation of MySQL? [yes/no]")
if ret != 'yes':
if not manual_installation:
if getInput("The MySQL service installation is complete? [yes|no]") != "no":
manual_installation = True
continue
return False
else:
if not installMysql():
ERROR_MSG("install mysql is failed!")
return False
else:
itry += 1
else:
break
createDatabase()
return found
def checkGit():
pass
def checkKBEEnvironment():
if not _checkKBEEnvironment(True):
resetKBEEnvironment()
return True
def checkDeps():
setEnvironment('user', INSTALLER_EVN_NAME, os.getcwd())
deps = {
"kbe_environment": checkKBEEnvironment,
"mysql" : checkMysql,
# "git" : checkGit
}
OUT_MSG("")
INFO_MSG("Check the dependences:")
for dep in deps:
INFO_MSG("- %s: checking..." % dep)
ret = deps[dep]()
if ret:
INFO_MSG("- %s: yes" % dep)
else:
ERROR_MSG("- %s: no" % dep)
return False
return True
def get_sources_infos():
try:
response = urllib.request.urlopen(source_url)
except:
response = urllib.urlopen(source_url)
html = response.read().decode("utf8")
ziplist = re.compile("""=\"[a-zA-Z0-9//\/\.?]+.zip""").findall(html)
tgzlist = re.compile("""=\"[a-zA-Z0-9//\/\.?]+.gz""").findall(html)
src_master_zip_url = ziplist[0].replace("=\"", "https://github.com")
src_zip_url = ziplist[1].replace("=\"", "https://github.com")
src_tgz_url = tgzlist[0].replace("=\"", "https://github.com")
# title
tag_start = """<h1 class="release-title">"""
tag_end = """</h1>"""
release_title = html
release_title = release_title[release_title.find(tag_start) + len(tag_start):]
release_title = release_title[:release_title.find(tag_end)]
release_title = re.compile("""\<a(?:\\s+.+?)*?\\s+href=\"(.*?\"\>)(.*?)\<\/a\>""").findall(release_title)
release_title = release_title[0][1]
# descriptions
tag_start = """<div class="markdown-body">"""
tag_end = """</div>"""
descrs = html
descrs = descrs[descrs.find(tag_start) + len(tag_start):]
descrs = descrs[:descrs.find(tag_end)]
descrs = descrs.replace("\n", "")
descrs = descrs.replace("<p>", "\t- ")
descrs = descrs.replace("</p>", "\n")
descrs = descrs.replace("<ul class=\"task-list\">", "")
descrs = descrs.replace("<li>", "\t- ")
descrs = descrs.replace("</li>", "\n")
descrs = descrs.replace("</ul>", "")
descrs.strip()
# downloads
#print("\ndownloads:")
#print("found:" + src_zip_url)
#print("found:" + src_tgz_url)
return (src_master_zip_url, src_zip_url, src_tgz_url, release_title, descrs)
def download_hookreport(count, block_size, total_size):
s = ""
if total_size <= 0:
s = '\rdownloading : %.2fMB' % (count * block_size / 1024 / 1024.0)
else:
s = '\rdownloading : %d/%d (%02d%%)' % (count * block_size, total_size, 100.0 * count * block_size / total_size)
sys.stdout.write(s)
sys.stdout.flush()
def download(currurl, fname = None):
OUT_MSG("")
INFO_MSG("Downloading from " + currurl)
try:
return urllib.urlretrieve(currurl, filename = fname, reporthook = download_hookreport)
except:
pass
return urllib.request.urlretrieve(currurl, filename = fname, reporthook = download_hookreport)
def getSystemUser():
global os_user_name
global os_user_passwd
if len(os_user_name) > 0:
return
if platform.system() == 'Windows':
return
os_user_name = getInput("Please enter the KBE system account name(No input is kbe):")
if len(os_user_name) == 0:
os_user_name = "kbe"
hasuser = ""
try:
hasuser = pwd.getpwnam(os_user_name)
except:
pass
if len(hasuser) == 0:
if getInput("not found system-user[%s], create new user?: [yes|no]" % (os_user_name)) == "yes":
os_user_passwd = getInput("Please enter the KBE system account passwd(No input is kbe):")
if len(os_user_passwd) == 0:
os_user_passwd = "kbe"
syscommand('bash -c \'useradd %s -p%s\'' % (os_user_name, os_user_passwd), False)
syscommand('bash -c \'echo \'%s:%s\' | chpasswd\'' % (os_user_name, os_user_passwd), False)
def getInstallPath():
global KBE_ROOT
global _install_path
_install_path = ""
global _zip_kbengine_dirname
KBE_ROOT = getEnvironment('user', 'KBE_ROOT')
if _checkKBEEnvironment(False):
INFO_MSG("Already installed KBEngine, KBE_ROOT=[%s].\n" % (KBE_ROOT))
if getInput("Want to install to [%s]?[yes|no]" % (KBE_ROOT)) == "yes":
_install_path = ""
return
while True:
if os.path.isdir(_install_path):
break
if len(os_user_name) == 0:
_install_path = getInput("Please enter the installation path:")
else:
_install_path = getInput("Please enter the installation path(No inout is %s):" % (pwd.getpwnam(os_user_name).pw_dir))
if len(_install_path) == 0:
_install_path = pwd.getpwnam(os_user_name).pw_dir
_install_path = _install_path + "/" + _zip_kbengine_dirname + "/"
if os.path.isdir(_install_path):
if getInput("Coverage of this directory? [yes|no]") == "yes":
break
ERROR_MSG("%s has exist!" % _install_path)
_install_path = ""
continue
break
if not os.path.isdir(_install_path):
try:
os.mkdir(_install_path)
except:
ERROR_MSG("path[%s] is error!" % _install_path)
_install_path = ""
getInstallPath()
def copyFilesTo(root_src_dir, root_dst_dir):
count = 0
total_count = sum([len(files) for root, dirs, files in os.walk(root_src_dir)])
for src_dir, dirs, files in os.walk(root_src_dir):
dst_dir = src_dir.replace(root_src_dir, root_dst_dir)
if not os.path.exists(dst_dir):
os.mkdir(dst_dir)
for file_ in files:
src_file = os.path.join(src_dir, file_)
dst_file = os.path.join(dst_dir, file_)
if os.path.exists(dst_file):
os.remove(dst_file)
shutil.move(src_file, dst_dir)
count += 1
s = "\rmoved: %d/%d (%d%%)" % (count, total_count, (count / float(total_count)) * 100)
sys.stdout.write(s)
sys.stdout.flush()
INFO_MSG("")
def copy_new_to_kbengine_dir(checksources = True):
global _install_path
global KBE_ROOT
global KBE_RES_PATH
global KBE_BIN_PATH
global KBE_UID
global kbe_res_path
global _zip_kbengine_path
global _zip_kbengine_dirname
currkbedir = _zip_kbengine_path + "/" + _zip_kbengine_dirname
if len(_install_path) > 0:
KBE_ROOT = _install_path
if platform.system() == 'Windows':
KBE_RES_PATH = "%KBE_ROOT%kbe/res/;%KBE_ROOT%assets/;%KBE_ROOT%/assets/scripts/;%KBE_ROOT%assets/res/"
if platform.architecture()[0] == '32bit':
KBE_BIN_PATH = "%KBE_ROOT%kbe/bin/server/"
else:
KBE_BIN_PATH = "%KBE_ROOT%kbe/bin/server/"
else:
KBE_RES_PATH = "$KBE_ROOT/kbe/res/:$KBE_ROOT/assets/:$KBE_ROOT/assets/scripts/:$KBE_ROOT/assets/res/"
if platform.architecture()[0] == '32bit':
KBE_BIN_PATH = "$KBE_ROOT/kbe/bin/server/"
else:
KBE_BIN_PATH = "$KBE_ROOT/kbe/bin/server/"
setEnvironment('user', 'KBE_ROOT', KBE_ROOT)
setEnvironment('user', 'KBE_RES_PATH', KBE_RES_PATH)
setEnvironment('user', 'KBE_BIN_PATH', KBE_BIN_PATH)
INFO_MSG("KBE_ROOT = %s" % KBE_ROOT)
INFO_MSG("KBE_RES_PATH = %s" % KBE_RES_PATH)
INFO_MSG("KBE_BIN_PATH = %s" % KBE_BIN_PATH)
INFO_MSG("\n\nInstalling KBEngine...")
INFO_MSG("moving %s to %s..." % (currkbedir, _install_path))
copyFilesTo(currkbedir, _install_path)
if platform.system() != 'Windows':
syscommand('bash -c \'chmod -R 755 %s\'' % (KBE_ROOT), True)
return
KBE_ROOT = getEnvironment('user', 'KBE_ROOT')
KBE_RES_PATH = getEnvironment('user', 'KBE_RES_PATH')
KBE_BIN_PATH = getEnvironment('user', 'KBE_BIN_PATH')
currkbedir = currkbedir.replace("\\", "/").replace("//", "/")
KBE_ROOT = KBE_ROOT.replace("\\", "/").replace("//", "/")
if currkbedir == KBE_ROOT:
WARNING_MSG("currkbedir[%s] == KBE_ROOT[%s]" % (currkbedir, KBE_ROOT))
return
INFO_MSG("\n\nInstalling KBEngine[%s]..." % KBE_ROOT)
kbe_res_path1 = kbe_res_path.replace("%KBE_ROOT%", KBE_ROOT).replace("$KBE_ROOT", KBE_ROOT)
if len(kbe_res_path1) == 0:
kbe_res_path1 = KBE_ROOT + "/kbe/res"
if os.path.isdir(KBE_ROOT):
if os.path.isdir(kbe_res_path1):
if getInput('Found that the existing directory(%s), whether to replace the: [yes|no]?' % kbe_res_path1) == "yes":
# shutil.rmtree(kbe_res_path1)
pass
kbe_tools_path = KBE_ROOT + "/kbe/tools"
kbe_tools_path = kbe_tools_path.replace("\\", "/").replace("//", "/")
if os.path.isdir(kbe_tools_path):
if getInput('Found that the existing directory(%s), whether to replace the: [yes|no]?' % kbe_tools_path) == "yes":
# shutil.rmtree(kbe_tools_path)
pass
if checksources:
srcpath = KBE_ROOT + "/kbe/src"
srcpath = srcpath.replace("\\", "/").replace("//", "/")
if os.path.isdir(srcpath):
if getInput('Found that the existing directory(%s), whether to replace the: [yes|no]?' % (srcpath)) == "yes":
# shutil.rmtree(srcpath)
pass
copyFilesTo(currkbedir + "/kbe/src", srcpath)
else:
binpath = KBE_ROOT + "/kbe/bin"
binpath = binpath.replace("\\", "/").replace("//", "/")
if os.path.isdir(binpath):
if getInput('Found that the existing directory(%s), whether to replace the: [yes|no]?' % (binpath)) == "yes":
# shutil.rmtree(binpath)
pass
copyFilesTo(currkbedir + "/kbe/bin", binpath)
copyFilesTo(currkbedir + "/kbe/tools", kbe_tools_path)
copyFilesTo(currkbedir + "/kbe/res", kbe_res_path1)
if platform.system() != 'Windows':
syscommand('bash -c \'chmod -R 755 %s\'' % (KBE_ROOT), True)
def download_sources(release = True):
global _zip_kbengine_dirname
_zip_kbengine_dirname = ""
global _zip_kbengine_path
_zip_kbengine_path = ""
OUT_MSG("")
INFO_MSG("Getting the latest source code...")
urls = get_sources_infos()
src_master_zip_url = urls[0]
src_zip_url = urls[1]
src_tgz_url = urls[2]
release_title = urls[3]
descrs = urls[4]
currurl = src_zip_url
# If release is False, download the GIT version of master
if not release:
currurl = src_master_zip_url
INFO_MSG("")
INFO_MSG(release_title)
INFO_MSG(descrs)
file = download(currurl)[0]
_zip_kbengine_path = tempfile.mkdtemp("_kbengine")
namelist = extract_file(file, _zip_kbengine_path)
os.remove(file)
for n in namelist[0].replace("\\", "/").split("/"):
if "kbengine" in n:
_zip_kbengine_dirname = n
break
def download_binary():
global _zip_kbengine_dirname
_zip_kbengine_dirname = ""
global _zip_kbengine_path
_zip_kbengine_path = ""
OUT_MSG("")
INFO_MSG("Getting the latest KBEngine...")
file = download(bin_zip_url)[0]
_zip_kbengine_path = tempfile.mkdtemp("_kbengine")
namelist = extract_file(file, _zip_kbengine_path)
os.remove(file)
for n in namelist[0].replace("\\", "/").split("/"):
if "kbengine" in n:
_zip_kbengine_dirname = n
break
def getRealUrl(url):
parsedurl = urlparse(url)
httpConn = http.client.HTTPConnection(parsedurl[1])
httpConn.request('GET', parsedurl[2])
response = httpConn.getresponse()
if response.status != 200:
return getRealUrl(response.getheader('Location'))
return url
def getCompressedFileRootDir(src_file):
f = None
if ".tar" in src_file:
f = tarfile.open(src_file)
namelist = f.getnames()
else:
f = zipfile.ZipFile(src_file, 'r')
namelist = f.namelist()
f.close()
return namelist[0]
def extract_file(src_file, extractPath = "./"):
OUT_MSG("")
f = None
total_count = 0
if ".tar" in src_file:
f = tarfile.open(src_file)
namelist = f.getnames()
INFO_MSG("untgz(%s)..." % (src_file))
else:
f = zipfile.ZipFile(src_file, 'r')
namelist = f.namelist()
INFO_MSG("unzip(%s)..." % (src_file))
total_count = len(namelist)
count = 0
for file in namelist:
f.extract(file, extractPath)
count += 1
s = "\rextract: %d/%d (%d%%)" % (count, total_count, (count / float(total_count)) * 100)
sys.stdout.write(s)
sys.stdout.flush()
f.close()
OUT_MSG("")
INFO_MSG("unzip(%s) is completed(%d)!\n\n" % (src_file, len(namelist)))
return namelist
def normalinstall():
getSystemUser()
if checkDeps() and modifyKBEConfig():
INFO_MSG("KBEngine has been successfully installed!")
else:
ERROR_MSG("KBEngine installation failed!")
def installclean():
global _zip_kbengine_path
if len(_zip_kbengine_path) > 0:
INFO_MSG("Cleanup temporary files...")
shutil.rmtree(_zip_kbengine_path)
def sourceinstall():
download_sources()
getSystemUser()
getInstallPath()
copy_new_to_kbengine_dir(True)
installclean()
normalinstall()
def binaryinstall():
download_binary()
getSystemUser()
getInstallPath()
copy_new_to_kbengine_dir(False)
installclean()
normalinstall()
def localfileinstall(file):
global _zip_kbengine_dirname
_zip_kbengine_dirname = ""
global _zip_kbengine_path
_zip_kbengine_path = ""
getSystemUser()
_zip_kbengine_dirname = getCompressedFileRootDir(file)
getInstallPath()
_zip_kbengine_path = tempfile.mkdtemp("_kbengine")
namelist = extract_file(file, _zip_kbengine_path)
for n in namelist[0].replace("\\", "/").split("/"):
if "kbengine" in n:
_zip_kbengine_dirname = n
break
copy_new_to_kbengine_dir(False)
installclean()
normalinstall()
def uninstall():
global KBE_ROOT
INFO_MSG("Uninstall KBEngine ...")
if len(KBE_ROOT) > 0:
if getInput('Waring: Folder[%s] will be deleted: [deleteKBEngine|no]?' % (KBE_ROOT)) == "deleteKBEngine":
shutil.rmtree(KBE_ROOT)
removeKBEEnvironment()
INFO_MSG("Uninstall KBEngine completed!")
def update():
INFO_MSG("https://github.com/kbengine/kbengine/releases/latest")
sourceinstall()
def processCommand():
const_args = {
'update' : update,
'uninstall' : uninstall,
'version' : echoKBEVersion,
'evn' : echoKBEEnvironment,
'resetevn' : resetKBEEnvironment,
'help' : help
}
if len(sys.argv[1:]) == 0:
hello()
help()
return
argv = sys.argv[1:][0]
func = const_args.get(argv)
if func:
func()
return
else:
if 'install' in argv:
if platform.system() != 'Windows':
if os.geteuid() != 0:
assert False and "You must use the root to installation!"
if argv == 'install':
normalinstall()
return
else:
if argv.startswith("install="):
argv = argv.replace("install=", "")
if argv == "remotesrc":
sourceinstall()
elif argv == "remotebin":
binaryinstall()
elif os.path.isfile(argv):
localfileinstall(argv)
else:
assert False
return
help()
if __name__ == "__main__":
processCommand()
|
thecodinghub/news-for-good
|
refs/heads/master
|
news/Lib/site-packages/pip/_vendor/html5lib/filters/lint.py
|
328
|
from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
from . import base
from ..constants import namespaces, voidElements
from ..constants import spaceCharacters
spaceCharacters = "".join(spaceCharacters)
class Filter(base.Filter):
def __init__(self, source, require_matching_tags=True):
super(Filter, self).__init__(source)
self.require_matching_tags = require_matching_tags
def __iter__(self):
open_elements = []
for token in base.Filter.__iter__(self):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
namespace = token["namespace"]
name = token["name"]
assert namespace is None or isinstance(namespace, text_type)
assert namespace != ""
assert isinstance(name, text_type)
assert name != ""
assert isinstance(token["data"], dict)
if (not namespace or namespace == namespaces["html"]) and name in voidElements:
assert type == "EmptyTag"
else:
assert type == "StartTag"
if type == "StartTag" and self.require_matching_tags:
open_elements.append((namespace, name))
for (namespace, name), value in token["data"].items():
assert namespace is None or isinstance(namespace, text_type)
assert namespace != ""
assert isinstance(name, text_type)
assert name != ""
assert isinstance(value, text_type)
elif type == "EndTag":
namespace = token["namespace"]
name = token["name"]
assert namespace is None or isinstance(namespace, text_type)
assert namespace != ""
assert isinstance(name, text_type)
assert name != ""
if (not namespace or namespace == namespaces["html"]) and name in voidElements:
assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name}
elif self.require_matching_tags:
start = open_elements.pop()
assert start == (namespace, name)
elif type == "Comment":
data = token["data"]
assert isinstance(data, text_type)
elif type in ("Characters", "SpaceCharacters"):
data = token["data"]
assert isinstance(data, text_type)
assert data != ""
if type == "SpaceCharacters":
assert data.strip(spaceCharacters) == ""
elif type == "Doctype":
name = token["name"]
assert name is None or isinstance(name, text_type)
assert token["publicId"] is None or isinstance(name, text_type)
assert token["systemId"] is None or isinstance(name, text_type)
elif type == "Entity":
assert isinstance(token["name"], text_type)
elif type == "SerializerError":
assert isinstance(token["data"], text_type)
else:
assert False, "Unknown token type: %(type)s" % {"type": type}
yield token
|
mindbender-studio/core
|
refs/heads/master
|
avalon/tools/views.py
|
2
|
from ..vendor.Qt import QtWidgets, QtCore
class DeselectableTreeView(QtWidgets.QTreeView):
"""A tree view that deselects on clicking on an empty area in the view"""
def mousePressEvent(self, event):
index = self.indexAt(event.pos())
if not index.isValid():
# clear the selection
self.clearSelection()
# clear the current index
self.setCurrentIndex(QtCore.QModelIndex())
QtWidgets.QTreeView.mousePressEvent(self, event)
|
atsiaras/transit_simulator
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup
import codecs
import os
import glob
name = 'transit_simulator'
description = 'Graphic interface for transit visualisation'
url = 'https://github.com/atsiaras/transit_simulator'
install_requires = ['matplotlib', 'numpy', 'pylightcurve']
os.chdir(os.path.abspath(os.path.dirname(__file__)))
subdirs_to_include = []
for x in os.walk(name):
if os.path.isdir(x[0]):
if x[0] != name:
subdirs_to_include.append(x[0])
files_to_include = []
for x in glob.glob(os.path.join(name, '*')):
if x[-2:] != 'py':
files_to_include.append(os.path.join(name, os.path.split(x)[1]))
files_to_include.append('README.md')
files_to_include.append('LICENSE')
w = open('MANIFEST.in', 'w')
for i in subdirs_to_include:
w.write('include ' + os.path.join(i, '*') + ' \n')
for i in files_to_include:
w.write('include ' + i + ' \n')
w.close()
with codecs.open('README.md', encoding='utf-8') as f:
long_description = f.read()
version = ' '
for i in open(os.path.join(name, '__init__.py')):
if len(i.split('__version__')) > 1:
version = i.split()[-1][1:-1]
setup(
name=name,
version=version,
description=description,
long_description=long_description,
url=url,
author='Angelos Tsiaras',
author_email='aggelostsiaras@gmail.com',
license='MIT',
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Astronomy',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: MacOS :: MacOS X'
'Programming Language :: Python :: 3.6',
],
packages=[name],
install_requires=install_requires,
include_package_data=True,
zip_safe=False,
)
|
drammock/expyfun
|
refs/heads/fix-install-docs
|
expyfun/_sound_controllers/_rtmixer.py
|
2
|
"""python-rtmixer interface for sound output."""
# Authors: Eric Larson <larsoner@uw.edu>
#
# License: BSD (3-clause)
import atexit
import sys
import numpy as np
from rtmixer import Mixer, RingBuffer
import sounddevice
from .._utils import logger, get_config
_PRIORITY = 100
_DEFAULT_NAME = None
# only initialize each mixer once and reuse it until Python closes
_MIXER_REGISTRY = {}
def _get_mixer(fs, n_channels, api, name, api_options):
"""Select the API and device."""
# API
if api is None:
api = get_config('SOUND_CARD_API', None)
if api is None:
# Eventually we should maybe allow 'Windows WDM-KS',
# 'Windows DirectSound', or 'MME'
api = dict(
darwin='Core Audio',
win32='Windows WASAPI',
linux='ALSA',
linux2='ALSA',
)[sys.platform]
key = (fs, n_channels, api, name)
if key not in _MIXER_REGISTRY:
_MIXER_REGISTRY[key] = _init_mixer(fs, n_channels, api, name,
api_options)
return _MIXER_REGISTRY[key]
def _init_mixer(fs, n_channels, api, name, api_options=None):
devices = sounddevice.query_devices()
if len(devices) == 0:
raise OSError('No sound devices found!')
apis = sounddevice.query_hostapis()
for ai, this_api in enumerate(apis):
if this_api['name'] == api:
api = this_api
break
else:
raise RuntimeError('Could not find host API %s' % (api,))
del this_api
# Name
if name is None:
name = get_config('SOUND_CARD_NAME', None)
if name is None:
global _DEFAULT_NAME
if _DEFAULT_NAME is None:
di = api['default_output_device']
_DEFAULT_NAME = devices[di]['name']
logger.exp('Selected default sound device: %r' % (_DEFAULT_NAME,))
name = _DEFAULT_NAME
possible = list()
for di, device in enumerate(devices):
if device['hostapi'] == ai:
possible.append(device['name'])
if name in device['name']:
break
else:
raise RuntimeError('Could not find device on API %r with name '
'containing %r, found:\n%s'
% (api['name'], name, '\n'.join(possible)))
param_str = ('sound card %r (devices[%d]) via %r'
% (device['name'], di, api['name']))
extra_settings = None
if api_options is not None:
if api['name'] == 'Windows WASAPI':
# exclusive mode is needed for zero jitter on Windows in testing
extra_settings = sounddevice.WasapiSettings(**api_options)
else:
raise ValueError(
'api_options only supported for "Windows WASAPI" backend, '
'using %s backend got api_options=%s'
% (api['name'], api_options))
param_str += ' with options %s' % (api_options,)
param_str += ', %d channels' % (n_channels,)
if fs is not None:
param_str += ' @ %d Hz' % (fs,)
try:
mixer = Mixer(
samplerate=fs, latency='low', channels=n_channels,
dither_off=True, device=di,
extra_settings=extra_settings)
except Exception as exp:
raise RuntimeError('Could not set up %s:\n%s' % (param_str, exp))
assert mixer.channels == n_channels
if fs is None:
param_str += ' @ %d Hz' % (mixer.samplerate,)
else:
assert mixer.samplerate == fs
mixer.start()
assert mixer.active
logger.info('Expyfun: using %s, %0.1f ms nominal latency'
% (param_str, 1000 * device['default_low_output_latency']))
atexit.register(lambda: (mixer.abort(), mixer.close()))
return mixer
class SoundPlayer(object):
"""SoundPlayer based on rtmixer."""
def __init__(self, data, fs=None, loop=False, api=None, name=None,
fixed_delay=None, api_options=None):
data = np.atleast_2d(data).T
data = np.asarray(data, np.float32, 'C')
self._data = data
self.loop = bool(loop)
self._n_samples, n_channels = self._data.shape
assert n_channels >= 1
self._n_channels = n_channels
self._mixer = None # in case the next line crashes, __del__ works
self._mixer = _get_mixer(fs, self._n_channels, api, name, api_options)
if loop:
self._ring = RingBuffer(self._data.itemsize * self._n_channels,
self._data.size)
self._ring.write(self._data)
self._fs = float(self._mixer.samplerate)
self._ec_duration = self._n_samples / self._fs
self._action = None
self._fixed_delay = fixed_delay
if fixed_delay is not None:
logger.info('Expyfun: Using fixed audio delay %0.1f ms'
% (1000 * fixed_delay,))
else:
logger.info('Expyfun: Variable audio delay')
@property
def fs(self):
return self._fs
@property
def playing(self):
return self._action is not None and self._mixer is not None
@property
def _start_time(self):
if self._fixed_delay is not None:
return self._mixer.time + self._fixed_delay
else:
return 0.
def play(self):
"""Play."""
if not self.playing and self._mixer is not None:
if self.loop:
self._action = self._mixer.play_ringbuffer(
self._ring, start=self._start_time)
else:
self._action = self._mixer.play_buffer(
self._data, self._data.shape[1], start=self._start_time)
def stop(self, wait=True, extra_delay=0.):
"""Stop."""
if self.playing:
action, self._action = self._action, None
# Impose the same delay here that we imposed on the stim start
cancel_action = self._mixer.cancel(
action, time=self._start_time + extra_delay)
if wait:
self._mixer.wait(cancel_action)
else:
return cancel_action
def delete(self):
"""Delete."""
if getattr(self, '_mixer', None) is not None:
self.stop(wait=False)
mixer, self._mixer = self._mixer, None
stats = mixer.fetch_and_reset_stats().stats
logger.exp('%d underflows %d blocks'
% (stats.output_underflows, stats.blocks))
def __del__(self): # noqa
self.delete()
|
zeyuanxy/leet-code
|
refs/heads/master
|
vol7/coin-change/coin-change.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Zeyuan Shang
# @Date: 2015-12-29 18:41:13
# @Last Modified by: Zeyuan Shang
# @Last Modified time: 2015-12-29 18:41:24
class Solution(object):
def coinChange(self, coins, amount):
"""
:type coins: List[int]
:type amount: int
:rtype: int
"""
dp = [amount + 2] * (amount + 1)
dp[0] = 0
for coin in coins:
for value in xrange(amount + 1 - coin):
if value + coin <= amount:
dp[value + coin] = min(dp[value + coin], dp[value] + 1)
if dp[amount] == amount + 2:
return -1
else:
return dp[amount]
|
khaleeque-ansari/Online-Coding-
|
refs/heads/master
|
Hackerrank/Graph Theory/Snakes & Ladders.py
|
1
|
# Enter your code here. Read input from STDIN. Print output to STDOUT
#T = input()
T = 1
from collections import namedtuple
from pprint import pprint as pp
inf = float('inf')
Edge = namedtuple('Edge', 'start, end, cost')
class Graph():
def __init__(self, edges):
self.edges = edges2 = [Edge(*edge) for edge in edges]
self.vertices = set(sum(([e.start, e.end] for e in edges2), []))
def dijkstra(self, source, dest):
assert source in self.vertices
dist = {vertex: inf for vertex in self.vertices}
previous = {vertex: None for vertex in self.vertices}
dist[source] = 0
q = self.vertices.copy()
neighbours = {vertex: set() for vertex in self.vertices}
for start, end, cost in self.edges:
neighbours[start].add((end, cost))
#pp(neighbours)
while q:
u = min(q, key=lambda vertex: dist[vertex])
q.remove(u)
if dist[u] == inf or u == dest:
break
for v, cost in neighbours[u]:
alt = dist[u] + cost
if alt < dist[v]: # Relax (u,v,a)
dist[v] = alt
previous[v] = u
#pp(previous)
s, u = [], dest
while previous[u]:
s.insert(0, u)
u = previous[u]
s.insert(0, u)
return s
for i in xrange(T):
#S,L = map(int,raw_input().split(','))
L,S = 1,4
#SArr = raw_input().split()
s = '22,54'
LArr = s.split()
#LArr = ['32,62', '42,68', '12,98']
s = '79,17 67,7 89,25 69,23'
SArr = s.split()
#SArr = ['95,13', '97,25', '93,37', '79,27', '75,19', '49,47', '67,17']
l = []
for i in xrange(1,100):
l.append((i,i+1,1))
for ll in LArr:
a,b = map(int,ll.split(','))
l.append((int(a), int(b), 0))
for s in SArr:
a,b = map(int,s.split(','))
l.append((int(a), int(b), 0))
l = sorted(l)
#print l
graph = Graph(l)
pp(graph.dijkstra(1, 100))
|
0be1/ansible
|
refs/heads/devel
|
v2/ansible/plugins/shell/__init__.py
|
7690
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
jteehan/cfme_tests
|
refs/heads/master
|
sprout/sprout/irc_bot.py
|
15
|
# -*- coding: utf-8 -*-
from sprout import settings
import json
import pika
REQUIRED = {'PIKA_USER', 'PIKA_PASS', 'PIKA_HOST', 'PIKA_CHANNEL', 'PIKA_ROUTING_KEY'}
def send_message(message):
if not any(hasattr(settings, item) for item in REQUIRED):
return
url = "amqp://" + settings.PIKA_USER + ":" + settings.PIKA_PASS + "@" + settings.PIKA_HOST
params = pika.URLParameters(url)
params.socket_timeout = 5
try:
connection = pika.BlockingConnection(params) # Connect to CloudAMQP
channel = connection.channel()
message = json.dumps({'channel': settings.PIKA_CHANNEL, 'body': message})
channel.basic_publish(exchange='',
routing_key=settings.PIKA_ROUTING_KEY,
body=message)
connection.close()
except:
# Don't bother if we cannot connect
pass
|
concurrence/concurrence
|
refs/heads/master
|
scripts/httpperf.py
|
2
|
#!/usr/bin/env stackless
from concurrence import dispatch, quit, Tasklet, Channel
from concurrence.http.client import HTTPConnection
from concurrence.statistic import gamma_filter
from concurrence.containers.deque import Deque
from optparse import OptionParser
import urlparse
import logging
import time
import sys
def parse_options():
parser = OptionParser(usage="%prog [options]", version="%prog 1.0", prog="httpperf")
parser.add_option("--url", type="string", default=None, dest="url", metavar="URL", help="the url to fetch")
parser.add_option("--sessions", type="int", default=1, dest="sessions", metavar="SESSIONS", help="")
parser.add_option("--requests", type="int", default=-1, dest="requests", metavar="REQUESTS", help="")
parser.add_option("--count", type="int", default=-1, dest="count", metavar="COUNT", help="")
parser.add_option("--delay", type="float", default=1, dest="delay", metavar="DELAY", help="")
parser.add_option("--pipeline", type="int", default=1, dest="pipeline", metavar="PIPELINE", help="")
parser.add_option("--dump", action="store_true", dest="dump", metavar="DUMP", help="")
(options, _) = parser.parse_args()
return options
class HttpPerf(object):
def __init__(self, options):
self.status = {}
self.request = 0
self.lastRequest = None
self.lastTime = None
self.options = options
self.dispenser = Channel()
def session_response_reader(self, cnn, pipeline_tokens):
#TODO use tasklet.loop, must be extended such that you can stop the loop by returning something (or StopIteration?)
while True:
response = cnn.receive()
#read status
self.count('status', response.status)
connection_header = response.get_header('Connection')
if connection_header == 'close' and self.options.requests != 1:
print >> sys.stderr, "WARNING: Server closed connection, no Keep Alive!, please use --requests=1"
#this will read the complete response
if self.options.dump:
print response.status
for k, v in response.headers:
print "%s: %s" % (k, v)
for chunk in response:
sys.stdout.write(chunk)
sys.stdout.flush()
print
else:
list(response)
#print 'resp'
pipeline_tokens.append(True)
def session(self, host, port, path):
cnn = None
pipeline_tokens = Deque()
for _ in range(self.options.pipeline): # can append take iterator?, or list?
pipeline_tokens.append(True)
try:
cnn = HTTPConnection()
cnn.connect((host, port))
Tasklet.new(self.session_response_reader)(cnn, pipeline_tokens)
requests = 0 #no requests in this session
while True:
if self.options.requests != -1 and requests >= self.options.requests:
break #we are done with this session
if self.dispenser.receive() is None:
return False #we are done globally
pipeline_tokens.popleft(True)
#do the request
cnn.send(cnn.get(path))
#print response
requests += 1
self.count('request')
finally:
#if response_reader_task is not None:
# response_reader_task.kill()
if cnn is not None:
cnn.close()
return True
def sessions(self):
u = urlparse.urlparse(self.options.url)
if ':' in u.netloc:
host, port = u.netloc.split(':')
port = int(port)
else:
host, port = u.netloc, 80
path = urlparse.urlunsplit(['', '', u.path, u.query, u.fragment])
if path == '':
path = '/'
try:
while True:
if not self.session(host, port, path):
return
except TaskletExit:
raise
except:
logging.exception("exception in http session")
def count(self, attr, key = None, inc = 1):
a = getattr(self, attr)
if key is None:
v = a + inc
setattr(self, attr, v)
return v
else:
if not key in a:
a[key] = inc
else:
a[key] = a[key] + inc
return a[key]
def show(self):
now = time.time()
if self.lastTime is not None:
reqSec = (self.request - self.lastRequest) / (now - self.lastTime)
reqSec = gamma_filter(self.lastReqSec, reqSec, 0.60)
else:
reqSec = 0.0
print >> sys.stderr, self.status, self.request, reqSec
self.lastTime = time.time()
self.lastRequest = self.request
self.lastReqSec = reqSec
def dispense(self):
if self.options.count == -1:
#run forever
while True:
self.dispenser.send(True)
if self.options.delay > 0.0:
Tasklet.sleep(self.options.delay)
else:
#a fixed number of total requests
for i in range(self.options.count):
self.dispenser.send(True)
if self.options.delay > 0.0:
Tasklet.sleep(self.options.delay)
for i in range(self.options.sessions):
self.dispenser.send(None)
def run(self):
#show stats every second:
Tasklet.interval(1.0, self.show, immediate = True)()
#dispenses tokens for doing a request to sessions:
Tasklet.new(self.dispense)()
#start up sessions, and wait till they are finished
Tasklet.join_all([Tasklet.new(self.sessions)() for _ in range(self.options.sessions)])
quit()
def main():
options = parse_options()
if not options.url:
assert False, "provide a url please!"
perf = HttpPerf(options)
perf.run()
if __name__ == '__main__':
dispatch(main)
|
wakatime/wakatime
|
refs/heads/master
|
tests/test_languages.py
|
1
|
# -*- coding: utf-8 -*-
from wakatime.main import execute
from wakatime.packages import requests
import os
import time
from wakatime.compat import u
from wakatime.constants import SUCCESS
from wakatime.stats import guess_lexer
from . import utils
from .utils import ANY, CustomResponse
class LanguagesTestCase(utils.TestCase):
patch_these = [
'wakatime.packages.requests.adapters.HTTPAdapter.send',
'wakatime.offlinequeue.Queue.push',
['wakatime.offlinequeue.Queue.pop', None],
['wakatime.offlinequeue.Queue.connect', None],
'wakatime.session_cache.SessionCache.save',
'wakatime.session_cache.SessionCache.delete',
['wakatime.session_cache.SessionCache.get', requests.session],
['wakatime.session_cache.SessionCache.connect', None],
]
def shared(self, expected_language='', entity='', entity_type='file', extra_args=[]):
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = CustomResponse()
config = 'tests/samples/configs/good_config.cfg'
if entity_type == 'file':
entity = os.path.join('tests/samples/codefiles', entity)
now = u(int(time.time()))
args = ['--entity', entity, '--config', config, '--time', now] + extra_args
retval = execute(args)
self.assertEquals(retval, SUCCESS)
self.assertNothingPrinted()
heartbeat = {
'language': expected_language,
'lines': ANY,
'entity': os.path.realpath(entity) if entity_type == 'file' else entity,
'project': ANY,
'branch': ANY,
'dependencies': ANY,
'time': float(now),
'type': entity_type,
'is_write': False,
'user_agent': ANY,
}
self.assertHeartbeatSent(heartbeat)
self.assertHeartbeatNotSavedOffline()
self.assertOfflineHeartbeatsSynced()
self.assertSessionCacheSaved()
def test_c_language_detected_for_header_with_c_files_in_folder(self):
self.shared(
expected_language='C',
entity='c_only/see.h',
)
def test_cpp_language_detected_for_header_with_c_and_cpp_files_in_folder(self):
self.shared(
expected_language='C++',
entity='c_and_cpp/cpp.h',
)
def test_cpp_language_detected_for_header_with_c_and_cxx_files_in_folder(self):
self.shared(
expected_language='C++',
entity='c_and_cxx/cpp.h',
)
def test_c_not_detected_for_non_header_with_c_files_in_folder(self):
self.shared(
expected_language='Python',
entity='c_and_python/see.py',
)
def test_objectivec_language_detected_when_header_files_in_folder(self):
self.shared(
expected_language='Objective-C',
entity='c_and_cpp/empty.m',
)
def test_objectivec_language_detected_when_m_files_in_folder(self):
self.shared(
expected_language='Objective-C',
entity='c_and_cpp/objective-c.h',
)
def test_objectivecpp_language_detected_when_header_files_in_folder(self):
self.shared(
expected_language='Objective-C++',
entity='c_and_cpp/empty.mm',
)
def test_objectivecpp_language_detected_when_m_files_in_folder(self):
self.shared(
expected_language='Objective-C++',
entity='c_and_cpp/objective-cpp.h',
)
def test_guess_lexer(self):
source_file = 'tests/samples/codefiles/python.py'
local_file = None
lexer = guess_lexer(source_file, local_file)
language = u(lexer.name) if lexer else None
self.assertEquals(language, 'Python')
def test_guess_lexer_from_vim_modeline(self):
self.shared(
expected_language='Python',
entity='python_without_extension',
)
def test_guess_lexer_when_entity_not_exist_but_local_file_exists(self):
source_file = 'tests/samples/codefiles/does_not_exist.py'
local_file = 'tests/samples/codefiles/python.py'
self.assertFalse(os.path.exists(source_file))
lexer = guess_lexer(source_file, local_file)
language = u(lexer.name) if lexer else None
self.assertEquals(language, 'Python')
def test_language_arg_takes_priority_over_detected_language(self):
self.shared(
expected_language='Java',
entity='python.py',
extra_args=['--language', 'JAVA'],
)
def test_language_arg_is_used_when_not_guessed(self):
with utils.mock.patch('wakatime.stats.guess_lexer') as mock_guess_lexer:
mock_guess_lexer.return_value = None
self.shared(
expected_language='Java',
entity='python.py',
extra_args=['--language', 'JAVA']
)
def test_language_defaults_to_none_for_entity_type_app(self):
self.shared(
expected_language=None,
entity='not-a-file',
entity_type='domain',
extra_args=['--entity-type', 'domain'],
)
def test_language_arg_used_for_entity_type_app(self):
self.shared(
expected_language='Java',
entity='not-a-file',
entity_type='app',
extra_args=['--entity-type', 'app', '--language', 'JAVA'],
)
def test_language_arg_used_for_entity_type_domain(self):
self.shared(
expected_language='Java',
entity='not-a-file',
entity_type='domain',
extra_args=['--entity-type', 'domain', '--language', 'JAVA'],
)
def test_vim_language_arg_is_used_when_not_guessed(self):
with utils.mock.patch('wakatime.stats.guess_lexer') as mock_guess_lexer:
mock_guess_lexer.return_value = None
self.shared(
expected_language='Java',
entity='python.py',
extra_args=['--language', 'java', '--plugin', 'NeoVim/703 vim-wakatime/4.0.9']
)
def test_alternate_language_not_used_when_invalid(self):
with utils.mock.patch('wakatime.stats.guess_lexer') as mock_guess_lexer:
mock_guess_lexer.return_value = None
self.shared(
expected_language=None,
entity='python.py',
extra_args=['--language', 'foo', '--plugin', 'NeoVim/703 vim-wakatime/4.0.9']
)
def test_error_reading_alternate_language_json_map_file(self):
with utils.mock.patch('wakatime.stats.guess_lexer') as mock_guess_lexer:
mock_guess_lexer.return_value = None
with utils.mock.patch('wakatime.stats.open') as mock_open:
mock_open.side_effect = IOError('')
self.shared(
expected_language=None,
entity='python.py',
extra_args=['--language', 'foo', '--plugin', 'NeoVim/703 vim-wakatime/4.0.9']
)
def test_typescript_detected_over_typoscript(self):
self.shared(
expected_language='TypeScript',
entity='empty.ts',
extra_args=['--language', 'foo', '--plugin', 'NeoVim/703 vim-wakatime/4.0.9']
)
def test_perl_detected_over_prolog(self):
self.shared(
expected_language='Perl',
entity='perl.pl',
)
def test_fsharp_detected_over_forth(self):
self.shared(
expected_language='F#',
entity='fsharp.fs',
)
def test_matlab_detected(self):
self.shared(
expected_language='Matlab',
entity='matlab/matlab.m',
)
def test_matlab_detected_over_objectivec_when_mat_file_in_folder(self):
self.shared(
expected_language='Matlab',
entity='matlab/with_mat_files/empty.m',
)
def test_objectivec_detected_over_matlab_with_matching_header(self):
self.shared(
expected_language='Objective-C',
entity='matlab/with_mat_files/objective-c.m',
)
def test_objectivec_detected_over_matlab_with_non_maching_headers_present(self):
self.shared(
expected_language='Objective-C',
entity='matlab/with_headers/empty.m',
)
def test_matlab_detected_over_objectivec_when_header_in_folder(self):
self.shared(
expected_language='Matlab',
entity='matlab/with_headers/matlab.m',
)
def test_heartbeat_skipped_when_matlab_same_accuracy(self):
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = CustomResponse()
entity = 'matlab/without_headers/empty.m'
config = 'tests/samples/configs/good_config.cfg'
entity = os.path.join('tests/samples/codefiles', entity)
now = u(int(time.time()))
args = ['--file', entity, '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, SUCCESS)
self.assertNothingPrinted()
self.assertHeartbeatNotSent()
self.assertHeartbeatNotSavedOffline()
self.assertOfflineHeartbeatsSynced()
self.assertSessionCacheUntouched()
def test_mjs_javascript_module_extension_detected(self):
self.shared(
expected_language='JavaScript',
entity='javascript_module.mjs',
)
def test_go_mod_detected(self):
self.shared(
expected_language='Go',
entity='go.mod',
)
def test_coldfusion_detected(self):
self.shared(
expected_language='ColdFusion',
entity='coldfusion.cfm',
)
def test_gas_detected_as_assembly(self):
self.shared(
expected_language='Assembly',
entity='gas.s',
)
|
meteorcloudy/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/clip_ops_test.py
|
13
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.clip_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.platform import test
class ClipTest(test.TestCase):
def DISABLED_testClipByValueGradient(self):
inputs = constant_op.constant([1.0, 2.0, 3.0, 4.0], dtype=dtypes.float32)
outputs_1 = clip_ops.clip_by_value(inputs, 0.5, 3.5)
min_val = constant_op.constant([0.5, 0.5, 0.5, 0.5], dtype=dtypes.float32)
max_val = constant_op.constant([3.5, 3.5, 3.5, 3.5], dtype=dtypes.float32)
outputs_2 = clip_ops.clip_by_value(inputs, min_val, max_val)
with self.test_session():
error_1 = gradient_checker.compute_gradient_error(inputs, [4], outputs_1,
[4])
self.assertLess(error_1, 1e-4)
error_2 = gradient_checker.compute_gradient_error(inputs, [4], outputs_2,
[4])
self.assertLess(error_2, 1e-4)
# ClipByValue test
def testClipByValue(self):
with self.test_session(use_gpu=True):
x = constant_op.constant([-5.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3])
np_ans = [[-4.4, 2.0, 3.0], [4.0, 4.4, 4.4]]
clip_value = 4.4
ans = clip_ops.clip_by_value(x, -clip_value, clip_value)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
# [Tensor, Scalar, Scalar]
def DISABLED_testClipByValue0Type(self):
for dtype in [
dtypes.float16, dtypes.float32, dtypes.float64, dtypes.int8,
dtypes.int16, dtypes.int32, dtypes.int64, dtypes.uint8, dtypes.uint16
]:
with self.test_session(use_gpu=True):
x = constant_op.constant([1, 2, 3, 4, 5, 6], shape=[2, 3], dtype=dtype)
np_ans = [[2, 2, 3], [4, 4, 4]]
clip_value_min = 2
clip_value_max = 4
ans = clip_ops.clip_by_value(x, clip_value_min, clip_value_max)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
# [Tensor, Tensor, Scalar]
def DISABLED_testClipByValue1Type(self):
for dtype in [
dtypes.float16, dtypes.float32, dtypes.float64, dtypes.int8,
dtypes.int16, dtypes.int32, dtypes.int64, dtypes.uint8, dtypes.uint16
]:
with self.test_session(use_gpu=True):
x = constant_op.constant([1, 2, 3, 4, 5, 6], shape=[2, 3], dtype=dtype)
np_ans = [[2, 2, 3], [4, 4, 4]]
clip_value_min = constant_op.constant(
[2, 2, 2, 3, 3, 3], shape=[2, 3], dtype=dtype)
clip_value_max = 4
ans = clip_ops.clip_by_value(x, clip_value_min, clip_value_max)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
# [Tensor, Scalar, Tensor]
def DISABLED_testClipByValue2Type(self):
for dtype in [
dtypes.float16, dtypes.float32, dtypes.float64, dtypes.int8,
dtypes.int16, dtypes.int32, dtypes.int64, dtypes.uint8, dtypes.uint16
]:
with self.test_session(use_gpu=True):
x = constant_op.constant([1, 2, 3, 4, 5, 6], shape=[2, 3], dtype=dtype)
np_ans = [[4, 4, 4], [4, 5, 6]]
clip_value_min = 4
clip_value_max = constant_op.constant(
[6, 6, 6, 6, 6, 6], shape=[2, 3], dtype=dtype)
ans = clip_ops.clip_by_value(x, clip_value_min, clip_value_max)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
# [Tensor, Tensor, Tensor]
def DISABLED_testClipByValue3Type(self):
for dtype in [
dtypes.float16, dtypes.float32, dtypes.float64, dtypes.int8,
dtypes.int16, dtypes.int32, dtypes.int64, dtypes.uint8, dtypes.uint16
]:
with self.test_session(use_gpu=True):
x = constant_op.constant([1, 2, 3, 4, 5, 6], shape=[2, 3], dtype=dtype)
np_ans = [[2, 2, 3], [5, 5, 6]]
clip_value_min = constant_op.constant(
[2, 2, 2, 5, 5, 5], shape=[2, 3], dtype=dtype)
clip_value_max = constant_op.constant(
[5, 5, 5, 7, 7, 7], shape=[2, 3], dtype=dtype)
ans = clip_ops.clip_by_value(x, clip_value_min, clip_value_max)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByValueBadShape(self):
with self.test_session(use_gpu=True):
x = constant_op.constant([-5.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3, 1])
# Use a nonsensical shape.
clip = constant_op.constant([1.0, 2.0])
with self.assertRaises(ValueError):
_ = clip_ops.clip_by_value(x, -clip, clip)
with self.assertRaises(ValueError):
_ = clip_ops.clip_by_value(x, 1.0, clip)
def testClipByValueNonFinite(self):
# TODO(b/78016351): Enable test on GPU once the bug is fixed.
with self.test_session():
x = constant_op.constant([float('NaN'), float('Inf'), -float('Inf')])
np_ans = [float('NaN'), 4.0, -4.0]
clip_value = 4.0
ans = clip_ops.clip_by_value(x, -clip_value, clip_value)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
# ClipByNorm tests
def testClipByNormClipped(self):
# Norm clipping when clip_norm < 5
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
# Norm of x = sqrt(3^2 + 4^2) = 5
np_ans = [[-2.4, 0.0, 0.0], [3.2, 0.0, 0.0]]
clip_norm = 4.0
ans = clip_ops.clip_by_norm(x, clip_norm)
tf_ans = ans.eval()
clip_tensor = constant_op.constant(4.0)
ans = clip_ops.clip_by_norm(x, clip_norm)
tf_ans_tensor = ans.eval()
self.assertAllClose(np_ans, tf_ans)
self.assertAllClose(np_ans, tf_ans_tensor)
def testClipByNormBadShape(self):
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3, 1])
# Use a nonsensical shape.
clip = constant_op.constant([1.0, 2.0])
with self.assertRaises(ValueError):
_ = clip_ops.clip_by_norm(x, clip)
def testClipByNormNotClipped(self):
# No norm clipping when clip_norm >= 5
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
# Norm of x = sqrt(3^2 + 4^2) = 5
np_ans = [[-3.0, 0.0, 0.0], [4.0, 0.0, 0.0]]
clip_norm = 6.0
ans = clip_ops.clip_by_norm(x, clip_norm)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByNormZero(self):
# No norm clipping when norm = 0
with self.test_session(use_gpu=True):
x = constant_op.constant([0.0, 0.0, 0.0, 0.0, 0.0, 0.0], shape=[2, 3])
# Norm = 0, no changes
np_ans = [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]
clip_norm = 6.0
ans = clip_ops.clip_by_norm(x, clip_norm)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByNormClippedWithDim0(self):
# Norm clipping when clip_norm < 5
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 3.0], shape=[2, 3])
# Norm of x[:, 0] = sqrt(3^2 + 4^2) = 5, x[:, 2] = 3
np_ans = [[-2.4, 0.0, 0.0], [3.2, 0.0, 3.0]]
clip_norm = 4.0
ans = clip_ops.clip_by_norm(x, clip_norm, [0])
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByNormClippedWithDim1(self):
# Norm clipping when clip_norm < 5
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 3.0], shape=[2, 3])
# Norm of x[0, :] = 3, x[1, :] = sqrt(3^2 + 4^2) = 5
np_ans = [[-3.0, 0.0, 0.0], [3.2, 0.0, 2.4]]
clip_norm = 4.0
ans = clip_ops.clip_by_norm(x, clip_norm, [1])
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByNormNotClippedWithAxes(self):
# No norm clipping when clip_norm >= 5
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 3.0], shape=[2, 3])
# Norm of x[0, :] = 3, x[1, :] = sqrt(3^2 + 4^2) = 5
np_ans = [[-3.0, 0.0, 0.0], [4.0, 0.0, 3.0]]
clip_norm = 6.0
ans = clip_ops.clip_by_norm(x, clip_norm, [1])
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
# ClipByGlobalNorm tests
def testClipByGlobalNormClipped(self):
# Norm clipping when clip_norm < 5
with self.test_session(use_gpu=True):
x0 = constant_op.constant([-2.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
x1 = constant_op.constant([1.0, -2.0])
# Global norm of x0 and x1 = sqrt(1 + 4^2 + 2^2 + 2^2) = 5
clip_norm = 4.0
# Answers are the original tensors scaled by 4.0/5.0
np_ans_0 = [[-1.6, 0.0, 0.0], [3.2, 0.0, 0.0]]
np_ans_1 = [0.8, -1.6]
ans, norm = clip_ops.clip_by_global_norm((x0, x1), clip_norm)
tf_ans_1 = ans[0].eval()
tf_ans_2 = ans[1].eval()
tf_norm = norm.eval()
self.assertAllClose(tf_norm, 5.0)
self.assertAllClose(np_ans_0, tf_ans_1)
self.assertAllClose(np_ans_1, tf_ans_2)
def testClipByGlobalNormClippedTensor(self):
# Norm clipping when clip_norm < 5
with self.test_session(use_gpu=True):
x0 = constant_op.constant([-2.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
x1 = constant_op.constant([1.0, -2.0])
# Global norm of x0 and x1 = sqrt(1 + 4^2 + 2^2 + 2^2) = 5
clip_norm = constant_op.constant(4.0)
# Answers are the original tensors scaled by 4.0/5.0
np_ans_0 = [[-1.6, 0.0, 0.0], [3.2, 0.0, 0.0]]
np_ans_1 = [0.8, -1.6]
ans, norm = clip_ops.clip_by_global_norm((x0, x1), clip_norm)
tf_ans_1 = ans[0].eval()
tf_ans_2 = ans[1].eval()
tf_norm = norm.eval()
self.assertAllClose(tf_norm, 5.0)
self.assertAllClose(np_ans_0, tf_ans_1)
self.assertAllClose(np_ans_1, tf_ans_2)
def testClipByGlobalNormSupportsNone(self):
# Norm clipping when clip_norm < 5
with self.test_session(use_gpu=True):
x0 = constant_op.constant([-2.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
x1 = constant_op.constant([1.0, -2.0])
# Global norm of x0 and x1 = sqrt(1 + 4^2 + 2^2 + 2^2) = 5
clip_norm = 4.0
# Answers are the original tensors scaled by 4.0/5.0
np_ans_0 = [[-1.6, 0.0, 0.0], [3.2, 0.0, 0.0]]
np_ans_1 = [0.8, -1.6]
ans, norm = clip_ops.clip_by_global_norm((x0, None, x1, None), clip_norm)
self.assertTrue(ans[1] is None)
self.assertTrue(ans[3] is None)
tf_ans_1 = ans[0].eval()
tf_ans_2 = ans[2].eval()
tf_norm = norm.eval()
self.assertAllClose(tf_norm, 5.0)
self.assertAllClose(np_ans_0, tf_ans_1)
self.assertAllClose(np_ans_1, tf_ans_2)
def testClipByGlobalNormWithIndexedSlicesClipped(self):
# Norm clipping when clip_norm < 5
with self.test_session(use_gpu=True):
x0 = constant_op.constant([-2.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
x1 = ops.IndexedSlices(
constant_op.constant([1.0, -2.0]), constant_op.constant([3, 4]))
# Global norm of x0 and x1 = sqrt(1 + 4^2 + 2^2 + 2^2) = 5
clip_norm = 4.0
# Answers are the original tensors scaled by 4.0/5.0
np_ans_0 = [[-1.6, 0.0, 0.0], [3.2, 0.0, 0.0]]
np_ans_1 = [0.8, -1.6]
ans, norm = clip_ops.clip_by_global_norm([x0, x1], clip_norm)
tf_ans_1 = ans[0].eval()
tf_ans_2 = ans[1].values.eval()
tf_norm = norm.eval()
self.assertAllClose(tf_norm, 5.0)
self.assertAllClose(np_ans_0, tf_ans_1)
self.assertAllClose(np_ans_1, tf_ans_2)
def testClipByGlobalNormPreservesDenseShape(self):
dense_shape = (1,)
slices = ops.IndexedSlices(
constant_op.constant([1.0]),
constant_op.constant([0]),
dense_shape=dense_shape)
ans, _ = clip_ops.clip_by_global_norm([slices], 1.0)
modified_slices = ans[0]
self.assertEqual(dense_shape, slices.dense_shape)
self.assertEqual(dense_shape, modified_slices.dense_shape)
def testClipByGlobalNormNotClipped(self):
# No norm clipping when clip_norm >= 5
with self.test_session(use_gpu=True):
x0 = constant_op.constant([-2.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
x1 = constant_op.constant([1.0, -2.0])
# Global norm of x0 and x1 = sqrt(1 + 4^2 + 2^2 + 2^2) = 5
np_ans_0 = [[-2.0, 0.0, 0.0], [4.0, 0.0, 0.0]]
np_ans_1 = [1.0, -2.0]
clip_norm = 6.0
ans, norm = clip_ops.clip_by_global_norm([x0, x1], clip_norm)
tf_ans_1 = ans[0].eval()
tf_ans_2 = ans[1].eval()
tf_norm = norm.eval()
self.assertAllClose(tf_norm, 5.0)
self.assertAllClose(np_ans_0, tf_ans_1)
self.assertAllClose(np_ans_1, tf_ans_2)
def testClipByGlobalNormZero(self):
# No norm clipping when norm = 0
with self.test_session(use_gpu=True):
x0 = constant_op.constant([0.0, 0.0, 0.0, 0.0, 0.0, 0.0], shape=[2, 3])
x1 = constant_op.constant([0.0, 0.0])
# Norm = 0, no changes
np_ans_0 = [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]
np_ans_1 = [0.0, 0.0]
clip_norm = 6.0
ans, norm = clip_ops.clip_by_global_norm([x0, x1], clip_norm)
tf_ans_1 = ans[0].eval()
tf_ans_2 = ans[1].eval()
tf_norm = norm.eval()
self.assertAllClose(tf_norm, 0.0)
self.assertAllClose(np_ans_0, tf_ans_1)
self.assertAllClose(np_ans_1, tf_ans_2)
def testClipByAverageNormClipped(self):
# Norm clipping when average clip_norm < 0.83333333
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
# Average norm of x = sqrt(3^2 + 4^2) / 6 = 0.83333333
np_ans = [[-2.88, 0.0, 0.0], [3.84, 0.0, 0.0]]
clip_norm = 0.8
ans = clip_ops.clip_by_average_norm(x, clip_norm)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByAverageNormClippedTensor(self):
# Norm clipping when average clip_norm < 0.83333333
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
# Average norm of x = sqrt(3^2 + 4^2) / 6 = 0.83333333
np_ans = [[-2.88, 0.0, 0.0], [3.84, 0.0, 0.0]]
clip_norm = constant_op.constant(0.8)
ans = clip_ops.clip_by_average_norm(x, clip_norm)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByAverageNormNotClipped(self):
# No norm clipping when average clip_norm >= 0.83333333
with self.test_session(use_gpu=True):
x = constant_op.constant([-3.0, 0.0, 0.0, 4.0, 0.0, 0.0], shape=[2, 3])
# Average norm of x = sqrt(3^2 + 4^2) / 6 = 0.83333333
np_ans = [[-3.0, 0.0, 0.0], [4.0, 0.0, 0.0]]
clip_norm = 0.9
ans = clip_ops.clip_by_average_norm(x, clip_norm)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByAverageNormZero(self):
# No norm clipping when average clip_norm = 0
with self.test_session(use_gpu=True):
x = constant_op.constant([0.0, 0.0, 0.0, 0.0, 0.0, 0.0], shape=[2, 3])
# Average norm = 0, no changes
np_ans = [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]
clip_norm = 0.9
ans = clip_ops.clip_by_average_norm(x, clip_norm)
tf_ans = ans.eval()
self.assertAllClose(np_ans, tf_ans)
def testClipByValueEmptyTensor(self):
# Test case for GitHub issue 19337
zero = array_ops.placeholder(dtype=dtypes.float32, shape=None)
x = clip_ops.clip_by_value(zero, zero, zero)
y = clip_ops.clip_by_value(zero, 1.0, 1.0)
z = clip_ops.clip_by_value(zero, zero, 1.0)
w = clip_ops.clip_by_value(zero, 1.0, zero)
with self.test_session(use_gpu=True) as sess:
sess.run([x, y, z, w], feed_dict={zero: np.zeros((7, 0))})
if __name__ == '__main__':
test.main()
|
projectcalico/calico-neutron
|
refs/heads/calico-readme
|
neutron/plugins/cisco/extensions/n1kv.py
|
37
|
# Copyright 2013 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.api import extensions
from neutron.api.v2 import attributes
PROFILE_ID = 'n1kv:profile_id'
MULTICAST_IP = 'n1kv:multicast_ip'
SEGMENT_ADD = 'n1kv:segment_add'
SEGMENT_DEL = 'n1kv:segment_del'
MEMBER_SEGMENTS = 'n1kv:member_segments'
EXTENDED_ATTRIBUTES_2_0 = {
'networks': {
PROFILE_ID: {'allow_post': True, 'allow_put': False,
'validate': {'type:regex': attributes.UUID_PATTERN},
'default': attributes.ATTR_NOT_SPECIFIED,
'is_visible': True},
MULTICAST_IP: {'allow_post': True, 'allow_put': True,
'default': attributes.ATTR_NOT_SPECIFIED,
'is_visible': True},
SEGMENT_ADD: {'allow_post': True, 'allow_put': True,
'default': attributes.ATTR_NOT_SPECIFIED,
'is_visible': True},
SEGMENT_DEL: {'allow_post': True, 'allow_put': True,
'default': attributes.ATTR_NOT_SPECIFIED,
'is_visible': True},
MEMBER_SEGMENTS: {'allow_post': True, 'allow_put': True,
'default': attributes.ATTR_NOT_SPECIFIED,
'is_visible': True},
},
'ports': {
PROFILE_ID: {'allow_post': True, 'allow_put': False,
'validate': {'type:regex': attributes.UUID_PATTERN},
'default': attributes.ATTR_NOT_SPECIFIED,
'is_visible': True}
}
}
class N1kv(extensions.ExtensionDescriptor):
"""Extension class supporting N1kv profiles.
This class is used by neutron's extension framework to make
metadata about the n1kv profile extension available to
clients. No new resources are defined by this extension. Instead,
the existing network resource's request and response messages are
extended with attributes in the n1kv profile namespace.
To create a network based on n1kv profile using the CLI with admin rights:
(shell) net-create --tenant_id <tenant-id> <net-name> \
--n1kv:profile_id <id>
(shell) port-create --tenant_id <tenant-id> <net-name> \
--n1kv:profile_id <id>
With admin rights, network dictionaries returned from CLI commands
will also include n1kv profile attributes.
"""
@classmethod
def get_name(cls):
return "n1kv"
@classmethod
def get_alias(cls):
return "n1kv"
@classmethod
def get_description(cls):
return "Expose network profile"
@classmethod
def get_namespace(cls):
return "http://docs.openstack.org/ext/n1kv/api/v2.0"
@classmethod
def get_updated(cls):
return "2012-11-15T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
|
kustodian/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/vyos/vyos_system.py
|
21
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: "vyos_system"
version_added: "2.3"
author: "Nathaniel Case (@Qalthos)"
short_description: Run `set system` commands on VyOS devices
description:
- Runs one or more commands on remote devices running VyOS.
This module can also be introspected to validate key parameters before
returning successfully.
extends_documentation_fragment: vyos
notes:
- Tested against VyOS 1.1.8 (helium).
- This module works with connection C(network_cli). See L(the VyOS OS Platform Options,../network/user_guide/platform_vyos.html).
options:
host_name:
description:
- Configure the device hostname parameter. This option takes an ASCII string value.
domain_name:
description:
- The new domain name to apply to the device.
name_servers:
description:
- A list of name servers to use with the device. Mutually exclusive with
I(domain_search)
aliases: ['name_server']
domain_search:
description:
- A list of domain names to search. Mutually exclusive with
I(name_server)
state:
description:
- Whether to apply (C(present)) or remove (C(absent)) the settings.
default: present
choices: ['present', 'absent']
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- set system hostname vyos01
- set system domain-name foo.example.com
"""
EXAMPLES = """
- name: configure hostname and domain-name
vyos_system:
host_name: vyos01
domain_name: test.example.com
- name: remove all configuration
vyos_system:
state: absent
- name: configure name servers
vyos_system:
name_servers
- 8.8.8.8
- 8.8.4.4
- name: configure domain search suffixes
vyos_system:
domain_search:
- sub1.example.com
- sub2.example.com
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.vyos.vyos import get_config, load_config
from ansible.module_utils.network.vyos.vyos import vyos_argument_spec
def spec_key_to_device_key(key):
device_key = key.replace('_', '-')
# domain-search is longer than just it's key
if device_key == 'domain-search':
device_key += ' domain'
return device_key
def config_to_dict(module):
data = get_config(module)
config = {'domain_search': [], 'name_server': []}
for line in data.split('\n'):
if line.startswith('set system host-name'):
config['host_name'] = line[22:-1]
elif line.startswith('set system domain-name'):
config['domain_name'] = line[24:-1]
elif line.startswith('set system domain-search domain'):
config['domain_search'].append(line[33:-1])
elif line.startswith('set system name-server'):
config['name_server'].append(line[24:-1])
return config
def spec_to_commands(want, have):
commands = []
state = want.pop('state')
# state='absent' by itself has special meaning
if state == 'absent' and all(v is None for v in want.values()):
# Clear everything
for key in have:
commands.append('delete system %s' % spec_key_to_device_key(key))
for key in want:
if want[key] is None:
continue
current = have.get(key)
proposed = want[key]
device_key = spec_key_to_device_key(key)
# These keys are lists which may need to be reconciled with the device
if key in ['domain_search', 'name_server']:
if not proposed:
# Empty list was passed, delete all values
commands.append("delete system %s" % device_key)
for config in proposed:
if state == 'absent' and config in current:
commands.append("delete system %s '%s'" % (device_key, config))
elif state == 'present' and config not in current:
commands.append("set system %s '%s'" % (device_key, config))
else:
if state == 'absent' and current and proposed:
commands.append('delete system %s' % device_key)
elif state == 'present' and proposed and proposed != current:
commands.append("set system %s '%s'" % (device_key, proposed))
return commands
def map_param_to_obj(module):
return {
'host_name': module.params['host_name'],
'domain_name': module.params['domain_name'],
'domain_search': module.params['domain_search'],
'name_server': module.params['name_server'],
'state': module.params['state']
}
def main():
argument_spec = dict(
host_name=dict(type='str'),
domain_name=dict(type='str'),
domain_search=dict(type='list'),
name_server=dict(type='list', aliases=['name_servers']),
state=dict(type='str', default='present', choices=['present', 'absent']),
)
argument_spec.update(vyos_argument_spec)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[('domain_name', 'domain_search')],
)
warnings = list()
result = {'changed': False, 'warnings': warnings}
want = map_param_to_obj(module)
have = config_to_dict(module)
commands = spec_to_commands(want, have)
result['commands'] = commands
if commands:
commit = not module.check_mode
load_config(module, commands, commit=commit)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
tanglu-org/merge-o-matic
|
refs/heads/master
|
deb/controlfile.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# deb/controlfile.py - parse debian control files
#
# Copyright © 2008 Canonical Ltd.
# Author: Scott James Remnant <scott@ubuntu.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of version 3 of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
class ControlFile(object):
"""Debian control file.
This can be used directly by calling the parse() function or
overridden to add functionality.
Class Properties:
FieldNames Alternate canonical capitalisation of field names
Properties:
paras List of paragraphs as dictionaries
para Final (or single) paragraph
signed True if the paragraph was PGP signed
"""
FieldNames = []
def __init__(self, filename=None, fileobj=None, *args, **kwds):
self.paras = []
self.para = None
self.signed = False
if fileobj is not None:
self.parse(fileobj, *args, **kwds)
elif filename is not None:
self.open(filename, *args, **kwds)
def capitaliseField(self, field):
"""Capitalise a field name correctly.
Fields are stored in the dictionary canonically capitalised,
words split by dashes and the first letter of each in upper
case.
This can be overriden by adding the canonical capitalisation
of a field name to the FieldNames list.
"""
for canon in self.FieldNames:
if canon.lower() == field.lower():
return canon
return "-".join([ w.title() for w in field.split("-") ])
def open(self, file, *args, **kwds):
"""Open and parse a control-file format file."""
with open(file) as f:
try:
self.parse(f, *args, **kwds)
except Exception, e:
e.path = file
raise e
def parse(self, file, multi_para=False, signed=False):
"""Parse a control-file format file.
File is any object that acts as an iterator and returns lines,
file-like objects being most common.
Some control files may contain multiple paragraphs separated
by blank lines, if this is the case set multi_para to True.
Some single-paragraph control files may be PGP signed, if this
is the case set signed to True. If the file was actually
signed, the signed member of the object will be set to True.
"""
self.para = {}
is_signed = False
last_field = None
para_border = True
for line in file:
line = line.rstrip()
if line.startswith("#"):
continue
# Multiple blank lines are permitted at paragraph borders
if not len(line) and para_border:
continue
para_border = False
if line[:1].isspace():
if last_field is None:
raise IOError
self.para[last_field] += "\n" + line.lstrip()
elif ":" in line:
(field, value) = line.split(":", 1)
if len(field.rstrip().split(None)) > 1:
raise IOError
last_field = self.capitaliseField(field)
self.para[last_field] = value.lstrip()
elif line.startswith("-----BEGIN PGP") and signed:
if is_signed:
raise IOError
for line in file:
if not len(line) or line.startswith("\n"): break
is_signed = True
elif not len(line):
para_border = True
if multi_para:
self.paras.append(self.para)
self.para = {}
last_field = None
elif is_signed:
try:
pgpsig = file.next()
if not len(pgpsig):
raise IOError
except StopIteration:
raise IOError
if not pgpsig.startswith("-----BEGIN PGP"):
raise IOError
self.signed = True
break
else:
raise IOError
else:
raise IOError
if is_signed and not self.signed:
raise IOError
if last_field:
self.paras.append(self.para)
elif len(self.paras):
self.para = self.paras[-1]
|
underbluewaters/marinemap
|
refs/heads/master
|
lingcod/straightline_spacing/tests.py
|
1940
|
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
|
jnnk/pyethereum
|
refs/heads/master
|
tests/remoteblocksdata.py
|
4
|
data_poc5v23_1 = "f987d1f8b5f8b1a0b13262b2076a5c570c69324b09fe7b47dae0cb153871b763565176d18574a8c5a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794a39c2067eb45bc878818946d0f05a836b3da44faa0bb690805d24882bc7ccae6fc0f80ac146274d5b81c6a6e9c882cd9b0a649c9c7808340c0db208609184e72a000830ecab9808453bb008980a0a78d6e5a96a366e5646bd500401dcc3f30001689106413285e55b67d3034685dc0c0f8b5f8b1a0271ff55c8c82c3ef27c14ddbc59bd4808b3753a01cfa905f8bf6c3985e6f4d28a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0ed226b95fe7824304f41c99d6f10351d80c30eebdca125b5b9f29f0d58883ce9808340d10f1f8609184e72a000830ece6d808453bafe1480a0c6f12caadf53dc380e4cb4b8c7556afe0e241137e759c3027fadd50f800d3562c0c0f8b5f8b1a0f3fdc5d9f6ec118a148ae22e3e2d5c51d7082478b9aa6ba32aa730eb21790693a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a065a23bb92099e5211408f0dd1992ec0b0e1bcb848bc8d33e45cb6ae8703e404b808340c0df1e8609184e72a000830ed222808453bafe0680a07d00a55749b71edd9e37828f3c531215bd890727fe9dbe50739d12ac4aba7eb4c0c0f8b5f8b1a032c98ca5bab8455f184d1f7a454532cee8017d707c5e29c7e4e594910d01c68fa01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a020fb6c5836b2b21769d35987e8b77c6fab76bba7d4a4a8cb34fd85ed4283322b808340d1131d8609184e72a000830ed5d8808453bafdd880a0022da9b780bf9e646d9a455345f1e98dcb48d310410616a144fba63b467b28a7c0c0f8b5f8b1a02c9073587481eb1bb86c6480f1b27ccdfac298bda143749c97dafb3de68a9efda01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0f9f1890772b3460c8d8d0a278a4062ebcd4b9be1559e8c198cf48a59a92130fd808340c0e31c8609184e72a000830ed98f808453bafdd380a00bd3dbe4173945fb24771fd4222c12ae4b144247b49368fd04f50a71851e700ac0c0f8b5f8b1a002f39b0cc434ad0ec0f50b7e5c6a6898ab1e6eee78da95d96f1c4b38e012b7c6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0b395beed419b1ff2eb80b609a22bcb1f881d7a804659200cfd8d6643612ea734808340b0b71b8609184e72a000830edd47808453bafdca80a07dd22132a6d8f5f1f3430cb3c7ef22cbf376aab77b729382a0581e757a4bb0f8c0c0f8b5f8b1a0c147b14e9f82772ff74144f3b0f63825333d16b522cf79fc2533f5cddc77cbf6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a01b44f646176094bd530694a36049e75da22334652d376f453c392567f9fe45df808340a08f1a8609184e72a000830ee100808453bafdb080a024568af94c4c5a8a2e04f1c451011130d864e21375c2ea61a47c11a3cc896d01c0c0f8b5f8b1a0734a8cd6f4a03188b5aa22dfa96914c6b75665d83b06bc3799fa95d457aede08a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a024d268b988f999b98b95ce6c1bd714f81c81ee59b8facfe25490d21fcd251507808340906b198609184e72a000830ee4ba808453bafda080a0bef396e5c21eaf99cd90a5449dfcd75fb2c1c415ab9e7737149954d993ac2f29c0c0f8b5f8b1a01147fdca47f37bdaaf7ad4ae482ce53e95a85d9639ad8ce033a8f0726c85d39ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a001d8e3eadf4cbed5270ba84e4f912dfa4929b2a45b45dbd7010bf9f0cd04ca5d808340804b188609184e72a000830ee875808453bafd9480a0800f12131d82039470801e6c7db46846db07060434c2faa1585acf277b8a05e5c0c0f8b5f8b1a01c2b8d178c13ea905196029c557d337c26d8c4a83eb7e0bd536245edfe823195a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a072a0eab0938590d74490aca0575cedb6a76378287701a90763b91917b9b763c5808340702f178609184e72a000830eec31808453bafd9380a0d94a77d6021987d67812aae83187417f78ba3340278f979539aff80189395952c0c0f8b5f8b1a0a74ebbd94a509d3b8b7dbce7c6c64673f75b993004b114068fd9b8bcea598166a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a009cdb9389d0831207fce00f24c0218d8cadacb5da9b3bdbeb51d27f254c46fc8808340804f168609184e72a000830eefed808453bafd5f80a090ab44de68adf239538ed5a1128969a5d47f8b80a5951b2c0a51c1515d5dda66c0c0f92a30f8d3a04f7bca143918a4e5bb2ae744e6cf33aa15d38f67073c2bd15304d063807c818fa01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a04096ceca9a0eeac6e8250a52bd63e80ad5b7c3f8b67e4fcd916814f7cd27c0eaa07f9c465a89990fea7e6ebaca40be4bcf8b63e95f393111186a926b8ada123c5983409073158609184e72a000830ef36782df1a8453bafd3480a0625640a7073a46818e2a41bfa81f9b71ddab07e9b8cd53d2c8a5dcf98820b775f92957f91584f9155d078609184e72a000830186a094000000000000000000000000000000000000000080b914f5737e70f9460402290a3e487dae01f610a1a8218fda6010576000601157600060125760006013577f72656700000000000000000000000000000000000000000000000000000000006000547f75736572730000000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f150611465516100906000396000f2006000356000547f6b696c6c000000000000000000000000000000000000000000000000000000006000530e0f61003c59601056330e0f61003c5933ff6020356020546040356040547f72656700000000000000000000000000000000000000000000000000000000006000530e0f610388596015560f61008459600060005460206000f260406020530b61009859600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610142597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536101425960206060f2604053560f61015559600060005460206000f2602053560f61016859600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f75736572747970657300000000000000000000000000000000000000000000006080546020606060406060600060105660645c03f1506060536101cf5960206060f27f76616c69646174656e616d65000000000000000000000000000000000000000060805460205360a054602060806040608060003060645c03f1506080536102165960206080f27f63726561746500000000000000000000000000000000000000000000000000006080547f757365726461746100000000000000000000000000000000000000000000000060a0546020606060406080600060605360645c03f15060605361027d5960206060f27f637265617465686f6c64696e6773000000000000000000000000000000000000608054602060806020608060003060645c03f1507f706f7374636f6e7374000000000000000000000000000000000000000000000060a05460105660c05460403560e054602060a0606060a0600060805360645c03f1507f736574686f6c64696e677300000000000000000000000000000000000000000060a05460805360c054602060c0604060a0600060605360645c03f15060405360205357602053604053576060536001602053035760115661035b5960205360125761036f586020536002601356015760135660016020530157602053601357600160115601601157600160005460206000f27f64657265670000000000000000000000000000000000000000000000000000006000530e0f6105e25960406020530b6103c659600060005460206000f2602053566040546040536103de59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406040600060105660645c03f1506080530f610488597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536104885960206040f260016020530156604054600260205301566060546060536104cb596040536104b859600060125760006013576104c7586000600260405301576040536013576104f9586040536104e5596000600160605301576060536012576104f95860405360016060530157606053600260405301576001602053036040547f676574686f6c64696e67730000000000000000000000000000000000000000006060546020606060206060600060405360645c03f1507f6b696c6c000000000000000000000000000000000000000000000000000000006080546020608060206080600060405360645c03f1507f6b696c6c000000000000000000000000000000000000000000000000000000006080546020608060206080600060605360645c03f15060006020535657600060205357600060016020530157600060026020530157600060016020530357600160115603601157600160005460206000f27f67657475736572646174610000000000000000000000000000000000000000006000530e0f6106425960406020530b61062059600060005460206000f2602053566000546000536106335960206000f26001602053035660005460206000f27f67657475736572646174616164647200000000000000000000000000000000006000530e0f6106a25960406020530b61068059600060005460206000f2602053566000546000536106935960206000f26001600053035660005460206000f27f6765746e69636b000000000000000000000000000000000000000000000000006000530e0f6106ec5960406020530b6106e059600060005460206000f26020535660005460206000f27f6765746e69636b616464720000000000000000000000000000000000000000006000530e0f6107365960406020530b61072a59600060005460206000f26020535660005460206000f27f6973696e67726f757000000000000000000000000000000000000000000000006000530e0f6107df59600060406020350b0f610774595060406040350b61078259600060005460206000f26001602035035660405460405361079d59600060005460206000f27f68617375736572000000000000000000000000000000000000000000000000006000546020356020546020602060406000600060405360645c03f15060206020f27f6973696e67726f757061646472000000000000000000000000000000000000006000530e0f61088959600060406020350b0f61081d595060406040350b61082b59600060005460206000f2600160203556035660405460405361084759600060005460206000f27f68617375736572000000000000000000000000000000000000000000000000006000546020356020546020602060406000600060405360645c03f15060206020f27f697367726f7570000000000000000000000000000000000000000000000000006000530e0f61092f59600060406020350b0f6108c7595060406040350b6108d559600060005460206000f2602035566040546040536108ed59600060005460206000f27f67657474797065000000000000000000000000000000000000000000000000006000546020356020546020602060406000600060405360645c03f15060206020f27f6c6f636b000000000000000000000000000000000000000000000000000000006000530e0f610a24596015560f61096b59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610a15597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f150606053610a155960206060f26001601557600160005460206000f27f756e6c6f636b00000000000000000000000000000000000000000000000000006000530e0f610b1859601556610a5f59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610b09597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f150606053610b095960206060f26000601557600160005460206000f27f69736c6f636b65640000000000000000000000000000000000000000000000006000530e0f610b4d5960155660005460206000f27f76616c69646174656e616d6500000000000000000000000000000000000000006000530e0f610d9159600060605460036060530a0f610c3c596020536060531360805460016000602f6080530b0f610ba75950603a6080530a610c2159600060406080530b0f610bbf5950605b6080530a610c2159600060606080530b0f610bd75950607b6080530a610c2159600060bf6080530b0f610bef595060d76080530a610c2159600060d76080530b0f610c07595060f76080530a610c215950600060f76080530b0f610c2159506101006080530a610c2f59600060005460206000f2600160605301606054610b7c5860146060530a0f610d53596020536060531360805460016000602f6080530b0f610c675950603a6080530a610ceb59600060406080530b0f610c7f5950605b6080530a610ceb59600060606080530b0f610c975950607b6080530a610ceb59600060bf6080530b0f610caf595060d76080530a610ceb59600060d76080530b0f610cc7595060f76080530a610ceb59600060f76080530b0f610ce059506101006080530a610ceb595060006080530e610cf959600060005460206000f2608053610d465960016060530160605460146060530a0f610d3d59602053606053136080546080530f610d3059600060005460206000f2600160605301606054610d0958600160605303606054600160605301606054610c3c5860206060530a0f610d8759602053606053136080546080530f610d7a59600060005460206000f2600160605301606054610d5358600160005460206000f27f637265617465686f6c64696e67730000000000000000000000000000000000006000530e0f610dd85961068251610de360203960005460005360206000f060005460206000f2600060005460206000f2007f43617264626f61726420426f78000000000000000000000000000000000000006000546001600053577f706572736f6e616c000000000000000000000000000000000000000000000000600160005303577f6d69736300000000000000000000000000000000000000000000000000000000600260005303577f332d776865656c65642053686f7070696e6720436172740000000000000000006020546001602053577f706572736f6e616c000000000000000000000000000000000000000000000000600160205303577f6d697363000000000000000000000000000000000000000000000000000000006002602053035760026011576000536012576020536013576020536002600053015760005360016020530157602060a060a060a0600060805360645c03f150610542516101406020396000546000536020f2007f706f7374636f6e737400000000000000000000000000000000000000000000006000350e0f610052596010560f61003c59600060005460206000f2602035601057604035600957600160005460206000f260105661006359600060005460206000f27f6765746f776e65726164647265737300000000000000000000000000000000006000350e0f6100985960095660005460206000f26000356000546020356020547f6765746974656d000000000000000000000000000000000000000000000000006000530e0f6100ee5960406020530b6100e259600060005460206000f26020355660005460206000f27f6765746974656d66756c6c0000000000000000000000000000000000000000006000530e0f6101565960406020530b61012c59600060005460206000f26020355660005460016020350356602054600260203503566040546003602035035660605460806000f26040356040547f6164646974656d000000000000000000000000000000000000000000000000006000530e0f6102c65960406020530b61019a59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610244597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536102445960206060f2602053560f61026359604053602053560160205357600260005460206000f260405360205357606035600160205303576080356002602053035760a03560036020530357601156610299596020536012576102ad586020536002601356015760135660016020530157602053601357600160115601601157600160005460206000f27f72656d6f76656974656d000000000000000000000000000000000000000000006000530e0f61049e5960406020530b61030459600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f6103ae597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536103ae5960206060f2602053566040546040536103c659600060005460206000f26040536040350a0f6103e7596040356040530360205357600260005460206000f2600160205301566040546002602053015660605460605361042a596040536104175960006012576000601357610426586000600260405301576040536013576104585860405361044459600060016060530157606053601257610458586040536001606053015760605360026040530157600060205357600060016020530157600060026020530157600060016020530357600060026020530357600060036020530357600160115603601157600160005460206000f27f6b696c6c000000000000000000000000000000000000000000000000000000006000350e0f610538597f67657400000000000000000000000000000000000000000000000000000000006000547f75736572730000000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f150600053330e61053659600060005460206000f233ff600060005460206000f21ca0ffb72def2ddde38fac14808c61a51fe24a09cde00b9295cb34b666f82ed2b5f1a0617ac83371f26464828de7f380bdcdcdc3fe9409327f0ad0bc58be811c169783a07ec8c8c3b895aeaa57be761f1f2dbfdbe55b79f81e118cc972414aad9768896f827185f913cdf913a6088609184e72a000830186a094000000000000000000000000000000000000000080b9133e737e70f9460402290a3e487dae01f610a1a8218fda6010576000601157600060125760006013577f72656700000000000000000000000000000000000000000000000000000000006000547f75736572747970657300000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f150610d625161011a60203960005460005360206000f06000546000537f75736572646174610000000000000000000000000000000000000000000000005760016011577f75736572646174610000000000000000000000000000000000000000000000006012577f75736572646174610000000000000000000000000000000000000000000000006013576104c251610e7c6000396000f200737e70f9460402290a3e487dae01f610a1a8218fda601057610d365161002c6020396000546000536020f2007f736574646f7567000000000000000000000000000000000000000000000000006000350e0f61004c596010560f61003c59600060005460206000f2602035601057600160005460206000f260105661005d59600060005460206000f27f67656e65726174650000000000000000000000000000000000000000000000006000350e0f6100a459610bf35161014360203960005460005360206000f060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006000547f75736572747970657300000000000000000000000000000000000000000000006020546020604060406000600060105660645c03f1506000604053330e0f61013159507f6b696c6c000000000000000000000000000000000000000000000000000000006000350e0f6101385933ff600060005460206000f2007f757365726461746100000000000000000000000000000000000000000000000060015742600357610bb75161003c6020396000546000536020f2007f736574646f7567000000000000000000000000000000000000000000000000006000350e0f61004c596010560f61003c59600060005460206000f2602035601057600160005460206000f260105661005d59600060005460206000f26000356000546020356020547f73657475736572000000000000000000000000000000000000000000000000006000530e0f6100ca596101206020530b6100a859600060005460206000f26002560f6100ba59600060005460206000f2602035600257600160005460206000f27f61646475736572000000000000000000000000000000000000000000000000006000530e0f6101ff597f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f61019e597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f15060405361019e5960206040f26101206020530b6101b359600060005460206000f260403560205357610111566101cd59602053610112576101e35860205360026101135601576101135660016020530157602053610113576001610111560161011157600160005460206000f27f72656d6f766575736572000000000000000000000000000000000000000000006000530e0f6103a9596101206020530b61023e59600060005460206000f26020535661025059600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f610307597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f15060016040536102fe595060205356330e6103075960206040f2600160205301566040546002602053015660605460605361034d5960405361033959600061011257600061011357610349586000600260405301576040536101135761037c58604053610368596000600160605301576060536101125761037c5860605360026040530157604053600160605301576000602053576000600160205301576000600260205301576001610111560361011157600160005460206000f27f68617375736572000000000000000000000000000000000000000000000000006000530e0f6103f4596101206020530b6103e859600060005460206000f26020535660005460206000f27f67657474797065000000000000000000000000000000000000000000000000006000350e0f6104295960015660005460206000f27f63617061636974790000000000000000000000000000000000000000000000006000350e0f61045e5960025660005460206000f27f73657463617061636974790000000000000000000000000000000000000000006000350e0f61049259600060005460206000f27f63757272656e7473697a650000000000000000000000000000000000000000006000350e0f6104c8596101115660005460206000f27f636c6561720000000000000000000000000000000000000000000000000000006000350e0f6106e1596101115661050459600160005460206000f27f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406040600060105660645c03f1506080530f6105ae597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536105ae5960206040f27f67657400000000000000000000000000000000000000000000000000000000006000547f75736572730000000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f15060025660a0547f6765746e69636b0000000000000000000000000000000000000000000000000060405460a053606054602060a060406040600060005360645c03f150610112566020546020530f6106c5597f72656d6f7665757365720000000000000000000000000000000000000000000060605460a053608054602060406040606060006020535660645c03f1506020536040546002602053015660205460006040535760006001604053015760006002604053015761065358600061011157600061011257600061011357600160005460206000f27f7365746e616d65000000000000000000000000000000000000000000000000006000350e0f61071559600060005460206000f27f676574746f6b656e7300000000000000000000000000000000000000000000006000530e0f61074a5960045660005460206000f27f616464746f6b656e7300000000000000000000000000000000000000000000006000530e0f610832597f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f61081e597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f15060405361081e5960206040f260203560045601600457600160005460206000f27f72656d6f7665746f6b656e7300000000000000000000000000000000000000006000530e0f610930596020356004560a0f61087259600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f61091c597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f15060405361091c5960206040f260203560045603600457600160005460206000f27f736574686f6c64696e67730000000000000000000000000000000000000000006000530e0f6109d8597f67657400000000000000000000000000000000000000000000000000000000006000547f75736572730000000000000000000000000000000000000000000000000000006020546020602060406000600060105660645c03f150602053330e6109c859600060005460206000f2602035600557600160005460206000f27f676574686f6c64696e67730000000000000000000000000000000000000000006000530e0f610a0d5960055660005460206000f27f736574686f6d65000000000000000000000000000000000000000000000000006000530e0f610af1597f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f610ae1597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f150604053610ae15960206040f2602035600657600160005460206000f27f676574686f6d65000000000000000000000000000000000000000000000000006000530e0f610b265960065660005460206000f27f6b696c6c000000000000000000000000000000000000000000000000000000006000530e0f610bb7597f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e73000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f150600053330e610bb5590033ff6000356000546020356020547f63726561746500000000000000000000000000000000000000000000000000006000530e0f6100d65960406020530b61004a59600060005460206000f2602035566000547f67656e65726174650000000000000000000000000000000000000000000000006020546020604060206020600060005360645c03f1506040536100945960206040f27f736574646f7567000000000000000000000000000000000000000000000000006000546010566020546020606060406000600060405360645c03f15060206040f27f68617374797065000000000000000000000000000000000000000000000000006000530e0f6101205960406020530b61011459600060005460206000f26020355660005460206000f27f6b696c6c000000000000000000000000000000000000000000000000000000006000530e0f61015659601056330e0f6101565933ff6040356040547f72656700000000000000000000000000000000000000000000000000000000006000530e0f6102e55960406020530b61019a59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610244597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536102445960206060f2602053560f61025759600060005460206000f27f736574646f756700000000000000000000000000000000000000000000000000606054601056608054602060a060406060600060405360645c03f15060a0536102a059602060a0f2604053602053576011566102b8596020536012576102cc586020536002601356015760135660016020530157602053601357600160115601601157600160005460206000f27f64657265670000000000000000000000000000000000000000000000000000006000530e0f6104b85960406020530b61032359600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f6103cd597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536103cd5960206060f2602053566040546040536103e559600060005460206000f27f6b696c6c000000000000000000000000000000000000000000000000000000006060546020606060206060600060405360645c03f150600160205301566040546002602053015660605460605361045f5960405361044c596000601257600060135761045b5860006002604053015760405360135761048d586040536104795960006001606053015760605360125761048d586040536001606053015760605360026040530157600060205357600060016020530157600060026020530157600160115603601157600160005460206000f2600060005460206000f21ca027da8baf46f1cd5a0ecd68952fad8719c9bf4ced7c485914c20bb361df097584a04c0122707d1fbdfe0089b123fa971f6226a4ba75c6d056fc0bc5649b6d6c0d75a010a6b025f7a3469c2410c1776bb149fc87f59ab56e100e31caf388c657495fbe82df1ac0f8b5f8b1a043e72c29aa5860c216da88a9200ee79fb1d538daaf0c0151328591b593b311aba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0be6291068d47488ed7f776e577f1e1b2c35ad470b1448f2e06585e7a2eb20da08083408053148609184e72a000830ef725808453bafd2c80a0c59a60330f48d905fc48af746083c6c5b6a9dadb2350e29f592fd324124d6505c0c0f91155f8d3a09cd5763a78981b7d5e23cd0ba421d398f3f7d0ca93ef1cad3c4dce1f734d9bd4a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0751ecd77008fb856dd265ac8b3db2760867d0afa7d9056855bf3a78a33d6cb89a0bb14a37ae0a653a853794839c56b2c96bdfb11a08b7c06b0bce6d61e0a79372083407037138609184e72a000830efabf827be28453bafd2480a0b08806644e233f96e19be0029ef1a2b2564bc431ed0d6866c5ebc930f140f5d9f9107cf91079f91052068609184e72a000830186a094000000000000000000000000000000000000000080b90fea63100000006011576310003e80601257603260135761100a60145763200000006021576320000c806022576103e860235761200a60245763300000006031576330000320603257620f424060335761300a60345763400000006041576340000010604257633b9aca006043576140016044577f436c6f7665720000000000000000000000000000000000000000000000000000611000577f4275747465726375700000000000000000000000000000000000000000000000611001577f53756e666c6f7765720000000000000000000000000000000000000000000000611002577f426c756562656c6c000000000000000000000000000000000000000000000000611003577f4c6f747573000000000000000000000000000000000000000000000000000000611004577f506f707079000000000000000000000000000000000000000000000000000000611005577f526f736500000000000000000000000000000000000000000000000000000000611006577f4c6176656e646172000000000000000000000000000000000000000000000000611007577f54756c6970000000000000000000000000000000000000000000000000000000611008577f526f73656d617279000000000000000000000000000000000000000000000000611009577f4d61676e6f6c6961000000000000000000000000000000000000000000000000612000577f4a756e6970657200000000000000000000000000000000000000000000000000612001577f4c696c6163000000000000000000000000000000000000000000000000000000612002577f446170686e650000000000000000000000000000000000000000000000000000612003577f4865617468657200000000000000000000000000000000000000000000000000612004577f4163616369610000000000000000000000000000000000000000000000000000612005577f486f6c6c79000000000000000000000000000000000000000000000000000000612006577f43616d656c6c6961000000000000000000000000000000000000000000000000612007577f4a61736d696e6500000000000000000000000000000000000000000000000000612008577f486f7274656e7369610000000000000000000000000000000000000000000000612009577f57696c6c6f770000000000000000000000000000000000000000000000000000613000577f50696e6500000000000000000000000000000000000000000000000000000000613001577f4365646172000000000000000000000000000000000000000000000000000000613002577f4269726368000000000000000000000000000000000000000000000000000000613003577f45626f6e79000000000000000000000000000000000000000000000000000000613004577f4f616b0000000000000000000000000000000000000000000000000000000000613005577f526564776f6f6400000000000000000000000000000000000000000000000000613006577f4d616e67726f7665000000000000000000000000000000000000000000000000613007577f4173680000000000000000000000000000000000000000000000000000000000613008577f4379707265737300000000000000000000000000000000000000000000000000613009577f4368617465617520646520444f5547000000000000000000000000000000000061400057737e70f9460402290a3e487dae01f610a1a8218fda6010577f72656700000000000000000000000000000000000000000000000000000000006000547f7265616c657374617465000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f150610a7c5161056e6000396000f2006000356000546020356020546040356040547f67657474797065000000000000000000000000000000000000000000000000006000530e0f6101705960203560005460006011566000530a61005759506012566000530a0f61008b597f61706172746d656e74000000000000000000000000000000000000000000000060005460135660205460406000f260006021566000530a6100a059506022566000530a0f6100d4597f686f75736500000000000000000000000000000000000000000000000000000060005460235660205460406000f260006031566000530a6100e959506032566000530a0f61011d597f6d616e73696f6e0000000000000000000000000000000000000000000000000060005460335660205460406000f260006041566000530a61013259506042566000530a0f610166597f636173746c65000000000000000000000000000000000000000000000000000060005460435660205460406000f2600060005460206000f27f67657464656661756c74707269636500000000000000000000000000000000006000530e0f6101a55960335660005460206000f27f73657464656661756c74707269636500000000000000000000000000000000006000530e0f61036b597f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f610279597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536102795960206040f27f61706172746d656e7400000000000000000000000000000000000000000000006020350e0f6102b359604035601357600160005460206000f27f686f7573650000000000000000000000000000000000000000000000000000006020350e0f6102ed59604035602357600160005460206000f27f6d616e73696f6e000000000000000000000000000000000000000000000000006020350e0f61032759604035603357600160005460206000f27f636173746c6500000000000000000000000000000000000000000000000000006020350e0f61036159604035604357600160005460206000f2600060005460206000f27f6765746f776e65720000000000000000000000000000000000000000000000006000530e0f6103a1596020535660005460206000f27f7365746f776e65720000000000000000000000000000000000000000000000006000530e0f61049959602053560f6103de59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f610488597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536104885960206040f260403560205357600160005460206000f27f67657470726963650000000000000000000000000000000000000000000000006000530e0f6104d2596001602053015660005460206000f27f73657470726963650000000000000000000000000000000000000000000000006000530e0f6105ba597f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f6105a6597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536105a65960206040f260403560016020530157600160005460206000f27f7472616e736665726f776e6572736869700000000000000000000000000000006000530e0f6106ba59602053566105f659600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f6106a0597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536106a05960206040f260403560205357600060016020530157600160005460206000f27f72656c656173656f776e657273686970000000000000000000000000000000006000530e0f6107b959602053566106f659600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f6107a0597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536107a05960206040f2600060205357600060016020530157600160005460206000f27f6e616d65636173746c65000000000000000000000000000000000000000000006000530e0f6108125932602053560e6107f759600060005460206000f26041566020530360005461040060005301600054604035600053577f63726561746500000000000000000000000000000000000000000000000000006000530e0f610a72596020535661084e59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f6108f8597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536108f85960206040f27f61706172746d656e7400000000000000000000000000000000000000000000006020350e0f61093b5960145660005461384060125601601257600a601456016014577f686f7573650000000000000000000000000000000000000000000000000000006020350e0f61097e59602456600054610b4060225601602257600a602456016024577f6d616e73696f6e000000000000000000000000000000000000000000000000006020350e0f6109c1596034566000546102d060325601603257600a603456016034577f636173746c6500000000000000000000000000000000000000000000000000006020350e0f610a0359604456600054601060425601604257600a6044560160445760403560005357606035600160005301576080356002600053015760a0356003600053015760c0356004600053015760e035600560005301576101003560066000530157610120356007600053015761014035600860005301576101603560096000530157600160005460206000f2600060005460206000f21ba08bca8f3eee850e0741f5dbf95bb5af710330da78a6c5a5be1a405f0e371f0ab6a029a5a1eeb5e3b2554844c7a39d7829a6b1934a97f49e811517c0d14106cd018ca0f2fba8a1e2d66aa04b7ec9efe689b8188e77b03e238d4759f1f3e7b504b7eecc827be2c0f90ba0f8d3a065eab40cc4a5092c1fbd8d9b072db24dfaede926cc4c3b8163b41f8da88b8868a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0edfd8c2c63ec04a6da48a6e57e9dffd29f0716b1ad1683bbeceefd7ecf63f1cda0eccd724363bf7a540c1f2b6780605565db9cab6689372eb5b5defb0ca1bc70558340601f128609184e72a000830efe6c823fd98453bafd0f80a00188fdfbea0ceb86e026735ecef6e8283118ce967575bd8273edd5404e951dadf90ac7f90ac4f90a9d058609184e72a000830186a094000000000000000000000000000000000000000080b90a35737e70f9460402290a3e487dae01f610a1a8218fda6010576000601157600060125760006013577f72656700000000000000000000000000000000000000000000000000000000006000547f706f6c6c747970657300000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f1506101ec5161011a60203960005460005360206000f06000546000537f6175746f706173730000000000000000000000000000000000000000000000005760016011577f6175746f706173730000000000000000000000000000000000000000000000006012577f6175746f7061737300000000000000000000000000000000000000000000000060135761072f516103066000396000f200737e70f9460402290a3e487dae01f610a1a8218fda6010576101c05161002c6020396000546000536020f2007f736574646f7567000000000000000000000000000000000000000000000000006000350e0f61004c596010560f61003c59600060005460206000f2602035601057600160005460206000f260105661005d59600060005460206000f27f69736175746f70617373000000000000000000000000000000000000000000006000350e0f61009159600160005460206000f27f696e6974000000000000000000000000000000000000000000000000000000006000350e0f6100c559600160005460206000f27f67657400000000000000000000000000000000000000000000000000000000006000350e0f6100f8593060005460206000f27f646f6175746f70617373000000000000000000000000000000000000000000006000350e0f61012c59600160005460206000f27f67657400000000000000000000000000000000000000000000000000000000006000547f706f6c6c747970657300000000000000000000000000000000000000000000006020546020604060406000600060105660645c03f1506000604053330e0f6101b959507f6b696c6c000000000000000000000000000000000000000000000000000000006000350e0f6101c05933ff6000356000546020356020547f6b696c6c000000000000000000000000000000000000000000000000000000006000530e0f61004259601056330e0f6100425933ff6040356040547f72656700000000000000000000000000000000000000000000000000000000006000530e0f6102505960406020530b61008659600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610130597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536101305960206060f2602053560f61014359600060005460206000f27f736574646f756700000000000000000000000000000000000000000000000000606054601056608054602060a060406060600060405360645c03f15060a05361018c59602060a0f2604053602053577f69736175746f7061737300000000000000000000000000000000000000000000606054602060a060206060600060405360645c03f15060a053610212597f67657474696d656c696d69740000000000000000000000000000000000000000606054602060a060206060600060405360645c03f15060a0536001602053035760115661022359602053601257610237586020536002601356015760135660016020530157602053601357600160115601601157600160005460206000f27f64657265670000000000000000000000000000000000000000000000000000006000530e0f61042c5960406020530b61028e59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e73000000000000000000000000000000000000000000000000006080546020604060406060600060105660645c03f1506040530f610338597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060405360645c03f1506060536103385960206060f26020535660405460405361035059600060005460206000f27f6b696c6c000000000000000000000000000000000000000000000000000000006060546020606060206060600060405360645c03f15060016020530156604054600260205301566060546060536103ca596040536103b759600060125760006013576103c6586000600260405301576040536013576103f8586040536103e4596000600160605301576060536012576103f8586040536001606053015760605360026040530157600060205357600060016020530357600060016020530157600060026020530157600160115603601157600160005460206000f27f686173706f6c6c000000000000000000000000000000000000000000000000006000530e0f6104765960406020530b61046a59600060005460206000f26020355660005460206000f27f63726561746500000000000000000000000000000000000000000000000000006000530e0f6105c35960406020530b6104b459600060005460206000f2602035566000547f67657400000000000000000000000000000000000000000000000000000000006020546020604060206020600060005360645c03f1506000536040530e6105be597f736574646f7567000000000000000000000000000000000000000000000000006000546010566020546020606060406000600060405360645c03f1506060536105465960206060f27f73657474696d656c696d69740000000000000000000000000000000000000000600054600160203503566020546020606060406000600060405360645c03f1507f696e6974000000000000000000000000000000000000000000000000000000006000546020600060206000600060405360645c03f15060206040f27f67657474696d656c696d697400000000000000000000000000000000000000006000530e0f6106235960406020530b61060159600060005460206000f2602053566000546000536106145960206000f26001602053035660205460206020f26040356040547f73657474696d656c696d697400000000000000000000000000000000000000006000530e0f6107255960406020530b61066759600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610711597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536107115960206060f260403560016020350357600160005460206000f2600060005460206000f21ca03ff24c45547b7d7ec2613a1fbcf40960dd57f455e09581b8622f77a2d06a29fca00bd38b28fafc431161e56b4506bd791e65e0e28f1420761fa7bcd29f035860f0a08f46670f1f18e87a37e7982e7a7bd349f83b89a2f11ce0d657570277fc557657823fd9c0f907b7f8d3a04afcf6900e3285f43a97950bdd2411796fb9c08a2c3947781863ac7570f67f93a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0001079829e5c0a5e895ff93e9e650e3fd928c75182a3cdebcb90b1288cd8c071a07f865c3a7c24d53ec9e19e1eb0ebe28cc689335148ab747f3bab9773c32b1dd58340500b118609184e72a000830f0220822b508453bafd0b80a01ce05d654c4d4b1c00d6fbd1b666f76bb7de0fa2b49724a61a5219a31e867e0cf906def906dbf906b4048609184e72a000830186a094000000000000000000000000000000000000000080b9064c737e70f9460402290a3e487dae01f610a1a8218fda6010576000601157601960125762010020601357600260145760076015576000601657600060175760006018577f72656700000000000000000000000000000000000000000000000000000000006000547f6d61726b6574706c6163650000000000000000000000000000000000000000006020546020600060406000600060105660645c03f1506105a1516100ab6000396000f20060003560005460203560205460007f6b696c6c000000000000000000000000000000000000000000000000000000006000530e0f61003e5950601056330e0f6100455933ff60007f706f7374000000000000000000000000000000000000000000000000000000006000530e0f610078595060406020530b0f6101d2597f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610127597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536101275960206060f260006011560b610145596013566060546015566013560160135761015e586012565660605460016012560360125760016011560360115760165661017a59606053601757606053601857600160165761019a58606053600160185601576018566060535760605360185760016016560160165732600260605301574260036060530157602035600460605301576040356005606053015760603560066060530157600160005460206000f260007f72656d6f766500000000000000000000000000000000000000000000000000006000530e0f6102085950620100206020530a0f0f6103b3597f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f6102b7597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536102b75960206060f26020535660405460016020530156606054600060006040530e0f6102dc595060006060530e61033b5960605361030f596040536102fc596000601757600060185761030b586000600160405301576040536018576103375860405361032659600060605357606053601757610337586040536060535760605360016040530157610345586000601757600060185760016016560360165760016012560160405460205360405357604053601257600160115601601157600060205357600060016020530157600060026020530157600060036020530157600060046020530157600060056020530157600060066020530157600160005460206000f260007f676574706f7374657200000000000000000000000000000000000000000000006000530e0f6103e95950620100206020530a0f0f6103fd596002602053015660005460206000f260007f67657474696d657374616d7000000000000000000000000000000000000000006000530e0f6104335950620100206020530a0f0f610447596003602053015660005460206000f260007f6765746974656d6e616d650000000000000000000000000000000000000000006000530e0f61047d5950620100206020530a0f0f610491596004602053015660005460206000f260007f676574616d6f756e7400000000000000000000000000000000000000000000006000530e0f6104c75950620100206020530a0f0f6104db596005602053015660005460206000f260007f67657470726963650000000000000000000000000000000000000000000000006000530e0f6105115950620100206020530a0f0f610525596006602053015660005460206000f260007f676574656e7472790000000000000000000000000000000000000000000000006000530e0f61055b5950620100206020530a0f0f61059759600260205301566000546003602053015660205460046020530156604054600560205301566060546006602053015660805460a06000f2600060005460206000f21ba0cdebf68bd942608dc9cdcf143f4ff5f0839b0c189edf47b4870cc547018b30fda01c05170fe369f8bda642468f223d806f2dd937dd8341dfaaadc6a68c2a8d6b2da0f25206d4cd2af9e414adc9896bceda395e7393567be2453e951516ef5f824436822b50c0f90e71f8d3a0b8b701d38a5fd23af61d7ee5ed581017ecb536089b028beec5e49d56c8736dcea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0fcf18ebc6aed8e603b3af550d161d7bceb9b4fb4c6e4ebe4b952fc5f25bce5b2a09b78a9c8318aa6470ee43560f99741a87a869c78e120f27b58c046726aade81683406023108609184e72a000830f05c88256c48453bafcc680a096a950e6c44ad1901ef2a4ce040da12a6462a80c4936e79567893bf3d544617af90d98f907aef90787028609184e72a000830186a094000000000000000000000000000000000000000080b9071f737e70f9460402290a3e487dae01f610a1a8218fda60105760046011576020601257678ac7230489e80000601357670de0b6b3a76400006015577f72656700000000000000000000000000000000000000000000000000000000006000547f62616e6b000000000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f15061067c516100a36000396000f2006000356000546020356020547f62616c616e6365000000000000000000000000000000000000000000000000006000530e0f61004059303160005460206000f27f7365746d6178656e646f776d656e7400000000000000000000000000000000006000530e0f6100b35967016345785d8a00006020350a0f61008659600060005460206000f268056bc75e2d631000006020350b0f6100a359600060005460206000f2602035601357600160005460206000f27f6d6178656e646f776d656e7400000000000000000000000000000000000000006000530e0f6100e85960135660005460206000f27f6465706f736974000000000000000000000000000000000000000000000000006000530e0f6101835967016345785d8a0000340a0f61012c59600060005460206000f26012566000547f4465706f736974000000000000000000000000000000000000000000000000006000535742600160005301573260026000530157346003600053015760115660125601601257600160005460206000f27f656e646f770000000000000000000000000000000000000000000000000000006000530e0f6102f1597f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610257597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536102575960206060f26013566040350b0f61026d59600060005460206000f260403530310a0f61028259600060005460206000f2600060006000600060403560203560645c03f1506012566000547f456e646f776d656e740000000000000000000000000000000000000000000000600053574260016000530157602035600260005301576040356003600053015760115660125601601257600160005460206000f27f676574746f6b656e7072696365000000000000000000000000000000000000006000530e0f6103265960155660005460206000f27f736574746f6b656e7072696365000000000000000000000000000000000000006000530e0f610400597f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f6103fa597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536103fa5960206040f26020356015577f73656c6c746f6b656e73000000000000000000000000000000000000000000006000530e0f610539597f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f6104d4597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f1506040536104d45960206040f2602035601653016016576012566000547f536f7665726569676e7320736f6c640000000000000000000000000000000000600053574260016000530157604035600260005301576020356003600053015760115660125601601257600160005460206000f27f6177617264746f6b656e730000000000000000000000000000000000000000006000530e0f610672597f67657400000000000000000000000000000000000000000000000000000000006040547f616374696f6e73000000000000000000000000000000000000000000000000006060546020608060406000600060105660645c03f1506080530f61060d597f76616c6964617465000000000000000000000000000000000000000000000000604054336060546020604060406040600060805360645c03f15060405361060d5960206040f2602035601756016017576012566000547f536f7665726569676e7320617761726465640000000000000000000000000000600053574260016000530157604035600260005301576020356003600053015760115660125601601257600160005460206000f2600060005460206000f21ba0c76e0be6db4ad408c9dc3fc00a2d1350c4f0a8a312b4cb907a74669bd09d7018a046a4d862181443df5a4154f588d7b26c5709bc7e63a5520dea7ac2351eb2d481a0d2b9c430d75304113420eb0030e980fb44df4b9ba7c5c1fde233ddb166df7853822dddf905e4f905bd038609184e72a000830186a094000000000000000000000000000000000000000080b90555737e70f9460402290a3e487dae01f610a1a8218fda6010577f43617264626f61726420426f78000000000000000000000000000000000000006000547f6d69736300000000000000000000000000000000000000000000000000000000600053576001600160005303577f332d776865656c65642053686f7070696e6720436172740000000000000000006020547f6d6973630000000000000000000000000000000000000000000000000000000060205357600160016020530357600260115760005360125760205360135760205360026000530157600053600160205301577f72656700000000000000000000000000000000000000000000000000000000006000547f6974656d730000000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f150610405516101506020396000546000536020f2006000356000546020356020547f6765746974656d000000000000000000000000000000000000000000000000006000530e0f6100565960406020530b61004a59600060005460206000f26020355660005460206000f27f6765746974656d66756c6c0000000000000000000000000000000000000000006000530e0f6100b45960406020530b61009459600060005460206000f260203556600054600160203503566020546002602035035660405460606000f26040356040547f72656769737465726974656d00000000000000000000000000000000000000006000530e0f61020e5960406020530b6100f859600060005460206000f2602053560f61010b59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f6101b5597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536101b55960206060f26040536020535760603560016020530357608035600260205303576011566101e1596020536012576101f5586020536002601356015760135660016020530157602053601357600160115601601157600160005460206000f27f72656d6f76656974656d000000000000000000000000000000000000000000006000530e0f6103bc5960406020530b61024c59600060005460206000f26020535660405460405361026459600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f61030e597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f15060605361030e5960206060f260016020530156604054600260205301566060546060536103515960405361033e596000601257600060135761034d5860006002604053015760405360135761037f5860405361036b5960006001606053015760605360125761037f586040536001606053015760605360026040530157600060205357600060016020530157600060026020530157600060016020530357600060026020530357600160115603601157600160005460206000f27f6b696c6c000000000000000000000000000000000000000000000000000000006000350e0f6103fb59601056330e6103f959600060005460206000f233ff600060005460206000f21ca07e14d3715a2221a6e2dd94088f998cacc6efe4636ff811f7addd4c236d056652a00c4fb93415deb24f477f058623dcc04bd795737210597df676fe37b48b2dcdd0a020f8f9f94b8b22593cfebbccbace12ee704efd9f5b57fbb7c25fbe6f6058248a8256c4c0f91307f8d3a0aeb3667f5c17149bea0a902796b5d7f83d31b0e7dafc8f0d66f409beafc48acba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a010da6cee97165174fdc4aedf39f6b736205ba6cf5c46e85bbfb0130f9301fdd3a0d83b649aeb2b1d12b10edd8b494351e63023168b934f61dffea12127a390f7f88340703f0f8609184e72a000830f096c82668a8453bafc6180a0e1fa3ded62c7f655a9614d8c05ccf224fafc507c0a1ed026f98ff15292a682acf9122ef9122bf91204018609184e72a000830186a094000000000000000000000000000000000000000080b9119c737e70f9460402290a3e487dae01f610a1a8218fda6010576000601157600060125760006013577f72656700000000000000000000000000000000000000000000000000000000006000547f616374696f6e74797065730000000000000000000000000000000000000000006020546020600060406000600060105660645c03f1506108d6516108c660203960005460005360206000f06000546000537f616464616374696f6e0000000000000000000000000000000000000000000000577f6175746f7061737300000000000000000000000000000000000000000000000060017f616464616374696f6e0000000000000000000000000000000000000000000000035760016011577f616464616374696f6e00000000000000000000000000000000000000000000006012577f616464616374696f6e0000000000000000000000000000000000000000000000601357610766516101606000396000f2006000356000546020356020547f63726561746500000000000000000000000000000000000000000000000000006000530e0f6101245960406020530b61004a59600060005460206000f2602035566000547f67657400000000000000000000000000000000000000000000000000000000006020546040356040546020604060406020600060005360645c03f1506040356100de597f736574646f7567000000000000000000000000000000000000000000000000006000546010566020546020606060406000600060405360645c03f1506060536100de5960206060f27f736574706f6c6c00000000000000000000000000000000000000000000000000600054600160203503566020546020606060406000600060405360645c03f15060206040f27f6b696c6c000000000000000000000000000000000000000000000000000000006000530e0f61015a59601056330e0f61015a5933ff7f6765746163746e706f6c6c0000000000000000000000000000000000000000006000530e0f6101ba5960406020530b61019859600060005460206000f2602053566000546000536101ab5960206000f26001602053035660205460206020f26040356040547f7365746163746e706f6c6c0000000000000000000000000000000000000000006000530e0f6102bc5960406020530b6101fe59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f6102a8597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536102a85960206060f260403560016020350357600160005460206000f27f686173616374696f6e00000000000000000000000000000000000000000000006000530e0f6103065960406020530b6102fa59600060005460206000f26020355660005460206000f27f72656700000000000000000000000000000000000000000000000000000000006000530e0f6105805960406020530b61034459600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f6103ee597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536103ee5960206060f2602053560f61040159600060005460206000f27f736574646f756700000000000000000000000000000000000000000000000000606054601056608054602060a060406060600060405360645c03f15060a05361044a59602060a0f27f676574706f6c6c00000000000000000000000000000000000000000000000000606054602060a060406060600060405360645c03f1507f67657400000000000000000000000000000000000000000000000000000000006060547f706f6c6c74797065730000000000000000000000000000000000000000000000608054602060c060406060600060105660645c03f15060c0536104e859602060c0f27f686173706f6c6c0000000000000000000000000000000000000000000000000060605460a053608054602060c060406060600060c05360645c03f15060c05361053159602060c0f26040536020535760a0536001602053035760115661055359602053601257610567586020536002601356015760135660016020530157602053601357600160115601601157600160005460206000f27f64657265670000000000000000000000000000000000000000000000000000006000530e0f61075c5960406020530b6105be59600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006060547f616374696f6e7300000000000000000000000000000000000000000000000000608054602060a060406060600060105660645c03f15060a0530f610668597f76616c6964617465000000000000000000000000000000000000000000000000606054336080546020606060406060600060a05360645c03f1506060536106685960206060f26020535660405460405361068059600060005460206000f27f6b696c6c000000000000000000000000000000000000000000000000000000006060546020606060206060600060405360645c03f15060016020530156604054600260205301566060546060536106fa596040536106e759600060125760006013576106f6586000600260405301576040536013576107285860405361071459600060016060530157606053601257610728586040536001606053015760605360026040530157600060205357600060016020530357600060016020530157600060026020530157600160115603601157600160005460206000f2600060005460206000f27f6175746f70617373000000000000000000000000000000000000000000000000600957737e70f9460402290a3e487dae01f610a1a8218fda601057610886516100506020396000546000536020f2007f736574646f7567000000000000000000000000000000000000000000000000006000350e0f61004c596010560f61003c59600060005460206000f2602035601057600160005460206000f260105661005d59600060005460206000f27f676574706f6c6c000000000000000000000000000000000000000000000000006000350e0f6100925960095660005460206000f27f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e74797065730000000000000000000000000000000000000000006020546020600060406000600060105660645c03f1507f67657400000000000000000000000000000000000000000000000000000000006000350e0f61015b59600053330e61012a59600060005460206000f2602035610152596104e95161039d60203960005460005360206000f060005460206000f261015b583060005460206000f27f736574706f6c6c000000000000000000000000000000000000000000000000006000350e0f6101a859336000530e61019859600060005460206000f2602035600957600160005460206000f27f6175746f657865637574650000000000000000000000000000000000000000006000350e0f610359597f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e73000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f1506000600053330e0f6102485960406040350b0f610248595060203561025659600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e74797065730000000000000000000000000000000000000000006020546020604060406000600060105660645c03f1507f686173616374696f6e00000000000000000000000000000000000000000000006000546040356020546020600060406000600060405360645c03f1506000530f61030059600060005460206000f27f726567000000000000000000000000000000000000000000000000000000000060605460403560805460203560a0546020600060606060600060405360645c03f15060005361034f5960206000f2600160005460206000f260007f6b696c6c000000000000000000000000000000000000000000000000000000006000350e0f61038b5950600053330e0f6103925933ff600060005460206000f2006104d5516100146020396000546000536020f2007f736574646f7567000000000000000000000000000000000000000000000000006000350e0f61004c596010560f61003c59600060005460206000f2602035601057600160005460206000f260105661005d59600060005460206000f27f736574706f6c6c000000000000000000000000000000000000000000000000006000350e0f610105597f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e74797065730000000000000000000000000000000000000000006020546020600060406000600060105660645c03f150336000530e6100f559600060005460206000f2602035600957600160005460206000f27f676574706f6c6c000000000000000000000000000000000000000000000000006000350e0f61013a5960095660005460206000f27f696e6974000000000000000000000000000000000000000000000000000000006000350e0f6102a8597f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e73000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f1506000600053330e0f6101da5960406040350b0f6101da59506020356101e859600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e74797065730000000000000000000000000000000000000000006020546020604060406000600060105660645c03f1507f686173616374696f6e00000000000000000000000000000000000000000000006000546040356020546020600060406000600060405360645c03f1506000530f61029259600060005460206000f2602035601157604035601257600160005460206000f27f65786563757465000000000000000000000000000000000000000000000000006000350e0f610437597f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e73000000000000000000000000000000000000000000000000006020546020602060406000600060105660645c03f150336020530e61034059600060005460006000f27f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e74797065730000000000000000000000000000000000000000006020546020600060406000600060105660645c03f1507f686173616374696f6e00000000000000000000000000000000000000000000006020546012566040546020602060406020600060005360645c03f1506020530f6103ea59600060005460206000f27f72656700000000000000000000000000000000000000000000000000000000006040546012566060546011566080546020600060606040600060005360645c03f150600160005460206000f27f67657400000000000000000000000000000000000000000000000000000000006000547f616374696f6e73000000000000000000000000000000000000000000000000006020546020600060406000600060105660645c03f1506000600053330e0f6104c459507f6b696c6c000000000000000000000000000000000000000000000000000000006000350e0f6104cb5933ff600060005460206000f21ba04fccc9e1deb9f56676055d7fa9287a83d7303a68d2e2f97ff949b4a5d4ce7a57a06e9bf79aa823317b607d1a63674dd3539e2ef58a3db9328c8933c180f604156ea0add0be6a25a4ed5c464f67ad3362ef36b6a4d418a6bd1619ba55b6ee54d69eb182668ac0f8b5f8b1a023451c8b169d36601a02a882bb352c2632485321ee2cc93178f631b7f9fec0d6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0d2646114c3c323be279eefd344014ab308ab028907c039ceb70019de3826481b80834060270e8609184e72a000830f0d30808453bafc3980a034157a30847fbd0ad8ba8155213ec9ae589d0b062837a5ad00fe8d549d41b387c0c0f8b5f8b1a037fa309f9b6d70d80024fc997089ea66166c1c8d49d3658f5734dd73fa6f1532a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0b9e0c292ba78c8aabfb3417aff51d78b1f5d30003362725feec45875a257f96080834050130d8609184e72a000830f10f5808453bafc2480a0a9231ddaa5bdaa0b43c6dce6b603cc460570f23073bb5f45d24a62da5e6139b8c0c0f8b5f8b1a00616c552679a920c3a14a0707e3eaec6513cfff9dc70585a1a89d462a35425cca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a077bdd159076f042a51a94d51e971f80313dc741b6cbd837a32d669ac480f987280834040030c8609184e72a000830f14bb808453bafc0080a032ec045859a9a10eea3a20904f86caa320ae024c4a1c90dd1dc6f14e6c476b0ec0c0f90589f8d3a0a47f6b81c5033b45a31211b6b86ad8888a0043c0f779272a9b71af9b5e94b856a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0d83b4024511364e2a80a63fc4a437af3cc3e7d58ac67a5e01c1a4f6b4be7fa9aa04463aae8394a7efde941377c958e578e6cab65d312c646bdb9feb443448abe17834050170b8609184e72a000830f187a8218b18453bafbc580a070f720929867c786801a9bff7ccc922e5b51f2aaaa9471541e37d36727583f34f904b0f904adf90486808609184e72a00082271094000000000000000000000000000000000000000080b9041f7f50524f444f5547000000000000000000000000000000000000000000000000006001576000601157600060125760006013576103de516100416000396000f20060003560005460203560205460406020530b61002059600060005460206000f27f67657400000000000000000000000000000000000000000000000000000000006000530e0f610056596020535660005460206000f26040356040547f72656700000000000000000000000000000000000000000000000000000000006000530e0f61021d597f616374696f6e7300000000000000000000000000000000000000000000000000560f610114597f76616c696461746500000000000000000000000000000000000000000000000060605433608054602060606040606060007f616374696f6e73000000000000000000000000000000000000000000000000005660645c03f150606053610114596020606053f260405361011f5933604054602053560f6101d8597f64756d700000000000000000000000000000000000000000000000000000000060805460405360a054602060806040608060006020535660645c03f1507f616374696f6e73000000000000000000000000000000000000000000000000006020530e6101c7597f6b696c6c00000000000000000000000000000000000000000000000000000000608054602060806020608060006020535660645c03f15060405360205357600260005460206000f2604053602053576011566101f059602053601257610204586020536002601356015760135660016020530157602053601357600160115601601157600260005460206000f27f64657265670000000000000000000000000000000000000000000000000000006000530e0f6103d4597f616374696f6e7300000000000000000000000000000000000000000000000000560f6102d4597f76616c696461746500000000000000000000000000000000000000000000000060605433608054602060606040606060007f616374696f6e73000000000000000000000000000000000000000000000000005660645c03f1506060536102d45960206060f27f646f7567000000000000000000000000000000000000000000000000000000006020530e0f6103005933ff7f6b696c6c00000000000000000000000000000000000000000000000000000000608054602060806020608060006020535660645c03f150600160205301566040546002602053015660605460605361037b596040536103685960006012576000601357610377586000600260405301576040536013576103a958604053610395596000600160605301576060536012576103a9586040536001606053015760605360026040530157600060205357600060016020530157600060026020530157600160115603601157600160005460206000f2600060005460206000f21ba0a20c5cf8ba7c22c51d3bdd11e5b8796aabc045e35e12da269941e63e213db0a1a0081967c69324f0ee0ced0d0ac490a3ae8a420e3eab894c9c6f6509eecac405baa0643aabd3c1786ed63848db5352be132df6e80b369085f09b36d277f85ee804de8218b1c0f8b5f8b1a01e6536312207df93e64e85ec428eb57a4f5aae7e175a538d0e5725092fdddeb4a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a01b331d95b5b493fb90fbf3166006f674e1707dc5ab84afe382e2fc1d440f497980834040070a8609184e72a000830f1c42808453bafbc480a08616702c69753268ea591ea65c0eda3e2c5b47790865d758cc04388142e9f51dc0c0f8b5f8b1a00adb263ba4d9cece76e5734f28622150a78e5e6c68862752946637b0644de8a0a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a09354a9316bc091dd18188ae03a267105825d625ea97b648e48c3a84ddf041a998083402ffc098609184e72a000830f200b808453bafbb680a03478f52ab8cb6e238e1cf8825441c140081d5dfeb34357c374cb2120ec9f2fddc0c0f8b5f8b1a04c5a9484bcc11a9ee084a2aacbc9f24666381fca1e503ba9f49ab34835514840a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a09be7af5e34dd46dd2f9f83e7d13c9469a8126b109c3f8e15628b50a7797bc506808340400c088609184e72a000830f23d4808453bafb6580a0d1ee6a6c26410a85c100326c6bd3045478adeb379c6738be27252d8401d7dc17c0c0f8b5f8b1a0cc041de5652f281ce493792533e985586a8023a1f5b558908cff5407c3c0db0fa01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a071c88d664dc93c16d3a46b3a17392252518101f4d7726daffc3aadb2fd1f70978083403000078609184e72a000830f279e808453bafb5980a0ef5e4c5a3fe18e7cb34cdc04f87789dfaf1adf74f1f613a65dcb4cf551070092c0c0f8b5f8b1a01128ec8377956538f1ab30cec6bc4b94d04d712368c20776774d19752e260547a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0b16eeb1e12cd46dd438050b510d64be1fa0845c8a3df7f04bb40535ea33027598083401ff9068609184e72a000830f2b69808453bafb5480a0f82c1411c721e8b7124a842c994166a46828d3adf1797f0e8753a847ed1f80c6c0c0f8b5f8b1a01b6a23aef349afd2164029113b24fbdfd95d25979763f1c3fbb7ac3d7b577c3fa01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0882c3669d09b8d294d75a019608a3d3de0f1afd9c76911bd8bf7ac163bb168f48083400ff6058609184e72a000830f2f35808453bafb4580a0bfd37885381d08cc4e0ea2e50cc2f87a8bb1e7456e23c479099f8c978ae48331c0c0f8b5f8b1a0a60d0075ddd6f89fa70b50ce0644fcd20195dc0be05f9491fcd33d458e10d19ea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a07b9f84a0b29dbd3602c55d042c099f7f066b414fd8edc11ee6d2211d4c9a650f80833ffff7048609184e72a000830f3302808453bafb2480a0ccba311e3a57531a3b070aeeee9f82e40af63586d38b8721a56b32c95cdf0e9ec0c0f8b5f8b1a006f3851e7ed2c0a9ef51fd444d572b61db124bceab309064b572301b9e06775ca01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0968da9f18bc1ad3d7298c74e68eb45fbe86f58bfe44a4a5559d84e6fc61084a380833feffc038609184e72a000830f36d0808453bafb0a80a0c946a49b1672094993d57f6a67d8a39887f41b3aa9f9fded7054f9084e51aaf8c0c0f8b5f8b1a0591b0bc597f0368a69d5ba618945e86b83e1ccc304b64e13b73c2d7f1ab4f7fda01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a09357a2a48f6c0db64a0591d24c561466f74eba84d4c842b1aeee30c9717fc5b280833fe004028609184e72a000830f3a9f808453bafafa80a0fc6593b023ae424f8093d0a6198374c1476fcc85ec1a8bf2a31cfb416e6530dac0c0f8b5f8b1a0a7722d611450de26f55026b6544e34d9431b0a67a829e1794ac36fa4f079208fa01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347943a587078e64080ada2f91adeb51bdd7f8f136311a0d04cdf95d2e20ef5e15d777299a7b26d73cae730fc13f9361f5e20ce188331c980833ff000018609184e72a000830f3e6f808453bafa2580a055fbbc1a8e1dee55345cdda2b31bebbcec911e369fe30251441954ed650be4f3c0c0"
|
huongttlan/statsmodels
|
refs/heads/master
|
tools/cythonize.py
|
28
|
#!/usr/bin/env python
""" cythonize
Cythonize pyx files into C files as needed.
Usage: cythonize [root_dir]
Default [root_dir] is 'statsmodels'.
Checks pyx files to see if they have been changed relative to their
corresponding C files. If they have, then runs cython on these files to
recreate the C files.
The script thinks that the pyx files have changed relative to the C files
by comparing hashes stored in a database file.
Simple script to invoke Cython (and Tempita) on all .pyx (.pyx.in)
files; while waiting for a proper build system. Uses file hashes to
figure out if rebuild is needed.
For now, this script should be run by developers when changing Cython files
only, and the resulting C files checked in, so that end-users (and Python-only
developers) do not get the Cython/Tempita dependencies.
Originally written by Dag Sverre Seljebotn, and copied here from:
https://raw.github.com/dagss/private-scipy-refactor/cythonize/cythonize.py
Note: this script does not check any of the dependent C libraries; it only
operates on the Cython .pyx files.
"""
from __future__ import division, print_function, absolute_import
import os
import re
import sys
import hashlib
import subprocess
HASH_FILE = 'cythonize.dat'
DEFAULT_ROOT = 'statsmodels'
# WindowsError is not defined on unix systems
try:
WindowsError
except NameError:
WindowsError = None
#
# Rules
#
def process_pyx(fromfile, tofile):
try:
from Cython.Compiler.Version import version as cython_version
from distutils.version import LooseVersion
if LooseVersion(cython_version) < LooseVersion('0.19'):
raise Exception('Building Statsmodels requires Cython >= 0.19')
except ImportError:
pass
flags = ['--fast-fail']
if tofile.endswith('.cxx'):
flags += ['--cplus']
try:
try:
r = subprocess.call(['cython'] + flags + ["-o", tofile, fromfile])
if r != 0:
raise Exception('Cython failed')
except OSError:
# There are ways of installing Cython that don't result in a cython
# executable on the path, see gh-2397.
r = subprocess.call([sys.executable, '-c',
'import sys; from Cython.Compiler.Main import '
'setuptools_main as main; sys.exit(main())'] + flags +
["-o", tofile, fromfile])
if r != 0:
raise Exception('Cython failed')
except OSError:
raise OSError('Cython needs to be installed')
def process_tempita_pyx(fromfile, tofile):
try:
try:
from Cython import Tempita as tempita
except ImportError:
import tempita
except ImportError:
raise Exception('Building Statsmodels requires Tempita: '
'pip install --user Tempita')
with open(fromfile, "r") as f:
tmpl = f.read()
pyxcontent = tempita.sub(tmpl)
assert fromfile.endswith('.pyx.in')
pyxfile = fromfile[:-len('.pyx.in')] + '.pyx'
with open(pyxfile, "w") as f:
f.write(pyxcontent)
process_pyx(pyxfile, tofile)
rules = {
# fromext : function
'.pyx' : process_pyx,
'.pyx.in' : process_tempita_pyx
}
#
# Hash db
#
def load_hashes(filename):
# Return { filename : (sha1 of input, sha1 of output) }
if os.path.isfile(filename):
hashes = {}
with open(filename, 'r') as f:
for line in f:
filename, inhash, outhash = line.split()
hashes[filename] = (inhash, outhash)
else:
hashes = {}
return hashes
def save_hashes(hash_db, filename):
with open(filename, 'w') as f:
for key, value in sorted(hash_db.items()):
f.write("%s %s %s\n" % (key, value[0], value[1]))
def sha1_of_file(filename):
h = hashlib.sha1()
with open(filename, "rb") as f:
h.update(f.read())
return h.hexdigest()
#
# Main program
#
def normpath(path):
path = path.replace(os.sep, '/')
if path.startswith('./'):
path = path[2:]
return path
def get_hash(frompath, topath):
from_hash = sha1_of_file(frompath)
to_hash = sha1_of_file(topath) if os.path.exists(topath) else None
return (from_hash, to_hash)
def process(path, fromfile, tofile, processor_function, hash_db):
fullfrompath = os.path.join(path, fromfile)
fulltopath = os.path.join(path, tofile)
current_hash = get_hash(fullfrompath, fulltopath)
if current_hash == hash_db.get(normpath(fullfrompath), None):
print('%s has not changed' % fullfrompath)
return
orig_cwd = os.getcwd()
try:
os.chdir(path)
print('Processing %s' % fullfrompath)
processor_function(fromfile, tofile)
finally:
os.chdir(orig_cwd)
# changed target file, recompute hash
current_hash = get_hash(fullfrompath, fulltopath)
# store hash in db
hash_db[normpath(fullfrompath)] = current_hash
def find_process_files(root_dir):
hash_db = load_hashes(HASH_FILE)
for cur_dir, dirs, files in os.walk(root_dir):
for filename in files:
in_file = os.path.join(cur_dir, filename + ".in")
if filename.endswith('.pyx') and os.path.isfile(in_file):
continue
for fromext, function in rules.items():
if filename.endswith(fromext):
toext = ".c"
with open(os.path.join(cur_dir, filename), 'rb') as f:
data = f.read()
m = re.search(br"^\s*#\s*distutils:\s*language\s*=\s*c\+\+\s*$", data, re.I|re.M)
if m:
toext = ".cxx"
fromfile = filename
tofile = filename[:-len(fromext)] + toext
process(cur_dir, fromfile, tofile, function, hash_db)
save_hashes(hash_db, HASH_FILE)
def main():
try:
root_dir = sys.argv[1]
except IndexError:
root_dir = DEFAULT_ROOT
find_process_files(root_dir)
if __name__ == '__main__':
main()
|
dohoangkhiem/uwsgi
|
refs/heads/master
|
examples/staticfilesnmp.py
|
17
|
import uwsgi
from os import path
uwsgi.snmp_set_counter64(1, 0) # Number of requests
uwsgi.snmp_set_counter64(2, 0) # Number of bytes
def application(environ, start_response):
size = path.getsize('logo_uWSGI.png')
start_response('200 OK', [('Content-Type', 'image/png'), ('Content-Length', str(size))])
fd = open('logo_uWSGI.png', 'r')
uwsgi.snmp_incr_counter64(1)
uwsgi.snmp_incr_counter64(2, size)
return environ['wsgi.file_wrapper'](fd, 4096)
|
LinkHS/incubator-mxnet
|
refs/heads/master
|
example/neural-style/end_to_end/model_vgg19.py
|
43
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
import os, sys
from collections import namedtuple
ConvExecutor = namedtuple('ConvExecutor', ['executor', 'data', 'data_grad', 'style', 'content', 'arg_dict'])
def get_vgg_symbol(prefix, content_only=False):
# declare symbol
data = mx.sym.Variable("%s_data" % prefix)
conv1_1 = mx.symbol.Convolution(name='%s_conv1_1' % prefix, data=data , num_filter=64, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu1_1 = mx.symbol.Activation(data=conv1_1 , act_type='relu')
conv1_2 = mx.symbol.Convolution(name='%s_conv1_2' % prefix, data=relu1_1 , num_filter=64, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu1_2 = mx.symbol.Activation(data=conv1_2 , act_type='relu')
pool1 = mx.symbol.Pooling(data=relu1_2 , pad=(0,0), kernel=(2,2), stride=(2,2), pool_type='avg')
conv2_1 = mx.symbol.Convolution(name='%s_conv2_1' % prefix, data=pool1 , num_filter=128, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu2_1 = mx.symbol.Activation(data=conv2_1 , act_type='relu')
conv2_2 = mx.symbol.Convolution(name='%s_conv2_2' % prefix, data=relu2_1 , num_filter=128, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu2_2 = mx.symbol.Activation(data=conv2_2 , act_type='relu')
pool2 = mx.symbol.Pooling(data=relu2_2 , pad=(0,0), kernel=(2,2), stride=(2,2), pool_type='avg')
conv3_1 = mx.symbol.Convolution(name='%s_conv3_1' % prefix, data=pool2 , num_filter=256, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu3_1 = mx.symbol.Activation(data=conv3_1 , act_type='relu')
conv3_2 = mx.symbol.Convolution(name='%s_conv3_2' % prefix, data=relu3_1 , num_filter=256, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu3_2 = mx.symbol.Activation(data=conv3_2 , act_type='relu')
conv3_3 = mx.symbol.Convolution(name='%s_conv3_3' % prefix, data=relu3_2 , num_filter=256, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu3_3 = mx.symbol.Activation(data=conv3_3 , act_type='relu')
conv3_4 = mx.symbol.Convolution(name='%s_conv3_4' % prefix, data=relu3_3 , num_filter=256, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu3_4 = mx.symbol.Activation(data=conv3_4 , act_type='relu')
pool3 = mx.symbol.Pooling(data=relu3_4 , pad=(0,0), kernel=(2,2), stride=(2,2), pool_type='avg')
conv4_1 = mx.symbol.Convolution(name='%s_conv4_1' % prefix, data=pool3 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu4_1 = mx.symbol.Activation(data=conv4_1 , act_type='relu')
conv4_2 = mx.symbol.Convolution(name='%s_conv4_2' % prefix, data=relu4_1 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu4_2 = mx.symbol.Activation(data=conv4_2 , act_type='relu')
conv4_3 = mx.symbol.Convolution(name='%s_conv4_3' % prefix, data=relu4_2 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu4_3 = mx.symbol.Activation(data=conv4_3 , act_type='relu')
conv4_4 = mx.symbol.Convolution(name='%s_conv4_4' % prefix, data=relu4_3 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu4_4 = mx.symbol.Activation(data=conv4_4 , act_type='relu')
pool4 = mx.symbol.Pooling(data=relu4_4 , pad=(0,0), kernel=(2,2), stride=(2,2), pool_type='avg')
conv5_1 = mx.symbol.Convolution(name='%s_conv5_1' % prefix, data=pool4 , num_filter=512, pad=(1,1), kernel=(3,3), stride=(1,1), workspace=1024)
relu5_1 = mx.symbol.Activation(data=conv5_1 , act_type='relu')
if content_only:
return relu4_2
# style and content layers
style = mx.sym.Group([relu1_1, relu2_1, relu3_1, relu4_1, relu5_1])
content = mx.sym.Group([relu4_2])
return style, content
def get_executor_with_style(style, content, input_size, ctx):
out = mx.sym.Group([style, content])
# make executor
arg_shapes, output_shapes, aux_shapes = out.infer_shape(data=(1, 3, input_size[0], input_size[1]))
arg_names = out.list_arguments()
arg_dict = dict(zip(arg_names, [mx.nd.zeros(shape, ctx=ctx) for shape in arg_shapes]))
grad_dict = {"data": arg_dict["data"].copyto(ctx)}
# init with pretrained weight
pretrained = mx.nd.load("./model/vgg19.params")
for name in arg_names:
if name == "data":
continue
key = "arg:" + name
if key in pretrained:
pretrained[key].copyto(arg_dict[name])
else:
print("Skip argument %s" % name)
executor = out.bind(ctx=ctx, args=arg_dict, args_grad=grad_dict, grad_req="write")
return ConvExecutor(executor=executor,
data=arg_dict["data"],
data_grad=grad_dict["data"],
style=executor.outputs[:-1],
content=executor.outputs[-1],
arg_dict=arg_dict)
def get_executor_content(content, input_size, ctx):
arg_shapes, output_shapes, aux_shapes = content.infer_shape(data=(1, 3, input_size[0], input_size[1]))
arg_names = out.list_arguments()
arg_dict = dict(zip(arg_names, [mx.nd.zeros(shape, ctx=ctx) for shape in arg_shapes]))
pretrained = mx.nd.load("./model/vgg19.params")
for name in arg_names:
if name == "data":
continue
key = "arg:" + name
if key in pretrained:
pretrained[key].copyto(arg_dict[name])
else:
print("Skip argument %s" % name)
executor = out.bind(ctx=ctx, args=arg_dict, args_grad=[], grad_req="null")
return ConvExecutor(executor=executor,
data=arg_dict["data"],
data_grad=None,
style=None,
content=executor.outputs[0],
arg_dict=arg_dict)
|
Amsoft-Systems/testlink-1.9.5
|
refs/heads/master
|
third_party/fckeditor/editor/filemanager/connectors/py/fckcommands.py
|
48
|
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
import os
try: # Windows needs stdio set for binary mode for file upload to work.
import msvcrt
msvcrt.setmode (0, os.O_BINARY) # stdin = 0
msvcrt.setmode (1, os.O_BINARY) # stdout = 1
except ImportError:
pass
from fckutil import *
from fckoutput import *
import config as Config
class GetFoldersCommandMixin (object):
def getFolders(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
s = """<Folders>""" # Open the folders node
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
s += """</Folders>""" # Close the folders node
return s
class GetFoldersAndFilesCommandMixin (object):
def getFoldersAndFiles(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders and files
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
if size > 0:
size = round(size/1024)
if size < 1:
size = 1
files += """<File name="%s" size="%d" />""" % (
convertToXmlAttribute(someObject),
size
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
return folders + files
class CreateFolderCommandMixin (object):
def createFolder(self, resourceType, currentFolder):
"""
Purpose: command to create a new folder
"""
errorNo = 0; errorMsg ='';
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
newFolder = sanitizeFolderName (newFolder)
try:
newFolderPath = mapServerFolder(self.userFilesFolder, combinePaths(currentFolder, newFolder))
self.createServerFolder(newFolderPath)
except Exception, e:
errorMsg = str(e).decode('iso-8859-1').encode('utf-8') # warning with encodigns!!!
if hasattr(e,'errno'):
if e.errno==17: #file already exists
errorNo=0
elif e.errno==13: # permission denied
errorNo = 103
elif e.errno==36 or e.errno==2 or e.errno==22: # filename too long / no such file / invalid name
errorNo = 102
else:
errorNo = 110
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def createServerFolder(self, folderPath):
"Purpose: physically creates a folder on the server"
# No need to check if the parent exists, just create all hierachy
try:
permissions = Config.ChmodOnFolderCreate
if not permissions:
os.makedirs(folderPath)
except AttributeError: #ChmodOnFolderCreate undefined
permissions = 0755
if permissions:
oldumask = os.umask(0)
os.makedirs(folderPath,mode=0755)
os.umask( oldumask )
class UploadFileCommandMixin (object):
def uploadFile(self, resourceType, currentFolder):
"""
Purpose: command to upload files to server (same as FileUpload)
"""
errorNo = 0
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileName = sanitizeFileName( newFileName )
newFileNameOnly = removeExtension(newFileName)
newFileExtension = getExtension(newFileName).lower()
allowedExtensions = Config.AllowedExtensions[resourceType]
deniedExtensions = Config.DeniedExtensions[resourceType]
if (allowedExtensions):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = mapServerFolder(self.userFilesFolder, currentFolder)
i = 0
while (True):
newFilePath = os.path.join (currentFolderPath,newFileName)
if os.path.exists(newFilePath):
i += 1
newFileName = "%s(%d).%s" % (
newFileNameOnly, i, newFileExtension
)
errorNo= 201 # file renamed
else:
# Read file contents and write to the desired path (similar to php's move_uploaded_file)
fout = file(newFilePath, 'wb')
while (True):
chunk = newFile.file.read(100000)
if not chunk: break
fout.write (chunk)
fout.close()
if os.path.exists ( newFilePath ):
doChmod = False
try:
doChmod = Config.ChmodOnUpload
permissions = Config.ChmodOnUpload
except AttributeError: #ChmodOnUpload undefined
doChmod = True
permissions = 0755
if ( doChmod ):
oldumask = os.umask(0)
os.chmod( newFilePath, permissions )
os.umask( oldumask )
newFileUrl = combinePaths(self.webUserFilesFolder, currentFolder) + newFileName
return self.sendUploadResults( errorNo , newFileUrl, newFileName )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "" )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "No File" )
|
markflyhigh/incubator-beam
|
refs/heads/master
|
sdks/python/apache_beam/options/pipeline_options_validator_test.py
|
1
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the pipeline options validator module."""
from __future__ import absolute_import
import logging
import unittest
from builtins import object
from hamcrest import assert_that
from hamcrest import contains_string
from hamcrest import only_contains
from hamcrest.core.base_matcher import BaseMatcher
from apache_beam.internal import pickler
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options_validator import PipelineOptionsValidator
# Mock runners to use for validations.
class MockRunners(object):
class DataflowRunner(object):
pass
class TestDataflowRunner(object):
pass
class OtherRunner(object):
pass
# Matcher that always passes for testing on_success_matcher option
class AlwaysPassMatcher(BaseMatcher):
def _matches(self, item):
return True
class SetupTest(unittest.TestCase):
def check_errors_for_arguments(self, errors, args):
"""Checks that there is exactly one error for each given argument."""
missing = []
remaining = list(errors)
for arg in args:
found = False
for error in remaining:
if arg in error:
remaining.remove(error)
found = True
break
if not found:
missing.append('Missing error for: ' + arg)
# Return missing and remaining (not matched) errors.
return missing + remaining
def test_local_runner(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertEqual(len(errors), 0)
def test_missing_required_options(self):
options = PipelineOptions([''])
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertEqual(
self.check_errors_for_arguments(
errors,
['project', 'staging_location', 'temp_location']),
[])
def test_gcs_path(self):
def get_validator(temp_location, staging_location):
options = ['--project=example:example', '--job_name=job']
if temp_location is not None:
options.append('--temp_location=' + temp_location)
if staging_location is not None:
options.append('--staging_location=' + staging_location)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'temp_location': None,
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': None,
'staging_location': None,
'errors': ['staging_location', 'temp_location']},
{'temp_location': 'gs://foo/bar',
'staging_location': None,
'errors': []},
{'temp_location': 'gs://foo/bar',
'staging_location': 'gs://ABC/bar',
'errors': ['staging_location']},
{'temp_location': 'gcs:/foo/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs:/foo/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://ABC/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://ABC/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://foo',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://foo/',
'staging_location': 'gs://foo/bar',
'errors': []},
{'temp_location': 'gs://foo/bar',
'staging_location': 'gs://foo/bar',
'errors': []},
]
for case in test_cases:
errors = get_validator(case['temp_location'],
case['staging_location']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_project(self):
def get_validator(project):
options = ['--job_name=job', '--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if project is not None:
options.append('--project=' + project)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'project': None, 'errors': ['project']},
{'project': '12345', 'errors': ['project']},
{'project': 'FOO', 'errors': ['project']},
{'project': 'foo:BAR', 'errors': ['project']},
{'project': 'fo', 'errors': ['project']},
{'project': 'foo', 'errors': []},
{'project': 'foo:bar', 'errors': []},
]
for case in test_cases:
errors = get_validator(case['project']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_job_name(self):
def get_validator(job_name):
options = ['--project=example:example', '--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if job_name is not None:
options.append('--job_name=' + job_name)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'job_name': None, 'errors': []},
{'job_name': '12345', 'errors': ['job_name']},
{'job_name': 'FOO', 'errors': ['job_name']},
{'job_name': 'foo:bar', 'errors': ['job_name']},
{'job_name': 'fo', 'errors': []},
{'job_name': 'foo', 'errors': []},
]
for case in test_cases:
errors = get_validator(case['job_name']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_num_workers(self):
def get_validator(num_workers):
options = ['--project=example:example', '--job_name=job',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if num_workers is not None:
options.append('--num_workers=' + num_workers)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'num_workers': None, 'errors': []},
{'num_workers': '1', 'errors': []},
{'num_workers': '0', 'errors': ['num_workers']},
{'num_workers': '-1', 'errors': ['num_workers']},
]
for case in test_cases:
errors = get_validator(case['num_workers']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_is_service_runner(self):
test_cases = [
{
'runner': MockRunners.OtherRunner(),
'options': [],
'expected': False,
},
{
'runner': MockRunners.OtherRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com'],
'expected': False,
},
{
'runner': MockRunners.OtherRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com/'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://another.service.com'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://another.service.com/'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com'],
'expected': True,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com/'],
'expected': True,
},
{
'runner': MockRunners.DataflowRunner(),
'options': [],
'expected': True,
},
]
for case in test_cases:
validator = PipelineOptionsValidator(
PipelineOptions(case['options']), case['runner'])
self.assertEqual(validator.is_service_runner(), case['expected'])
def test_dataflow_job_file_and_template_location_mutually_exclusive(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--template_location', 'abc',
'--dataflow_job_file', 'def'
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertTrue(errors)
def test_validate_template_location(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--template_location', 'abc',
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertFalse(errors)
def test_validate_dataflow_job_file(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--dataflow_job_file', 'abc'
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertFalse(errors)
def test_test_matcher(self):
def get_validator(matcher):
options = ['--project=example:example',
'--job_name=job',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar',]
if matcher:
options.append('%s=%s' % ('--on_success_matcher', matcher.decode()))
pipeline_options = PipelineOptions(options)
runner = MockRunners.TestDataflowRunner()
return PipelineOptionsValidator(pipeline_options, runner)
test_case = [
{'on_success_matcher': None,
'errors': []},
{'on_success_matcher': pickler.dumps(AlwaysPassMatcher()),
'errors': []},
{'on_success_matcher': b'abc',
'errors': ['on_success_matcher']},
{'on_success_matcher': pickler.dumps(object),
'errors': ['on_success_matcher']},
]
for case in test_case:
errors = get_validator(case['on_success_matcher']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_transform_name_mapping_without_update(self):
options = ['--project=example:example',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar',
'--transform_name_mapping={\"fromPardo\":\"toPardo\"}']
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
errors = validator.validate()
assert_that(errors, only_contains(
contains_string('Transform name mapping option is only useful when '
'--update and --streaming is specified')))
def test_transform_name_mapping_invalid_format(self):
options = ['--project=example:example',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar',
'--update',
'--job_name=test',
'--streaming',
'--transform_name_mapping={\"fromPardo\":123}']
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
errors = validator.validate()
assert_that(errors, only_contains(
contains_string('Invalid transform name mapping format.')))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
|
wreckJ/intellij-community
|
refs/heads/master
|
python/testData/mover/theSameLevelMultiple_afterDown.py
|
83
|
try:
a = 1
except ImportError as A:
print xrange
import <caret>tmp2; import tmp1
|
saurabh6790/pow-app
|
refs/heads/master
|
setup/doctype/company/charts/import_from_openerp.py
|
30
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
"""
Import chart of accounts from OpenERP sources
"""
from __future__ import unicode_literals
import os, json
from xml.etree import ElementTree as ET
from webnotes.utils.datautils import read_csv_content
from webnotes.utils import cstr
path = "/Users/rmehta/Downloads/openerp/openerp/addons"
chart_roots = []
accounts = {}
charts = {}
types = {}
def go():
find_charts()
make_maps()
make_trees()
make_charts()
def make_charts():
"""write chart files in app/setup/doctype/company/charts"""
for chart_id in charts:
src = charts[chart_id]
if not src.get("name") or not src.get("account_root_id"):
continue
if not src["account_root_id"] in accounts:
continue
filename = src["id"][5:] + "_" + chart_id
print "building " + filename
chart = {}
chart["name"] = src["name"]
chart["root"] = accounts[src["account_root_id"]]
with open(os.path.join("app", "setup", "doctype", "company",
"charts", filename + ".json"), "w") as chartfile:
chartfile.write(json.dumps(chart, indent=1, sort_keys=True))
def make_trees():
"""build tree hierarchy"""
print "making trees..."
for id in accounts.keys():
account = accounts[id]
if account.get("parent_id") and accounts[account["parent_id"]]:
accounts[account["parent_id"]]["children"].append(account)
del account["parent_id"]
# remove empty children
for id in accounts.keys():
if "children" in accounts[id] and not accounts[id].get("children"):
del accounts[id]["children"]
def make_maps():
"""make maps for `charts` and `accounts`"""
print "making maps..."
for root in chart_roots:
for node in root[0].findall("record"):
if node.get("model")=="account.account.template":
data = {}
for field in node.findall("field"):
if field.get("name")=="name":
data["name"] = field.text
if field.get("name")=="parent_id":
data["parent_id"] = field.get("ref")
if field.get("name")=="user_type":
value = field.get("ref")
if types.get(value, {}).get("root_type"):
data["root_type"] = types[value]["root_type"]
else:
if "asset" in value: data["root_type"] = "Asset"
if "liability" in value: data["root_type"] = "Liability"
if "income" in value: data["root_type"] = "Income"
if "expense" in value: data["root_type"] = "Expense"
data["children"] = []
accounts[node.get("id")] = data
if node.get("model")=="account.chart.template":
data = {}
for field in node.findall("field"):
if field.get("name")=="name":
data["name"] = field.text
if field.get("name")=="account_root_id":
data["account_root_id"] = field.get("ref")
data["id"] = root.get("folder")
charts.setdefault(node.get("id"), {}).update(data)
if node.get("model")=="account.account.type":
data = {}
for field in node.findall("field"):
if field.get("name")=="report_type":
data["root_type"] = field.text.title()
types[node.get("id")] = data
def find_charts():
print "finding charts..."
for basepath, folders, files in os.walk(path):
basename = os.path.basename(basepath)
if basename.startswith("l10n"):
for fname in files:
fname = cstr(fname)
filepath = os.path.join(basepath, fname)
if fname.endswith(".xml"):
tree = ET.parse(filepath)
root = tree.getroot()
for node in root[0].findall("record"):
if node.get("model") in ["account.account.template",
"account.chart.template", "account.account.type"]:
chart_roots.append(root)
root.set("folder", basename)
break
if fname.endswith(".csv"):
with open(filepath, "r") as csvfile:
try:
content = read_csv_content(csvfile.read())
except Exception, e:
continue
if content[0][0]=="id":
for row in content[1:]:
data = dict(zip(content[0], row))
account = {
"name": data.get("name"),
"parent_id": data.get("parent_id:id"),
"children": []
}
accounts[data.get("id")] = account
if not account.get("parent_id"):
chart_id = data.get("chart_id:id")
charts.setdefault(chart_id, {}).update({
"account_root_id": data.get("id")})
if __name__=="__main__":
go()
|
leeyingmu/melee
|
refs/heads/master
|
melee/core/tasklet.py
|
1
|
# -*- coding: utf-8 -*-
"""
Common backgroup process execution framework.
You can define a .py file and run it by using the ``tasklet`` framework which will
handle the process's life cycle automatically。
Example Code:
"""
import os, sys, pkgutil, time, signal
from subprocess import Popen
from .env import config, logger, logging
class Tasklet(object):
"""
One tasklet is one subprocess which runs the specific module
"""
def __init__(self, name, module, args):
"""
:param name: the tasklet's name in the config
"""
self.logger = logging.getLogger('%s.tasklet.%s' % (config.servicename, name))
self.name = name
self.package, self.module_name = module.rsplit('.', 1)
self.filename = '%s.py' % self.module_name
package_loader = pkgutil.find_loader(self.package)
self.filepath = package_loader.filename
self.cmds = [sys.executable, os.path.join(self.filepath, self.filename)]
if args:
self.cmds.extend(args.split())
self.process = None
self.startms = None
def __str__(self):
return 'tasklet %s %s %s' % (self.process.pid if self.process else ' ', self.name, self.cmds)
def start(self):
self.process = Popen(self.cmds, cwd=os.getcwd())
self.startms = int(time.time()*1000)
self.logger.info('%s started' % self.name)
def terminate(self):
if self.process:
self.process.terminate()
self.logger.warn('termimated', str(self))
def kill(self):
if self.process:
self.process.kill()
self.logger.warn('killed', str(self))
class TaskletManager(object):
"""
The main process used to run all tasklets by using a ``Tasklet``
"""
def __init__(self, tasklets):
self.logger = logging.getLogger('%s.taskletmanager' % config.servicename)
self.tasklets = tasklets
# register system signals
for s in ['SIGINT', 'SIGTERM', 'SIGQUIT']:
signal.signal(getattr(signal, s), getattr(self, 'handle_%s' % s))
def handle_SIGINT(self, sig, frame):
self.stopall()
self.logger.info('stop all for SIGINT')
def handle_SIGTERM(self, sig, frame):
self.stopall()
self.logger.info('stop all for SIGTERM')
def handle_SIGQUIT(self, sig, frame):
self.stopall()
self.logger.info('stop all for SIGQUIT')
def stopall(self):
if self.tasklets:
for t in self.tasklets:
t.kill()
sys.exit()
def startall(self):
if not self.tasklets:
self.logger.info('there are not any tasklets defined, exit')
sys.exit()
for t in self.tasklets:
t.start()
self.logger.info('all tasklets started')
while True:
# wait for signals
signal.pause()
@classmethod
def get(cls, tasklets_configs=None):
"""
:param tasklets_configs: the tasklets configures
[
{'name': '', 'module': 'melee.tasklet.demotask', 'args': '1 2 3', 'number': 2}
]
"""
if not tasklets_configs:
return None
tasklets = []
for c in tasklets_configs:
number = int(c.get('number') or 1)
for i in xrange(number):
tasklets.append(Tasklet(c.get('name'), c.get('module'), c.get('args')))
return TaskletManager(tasklets)
|
samfoo/servo
|
refs/heads/master
|
tests/wpt/harness/wptrunner/wptmanifest/node.py
|
190
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
class NodeVisitor(object):
def visit(self, node):
# This is ugly as hell, but we don't have multimethods and
# they aren't trivial to fake without access to the class
# object from the class body
func = getattr(self, "visit_%s" % (node.__class__.__name__))
return func(node)
class Node(object):
def __init__(self, data=None):
self.data = data
self.parent = None
self.children = []
def append(self, other):
other.parent = self
self.children.append(other)
def remove(self):
self.parent.children.remove(self)
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.data)
def __str__(self):
rv = [repr(self)]
for item in self.children:
rv.extend(" %s" % line for line in str(item).split("\n"))
return "\n".join(rv)
def __eq__(self, other):
if not (self.__class__ == other.__class__ and
self.data == other.data and
len(self.children) == len(other.children)):
return False
for child, other_child in zip(self.children, other.children):
if not child == other_child:
return False
return True
def copy(self):
new = self.__class__(self.data)
for item in self.children:
new.append(item.copy())
return new
class DataNode(Node):
def append(self, other):
# Append that retains the invariant that child data nodes
# come after child nodes of other types
other.parent = self
if isinstance(other, DataNode):
self.children.append(other)
else:
index = len(self.children)
while index > 0 and isinstance(self.children[index - 1], DataNode):
index -= 1
for i in xrange(index):
assert other.data != self.children[i].data
self.children.insert(index, other)
class KeyValueNode(Node):
def append(self, other):
# Append that retains the invariant that conditional nodes
# come before unconditional nodes
other.parent = self
if isinstance(other, ValueNode):
if self.children:
assert not isinstance(self.children[-1], ValueNode)
self.children.append(other)
else:
if self.children and isinstance(self.children[-1], ValueNode):
self.children.insert(len(self.children) - 1, other)
else:
self.children.append(other)
class ListNode(Node):
def append(self, other):
other.parent = self
self.children.append(other)
class ValueNode(Node):
def append(self, other):
raise TypeError
class AtomNode(ValueNode):
pass
class ConditionalNode(Node):
pass
class UnaryExpressionNode(Node):
def __init__(self, operator, operand):
Node.__init__(self)
self.append(operator)
self.append(operand)
def append(self, other):
Node.append(self, other)
assert len(self.children) <= 2
def copy(self):
new = self.__class__(self.children[0].copy(),
self.children[1].copy())
return new
class BinaryExpressionNode(Node):
def __init__(self, operator, operand_0, operand_1):
Node.__init__(self)
self.append(operator)
self.append(operand_0)
self.append(operand_1)
def append(self, other):
Node.append(self, other)
assert len(self.children) <= 3
def copy(self):
new = self.__class__(self.children[0].copy(),
self.children[1].copy(),
self.children[2].copy())
return new
class UnaryOperatorNode(Node):
def append(self, other):
raise TypeError
class BinaryOperatorNode(Node):
def append(self, other):
raise TypeError
class IndexNode(Node):
pass
class VariableNode(Node):
pass
class StringNode(Node):
pass
class NumberNode(ValueNode):
pass
|
ChrisTruncer/Egress-Assess
|
refs/heads/master
|
datatypes/socials.py
|
3
|
'''
This module generates social security numbers
'''
from common import helpers
class Datatype:
def __init__(self, cli_object):
self.cli = "ssn"
self.description = "Social Security Numbers"
self.filetype = "text"
self.datasize = int(cli_object.data_size)
def create_ssn(self):
ssn = helpers.randomNumbers(9)
ssn = ssn[0:3] + "-" + ssn[3:5] + "-" + ssn[5:9]
return ssn
def generate_data(self):
print "[*] Generating data..."
ssns = ''
# This is approx 1 meg of socials
for single_ssn in range(0, 81500 * self.datasize):
ssns += self.create_ssn() + ', '
return ssns
|
midnightradio/comment-miner
|
refs/heads/master
|
strscan.py
|
1
|
import re
class StringScanner(object):
def __init__(self, string=None):
if string is not None and type(string) is not type(''): raise TypeError('Scanner works only on string')
else:
self.__string = string
self.__pos = 0
def __iadd__(self, string):
if string is not None and type(string) is not type(''): raise TypeError('Scanner works only on string')
self.__string += string
return self
def concat(self, string):
try:
self+=string
except:
raise
@property
def string(self):
return self.__string
@string.setter
def string(self, string):
if string is not None and type(string) is not type(''): raise TypeError('Scanner works only on string')
self.__string = string
self.__pos = 0
@property
def pos(self):
return self.__pos
@pos.setter
def pos(self, pos):
self.__pos = pos
def eos(self):
return self.pos == len(self.string)
@property
def pre_match(self):
return self.string[:self.pos-1]
@property
def post_match(self):
return self.string[self.pos:]
# __strscan_do_scan(self, re, 1,1,1)
def scan(self, pattern, flags=0):
if type(pattern) == type(''):
pattern = re.compile(pattern, flags)
match = pattern.match(self.string, self.pos)
if match is not None:
self.pos = match.end()
return match.group(0)
return None
# __strscan_do_scan(self, re, 1,1,0)
def scan_until(self, pattern, flags=0):
if type(pattern) == type(''):
pattern = re.compile(pattern, flags)
match = pattern.search(self.string, self.pos)
if match is not None:
old = self.pos
self.pos = match.end()
return self.string[old:self.pos]
return None
# __strscan_do_scan(self, re, 1, 0, 1)
def skip(self, pattern, flags=0):
if type(pattern) == type(''):
pattern = re.compile(pattern, flags)
match = pattern.match(self.string, self.pos)
if match is not None:
old = self.pos
self.pos = match.end()
return self.pos - old
return None
# __strscan_do_scan(self, re, 1, 0, 0)
def skip_until(self, pattern, flags=0):
if type(pattern) == type(''):
pattern = re.compile(pattern, flags)
match = pattern.search(self.string, self.pos)
if match is not None:
old = self.pos
self.pos = match.end()
return self.pos - old
return None
|
pleaseproject/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/__init__.py
|
56
|
# -*- test-case-name: twisted -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Twisted: The Framework Of Your Internet.
"""
# Ensure the user is running the version of python we require.
import sys
if not hasattr(sys, "version_info") or sys.version_info < (2, 4):
raise RuntimeError("Twisted requires Python 2.4 or later.")
del sys
# Ensure compat gets imported
from twisted.python import compat
del compat
# setup version
from twisted._version import version
__version__ = version.short()
|
15Dkatz/pants
|
refs/heads/master
|
tests/python/pants_test/engine/test_graph.py
|
11
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import functools
import unittest
from pants.engine.nodes import Return
_WAITING = 'TODO: Waiting'
@unittest.skip('Skipped to expedite landing #3821; see: #4027.')
class GraphTest(unittest.TestCase):
def setUp(self):
super(GraphTest, self).setUp()
self.pg = 'TODO: These tests need to be ported to native tests.'
@classmethod
def _mk_chain(cls, graph, sequence, states=[_WAITING, Return]):
"""Create a chain of dependencies (e.g. 'A'->'B'->'C'->'D') in the graph from a sequence."""
for state in states:
dest = None
for src in reversed(sequence):
if state is _WAITING:
graph.add_dependencies(src, [dest] if dest else [])
else:
graph.complete_node(src, state([dest]))
dest = src
return sequence
def test_disallow_completed_state_change(self):
self.pg.complete_node('A', Return('done!'))
with self.assertRaises('TODO: CompletedNodeException: These tests should be ported to native tests.'):
self.pg.add_dependencies('A', ['B'])
def test_disallow_completing_with_incomplete_deps(self):
self.pg.add_dependencies('A', ['B'])
self.pg.add_dependencies('B', ['C'])
with self.assertRaises('TODO: IncompleteDependencyException: These tests should be ported to native tests.'):
self.pg.complete_node('A', Return('done!'))
def test_dependency_edges(self):
self.pg.add_dependencies('A', ['B', 'C'])
self.assertEquals({'B', 'C'}, set(self.pg.dependencies_of('A')))
self.assertEquals({'A'}, set(self.pg.dependents_of('B')))
self.assertEquals({'A'}, set(self.pg.dependents_of('C')))
def test_cycle_simple(self):
self.pg.add_dependencies('A', ['B'])
self.pg.add_dependencies('B', ['A'])
# NB: Order matters: the second insertion is the one tracked as a cycle.
self.assertEquals({'B'}, set(self.pg.dependencies_of('A')))
self.assertEquals(set(), set(self.pg.dependencies_of('B')))
self.assertEquals(set(), set(self.pg.cyclic_dependencies_of('A')))
self.assertEquals({'A'}, set(self.pg.cyclic_dependencies_of('B')))
def test_cycle_indirect(self):
self.pg.add_dependencies('A', ['B'])
self.pg.add_dependencies('B', ['C'])
self.pg.add_dependencies('C', ['A'])
self.assertEquals({'B'}, set(self.pg.dependencies_of('A')))
self.assertEquals({'C'}, set(self.pg.dependencies_of('B')))
self.assertEquals(set(), set(self.pg.dependencies_of('C')))
self.assertEquals(set(), set(self.pg.cyclic_dependencies_of('A')))
self.assertEquals(set(), set(self.pg.cyclic_dependencies_of('B')))
self.assertEquals({'A'}, set(self.pg.cyclic_dependencies_of('C')))
def test_cycle_long(self):
# Creating a long chain is allowed.
nodes = list(range(0, 100))
self._mk_chain(self.pg, nodes, states=(_WAITING,))
walked_nodes = [node for node, _ in self.pg.walk([nodes[0]])]
self.assertEquals(nodes, walked_nodes)
# Closing the chain is not.
begin, end = nodes[0], nodes[-1]
self.pg.add_dependencies(end, [begin])
self.assertEquals(set(), set(self.pg.dependencies_of(end)))
self.assertEquals({begin}, set(self.pg.cyclic_dependencies_of(end)))
def test_walk(self):
nodes = list('ABCDEF')
self._mk_chain(self.pg, nodes)
walked_nodes = list((node for node, _ in self.pg.walk(nodes[0])))
self.assertEquals(nodes, walked_nodes)
def test_invalidate_all(self):
chain_list = list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
invalidators = (
self.pg.invalidate,
functools.partial(self.pg.invalidate, lambda node, _: node == 'Z')
)
for invalidator in invalidators:
self._mk_chain(self.pg, chain_list)
self.assertTrue(self.pg.completed_nodes())
self.assertTrue(self.pg.dependents())
self.assertTrue(self.pg.dependencies())
self.assertTrue(self.pg.cyclic_dependencies())
invalidator()
self.assertFalse(self.pg._nodes)
def test_invalidate_partial(self):
comparison_pg = 'TODO: These tests need to be ported to native tests.'
chain_a = list('ABCDEF')
chain_b = list('GHIJKL')
# Add two dependency chains to the primary graph.
self._mk_chain(self.pg, chain_a)
self._mk_chain(self.pg, chain_b)
# Add only the dependency chain we won't invalidate to the comparison graph.
self._mk_chain(comparison_pg, chain_b)
# Invalidate one of the chains in the primary graph from the right-most node.
self.pg.invalidate(lambda node, _: node == chain_a[-1])
# Ensure the final structure of the primary graph matches the comparison graph.
pg_structure = {n: e.structure() for n, e in self.pg._nodes.items()}
comparison_structure = {n: e.structure() for n, e in comparison_pg._nodes.items()}
self.assertEquals(pg_structure, comparison_structure)
def test_invalidate_count(self):
self._mk_chain(self.pg, list('ABCDEFGHIJKLMNOPQRSTUVWXYZ'))
invalidated_count = self.pg.invalidate(lambda node, _: node == 'I')
self.assertEquals(invalidated_count, 9)
def test_invalidate_partial_identity_check(self):
# Create a graph with a chain from A..Z.
chain = self._mk_chain(self.pg, list('ABCDEFGHIJKLMNOPQRSTUVWXYZ'))
self.assertTrue(list(self.pg.completed_nodes()))
# Track the pre-invaliation nodes (from A..Q).
index_of_q = chain.index('Q')
before_nodes = [node for node, _ in self.pg.completed_nodes() if node in chain[:index_of_q + 1]]
self.assertTrue(before_nodes)
# Invalidate all nodes under Q.
self.pg.invalidate(lambda node, _: node == chain[index_of_q])
self.assertTrue(list(self.pg.completed_nodes()))
# Check that the root node and all fs nodes were removed via a identity checks.
for node, entry in self.pg._nodes.items():
self.assertFalse(node in before_nodes, 'node:\n{}\nwasnt properly removed'.format(node))
for associated in (entry.dependencies, entry.dependents, entry.cyclic_dependencies):
for associated_entry in associated:
self.assertFalse(
associated_entry.node in before_nodes,
'node:\n{}\nis still associated with:\n{}\nin {}'.format(node, associated_entry.node, entry)
)
|
OptiPop/external_chromium_org
|
refs/heads/opti-5.1
|
third_party/cython/src/Cython/Tests/__init__.py
|
1472
|
# empty file
|
ESSS/numpy
|
refs/heads/master
|
tools/win32build/misc/x86analysis.py
|
29
|
#! /usr/bin/env python
# Last Change: Sat Mar 28 02:00 AM 2009 J
# Try to identify instruction set used in binary (x86 only). This works by
# checking the assembly for instructions specific to sse, etc... Obviously,
# this won't work all the times (for example, if some instructions are used
# only after proper detection of the running CPU, this will give false alarm).
from __future__ import division, print_function
import sys
import re
import os
import subprocess
import popen2
import optparse
I486_SET = ["cmpxchg", "xadd", "bswap", "invd", "wbinvd", "invlpg"]
I586_SET = ["rdmsr", "wrmsr", "rdtsc", "cmpxch8B", "rsm"]
PPRO_SET = ["cmovcc", "fcmovcc", "fcomi", "fcomip", "fucomi", "fucomip", "rdpmc", "ud2"]
MMX_SET = ["emms", "movd", "movq", "packsswb", "packssdw", "packuswb", "paddb",
"paddw", "paddd", "paddsb", "paddsw", "paddusb", "paddusw", "pand",
"pandn", "pcmpeqb", "pcmpeqw", "pcmpeqd", "pcmpgtb", "pcmpgtw",
"pcmpgtd", "pmaddwd", "pmulhw", "pmullw", "por", "psllw", "pslld",
"psllq", "psraw", "psrad", "psrlw", "psrld", "psrlq", "psubb", "psubw",
"psubd", "psubsb", "psubsw", "psubusb", "psubusw", "punpckhbw",
"punpckhwd", "punpckhdq", "punpcklbw", "punpcklwd", "punpckldq",
"pxor"]
SSE_SET = ["addps", "addss", "andnps", "andps", "cmpps", "cmpss", "comiss",
"cvtpi2ps", "cvtps2pi", "cvtsi2ss", "cvtss2si", "cvttps2pi",
"cvttss2si", "divps", "divss", "fxrstor", "fxsave", "ldmxcsr", "maxps",
"maxss", "minps", "minss", "movaps", "movhlps", "movhps", "movlhps",
"movlps", "movmskps", "movss", "movups", "mulps", "mulss", "orps",
"pavgb", "pavgw", "psadbw", "rcpps", "rcpss", "rsqrtps", "rsqrtss",
"shufps", "sqrtps", "sqrtss", "stmxcsr", "subps", "subss", "ucomiss",
"unpckhps", "unpcklps", "xorps", "pextrw", "pinsrw", "pmaxsw",
"pmaxub", "pminsw", "pminub", "pmovmskb", "pmulhuw", "pshufw",
"maskmovq", "movntps", "movntq", "prefetch", "sfence"]
SSE2_SET = ["addpd", "addsd", "andnpd", "andpd", "clflush", "cmppd", "cmpsd",
"comisd", "cvtdq2pd", "cvtdq2ps", "cvtpd2pi", "cvtpd2pq", "cvtpd2ps",
"cvtpi2pd", "cvtps2dq", "cvtps2pd", "cvtsd2si", "cvtsd2ss", "cvtsi2sd",
"cvtss2sd", "cvttpd2pi", "cvttpd2dq", "cvttps2dq", "cvttsd2si",
"divpd", "divsd", "lfence", "maskmovdqu", "maxpd", "maxsd", "mfence",
"minpd", "minsd", "movapd", "movd", "movdq2q", "movdqa", "movdqu",
"movhpd", "movlpd", "movmskpd", "movntdq", "movnti", "movntpd", "movq",
"movq2dq", "movsd", "movupd", "mulpd", "mulsd", "orpd", "packsswb",
"packssdw", "packuswb", "paddb", "paddw", "paddd", "paddq", "paddq",
"paddsb", "paddsw", "paddusb", "paddusw", "pand", "pandn", "pause",
"pavgb", "pavgw", "pcmpeqb", "pcmpeqw", "pcmpeqd", "pcmpgtb",
"pcmpgtw", "pcmpgtd", "pextrw", "pinsrw", "pmaddwd", "pmaxsw",
"pmaxub", "pminsw", "pminub", "pmovmskb", "pmulhw", "pmulhuw",
"pmullw", "pmuludq", "pmuludq", "por", "psadbw", "pshufd", "pshufhw",
"pshuflw", "pslldq", "psllw", "pslld", "psllq", "psraw", "psrad",
"psrldq", "psrlw", "psrld", "psrlq", "psubb", "psubw", "psubd",
"psubq", "psubq", "psubsb", "psubsw", "psubusb", "psubusw", "psubsb",
"punpckhbw", "punpckhwd", "punpckhdq", "punpckhqdq", "punpcklbw",
"punpcklwd", "punpckldq", "punpcklqdq", "pxor", "shufpd", "sqrtpd",
"sqrtsd", "subpd", "subsd", "ucomisd", "unpckhpd", "unpcklpd", "xorpd"]
SSE3_SET = [ "addsubpd", "addsubps", "haddpd", "haddps", "hsubpd", "hsubps",
"lddqu", "movddup", "movshdup", "movsldup", "fisttp"]
def get_vendor_string():
"""Return the vendor string reading cpuinfo."""
try:
a = open('/proc/cpuinfo').readlines()
b = re.compile('^vendor_id.*')
c = [i for i in a if b.match(i)]
except IOError:
raise ValueError("Could not read cpuinfo")
int = re.compile("GenuineIntel")
amd = re.compile("AuthenticAMD")
cyr = re.compile("CyrixInstead")
tra = re.compile("GenuineTMx86")
if int.search(c[0]):
return "intel"
elif amd.search(c[0]):
return "amd"
elif cyr.search(c[0]):
return "cyrix"
elif tra.search(c[0]):
return "tra"
else:
raise ValueError("Unknown vendor")
def disassemble(filename):
"""From a filename, returns a list of all asm instructions."""
cmd = "i586-mingw32msvc-objdump -d %s " % filename
o, i = popen2.popen2(cmd)
def floupi(line):
line1 = line.split('\t')
if len(line1) > 2:
line2 = line1[2]
else:
line2 = line1[0]
line3 = line2.split(' ')
if len(line3) > 1:
inst = line3[0]
else:
inst = line3[0]
return inst
inst = [floupi(i) for i in o]
return inst
def has_set(seq, asm_set):
a = dict([(i, 0) for i in asm_set])
for i in asm_set:
a[i] = seq.count(i)
return a
def has_sse(seq):
return has_set(seq, SSE_SET)
def has_sse2(seq):
return has_set(seq, SSE2_SET)
def has_sse3(seq):
return has_set(seq, SSE3_SET)
def has_mmx(seq):
return has_set(seq, MMX_SET)
def has_ppro(seq):
return has_set(seq, PPRO_SET)
def cntset(seq):
cnt = 0
for i in seq.values():
cnt += i
return cnt
def main():
args = sys.argv[1:]
filename = args[0]
analyse(filename)
def analyse(filename):
print(get_vendor_string())
print("Getting instructions...")
inst = disassemble(filename)
print("Counting instructions...")
sse = has_sse(inst)
sse2 = has_sse2(inst)
sse3 = has_sse3(inst)
print("SSE3 inst %d" % cntset(sse3))
print("SSE2 inst %d" % cntset(sse2))
print("SSE inst %d" % cntset(sse))
print("Analysed %d instructions" % len(inst))
if __name__ == '__main__':
main()
|
YYWen0o0/python-frame-django
|
refs/heads/master
|
django/core/management/commands/runfcgi.py
|
120
|
import argparse
import warnings
from django.core.management.base import BaseCommand
from django.utils.deprecation import RemovedInDjango19Warning
class Command(BaseCommand):
help = "Runs this project as a FastCGI application. Requires flup."
def add_arguments(self, parser):
parser.add_argument('args', nargs=argparse.REMAINDER,
help='Various KEY=val options.')
def handle(self, *args, **options):
warnings.warn(
"FastCGI support has been deprecated and will be removed in Django 1.9.",
RemovedInDjango19Warning)
from django.conf import settings
from django.utils import translation
# Activate the current language, because it won't get activated later.
try:
translation.activate(settings.LANGUAGE_CODE)
except AttributeError:
pass
from django.core.servers.fastcgi import runfastcgi
runfastcgi(args)
def usage(self, subcommand):
from django.core.servers.fastcgi import FASTCGI_HELP
return FASTCGI_HELP
|
kansanmuisti/datavaalit
|
refs/heads/master
|
web/stats/migrations/0001_initial.py
|
1
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Statistic'
db.create_table('stats_statistic', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('source', self.gf('django.db.models.fields.CharField')(max_length=50)),
('source_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('election', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['political.Election'], null=True)),
('fetch_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('stats', ['Statistic'])
# Adding unique constraint on 'Statistic', fields ['name', 'source_url']
db.create_unique('stats_statistic', ['name', 'source_url'])
# Adding model 'VotingPercentage'
db.create_table('stats_votingpercentage', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('statistic', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stats.Statistic'])),
('value', self.gf('django.db.models.fields.DecimalField')(max_digits=20, decimal_places=4)),
('municipality', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['geo.Municipality'])),
('election', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['political.Election'])),
))
db.send_create_signal('stats', ['VotingPercentage'])
# Adding model 'VotingDistrictStatistic'
db.create_table('stats_votingdistrictstatistic', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('statistic', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stats.Statistic'])),
('value', self.gf('django.db.models.fields.DecimalField')(max_digits=20, decimal_places=4)),
('election', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['political.Election'])),
('district', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['political.VotingDistrict'])),
))
db.send_create_signal('stats', ['VotingDistrictStatistic'])
# Adding model 'PersonElectionStatistic'
db.create_table('stats_personelectionstatistic', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('statistic', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stats.Statistic'])),
('value', self.gf('django.db.models.fields.DecimalField')(max_digits=20, decimal_places=4)),
('election', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['political.Election'])),
('person', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['political.Person'])),
('district', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['political.VotingDistrict'], null=True)),
('municipality', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['geo.Municipality'])),
))
db.send_create_signal('stats', ['PersonElectionStatistic'])
def backwards(self, orm):
# Removing unique constraint on 'Statistic', fields ['name', 'source_url']
db.delete_unique('stats_statistic', ['name', 'source_url'])
# Deleting model 'Statistic'
db.delete_table('stats_statistic')
# Deleting model 'VotingPercentage'
db.delete_table('stats_votingpercentage')
# Deleting model 'VotingDistrictStatistic'
db.delete_table('stats_votingdistrictstatistic')
# Deleting model 'PersonElectionStatistic'
db.delete_table('stats_personelectionstatistic')
models = {
'geo.municipality': {
'Meta': {'object_name': 'Municipality'},
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'political.election': {
'Meta': {'object_name': 'Election'},
'date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'round': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'year': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'political.party': {
'Meta': {'object_name': 'Party'},
'abbrev': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'political.person': {
'Meta': {'object_name': 'Person'},
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'municipality': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['geo.Municipality']"}),
'party': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['political.Party']", 'null': 'True'})
},
'political.votingdistrict': {
'Meta': {'unique_together': "(('municipality', 'origin_id'),)", 'object_name': 'VotingDistrict'},
'borders': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'null': 'True'}),
'elections': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['political.Election']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipality': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['geo.Municipality']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'origin_id': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'stats.personelectionstatistic': {
'Meta': {'object_name': 'PersonElectionStatistic'},
'district': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['political.VotingDistrict']", 'null': 'True'}),
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['political.Election']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipality': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['geo.Municipality']"}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['political.Person']"}),
'statistic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['stats.Statistic']"}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '4'})
},
'stats.statistic': {
'Meta': {'unique_together': "(('name', 'source_url'),)", 'object_name': 'Statistic'},
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['political.Election']", 'null': 'True'}),
'fetch_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'stats.votingdistrictstatistic': {
'Meta': {'object_name': 'VotingDistrictStatistic'},
'district': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['political.VotingDistrict']"}),
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['political.Election']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'statistic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['stats.Statistic']"}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '4'})
},
'stats.votingpercentage': {
'Meta': {'object_name': 'VotingPercentage'},
'election': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['political.Election']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipality': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['geo.Municipality']"}),
'statistic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['stats.Statistic']"}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '4'})
}
}
complete_apps = ['stats']
|
OpenUpgrade/OpenUpgrade
|
refs/heads/8.0
|
addons/l10n_ca/__openerp__.py
|
260
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Canada - Accounting',
'version': '1.2',
'author': 'Savoir-faire Linux',
'website': 'http://www.savoirfairelinux.com',
'category': 'Localization/Account Charts',
'description': """
This is the module to manage the English and French - Canadian accounting chart in OpenERP.
===========================================================================================
Canadian accounting charts and localizations.
Fiscal positions
----------------
When considering taxes to be applied, it is the province where the delivery occurs that matters.
Therefore we decided to implement the most common case in the fiscal positions: delivery is the
responsibility of the supplier and done at the customer location.
Some examples:
1) You have a customer from another province and you deliver to his location.
On the customer, set the fiscal position to his province.
2) You have a customer from another province. However this customer comes to your location
with their truck to pick up products. On the customer, do not set any fiscal position.
3) An international supplier doesn't charge you any tax. Taxes are charged at customs
by the customs broker. On the supplier, set the fiscal position to International.
4) An international supplier charge you your provincial tax. They are registered with your
provincial government and remit taxes themselves. On the supplier, do not set any fiscal
position.
""",
'depends': [
'base',
'account',
'base_iban',
'base_vat',
'account_chart',
'account_anglo_saxon'
],
'data': [
'account_chart_en.xml',
'account_tax_code_en.xml',
'account_chart_template_en.xml',
'account_tax_en.xml',
'fiscal_templates_en.xml',
'account_chart_fr.xml',
'account_tax_code_fr.xml',
'account_chart_template_fr.xml',
'account_tax_fr.xml',
'fiscal_templates_fr.xml',
'l10n_ca_wizard.xml'
],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
accepton/accepton-python
|
refs/heads/master
|
accepton/promo_code.py
|
1
|
from datetime import datetime
from .base import Base
__all__ = ["PromoCode"]
class PromoCode(Base):
def __init__(self, attrs={}):
super(PromoCode, self).__init__(attrs)
self.initialize_attr("created_at", datetime)
self.initialize_attr("name", str)
self.initialize_attr("promo_type", str)
self.initialize_attr("value", float)
self.original_name = self.name
def as_params(self):
return {"name": self.name,
"promo_type": self.promo_type,
"value": self.value}
|
mirrax/OpenBazaar
|
refs/heads/develop
|
dht/util.py
|
13
|
"""
Collection of utility functions not bound to a particular class.
"""
import random
import time
from dht import constants
class BadGUIDError(Exception):
"""Exception raised on detecting a bad guid."""
pass
def now():
"""
Return whole seconds passed since Epoch.
"""
return int(time.time())
def partition(sequence, predicate):
"""
Partition the sequence into two lists, according to the
function provided.
Args:
sequence: A sequence of elements.
predicate: A function of one argument, returning bool.
Returns:
A tuple of lists, where the first list contains all
elements that pass the test, and the second list all
other elements.
"""
list1, list2 = [], []
for elem in sequence:
if predicate(elem):
list1.append(elem)
else:
list2.append(elem)
return list1, list2
def distance(guid1, guid2):
"""
Calculate the XOR result between two guids, which represents
the distance between these guids in the Kademlia protocol.
Args:
guid1, guid2: The first and second guid, respectively,
as strings or unicodes, in hexadecimal.
Returns:
XOR of the integers corresponding to the guids.
Raises:
BadGUIDError: Some guid was of improper length.
"""
if len(guid1) != constants.HEX_NODE_ID_LEN:
raise BadGUIDError('guid of improper length: {0}'.format(guid1))
if len(guid2) != constants.HEX_NODE_ID_LEN:
raise BadGUIDError('guid of improper length: {0}'.format(guid2))
return int(guid1, base=16) ^ int(guid2, base=16)
def num_to_guid(num):
"""
Converts an integer to a DHT guid.
It is the caller's responsibility to ensure the resulting
guid falls in the ID space.
Args:
num: The integer to convert.
Returns:
A string in hexadecimal, corresponding to the number given.
"""
guid = hex(num).lstrip('0x').rstrip('L')
# Pad to proper length.
return guid.rjust(constants.HEX_NODE_ID_LEN, '0')
def guid_to_num(guid):
"""
Convert a DHT guid to an integer.
Args:
guid: The guid to convert, as a string or unicode, in
hexadecimal.
Returns:
An integer corresponding to the DHT guid given.
"""
return int(guid.rstrip('L'), base=16)
def random_guid_in_range(range_min, range_max):
"""
Get a random guid from a half-open range of the ID space.
Args:
range_min, range_max: The lower and upper limit
of the target (half-open) range, as integers.
Returns:
A random guid that falls inside the range given.
"""
random_int = random.randrange(range_min, range_max)
return num_to_guid(random_int)
|
formiano/enigma2
|
refs/heads/master
|
lib/python/Components/Converter/StaticMultiList.py
|
132
|
from enigma import eListboxPythonMultiContent
from Components.Converter.StringList import StringList
class StaticMultiList(StringList):
"""Turns a python list in 'multi list format' into a list which can be used in a listbox."""
def changed(self, what):
if not self.content:
self.content = eListboxPythonMultiContent()
if self.source:
# setup the required item height, as given by the source.
self.content.setItemHeight(self.source.item_height)
# also setup fonts (also given by source)
index = 0
for f in self.source.fonts:
self.content.setFont(index, f)
index += 1
if self.source:
self.content.setList(self.source.list)
print "downstream_elements:", self.downstream_elements
self.downstream_elements.changed(what)
|
ee08b397/panda3d
|
refs/heads/master
|
direct/src/leveleditor/LevelLoaderBase.py
|
11
|
import imp
class LevelLoaderBase:
"""
Base calss for LevelLoader
which you will use to load level editor data in your game.
Refer LevelLoader.py for example.
"""
def __init__(self):
self.defaultPath = None # this should be set in your LevelLoader.py
self.initLoader()
def initLoader(self):
# You should implement this in subclass
raise NotImplementedError('populate() must be implemented in your LevelLoader.py')
def cleanUp(self):
# When you don't need to load any more data, you can call clean up
del base.objectPalette
del base.protoPalette
del base.objectHandler
del base.objectMgr
def loadFromFile(self, fileName, filePath=None):
if filePath is None:
filePath = self.defaultPath
if fileName.endswith('.py'):
fileName = fileName[:-3]
file, pathname, description = imp.find_module(fileName, [filePath])
try:
module = imp.load_module(fileName, file, pathname, description)
return True
except:
print 'failed to load %s'%fileName
return None
|
teslaji/homebase
|
refs/heads/master
|
venv/HomeBase/lib/python3.5/site-packages/django/contrib/gis/maps/google/overlays.py
|
117
|
from __future__ import unicode_literals
from functools import total_ordering
from django.contrib.gis.geos import (
LinearRing, LineString, Point, Polygon, fromstr,
)
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import html_safe
@html_safe
@python_2_unicode_compatible
class GEvent(object):
"""
A Python wrapper for the Google GEvent object.
Events can be attached to any object derived from GOverlayBase with the
add_event() call.
For more information please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#event
Example:
from django.shortcuts import render
from django.contrib.gis.maps.google import GoogleMap, GEvent, GPolyline
def sample_request(request):
polyline = GPolyline('LINESTRING(101 26, 112 26, 102 31)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
polyline.add_event(event)
return render(request, 'mytemplate.html', {
'google': GoogleMap(polylines=[polyline]),
})
"""
def __init__(self, event, action):
"""
Initializes a GEvent object.
Parameters:
event:
string for the event, such as 'click'. The event must be a valid
event for the object in the Google Maps API.
There is no validation of the event type within Django.
action:
string containing a Javascript function, such as
'function() { location.href = "newurl";}'
The string must be a valid Javascript function. Again there is no
validation fo the function within Django.
"""
self.event = event
self.action = action
def __str__(self):
"Returns the parameter part of a GEvent."
return '"%s", %s' % (self.event, self.action)
@html_safe
@python_2_unicode_compatible
class GOverlayBase(object):
def __init__(self):
self.events = []
def latlng_from_coords(self, coords):
"Generates a JavaScript array of GLatLng objects for the given coordinates."
return '[%s]' % ','.join('new GLatLng(%s,%s)' % (y, x) for x, y in coords)
def add_event(self, event):
"Attaches a GEvent to the overlay object."
self.events.append(event)
def __str__(self):
"The string representation is the JavaScript API call."
return '%s(%s)' % (self.__class__.__name__, self.js_params)
class GPolygon(GOverlayBase):
"""
A Python wrapper for the Google GPolygon object. For more information
please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#Polygon
"""
def __init__(self, poly,
stroke_color='#0000ff', stroke_weight=2, stroke_opacity=1,
fill_color='#0000ff', fill_opacity=0.4):
"""
The GPolygon object initializes on a GEOS Polygon or a parameter that
may be instantiated into GEOS Polygon. Please note that this will not
depict a Polygon's internal rings.
Keyword Options:
stroke_color:
The color of the polygon outline. Defaults to '#0000ff' (blue).
stroke_weight:
The width of the polygon outline, in pixels. Defaults to 2.
stroke_opacity:
The opacity of the polygon outline, between 0 and 1. Defaults to 1.
fill_color:
The color of the polygon fill. Defaults to '#0000ff' (blue).
fill_opacity:
The opacity of the polygon fill. Defaults to 0.4.
"""
if isinstance(poly, six.string_types):
poly = fromstr(poly)
if isinstance(poly, (tuple, list)):
poly = Polygon(poly)
if not isinstance(poly, Polygon):
raise TypeError('GPolygon may only initialize on GEOS Polygons.')
# Getting the envelope of the input polygon (used for automatically
# determining the zoom level).
self.envelope = poly.envelope
# Translating the coordinates into a JavaScript array of
# Google `GLatLng` objects.
self.points = self.latlng_from_coords(poly.shell.coords)
# Stroke settings.
self.stroke_color, self.stroke_opacity, self.stroke_weight = stroke_color, stroke_opacity, stroke_weight
# Fill settings.
self.fill_color, self.fill_opacity = fill_color, fill_opacity
super(GPolygon, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s, "%s", %s' % (self.points, self.stroke_color, self.stroke_weight, self.stroke_opacity,
self.fill_color, self.fill_opacity)
class GPolyline(GOverlayBase):
"""
A Python wrapper for the Google GPolyline object. For more information
please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#Polyline
"""
def __init__(self, geom, color='#0000ff', weight=2, opacity=1):
"""
The GPolyline object may be initialized on GEOS LineStirng, LinearRing,
and Polygon objects (internal rings not supported) or a parameter that
may instantiated into one of the above geometries.
Keyword Options:
color:
The color to use for the polyline. Defaults to '#0000ff' (blue).
weight:
The width of the polyline, in pixels. Defaults to 2.
opacity:
The opacity of the polyline, between 0 and 1. Defaults to 1.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Polygon(geom)
# Generating the lat/lng coordinate pairs.
if isinstance(geom, (LineString, LinearRing)):
self.latlngs = self.latlng_from_coords(geom.coords)
elif isinstance(geom, Polygon):
self.latlngs = self.latlng_from_coords(geom.shell.coords)
else:
raise TypeError('GPolyline may only initialize on GEOS LineString, LinearRing, and/or Polygon geometries.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
self.color, self.weight, self.opacity = color, weight, opacity
super(GPolyline, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s' % (self.latlngs, self.color, self.weight, self.opacity)
@total_ordering
class GIcon(object):
"""
Creates a GIcon object to pass into a Gmarker object.
The keyword arguments map to instance attributes of the same name. These,
in turn, correspond to a subset of the attributes of the official GIcon
javascript object:
https://developers.google.com/maps/documentation/javascript/reference#Icon
Because a Google map often uses several different icons, a name field has
been added to the required arguments.
Required Arguments:
varname:
A string which will become the basis for the js variable name of
the marker, for this reason, your code should assign a unique
name for each GIcon you instantiate, otherwise there will be
name space collisions in your javascript.
Keyword Options:
image:
The url of the image to be used as the icon on the map defaults
to 'G_DEFAULT_ICON'
iconsize:
a tuple representing the pixel size of the foreground (not the
shadow) image of the icon, in the format: (width, height) ex.:
GIcon('fast_food',
image="/media/icon/star.png",
iconsize=(15,10))
Would indicate your custom icon was 15px wide and 10px height.
shadow:
the url of the image of the icon's shadow
shadowsize:
a tuple representing the pixel size of the shadow image, format is
the same as ``iconsize``
iconanchor:
a tuple representing the pixel coordinate relative to the top left
corner of the icon image at which this icon is anchored to the map.
In (x, y) format. x increases to the right in the Google Maps
coordinate system and y increases downwards in the Google Maps
coordinate system.)
infowindowanchor:
The pixel coordinate relative to the top left corner of the icon
image at which the info window is anchored to this icon.
"""
def __init__(self, varname, image=None, iconsize=None,
shadow=None, shadowsize=None, iconanchor=None,
infowindowanchor=None):
self.varname = varname
self.image = image
self.iconsize = iconsize
self.shadow = shadow
self.shadowsize = shadowsize
self.iconanchor = iconanchor
self.infowindowanchor = infowindowanchor
def __eq__(self, other):
return self.varname == other.varname
def __lt__(self, other):
return self.varname < other.varname
def __hash__(self):
# XOR with hash of GIcon type so that hash('varname') won't
# equal hash(GIcon('varname')).
return hash(self.__class__) ^ hash(self.varname)
class GMarker(GOverlayBase):
"""
A Python wrapper for the Google GMarker object. For more information
please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#Marker
Example:
from django.shortcuts import render
from django.contrib.gis.maps.google.overlays import GMarker, GEvent
def sample_request(request):
marker = GMarker('POINT(101 26)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
marker.add_event(event)
return render(request, 'mytemplate.html', {
'google': GoogleMap(markers=[marker]),
})
"""
def __init__(self, geom, title=None, draggable=False, icon=None):
"""
The GMarker object may initialize on GEOS Points or a parameter
that may be instantiated into a GEOS point. Keyword options map to
GMarkerOptions -- so far only the title option is supported.
Keyword Options:
title:
Title option for GMarker, will be displayed as a tooltip.
draggable:
Draggable option for GMarker, disabled by default.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Point(geom)
if isinstance(geom, Point):
self.latlng = self.latlng_from_coords(geom.coords)
else:
raise TypeError('GMarker may only initialize on GEOS Point geometry.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
# TODO: Add support for more GMarkerOptions
self.title = title
self.draggable = draggable
self.icon = icon
super(GMarker, self).__init__()
def latlng_from_coords(self, coords):
return 'new GLatLng(%s,%s)' % (coords[1], coords[0])
def options(self):
result = []
if self.title:
result.append('title: "%s"' % self.title)
if self.icon:
result.append('icon: %s' % self.icon.varname)
if self.draggable:
result.append('draggable: true')
return '{%s}' % ','.join(result)
@property
def js_params(self):
return '%s, %s' % (self.latlng, self.options())
|
gmr/tinman
|
refs/heads/master
|
tinman/config.py
|
3
|
"""
Configuration Constants
"""
APPLICATION = 'Application'
DAEMON = 'Daemon'
HTTP_SERVER = 'HTTPServer'
LOGGING = 'Logging'
ROUTES = 'Routes'
ADAPTER = 'adapter'
AUTOMATIC = 'automatic'
BASE = 'base'
BASE_VARIABLE = '{{base}}'
CERT_REQS = 'cert_reqs'
DEBUG = 'debug'
DEFAULT_LOCALE = 'default_locale'
DB = 'db'
DIRECTORY = 'directory'
DURATION = 'duration'
FILE = 'file'
HOST = 'host'
LOG_FUNCTION = 'log_function'
NAME = 'name'
NEWRELIC = 'newrelic_ini'
NO_KEEP_ALIVE = 'no_keep_alive'
NONE = 'none'
OPTIONAL = 'optional'
PROCESSES = 'processes'
PATHS = 'paths'
PORT = 'port'
PORTS = 'ports'
RABBITMQ = 'rabbitmq'
REDIS = 'redis'
REQUIRED = 'required'
SSL_OPTIONS = 'ssl_options'
STATIC = 'static'
TEMPLATES = 'templates'
TRANSFORMS = 'transforms'
TRANSLATIONS = 'translations'
UI_MODULES = 'ui_modules'
VERSION = 'version'
XHEADERS = 'xheaders'
|
bsmrstu-warriors/Moytri--The-Drone-Aider
|
refs/heads/master
|
Lib/encodings/utf_8_sig.py
|
412
|
""" Python 'utf-8-sig' Codec
This work similar to UTF-8 with the following changes:
* On encoding/writing a UTF-8 encoded BOM will be prepended/written as the
first three bytes.
* On decoding/reading if the first three bytes are a UTF-8 encoded BOM, these
bytes will be skipped.
"""
import codecs
### Codec APIs
def encode(input, errors='strict'):
return (codecs.BOM_UTF8 + codecs.utf_8_encode(input, errors)[0], len(input))
def decode(input, errors='strict'):
prefix = 0
if input[:3] == codecs.BOM_UTF8:
input = input[3:]
prefix = 3
(output, consumed) = codecs.utf_8_decode(input, errors, True)
return (output, consumed+prefix)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
codecs.IncrementalEncoder.__init__(self, errors)
self.first = 1
def encode(self, input, final=False):
if self.first:
self.first = 0
return codecs.BOM_UTF8 + codecs.utf_8_encode(input, self.errors)[0]
else:
return codecs.utf_8_encode(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.first = 1
def getstate(self):
return self.first
def setstate(self, state):
self.first = state
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def __init__(self, errors='strict'):
codecs.BufferedIncrementalDecoder.__init__(self, errors)
self.first = True
def _buffer_decode(self, input, errors, final):
if self.first:
if len(input) < 3:
if codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this really is a BOM
# => try again on the next call
return (u"", 0)
else:
self.first = None
else:
self.first = None
if input[:3] == codecs.BOM_UTF8:
(output, consumed) = codecs.utf_8_decode(input[3:], errors, final)
return (output, consumed+3)
return codecs.utf_8_decode(input, errors, final)
def reset(self):
codecs.BufferedIncrementalDecoder.reset(self)
self.first = True
class StreamWriter(codecs.StreamWriter):
def reset(self):
codecs.StreamWriter.reset(self)
try:
del self.encode
except AttributeError:
pass
def encode(self, input, errors='strict'):
self.encode = codecs.utf_8_encode
return encode(input, errors)
class StreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors='strict'):
if len(input) < 3:
if codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this is a BOM
# => try again on the next call
return (u"", 0)
elif input[:3] == codecs.BOM_UTF8:
self.decode = codecs.utf_8_decode
(output, consumed) = codecs.utf_8_decode(input[3:],errors)
return (output, consumed+3)
# (else) no BOM present
self.decode = codecs.utf_8_decode
return codecs.utf_8_decode(input, errors)
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-8-sig',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
PearsonIOKI/compose-forum
|
refs/heads/master
|
askbot/deps/django_authopenid/ldap_auth.py
|
14
|
import logging
from django.conf import settings as django_settings
from django.contrib.auth.models import User
from django.forms import EmailField, ValidationError
from askbot.conf import settings as askbot_settings
from askbot.deps.django_authopenid.models import UserAssociation
from askbot.models.signals import user_registered
from askbot.utils.loading import load_module
LOG = logging.getLogger(__name__)
def split_name(full_name, name_format):
"""splits full name into first and last,
according to the order given in the name_format parameter"""
bits = full_name.strip().split()
if len(bits) == 1:
bits.push('')
elif len(bits) == 0:
bits = ['', '']
if name_format == 'first,last':
return bits[0], bits[1]
elif name_format == 'last,first':
return bits[1], bits[0]
else:
raise ValueError('Unexpected value of name_format')
def ldap_authenticate_default(username, password):
"""
Authenticate using ldap.
LDAP parameter setup is described in
askbot/doc/source/optional-modules.rst
See section about LDAP.
returns a dict with keys:
* first_name
* last_name
* ldap_username
* email (optional only if there is valid email)
* success - boolean, True if authentication succeeded
python-ldap must be installed
http://pypi.python.org/pypi/python-ldap/2.4.6
NOTE: if you are planning to implement a custom
LDAP authenticate function (python path to which can
be provided via setting `ASKBOT_LDAP_AUTHENTICATE`
setting in the settings.py file) - implement
the function just like this - accepting user name
and password and returning dict with the same values.
The returned dictionary can contain additional values
that you might find useful.
"""
import ldap
user_information = None
user_info = {}#the return value
try:
ldap_session = ldap.initialize(askbot_settings.LDAP_URL)
#set protocol version
if askbot_settings.LDAP_PROTOCOL_VERSION == '2':
ldap_session.protocol_version = ldap.VERSION2
elif askbot_settings.LDAP_PROTOCOL_VERSION == '3':
ldap_session.protocol_version = ldap.VERSION3
else:
raise NotImplementedError('unsupported version of ldap protocol')
ldap.set_option(ldap.OPT_REFERRALS, 0)
#set extra ldap options, if given
if hasattr(django_settings, 'LDAP_EXTRA_OPTIONS'):
options = django_settings.LDAP_EXTRA_OPTIONS
for key, value in options:
if key.startswith('OPT_'):
ldap_key = getattr(ldap, key)
ldap.set_option(ldap_key, value)
else:
raise ValueError('Invalid LDAP option %s' % key)
#add optional "master" LDAP authentication, if required
master_username = getattr(django_settings, 'LDAP_LOGIN_DN', None)
master_password = getattr(django_settings, 'LDAP_PASSWORD', None)
login_name_field = askbot_settings.LDAP_LOGIN_NAME_FIELD
base_dn = askbot_settings.LDAP_BASE_DN
login_template = login_name_field + '=%s,' + base_dn
encoding = askbot_settings.LDAP_ENCODING
if master_username and master_password:
ldap_session.simple_bind_s(
master_username.encode(encoding),
master_password.encode(encoding)
)
user_filter = askbot_settings.LDAP_USER_FILTER_TEMPLATE % (
askbot_settings.LDAP_LOGIN_NAME_FIELD,
username
)
email_field = askbot_settings.LDAP_EMAIL_FIELD
get_attrs = [
email_field.encode(encoding),
login_name_field.encode(encoding)
#str(askbot_settings.LDAP_USERID_FIELD)
#todo: here we have a chance to get more data from LDAP
#maybe a point for some plugin
]
common_name_field = askbot_settings.LDAP_COMMON_NAME_FIELD.strip()
given_name_field = askbot_settings.LDAP_GIVEN_NAME_FIELD.strip()
surname_field = askbot_settings.LDAP_SURNAME_FIELD.strip()
if given_name_field and surname_field:
get_attrs.append(given_name_field.encode(encoding))
get_attrs.append(surname_field.encode(encoding))
elif common_name_field:
get_attrs.append(common_name_field.encode(encoding))
# search ldap directory for user
user_search_result = ldap_session.search_s(
askbot_settings.LDAP_BASE_DN.encode(encoding),
ldap.SCOPE_SUBTREE,
user_filter.encode(encoding),
get_attrs
)
if user_search_result: # User found in LDAP Directory
user_dn = user_search_result[0][0]
user_information = user_search_result[0][1]
ldap_session.simple_bind_s(user_dn, password.encode(encoding)) #raises INVALID_CREDENTIALS
ldap_session.unbind_s()
if given_name_field and surname_field:
last_name = user_information.get(surname_field, [''])[0]
first_name = user_information.get(given_name_field, [''])[0]
elif surname_field:
common_name_format = askbot_settings.LDAP_COMMON_NAME_FIELD_FORMAT
common_name = user_information.get(common_name_field, [''])[0]
first_name, last_name = split_name(common_name, common_name_format)
user_info = {
'first_name': first_name,
'last_name': last_name,
'ldap_username': user_information[login_name_field][0],
'success': True
}
try:
email = user_information.get(email_field, [''])[0]
user_info['email'] = EmailField().clean(email)
except ValidationError:
pass
else:
user_info['success'] = False
except ldap.INVALID_CREDENTIALS, e:
user_info['success'] = False
except ldap.LDAPError, e:
LOG.error("LDAPError Exception")
LOG.exception(e)
user_info['success'] = False
except Exception, e:
LOG.error("Unexpected Exception Occurred")
LOG.exception(e)
user_info['success'] = False
return user_info
def ldap_create_user_default(user_info):
"""takes the result returned by the :func:`ldap_authenticate`
and returns a :class:`UserAssociation` object
"""
# create new user in local db
user = User()
user.username = user_info.get('django_username', user_info['ldap_username'])
user.set_unusable_password()
user.first_name = user_info['first_name']
user.last_name = user_info['last_name']
user.email = user_info['email']
user.is_staff = False
user.is_superuser = False
user.is_active = True
user.save()
user_registered.send(None, user = user)
LOG.info('Created New User : [{0}]'.format(user_info['ldap_username']))
assoc = UserAssociation()
assoc.user = user
assoc.openid_url = user_info['ldap_username'] + '@ldap'
assoc.provider_name = 'ldap'
assoc.save()
return assoc
LDAP_AUTH_FUNC_PATH = getattr(django_settings, 'LDAP_AUTHENTICATE_FUNCTION', None)
if LDAP_AUTH_FUNC_PATH:
ldap_authenticate = load_module(LDAP_AUTH_FUNC_PATH)
else:
ldap_authenticate = ldap_authenticate_default
LDAP_CREATE_FUNC_PATH = getattr(django_settings, 'LDAP_CREATE_USER_FUNCTION', None)
if LDAP_CREATE_FUNC_PATH:
ldap_create_user = load_module(LDAP_CREATE_FUNC_PATH)
else:
ldap_create_user = ldap_create_user_default
|
Airphrame/ardupilot
|
refs/heads/master
|
Tools/LogAnalyzer/tests/TestEmpty.py
|
346
|
from LogAnalyzer import Test,TestResult
import DataflashLog
class TestEmpty(Test):
'''test for empty or near-empty logs'''
def __init__(self):
Test.__init__(self)
self.name = "Empty"
def run(self, logdata, verbose):
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
# all the logic for this test is in the helper function, as it can also be called up front as an early exit
emptyErr = DataflashLog.DataflashLogHelper.isLogEmpty(logdata)
if emptyErr:
self.result.status = TestResult.StatusType.FAIL
self.result.statusMessage = "Empty log? " + emptyErr
|
rex-xxx/mt6572_x201
|
refs/heads/master
|
docs/source.android.com/scripts/micro-httpd.py
|
1
|
#!/usr/bin/env python
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import SimpleHTTPServer
import SocketServer
import os
outdir = os.path.join(os.path.dirname(__file__), '..', 'out')
os.chdir(outdir)
PORT = int(os.environ.get('HTTP_PORT', 8080))
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(('0.0.0.0', PORT), Handler)
httpd.allow_reuse_address = True
print 'Serving on port %d' % PORT
httpd.serve_forever()
|
drmrd/ansible
|
refs/heads/devel
|
lib/ansible/modules/net_tools/ipify_facts.py
|
94
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <mail@renemoser.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipify_facts
short_description: Retrieve the public IP of your internet gateway.
description:
- If behind NAT and need to know the public IP of your internet gateway.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
api_url:
description:
- URL of the ipify.org API service.
- C(?format=json) will be appended per default.
required: false
default: 'https://api.ipify.org'
timeout:
description:
- HTTP connection timeout in seconds.
required: false
default: 10
version_added: "2.3"
validate_certs:
description:
- When set to C(NO), SSL certificates will not be validated.
required: false
default: "yes"
version_added: "2.4"
notes:
- "Visit https://www.ipify.org to get more information."
'''
EXAMPLES = '''
# Gather IP facts from ipify.org
- name: get my public IP
ipify_facts:
# Gather IP facts from your own ipify service endpoint with a custom timeout
- name: get my public IP
ipify_facts:
api_url: http://api.example.com/ipify
timeout: 20
'''
RETURN = '''
---
ipify_public_ip:
description: Public IP of the internet gateway.
returned: success
type: string
sample: 1.2.3.4
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_text
class IpifyFacts(object):
def __init__(self):
self.api_url = module.params.get('api_url')
self.timeout = module.params.get('timeout')
def run(self):
result = {
'ipify_public_ip': None
}
(response, info) = fetch_url(module=module, url=self.api_url + "?format=json", force=True, timeout=self.timeout)
if not response:
module.fail_json(msg="No valid or no response from url %s within %s seconds (timeout)" % (self.api_url, self.timeout))
data = json.loads(to_text(response.read()))
result['ipify_public_ip'] = data.get('ip')
return result
def main():
global module
module = AnsibleModule(
argument_spec=dict(
api_url=dict(default='https://api.ipify.org/'),
timeout=dict(type='int', default=10),
validate_certs=dict(type='bool', default=True),
),
supports_check_mode=True,
)
ipify_facts = IpifyFacts().run()
ipify_facts_result = dict(changed=False, ansible_facts=ipify_facts)
module.exit_json(**ipify_facts_result)
if __name__ == '__main__':
main()
|
geekaia/edx-platform
|
refs/heads/master
|
common/lib/xmodule/xmodule/html_module.py
|
37
|
import copy
from fs.errors import ResourceNotFoundError
import logging
import os
import sys
from lxml import etree
from path import path
from pkg_resources import resource_string
from xblock.fields import Scope, String, Boolean, List
from xmodule.editing_module import EditingDescriptor
from xmodule.html_checker import check_html
from xmodule.stringify import stringify_children
from xmodule.x_module import XModule
from xmodule.xml_module import XmlDescriptor, name_to_pathname
import textwrap
from xmodule.contentstore.content import StaticContent
from xblock.core import XBlock
log = logging.getLogger("edx.courseware")
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
class HtmlFields(object):
display_name = String(
display_name=_("Display Name"),
help=_("This name appears in the horizontal navigation at the top of the page."),
scope=Scope.settings,
# it'd be nice to have a useful default but it screws up other things; so,
# use display_name_with_default for those
default=_("Text")
)
data = String(help=_("Html contents to display for this module"), default=u"", scope=Scope.content)
source_code = String(help=_("Source code for LaTeX documents. This feature is not well-supported."), scope=Scope.settings)
use_latex_compiler = Boolean(
help=_("Enable LaTeX templates?"),
default=False,
scope=Scope.settings
)
editor = String(
help=_("Select Visual to enter content and have the editor automatically create the HTML. Select Raw to edit HTML directly. If you change this setting, you must save the component and then re-open it for editing."),
display_name=_("Editor"),
default="visual",
values=[
{"display_name": _("Visual"), "value": "visual"},
{"display_name": _("Raw"), "value": "raw"}
],
scope=Scope.settings
)
class HtmlModule(HtmlFields, XModule):
js = {
'coffee': [
resource_string(__name__, 'js/src/javascript_loader.coffee'),
resource_string(__name__, 'js/src/html/display.coffee'),
],
'js': [
resource_string(__name__, 'js/src/collapsible.js'),
resource_string(__name__, 'js/src/html/imageModal.js'),
resource_string(__name__, 'js/common_static/js/vendor/draggabilly.pkgd.js'),
]
}
js_module_name = "HTMLModule"
css = {'scss': [resource_string(__name__, 'css/html/display.scss')]}
def get_html(self):
if self.system.anonymous_student_id:
return self.data.replace("%%USER_ID%%", self.system.anonymous_student_id)
return self.data
class HtmlDescriptor(HtmlFields, XmlDescriptor, EditingDescriptor):
"""
Module for putting raw html in a course
"""
mako_template = "widgets/html-edit.html"
module_class = HtmlModule
filename_extension = "xml"
template_dir_name = "html"
js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]}
js_module_name = "HTMLEditingDescriptor"
css = {'scss': [resource_string(__name__, 'css/editor/edit.scss'), resource_string(__name__, 'css/html/edit.scss')]}
# VS[compat] TODO (cpennington): Delete this method once all fall 2012 course
# are being edited in the cms
@classmethod
def backcompat_paths(cls, path):
if path.endswith('.html.xml'):
path = path[:-9] + '.html' # backcompat--look for html instead of xml
if path.endswith('.html.html'):
path = path[:-5] # some people like to include .html in filenames..
candidates = []
while os.sep in path:
candidates.append(path)
_, _, path = path.partition(os.sep)
# also look for .html versions instead of .xml
nc = []
for candidate in candidates:
if candidate.endswith('.xml'):
nc.append(candidate[:-4] + '.html')
return candidates + nc
@classmethod
def filter_templates(cls, template, course):
"""
Filter template that contains 'latex' from templates.
Show them only if use_latex_compiler is set to True in
course settings.
"""
return (not 'latex' in template['template_id'] or course.use_latex_compiler)
def get_context(self):
"""
an override to add in specific rendering context, in this case we need to
add in a base path to our c4x content addressing scheme
"""
_context = EditingDescriptor.get_context(self)
# Add some specific HTML rendering context when editing HTML modules where we pass
# the root /c4x/ url for assets. This allows client-side substitutions to occur.
_context.update({
'base_asset_url': StaticContent.get_base_url_path_for_course_assets(self.location.course_key),
'enable_latex_compiler': self.use_latex_compiler,
'editor': self.editor
})
return _context
# NOTE: html descriptors are special. We do not want to parse and
# export them ourselves, because that can break things (e.g. lxml
# adds body tags when it exports, but they should just be html
# snippets that will be included in the middle of pages.
@classmethod
def load_definition(cls, xml_object, system, location):
'''Load a descriptor from the specified xml_object:
If there is a filename attribute, load it as a string, and
log a warning if it is not parseable by etree.HTMLParser.
If there is not a filename attribute, the definition is the body
of the xml_object, without the root tag (do not want <html> in the
middle of a page)
'''
filename = xml_object.get('filename')
if filename is None:
definition_xml = copy.deepcopy(xml_object)
cls.clean_metadata_from_xml(definition_xml)
return {'data': stringify_children(definition_xml)}, []
else:
# html is special. cls.filename_extension is 'xml', but
# if 'filename' is in the definition, that means to load
# from .html
# 'filename' in html pointers is a relative path
# (not same as 'html/blah.html' when the pointer is in a directory itself)
pointer_path = "{category}/{url_path}".format(
category='html',
url_path=name_to_pathname(location.name)
)
base = path(pointer_path).dirname()
# log.debug("base = {0}, base.dirname={1}, filename={2}".format(base, base.dirname(), filename))
filepath = "{base}/{name}.html".format(base=base, name=filename)
# log.debug("looking for html file for {0} at {1}".format(location, filepath))
# VS[compat]
# TODO (cpennington): If the file doesn't exist at the right path,
# give the class a chance to fix it up. The file will be written out
# again in the correct format. This should go away once the CMS is
# online and has imported all current (fall 2012) courses from xml
if not system.resources_fs.exists(filepath):
candidates = cls.backcompat_paths(filepath)
# log.debug("candidates = {0}".format(candidates))
for candidate in candidates:
if system.resources_fs.exists(candidate):
filepath = candidate
break
try:
with system.resources_fs.open(filepath) as file:
html = file.read().decode('utf-8')
# Log a warning if we can't parse the file, but don't error
if not check_html(html) and len(html) > 0:
msg = "Couldn't parse html in {0}, content = {1}".format(filepath, html)
log.warning(msg)
system.error_tracker("Warning: " + msg)
definition = {'data': html}
# TODO (ichuang): remove this after migration
# for Fall 2012 LMS migration: keep filename (and unmangled filename)
definition['filename'] = [filepath, filename]
return definition, []
except (ResourceNotFoundError) as err:
msg = 'Unable to load file contents at path {0}: {1} '.format(
filepath, err)
# add more info and re-raise
raise Exception(msg), None, sys.exc_info()[2]
# TODO (vshnayder): make export put things in the right places.
def definition_to_xml(self, resource_fs):
''' Write <html filename="" [meta-attrs="..."]> to filename.xml, and the html
string to filename.html.
'''
# Write html to file, return an empty tag
pathname = name_to_pathname(self.url_name)
filepath = u'{category}/{pathname}.html'.format(
category=self.category,
pathname=pathname
)
resource_fs.makedir(os.path.dirname(filepath), recursive=True, allow_recreate=True)
with resource_fs.open(filepath, 'w') as filestream:
html_data = self.data.encode('utf-8')
filestream.write(html_data)
# write out the relative name
relname = path(pathname).basename()
elt = etree.Element('html')
elt.set("filename", relname)
return elt
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(HtmlDescriptor, self).non_editable_metadata_fields
non_editable_fields.append(HtmlDescriptor.use_latex_compiler)
return non_editable_fields
class AboutFields(object):
display_name = String(
help=_("Display name for this module"),
scope=Scope.settings,
default="overview",
)
data = String(
help=_("Html contents to display for this module"),
default=u"",
scope=Scope.content
)
@XBlock.tag("detached")
class AboutModule(AboutFields, HtmlModule):
"""
Overriding defaults but otherwise treated as HtmlModule.
"""
pass
@XBlock.tag("detached")
class AboutDescriptor(AboutFields, HtmlDescriptor):
"""
These pieces of course content are treated as HtmlModules but we need to overload where the templates are located
in order to be able to create new ones
"""
template_dir_name = "about"
module_class = AboutModule
class StaticTabFields(object):
"""
The overrides for Static Tabs
"""
display_name = String(
display_name=_("Display Name"),
help=_("This name appears in the horizontal navigation at the top of the page."),
scope=Scope.settings,
default="Empty",
)
data = String(
default=textwrap.dedent(u"""\
<p>Add the content you want students to see on this page.</p>
"""),
scope=Scope.content,
help=_("HTML for the additional pages")
)
@XBlock.tag("detached")
class StaticTabModule(StaticTabFields, HtmlModule):
"""
Supports the field overrides
"""
pass
@XBlock.tag("detached")
class StaticTabDescriptor(StaticTabFields, HtmlDescriptor):
"""
These pieces of course content are treated as HtmlModules but we need to overload where the templates are located
in order to be able to create new ones
"""
template_dir_name = None
module_class = StaticTabModule
class CourseInfoFields(object):
"""
Field overrides
"""
items = List(
help=_("List of course update items"),
default=[],
scope=Scope.content
)
data = String(
help=_("Html contents to display for this module"),
default=u"<ol></ol>",
scope=Scope.content
)
@XBlock.tag("detached")
class CourseInfoModule(CourseInfoFields, HtmlModule):
"""
Just to support xblock field overrides
"""
# statuses
STATUS_VISIBLE = 'visible'
STATUS_DELETED = 'deleted'
@XBlock.tag("detached")
class CourseInfoDescriptor(CourseInfoFields, HtmlDescriptor):
"""
These pieces of course content are treated as HtmlModules but we need to overload where the templates are located
in order to be able to create new ones
"""
template_dir_name = None
module_class = CourseInfoModule
|
apophys/freeipa
|
refs/heads/master
|
ipaclient/remote_plugins/2_164/idviews.py
|
10
|
#
# Copyright (C) 2016 FreeIPA Contributors see COPYING for license
#
# pylint: disable=unused-import
import six
from . import Command, Method, Object
from ipalib import api, parameters, output
from ipalib.parameters import DefaultFrom
from ipalib.plugable import Registry
from ipalib.text import _
from ipapython.dn import DN
from ipapython.dnsutil import DNSName
if six.PY3:
unicode = str
__doc__ = _("""
ID Views
Manage ID Views
IPA allows to override certain properties of users and groups per each host.
This functionality is primarily used to allow migration from older systems or
other Identity Management solutions.
""")
register = Registry()
@register()
class idoverridegroup(Object):
takes_params = (
parameters.Str(
'ipaanchoruuid',
primary_key=True,
label=_(u'Anchor to override'),
),
parameters.Str(
'description',
required=False,
label=_(u'Description'),
),
parameters.Str(
'cn',
required=False,
label=_(u'Group name'),
),
parameters.Int(
'gidnumber',
required=False,
label=_(u'GID'),
doc=_(u'Group ID Number'),
),
)
@register()
class idoverrideuser(Object):
takes_params = (
parameters.Str(
'ipaanchoruuid',
primary_key=True,
label=_(u'Anchor to override'),
),
parameters.Str(
'description',
required=False,
label=_(u'Description'),
),
parameters.Str(
'uid',
required=False,
label=_(u'User login'),
),
parameters.Int(
'uidnumber',
required=False,
label=_(u'UID'),
doc=_(u'User ID Number'),
),
parameters.Str(
'gecos',
required=False,
label=_(u'GECOS'),
),
parameters.Int(
'gidnumber',
required=False,
label=_(u'GID'),
doc=_(u'Group ID Number'),
),
parameters.Str(
'homedirectory',
required=False,
label=_(u'Home directory'),
),
parameters.Str(
'loginshell',
required=False,
label=_(u'Login shell'),
),
parameters.Str(
'ipaoriginaluid',
required=False,
exclude=('cli', 'webui'),
),
parameters.Str(
'ipasshpubkey',
required=False,
multivalue=True,
label=_(u'SSH public key'),
),
)
@register()
class idview(Object):
takes_params = (
parameters.Str(
'cn',
primary_key=True,
label=_(u'ID View Name'),
),
parameters.Str(
'description',
required=False,
label=_(u'Description'),
),
)
@register()
class idoverridegroup_add(Method):
__doc__ = _("Add a new Group ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'ipaanchoruuid',
cli_name='anchor',
label=_(u'Anchor to override'),
),
)
takes_options = (
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Str(
'cn',
required=False,
cli_name='group_name',
label=_(u'Group name'),
no_convert=True,
),
parameters.Int(
'gidnumber',
required=False,
cli_name='gid',
label=_(u'GID'),
doc=_(u'Group ID Number'),
),
parameters.Str(
'setattr',
required=False,
multivalue=True,
doc=_(u'Set an attribute to a name/value pair. Format is attr=value.\nFor multi-valued attributes, the command replaces the values already present.'),
exclude=('webui',),
),
parameters.Str(
'addattr',
required=False,
multivalue=True,
doc=_(u'Add an attribute/value pair. Format is attr=value. The attribute\nmust be part of the schema.'),
exclude=('webui',),
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idoverridegroup_del(Method):
__doc__ = _("Delete an Group ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'ipaanchoruuid',
multivalue=True,
cli_name='anchor',
label=_(u'Anchor to override'),
),
)
takes_options = (
parameters.Flag(
'continue',
doc=_(u"Continuous mode: Don't stop on errors."),
default=False,
autofill=True,
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Output(
'result',
dict,
doc=_(u'List of deletions that failed'),
),
output.ListOfPrimaryKeys(
'value',
),
)
@register()
class idoverridegroup_find(Method):
__doc__ = _("Search for an Group ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'criteria',
required=False,
doc=_(u'A string searched in all relevant object attributes'),
),
)
takes_options = (
parameters.Str(
'ipaanchoruuid',
required=False,
cli_name='anchor',
label=_(u'Anchor to override'),
),
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Str(
'cn',
required=False,
cli_name='group_name',
label=_(u'Group name'),
no_convert=True,
),
parameters.Int(
'gidnumber',
required=False,
cli_name='gid',
label=_(u'GID'),
doc=_(u'Group ID Number'),
),
parameters.Int(
'timelimit',
required=False,
label=_(u'Time Limit'),
doc=_(u'Time limit of search in seconds (0 is unlimited)'),
),
parameters.Int(
'sizelimit',
required=False,
label=_(u'Size Limit'),
doc=_(u'Maximum number of entries returned (0 is unlimited)'),
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'pkey_only',
required=False,
label=_(u'Primary key only'),
doc=_(u'Results should contain primary key attribute only ("anchor")'),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.ListOfEntries(
'result',
),
output.Output(
'count',
int,
doc=_(u'Number of entries returned'),
),
output.Output(
'truncated',
bool,
doc=_(u'True if not all results were returned'),
),
)
@register()
class idoverridegroup_mod(Method):
__doc__ = _("Modify an Group ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'ipaanchoruuid',
cli_name='anchor',
label=_(u'Anchor to override'),
),
)
takes_options = (
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Str(
'cn',
required=False,
cli_name='group_name',
label=_(u'Group name'),
no_convert=True,
),
parameters.Int(
'gidnumber',
required=False,
cli_name='gid',
label=_(u'GID'),
doc=_(u'Group ID Number'),
),
parameters.Str(
'setattr',
required=False,
multivalue=True,
doc=_(u'Set an attribute to a name/value pair. Format is attr=value.\nFor multi-valued attributes, the command replaces the values already present.'),
exclude=('webui',),
),
parameters.Str(
'addattr',
required=False,
multivalue=True,
doc=_(u'Add an attribute/value pair. Format is attr=value. The attribute\nmust be part of the schema.'),
exclude=('webui',),
),
parameters.Str(
'delattr',
required=False,
multivalue=True,
doc=_(u'Delete an attribute/value pair. The option will be evaluated\nlast, after all sets and adds.'),
exclude=('webui',),
),
parameters.Flag(
'rights',
label=_(u'Rights'),
doc=_(u'Display the access rights of this entry (requires --all). See ipa man page for details.'),
default=False,
autofill=True,
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Str(
'rename',
required=False,
label=_(u'Rename'),
doc=_(u'Rename the Group ID override object'),
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idoverridegroup_show(Method):
__doc__ = _("Display information about an Group ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'ipaanchoruuid',
cli_name='anchor',
label=_(u'Anchor to override'),
),
)
takes_options = (
parameters.Flag(
'rights',
label=_(u'Rights'),
doc=_(u'Display the access rights of this entry (requires --all). See ipa man page for details.'),
default=False,
autofill=True,
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idoverrideuser_add(Method):
__doc__ = _("Add a new User ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'ipaanchoruuid',
cli_name='anchor',
label=_(u'Anchor to override'),
),
)
takes_options = (
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Str(
'uid',
required=False,
cli_name='login',
label=_(u'User login'),
no_convert=True,
),
parameters.Int(
'uidnumber',
required=False,
cli_name='uid',
label=_(u'UID'),
doc=_(u'User ID Number'),
),
parameters.Str(
'gecos',
required=False,
label=_(u'GECOS'),
),
parameters.Int(
'gidnumber',
required=False,
label=_(u'GID'),
doc=_(u'Group ID Number'),
),
parameters.Str(
'homedirectory',
required=False,
cli_name='homedir',
label=_(u'Home directory'),
),
parameters.Str(
'loginshell',
required=False,
cli_name='shell',
label=_(u'Login shell'),
),
parameters.Str(
'ipaoriginaluid',
required=False,
exclude=('cli', 'webui'),
),
parameters.Str(
'ipasshpubkey',
required=False,
multivalue=True,
cli_name='sshpubkey',
label=_(u'SSH public key'),
no_convert=True,
),
parameters.Str(
'setattr',
required=False,
multivalue=True,
doc=_(u'Set an attribute to a name/value pair. Format is attr=value.\nFor multi-valued attributes, the command replaces the values already present.'),
exclude=('webui',),
),
parameters.Str(
'addattr',
required=False,
multivalue=True,
doc=_(u'Add an attribute/value pair. Format is attr=value. The attribute\nmust be part of the schema.'),
exclude=('webui',),
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idoverrideuser_del(Method):
__doc__ = _("Delete an User ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'ipaanchoruuid',
multivalue=True,
cli_name='anchor',
label=_(u'Anchor to override'),
),
)
takes_options = (
parameters.Flag(
'continue',
doc=_(u"Continuous mode: Don't stop on errors."),
default=False,
autofill=True,
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Output(
'result',
dict,
doc=_(u'List of deletions that failed'),
),
output.ListOfPrimaryKeys(
'value',
),
)
@register()
class idoverrideuser_find(Method):
__doc__ = _("Search for an User ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'criteria',
required=False,
doc=_(u'A string searched in all relevant object attributes'),
),
)
takes_options = (
parameters.Str(
'ipaanchoruuid',
required=False,
cli_name='anchor',
label=_(u'Anchor to override'),
),
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Str(
'uid',
required=False,
cli_name='login',
label=_(u'User login'),
no_convert=True,
),
parameters.Int(
'uidnumber',
required=False,
cli_name='uid',
label=_(u'UID'),
doc=_(u'User ID Number'),
),
parameters.Str(
'gecos',
required=False,
label=_(u'GECOS'),
),
parameters.Int(
'gidnumber',
required=False,
label=_(u'GID'),
doc=_(u'Group ID Number'),
),
parameters.Str(
'homedirectory',
required=False,
cli_name='homedir',
label=_(u'Home directory'),
),
parameters.Str(
'loginshell',
required=False,
cli_name='shell',
label=_(u'Login shell'),
),
parameters.Str(
'ipaoriginaluid',
required=False,
exclude=('cli', 'webui'),
),
parameters.Int(
'timelimit',
required=False,
label=_(u'Time Limit'),
doc=_(u'Time limit of search in seconds (0 is unlimited)'),
),
parameters.Int(
'sizelimit',
required=False,
label=_(u'Size Limit'),
doc=_(u'Maximum number of entries returned (0 is unlimited)'),
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'pkey_only',
required=False,
label=_(u'Primary key only'),
doc=_(u'Results should contain primary key attribute only ("anchor")'),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.ListOfEntries(
'result',
),
output.Output(
'count',
int,
doc=_(u'Number of entries returned'),
),
output.Output(
'truncated',
bool,
doc=_(u'True if not all results were returned'),
),
)
@register()
class idoverrideuser_mod(Method):
__doc__ = _("Modify an User ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'ipaanchoruuid',
cli_name='anchor',
label=_(u'Anchor to override'),
),
)
takes_options = (
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Str(
'uid',
required=False,
cli_name='login',
label=_(u'User login'),
no_convert=True,
),
parameters.Int(
'uidnumber',
required=False,
cli_name='uid',
label=_(u'UID'),
doc=_(u'User ID Number'),
),
parameters.Str(
'gecos',
required=False,
label=_(u'GECOS'),
),
parameters.Int(
'gidnumber',
required=False,
label=_(u'GID'),
doc=_(u'Group ID Number'),
),
parameters.Str(
'homedirectory',
required=False,
cli_name='homedir',
label=_(u'Home directory'),
),
parameters.Str(
'loginshell',
required=False,
cli_name='shell',
label=_(u'Login shell'),
),
parameters.Str(
'ipaoriginaluid',
required=False,
exclude=('cli', 'webui'),
),
parameters.Str(
'ipasshpubkey',
required=False,
multivalue=True,
cli_name='sshpubkey',
label=_(u'SSH public key'),
no_convert=True,
),
parameters.Str(
'setattr',
required=False,
multivalue=True,
doc=_(u'Set an attribute to a name/value pair. Format is attr=value.\nFor multi-valued attributes, the command replaces the values already present.'),
exclude=('webui',),
),
parameters.Str(
'addattr',
required=False,
multivalue=True,
doc=_(u'Add an attribute/value pair. Format is attr=value. The attribute\nmust be part of the schema.'),
exclude=('webui',),
),
parameters.Str(
'delattr',
required=False,
multivalue=True,
doc=_(u'Delete an attribute/value pair. The option will be evaluated\nlast, after all sets and adds.'),
exclude=('webui',),
),
parameters.Flag(
'rights',
label=_(u'Rights'),
doc=_(u'Display the access rights of this entry (requires --all). See ipa man page for details.'),
default=False,
autofill=True,
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Str(
'rename',
required=False,
label=_(u'Rename'),
doc=_(u'Rename the User ID override object'),
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idoverrideuser_show(Method):
__doc__ = _("Display information about an User ID override.")
takes_args = (
parameters.Str(
'idviewcn',
cli_name='idview',
label=_(u'ID View Name'),
),
parameters.Str(
'ipaanchoruuid',
cli_name='anchor',
label=_(u'Anchor to override'),
),
)
takes_options = (
parameters.Flag(
'rights',
label=_(u'Rights'),
doc=_(u'Display the access rights of this entry (requires --all). See ipa man page for details.'),
default=False,
autofill=True,
),
parameters.Flag(
'fallback_to_ldap',
required=False,
label=_(u'Fallback to AD DC LDAP'),
doc=_(u'Allow falling back to AD DC LDAP when resolving AD trusted objects. For two-way trusts only.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idview_add(Method):
__doc__ = _("Add a new ID View.")
takes_args = (
parameters.Str(
'cn',
cli_name='name',
label=_(u'ID View Name'),
),
)
takes_options = (
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Str(
'setattr',
required=False,
multivalue=True,
doc=_(u'Set an attribute to a name/value pair. Format is attr=value.\nFor multi-valued attributes, the command replaces the values already present.'),
exclude=('webui',),
),
parameters.Str(
'addattr',
required=False,
multivalue=True,
doc=_(u'Add an attribute/value pair. Format is attr=value. The attribute\nmust be part of the schema.'),
exclude=('webui',),
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idview_apply(Method):
__doc__ = _("Applies ID View to specified hosts or current members of specified hostgroups. If any other ID View is applied to the host, it is overridden.")
takes_args = (
parameters.Str(
'cn',
cli_name='name',
label=_(u'ID View Name'),
),
)
takes_options = (
parameters.Str(
'host',
required=False,
multivalue=True,
cli_name='hosts',
label=_(u'hosts'),
doc=_(u'Hosts to apply the ID View to'),
),
parameters.Str(
'hostgroup',
required=False,
multivalue=True,
cli_name='hostgroups',
label=_(u'hostgroups'),
doc=_(u'Hostgroups to whose hosts apply the ID View to. Please note that view is not applied automatically to any hosts added to the hostgroup after running the idview-apply command.'),
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Output(
'succeeded',
dict,
doc=_(u'Hosts that this ID View was applied to.'),
),
output.Output(
'failed',
dict,
doc=_(u'Hosts or hostgroups that this ID View could not be applied to.'),
),
output.Output(
'completed',
int,
doc=_(u'Number of hosts the ID View was applied to:'),
),
)
@register()
class idview_del(Method):
__doc__ = _("Delete an ID View.")
takes_args = (
parameters.Str(
'cn',
multivalue=True,
cli_name='name',
label=_(u'ID View Name'),
),
)
takes_options = (
parameters.Flag(
'continue',
doc=_(u"Continuous mode: Don't stop on errors."),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Output(
'result',
dict,
doc=_(u'List of deletions that failed'),
),
output.ListOfPrimaryKeys(
'value',
),
)
@register()
class idview_find(Method):
__doc__ = _("Search for an ID View.")
takes_args = (
parameters.Str(
'criteria',
required=False,
doc=_(u'A string searched in all relevant object attributes'),
),
)
takes_options = (
parameters.Str(
'cn',
required=False,
cli_name='name',
label=_(u'ID View Name'),
),
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Int(
'timelimit',
required=False,
label=_(u'Time Limit'),
doc=_(u'Time limit of search in seconds (0 is unlimited)'),
),
parameters.Int(
'sizelimit',
required=False,
label=_(u'Size Limit'),
doc=_(u'Maximum number of entries returned (0 is unlimited)'),
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'pkey_only',
required=False,
label=_(u'Primary key only'),
doc=_(u'Results should contain primary key attribute only ("name")'),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.ListOfEntries(
'result',
),
output.Output(
'count',
int,
doc=_(u'Number of entries returned'),
),
output.Output(
'truncated',
bool,
doc=_(u'True if not all results were returned'),
),
)
@register()
class idview_mod(Method):
__doc__ = _("Modify an ID View.")
takes_args = (
parameters.Str(
'cn',
cli_name='name',
label=_(u'ID View Name'),
),
)
takes_options = (
parameters.Str(
'description',
required=False,
cli_name='desc',
label=_(u'Description'),
),
parameters.Str(
'setattr',
required=False,
multivalue=True,
doc=_(u'Set an attribute to a name/value pair. Format is attr=value.\nFor multi-valued attributes, the command replaces the values already present.'),
exclude=('webui',),
),
parameters.Str(
'addattr',
required=False,
multivalue=True,
doc=_(u'Add an attribute/value pair. Format is attr=value. The attribute\nmust be part of the schema.'),
exclude=('webui',),
),
parameters.Str(
'delattr',
required=False,
multivalue=True,
doc=_(u'Delete an attribute/value pair. The option will be evaluated\nlast, after all sets and adds.'),
exclude=('webui',),
),
parameters.Flag(
'rights',
label=_(u'Rights'),
doc=_(u'Display the access rights of this entry (requires --all). See ipa man page for details.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Str(
'rename',
required=False,
label=_(u'Rename'),
doc=_(u'Rename the ID View object'),
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idview_show(Method):
__doc__ = _("Display information about an ID View.")
takes_args = (
parameters.Str(
'cn',
cli_name='name',
label=_(u'ID View Name'),
),
)
takes_options = (
parameters.Flag(
'rights',
label=_(u'Rights'),
doc=_(u'Display the access rights of this entry (requires --all). See ipa man page for details.'),
default=False,
autofill=True,
),
parameters.Flag(
'show_hosts',
required=False,
doc=_(u'Enumerate all the hosts the view applies to.'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'Retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=False,
autofill=True,
),
parameters.Flag(
'raw',
doc=_(u'Print entries as stored on the server. Only affects output format.'),
exclude=('webui',),
default=False,
autofill=True,
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Entry(
'result',
),
output.PrimaryKey(
'value',
doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"),
),
)
@register()
class idview_unapply(Method):
__doc__ = _("Clears ID View from specified hosts or current members of specified hostgroups.")
takes_options = (
parameters.Str(
'host',
required=False,
multivalue=True,
cli_name='hosts',
label=_(u'hosts'),
doc=_(u'Hosts to clear (any) ID View from.'),
),
parameters.Str(
'hostgroup',
required=False,
multivalue=True,
cli_name='hostgroups',
label=_(u'hostgroups'),
doc=_(u'Hostgroups whose hosts should have ID Views cleared. Note that view is not cleared automatically from any host added to the hostgroup after running idview-unapply command.'),
),
)
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
output.Output(
'succeeded',
dict,
doc=_(u'Hosts that ID View was cleared from.'),
),
output.Output(
'failed',
dict,
doc=_(u'Hosts or hostgroups that ID View could not be cleared from.'),
),
output.Output(
'completed',
int,
doc=_(u'Number of hosts that had a ID View was unset:'),
),
)
|
wildjan/Flask
|
refs/heads/master
|
Work/Trivia - Module 5/env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py
|
1229
|
"""A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.
To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
"""
from __future__ import absolute_import, division, unicode_literals
import sys
from ..utils import default_etree
treeWalkerCache = {}
def getTreeWalker(treeType, implementation=None, **kwargs):
"""Get a TreeWalker class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are:
"dom" - The xml.dom.minidom DOM implementation
"pulldom" - The xml.dom.pulldom event stream
"etree" - A generic walker for tree implementations exposing an
elementtree-like interface (known to work with
ElementTree, cElementTree and lxml.etree).
"lxml" - Optimized walker for lxml.etree
"genshi" - a Genshi stream
implementation - (Currently applies to the "etree" tree type only). A module
implementing the tree type e.g. xml.etree.ElementTree or
cElementTree."""
treeType = treeType.lower()
if treeType not in treeWalkerCache:
if treeType in ("dom", "pulldom"):
name = "%s.%s" % (__name__, treeType)
__import__(name)
mod = sys.modules[name]
treeWalkerCache[treeType] = mod.TreeWalker
elif treeType == "genshi":
from . import genshistream
treeWalkerCache[treeType] = genshistream.TreeWalker
elif treeType == "lxml":
from . import lxmletree
treeWalkerCache[treeType] = lxmletree.TreeWalker
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# XXX: NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeWalker
return treeWalkerCache.get(treeType)
|
sanyaade-mobiledev/clusto
|
refs/heads/master
|
src/clusto/drivers/devices/common/portmixin.py
|
3
|
"""
PortMixin is a basic mixin to be used with devices that have ports
"""
import re
import clusto
from clusto.exceptions import ConnectionException
class PortMixin:
"""Provide port capabilities to devices
The ports are defined in the Driver's _portmeta dictionary:
_portmeta = { '<porttype>' : {'numports': <num> }}
Several ports types can be defined in this dictionary. Currently
'numports' is the only porttype attribute used. This data does not get
stored as Entity attributes in the clusto db. They live only in the class
definition.
Port data gets stored in the DB as the connect to other ports. The
keynames are of the form '_port-<porttype>'. Each port has a specific
number associated with it (usually the same number as on the physical
device itself) and can have several port attributes. There are no
restrictions on attributes but some common ones might be: osname,
cabletype, status, etc.
"""
# _portmeta = { 'porttype' : {'numports': 10 }}
_portmeta = { 'pwr-nema-5' : { 'numports':1, },
'nic-eth' : { 'numports':2, },
}
def _port_key(self, porttype):
return 'port-' + porttype
def _ensure_portnum(self, porttype, num):
if not isinstance(num, int):
raise TypeError("Port number '%s' needs to be an integer." % str(num))
elif num < 1:
raise TypeError("Port number '%s' needs to be greater than 0." % str(num))
if not self._portmeta.has_key(porttype) \
or num > self._portmeta[porttype]['numports']:
raise ConnectionException("No port %s:%s exists on %s." % (porttype, str(num), self.name))
return num
def connect_ports(self, porttype, srcportnum, dstdev, dstportnum):
"""connect a local port to a port on another device
"""
for dev, num in [(self, srcportnum), (dstdev, dstportnum)]:
if not hasattr(dev, 'port_exists'):
msg = "%s has no ports."
raise ConnectionException(msg % (dev.name))
num = dev._ensure_portnum(porttype, num)
if not dev.port_exists(porttype, num):
msg = "port %s:%d doesn't exist on %s"
raise ConnectionException(msg % (porttype, num, dev.name))
if not dev.port_free(porttype, num):
msg = "port %s:%d on %s is already in use"
raise ConnectionException(msg % (porttype, num, dev.name))
try:
clusto.begin_transaction()
self.set_port_attr(porttype, srcportnum, 'connection', dstdev)
self.set_port_attr(porttype, srcportnum, 'otherportnum', dstportnum)
dstdev.set_port_attr(porttype, dstportnum, 'connection', self)
dstdev.set_port_attr(porttype, dstportnum, 'otherportnum', srcportnum)
clusto.commit()
except Exception, x:
clusto.rollback_transaction()
raise x
def disconnect_port(self, porttype, portnum):
"""disconnect both sides of a port"""
portnum = self._ensure_portnum(porttype, portnum)
if not self.port_free(porttype, portnum):
dev = self.get_connected(porttype, portnum)
otherportnum = self.get_port_attr(porttype, portnum, 'otherportnum')
clusto.begin_transaction()
try:
dev.del_port_attr(porttype, otherportnum, 'connection')
dev.del_port_attr(porttype, otherportnum, 'otherportnum')
self.del_port_attr(porttype, portnum, 'connection')
self.del_port_attr(porttype, portnum, 'otherportnum')
clusto.commit()
except Exception, x:
clusto.rollback_transaction()
raise x
def get_connected(self, porttype, portnum):
"""return the device that the given porttype/portnum is connected to"""
portnum = self._ensure_portnum(porttype, portnum)
if not self.port_exists(porttype, portnum):
msg = "port %s:%d doesn't exist on %s"
raise ConnectionException(msg % (porttype, portnum, self.name))
return self.get_port_attr(porttype, portnum, 'connection')
def ports_connectable(self, porttype, srcportnum, dstdev, dstportnum):
"""test if the ports you're trying to connect are compatible.
"""
return (self.port_exists(porttype, srcportnum)
and dstdev.port_exists(porttype, dstportnum))
def port_exists(self, porttype, portnum):
"""return true if the given port exists on this device"""
if ((porttype in self._portmeta)):
try:
portnum = self._ensure_portnum(porttype, portnum)
return True
except ConnectionException:
return False
else:
return False
def port_free(self, porttype, portnum):
"""return true if the given porttype and portnum are not in use"""
portnum = self._ensure_portnum(porttype, portnum)
if (not self.port_exists(porttype, portnum) or
self.has_attr(key=self._port_key(porttype), number=portnum,
subkey='connection')):
return False
else:
return True
def add_port_attr(self, porttype, portnum, key, value):
"""add an attribute on the given port"""
portnum = self._ensure_portnum(porttype, portnum)
self.add_attr(key=self._port_key(porttype),
number=portnum,
subkey=key,
value=value)
def set_port_attr(self, porttype, portnum, key, value):
"""set an attribute on the given port"""
portnum = self._ensure_portnum(porttype, portnum)
self.set_attr(key=self._port_key(porttype),
number=portnum,
subkey=key,
value=value)
def del_port_attr(self, porttype, portnum, key, value=()):
"""delete an attribute on the given port"""
portnum = self._ensure_portnum(porttype, portnum)
if value is ():
self.del_attrs(key=self._port_key(porttype),
number=portnum,
subkey=key)
else:
self.del_attrs(key=self._port_key(porttype),
number=portnum,
subkey=key,
value=value)
def get_port_attr(self, porttype, portnum, key):
"""get an attribute on the given port"""
portnum = self._ensure_portnum(porttype, portnum)
attr = self.attrs(key=self._port_key(porttype),
number=portnum,
subkey=key)
if len(attr) > 1:
raise ConnectionException("Somehow more than one attribute named "
"%s is associated with port %s:%d on %s"
% (key, porttype, portnum, self.name))
elif len(attr) == 1:
return attr[0].value
else:
return None
@property
def port_info(self):
"""return a list of tuples containing port information for this device
format:
port_info[<porttype>][<portnum>][<portattr>]
"""
portinfo = {}
for ptype in self.port_types:
portinfo[ptype]={}
for n in range(1, self._portmeta[ptype]['numports'] + 1):
portinfo[ptype][n] = {'connection': self.get_port_attr(ptype, n, 'connection'),
'otherportnum': self.get_port_attr(ptype, n, 'otherportnum')}
return portinfo
@property
def port_info_tuples(self):
"""return port information as a list of tuples that are suitble for use
as *args to connect_ports
format:
[ ('porttype', portnum, <connected device>, <port connected to>), ... ]
"""
t = []
d = self.port_info
for porttype, numdict in d.iteritems():
for num, stats in numdict.iteritems():
t.append((porttype, num,
stats['connection'], stats['otherportnum']))
return t
@property
def free_ports(self):
return [(pinfo[0], pinfo[1]) for pinfo in self.port_info_tuples if pinfo[3] == None]
@property
def connected_ports(self):
"""Return a list of connected ports"""
pdict = {}
for ptype in self.port_types:
portlist = [a.number for a in self.attrs(self._port_key(ptype),
subkey='connection')]
portlist.sort()
pdict[ptype] = portlist
return pdict
@property
def port_types(self):
return self._portmeta.keys()
|
alistairlow/tensorflow
|
refs/heads/master
|
tensorflow/contrib/rnn/python/kernel_tests/gru_ops_test.py
|
46
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Block GRU module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.rnn.python.kernel_tests import benchmarking
from tensorflow.contrib.rnn.python.ops import gru_ops
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
class GRUBlockCellTest(test.TestCase):
def testNoneDimsWithDynamicRNN(self):
with self.test_session(use_gpu=True, graph=ops.Graph()) as sess:
batch_size = 4
cell_size = 5
input_size = 6
num_steps = 7
cell = gru_ops.GRUBlockCell(cell_size)
x = array_ops.placeholder(dtypes.float32, shape=(None, None, input_size))
_, output = rnn.dynamic_rnn(
cell, x, time_major=True, dtype=dtypes.float32)
sess.run(variables.global_variables_initializer())
feed = {}
feed[x] = np.random.randn(num_steps, batch_size, input_size)
sess.run(output, feed)
def testBlockGRUToGRUCellSingleStep(self):
with self.test_session(use_gpu=True, graph=ops.Graph()) as sess:
batch_size = 4
cell_size = 5
input_size = 6
seed = 1994
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=seed)
# Inputs
x = array_ops.zeros([batch_size, input_size])
h = array_ops.zeros([batch_size, cell_size])
# Values for the inputs.
x_value = np.random.rand(batch_size, input_size)
h_value = np.random.rand(batch_size, cell_size)
# Output from the basic GRU cell implementation.
with vs.variable_scope("basic", initializer=initializer):
output = rnn_cell.GRUCell(cell_size)(x, h)
sess.run([variables.global_variables_initializer()])
basic_res = sess.run([output], {x: x_value, h: h_value})
# Output from the block GRU cell implementation.
with vs.variable_scope("block", initializer=initializer):
output = gru_ops.GRUBlockCell(cell_size)(x, h)
sess.run([variables.global_variables_initializer()])
block_res = sess.run([output], {x: x_value, h: h_value})
self.assertEqual(len(block_res), len(basic_res))
for block, basic in zip(block_res, basic_res):
self.assertAllClose(block, basic)
def testBlockGRUToGRUCellMultiStep(self):
with self.test_session(use_gpu=True, graph=ops.Graph()) as sess:
batch_size = 2
cell_size = 3
input_size = 3
time_steps = 4
# Random initializers.
seed = 1994
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=seed)
np.random.seed(seed)
# Inputs
concat_x = array_ops.placeholder(
dtypes.float32, shape=(time_steps, batch_size, input_size))
h = array_ops.zeros([batch_size, cell_size])
# Values for the inputs.
x_values = np.random.rand(time_steps, batch_size, input_size)
h_value = np.random.rand(batch_size, cell_size)
# Output from the block GRU cell implementation.
with vs.variable_scope("block", initializer=initializer):
cell = gru_ops.GRUBlockCell(cell_size)
outputs_dynamic, state_dynamic = rnn.dynamic_rnn(
cell,
inputs=concat_x,
initial_state=h,
time_major=True,
dtype=dtypes.float32)
feeds = {concat_x: x_values, h: h_value}
sess.run([variables.global_variables_initializer()])
block_res = sess.run([outputs_dynamic, state_dynamic], feeds)
# Output from the basic GRU cell implementation.
with vs.variable_scope("basic", initializer=initializer):
cell = rnn_cell.GRUCell(cell_size)
outputs_dynamic, state_dynamic = rnn.dynamic_rnn(
cell,
inputs=concat_x,
initial_state=h,
time_major=True,
dtype=dtypes.float32)
feeds = {concat_x: x_values, h: h_value}
sess.run([variables.global_variables_initializer()])
basic_res = sess.run([outputs_dynamic, state_dynamic], feeds)
# Check the lengths of the outputs_dynamic, and states.
self.assertEqual(len(block_res), len(basic_res))
self.assertEqual(len(block_res[0]), len(basic_res[0]))
self.assertEqual(len(block_res[1]), len(basic_res[1]))
# Check the outputs_dynamic values.
for block_output, basic_output in zip(block_res[0], basic_res[0]):
self.assertAllClose(block_output, basic_output)
# Check the state_dynamic value.
self.assertAllClose(block_res[1], block_res[1])
def testDerivativeOfBlockGRUToGRUCellSingleStep(self):
with self.test_session(use_gpu=True, graph=ops.Graph()) as sess:
batch_size = 2
cell_size = 3
input_size = 4
seed = 1994
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=seed)
np.random.seed(seed)
# Inputs
x = array_ops.zeros([batch_size, input_size])
h = array_ops.zeros([batch_size, cell_size])
# Values for the inputs.
x_value = np.random.rand(batch_size, input_size)
h_value = np.random.rand(batch_size, cell_size)
# Gradients from the block GRU cell implementation.
with vs.variable_scope("block", initializer=initializer):
output = gru_ops.GRUBlockCell(cell_size)(x, h)
sess.run([variables.global_variables_initializer()])
all_variables = variables.global_variables()[0:4]
[w_ru, b_ru, w_c, b_c] = all_variables
d_new_h_wrt_x = gradients_impl.gradients([output], x)
d_new_h_wrt_h = gradients_impl.gradients([output], h)
d_new_h_wrt_w_ru = gradients_impl.gradients([output], w_ru)
d_new_h_wrt_w_c = gradients_impl.gradients([output], w_c)
d_new_h_wrt_b_ru = gradients_impl.gradients([output], b_ru)
d_new_h_wrt_b_c = gradients_impl.gradients([output], b_c)
d_block_res = sess.run([
d_new_h_wrt_x, d_new_h_wrt_h, d_new_h_wrt_w_ru, d_new_h_wrt_w_c,
d_new_h_wrt_b_ru, d_new_h_wrt_b_c
], {x: x_value,
h: h_value})
# Gradients from the basic GRU cell implementation.
with vs.variable_scope("basic", initializer=initializer):
output = rnn_cell.GRUCell(cell_size)(x, h)
sess.run([variables.global_variables_initializer()])
all_variables = variables.global_variables()[4:8]
[w_ru, b_ru, w_c, b_c] = all_variables
d_new_h_wrt_x = gradients_impl.gradients([output], x)
d_new_h_wrt_h = gradients_impl.gradients([output], h)
d_new_h_wrt_w_ru = gradients_impl.gradients([output], w_ru)
d_new_h_wrt_w_c = gradients_impl.gradients([output], w_c)
d_new_h_wrt_b_ru = gradients_impl.gradients([output], b_ru)
d_new_h_wrt_b_c = gradients_impl.gradients([output], b_c)
d_basic_res = sess.run([
d_new_h_wrt_x, d_new_h_wrt_h, d_new_h_wrt_w_ru, d_new_h_wrt_w_c,
d_new_h_wrt_b_ru, d_new_h_wrt_b_c
], {x: x_value,
h: h_value})
# Check lengths of derivative results.
self.assertEqual(len(d_block_res), len(d_basic_res))
# Check the value of every derivative result.
for block, basic in zip(d_block_res, d_basic_res):
self.assertAllClose(block, basic)
def testDerivativeOfBlockGRUToGRUCellMultiSteps(self):
batch_size = 2
cell_size = 3
input_size = 4
time_steps = 2
with self.test_session(use_gpu=True, graph=ops.Graph()) as sess:
# Random initializers.
seed = 1994
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=seed)
np.random.seed(seed)
# Inputs
concat_x = array_ops.placeholder(
dtypes.float32, shape=(time_steps, batch_size, input_size))
h = array_ops.zeros([batch_size, cell_size])
# Values for the inputs.
x_values = np.random.rand(time_steps, batch_size, input_size)
h_value = np.random.rand(batch_size, cell_size)
feeds = {concat_x: x_values, h: h_value}
# Gradients from the block GRU cell implementation.
with vs.variable_scope("block", initializer=initializer):
cell = gru_ops.GRUBlockCell(cell_size)
outputs_dynamic, _ = rnn.dynamic_rnn(
cell,
inputs=concat_x,
initial_state=h,
time_major=True,
dtype=dtypes.float32)
grad_output_wrt_x = gradients_impl.gradients([outputs_dynamic[0]],
concat_x)
grad_output_wrt_h = gradients_impl.gradients([outputs_dynamic[0]], h)
sess.run([variables.global_variables_initializer()])
block_grad_res_x, block_grad_res_h = sess.run(
[grad_output_wrt_x, grad_output_wrt_h], feeds)
# Gradients from the basic GRU cell implementation.
with vs.variable_scope("basic", initializer=initializer):
cell = rnn_cell.GRUCell(cell_size)
outputs_dynamic, _ = rnn.dynamic_rnn(
cell,
inputs=concat_x,
initial_state=h,
time_major=True,
dtype=dtypes.float32)
grad_output_wrt_x = gradients_impl.gradients([outputs_dynamic[0]],
concat_x)
grad_output_wrt_h = gradients_impl.gradients([outputs_dynamic[0]], h)
sess.run([variables.global_variables_initializer()])
basic_grad_res_x, basic_grad_res_h = sess.run(
[grad_output_wrt_x, grad_output_wrt_h], feeds)
# Check derivatives values of the outputs wrt to x.
self.assertEqual(len(block_grad_res_x), len(basic_grad_res_x))
# Check derivatives values of the outputs wrt to h.
for block, basic in zip(block_grad_res_x, basic_grad_res_x):
self.assertAllClose(block, basic)
# Check derivatives values of the outputs wrt to x.
self.assertEqual(len(block_grad_res_h), len(basic_grad_res_h))
# Check derivatives values of the outputs wrt to h.
for block, basic in zip(block_grad_res_h, basic_grad_res_h):
self.assertAllClose(block, basic)
def testGradient(self):
with self.test_session(use_gpu=True, graph=ops.Graph()) as sess:
batch_size = 1
cell_size = 3
input_size = 2
# Inputs
x = array_ops.zeros([batch_size, input_size])
h = array_ops.zeros([batch_size, cell_size])
output = gru_ops.GRUBlockCell(cell_size)(x, h)
sess.run([variables.global_variables_initializer()])
all_variables = variables.global_variables()
[w_ru, b_ru, w_c, b_c] = all_variables[:4]
error_x = gradient_checker.compute_gradient_error(
x, (batch_size, input_size), output[0], (batch_size, cell_size))
error_h = gradient_checker.compute_gradient_error(h,
(batch_size, cell_size),
output[0],
(batch_size, cell_size))
error_w_ru = gradient_checker.compute_gradient_error(
w_ru, (input_size + cell_size, 2 * cell_size), output[0],
(batch_size, cell_size))
error_w_c = gradient_checker.compute_gradient_error(
w_c, (input_size + cell_size, cell_size), output[0],
(batch_size, cell_size))
error_b_ru = gradient_checker.compute_gradient_error(
b_ru, (2 * cell_size,), output[0], (batch_size, cell_size))
error_b_c = gradient_checker.compute_gradient_error(
b_c, (cell_size,), output[0], (batch_size, cell_size))
eps = 1e-4
self.assertLess(error_x, eps)
self.assertLess(error_h, eps)
self.assertLess(error_w_ru, eps)
self.assertLess(error_w_c, eps)
self.assertLess(error_b_ru, eps)
self.assertLess(error_b_c, eps)
#### Benchmarking GRUBlockCell vs GRUCell.
def training_gru_block_vs_gru_cell(batch_size,
cell_size,
input_size,
time_steps,
use_gpu=False,
iters=30):
"""Benchmark training speed between GRUBlockCell vs GRUCell."""
ops.reset_default_graph()
with session.Session(graph=ops.Graph()) as sess:
# Specify the device which is been used.
with benchmarking.device(use_gpu):
# Random initializers.
seed = 1994
initializer = init_ops.random_uniform_initializer(-1, 1, seed=seed)
np.random.seed(seed)
# Inputs
concat_x = vs.get_variable("concat_x",
[time_steps, batch_size, input_size])
h = vs.get_variable("h", [batch_size, cell_size])
y = vs.get_variable("y", [time_steps, batch_size, cell_size])
# Output from the basic GRU cell implementation.
with vs.variable_scope("basic", initializer=initializer):
cell = rnn_cell.GRUCell(cell_size)
outputs_dynamic, _ = rnn.dynamic_rnn(
cell,
inputs=concat_x,
initial_state=h,
time_major=True,
dtype=dtypes.float32)
sess.run([variables.global_variables_initializer()])
cost = math_ops.reduce_mean(math_ops.square(outputs_dynamic - y))
learning_rate = 0.01
optimizer = gradient_descent.GradientDescentOptimizer(
learning_rate).minimize(cost)
# time for a training step.
basic_time_training = benchmarking.seconds_per_run(
optimizer, sess, iters)
# Output from the basic GRU cell implementation.
with vs.variable_scope("block", initializer=initializer):
cell = gru_ops.GRUBlockCell(cell_size)
outputs_dynamic, _ = rnn.dynamic_rnn(
cell,
inputs=concat_x,
initial_state=h,
time_major=True,
dtype=dtypes.float32)
sess.run([variables.global_variables_initializer()])
cost = math_ops.reduce_mean(math_ops.square(outputs_dynamic - y))
learning_rate = 0.01
optimizer = gradient_descent.GradientDescentOptimizer(
learning_rate).minimize(cost)
# time for a training step.
block_time_training = benchmarking.seconds_per_run(
optimizer, sess, iters)
performance_training = (
basic_time_training - block_time_training) * 100 / basic_time_training
print(",".join([
str(batch_size), str(cell_size), str(input_size), str(time_steps), str(
use_gpu), str(basic_time_training), str(block_time_training), str(
performance_training)
]))
return basic_time_training, block_time_training
def inference_gru_block_vs_gru_cell(batch_size,
cell_size,
input_size,
time_steps,
use_gpu=False,
iters=30):
"""Benchmark inference speed between GRUBlockCell vs GRUCell."""
ops.reset_default_graph()
with session.Session(graph=ops.Graph()) as sess:
with benchmarking.device(use_gpu):
# Random initializers.
seed = 1994
initializer = init_ops.random_uniform_initializer(-1, 1, seed=seed)
np.random.seed(seed)
# Inputs
concat_x = vs.get_variable("concat_x",
[time_steps, batch_size, input_size])
h = vs.get_variable("h", [batch_size, cell_size])
# Output from the basic GRU cell implementation.
with vs.variable_scope("basic", initializer=initializer):
cell = rnn_cell.GRUCell(cell_size)
outputs_dynamic, _ = rnn.dynamic_rnn(
cell,
inputs=concat_x,
initial_state=h,
time_major=True,
dtype=dtypes.float32)
sess.run([variables.global_variables_initializer()])
basic_time_inference = benchmarking.seconds_per_run(
outputs_dynamic, sess, iters)
# Output from the block GRU cell implementation.
with vs.variable_scope("block", initializer=initializer):
cell = gru_ops.GRUBlockCell(cell_size)
outputs_dynamic, _ = rnn.dynamic_rnn(
cell,
inputs=concat_x,
initial_state=h,
time_major=True,
dtype=dtypes.float32)
sess.run([variables.global_variables_initializer()])
block_time_inference = benchmarking.seconds_per_run(
outputs_dynamic, sess, iters)
performance_inference = (basic_time_inference - block_time_inference
) * 100 / basic_time_inference
print(",".join([
str(batch_size), str(cell_size), str(input_size), str(time_steps), str(
use_gpu), str(basic_time_inference), str(block_time_inference), str(
performance_inference)
]))
return basic_time_inference, block_time_inference
def single_bprop_step_gru_block_vs_gru_cell(batch_size,
cell_size,
input_size,
use_gpu=False,
iters=30):
"""Benchmark single bprop step speed between GRUBlockCell vs GRUCell."""
ops.reset_default_graph()
with session.Session(graph=ops.Graph()) as sess:
with benchmarking.device(use_gpu):
initializer = init_ops.random_uniform_initializer(-1, 1, seed=1989)
# Inputs
x = vs.get_variable("x", [batch_size, input_size])
h = vs.get_variable("h", [batch_size, cell_size])
# Output from the basic GRU cell implementation.
with vs.variable_scope("basic", initializer=initializer):
output = rnn_cell.GRUCell(cell_size)(array_ops.identity(x),
array_ops.identity(h))
sess.run([variables.global_variables_initializer()])
grad_output_wrt_input = gradients_impl.gradients([output], h)
basic_time_bprop = benchmarking.seconds_per_run(grad_output_wrt_input,
sess, iters)
# Output from the block GRU cell implementation.
with vs.variable_scope("block", initializer=initializer):
output = gru_ops.GRUBlockCell(cell_size)(array_ops.identity(x),
array_ops.identity(h))
sess.run([variables.global_variables_initializer()])
grad_output_wrt_input = gradients_impl.gradients([output], h)
block_time_bprop = benchmarking.seconds_per_run(grad_output_wrt_input,
sess, iters)
performance_inference = (
basic_time_bprop - block_time_bprop) * 100 / basic_time_bprop
print(",".join([
str(batch_size), str(cell_size), str(input_size), str(use_gpu), str(
basic_time_bprop), str(block_time_bprop), str(performance_inference)
]))
return basic_time_bprop, block_time_bprop
class BenchmarkGRUBlock(test.Benchmark):
def benchmarkTrainingBlockGRUVsGRUCell(self):
print("Comparison GRUBlockCell vs GRUCell")
print("--------------------------------------------------------------")
print("Training speed GRUBlockCell vs GRUCell")
print("batch_size, cell_size, input_size, time_steps, GPU, "
"basic_time_training, block_time_training, performance_training[%]")
iters = 10
for config in benchmarking.dict_product({
"use_gpu": [True, False],
"batch_size": [1, 32, 128],
"cell_size": [128, 512],
"input_size": [128, 512],
"time_steps": [50]
}):
basic_time, block_time = training_gru_block_vs_gru_cell(
config["batch_size"], config["cell_size"], config["input_size"],
config["time_steps"], config["use_gpu"], iters)
self.report_benchmark(
name="GRUCell_training_time_BS%i_CS%i_IS%i_TS%i_gpu_%s" %
(config["batch_size"], config["cell_size"], config["input_size"],
config["time_steps"], config["use_gpu"]),
iters=iters,
wall_time=basic_time)
self.report_benchmark(
name="GRUBlockCell_training_time_BS%i_CS%i_IS%i_TS%i_gpu_%s" %
(config["batch_size"], config["cell_size"], config["input_size"],
config["time_steps"], config["use_gpu"]),
iters=iters,
wall_time=block_time)
def benchmarkInferenceBlockGRUVsGRUCell(self):
print("--------------------------------------------------------------")
print("Inference speed GRUBlockCell vs GRUCell")
print(
"batch_size, cell_size, input_size, time_steps, GPU, "
"basic_time_inference, block_time_inference, performance_inference[%]")
iters = 10
for config in benchmarking.dict_product({
"use_gpu": [True, False],
"batch_size": [1, 32, 128],
"cell_size": [128, 512],
"input_size": [128, 512],
"time_steps": [50]
}):
basic_time, block_time = inference_gru_block_vs_gru_cell(
config["batch_size"], config["cell_size"], config["input_size"],
config["time_steps"], config["use_gpu"], iters)
self.report_benchmark(
name="GRUCell_inference_time_BS%i_CS%i_IS%i_TS%i_gpu_%s" %
(config["batch_size"], config["cell_size"], config["input_size"],
config["time_steps"], config["use_gpu"]),
iters=iters,
wall_time=basic_time)
self.report_benchmark(
name="GRUBlockCell_inference_time_BS%i_CS%i_IS%i_TS%i_gpu_%s" %
(config["batch_size"], config["cell_size"], config["input_size"],
config["time_steps"], config["use_gpu"]),
iters=iters,
wall_time=block_time)
def benchmarkSingleBpropStepBlockGRUVsGRUCell(self):
print("--------------------------------------------------------------")
print("Single bprop step speed GRUBlockCell vs GRUCell")
print("batch_size, cell_size, input_size, GPU, basic_time, "
"block_time, performance_inference[%]")
iters = 10
for config in benchmarking.dict_product({
"use_gpu": [True, False],
"batch_size": [1, 32, 128],
"cell_size": [128, 512],
"input_size": [128, 512]
}):
basic_time, block_time = single_bprop_step_gru_block_vs_gru_cell(
config["batch_size"], config["cell_size"], config["input_size"],
config["use_gpu"], iters)
self.report_benchmark(
name="GRUCell_Bprop_single_step_time_BS%i_CS%i_IS%i_gpu_%s" %
(config["batch_size"], config["cell_size"], config["input_size"],
config["use_gpu"]),
iters=iters,
wall_time=basic_time)
self.report_benchmark(
name="GRUBlockCell_Bprop_single_step_time_BS%i_CS%i_IS%i_gpu_%s" %
(config["batch_size"], config["cell_size"], config["input_size"],
config["use_gpu"]),
iters=iters,
wall_time=block_time)
print("--------------------------------------------------------------")
if __name__ == "__main__":
test.main()
|
MatthewWilkes/django
|
refs/heads/master
|
tests/gis_tests/utils.py
|
327
|
from unittest import skip
from django.conf import settings
from django.db import DEFAULT_DB_ALIAS
def no_backend(test_func, backend):
"Use this decorator to disable test on specified backend."
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'].rsplit('.')[-1] == backend:
@skip("This test is skipped on '%s' backend" % backend)
def inner():
pass
return inner
else:
return test_func
# Decorators to disable entire test functions for specific
# spatial backends.
def no_oracle(func):
return no_backend(func, 'oracle')
# Shortcut booleans to omit only portions of tests.
_default_db = settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'].rsplit('.')[-1]
oracle = _default_db == 'oracle'
postgis = _default_db == 'postgis'
mysql = _default_db == 'mysql'
spatialite = _default_db == 'spatialite'
# MySQL spatial indices can't handle NULL geometries.
gisfield_may_be_null = not mysql
if oracle and 'gis' in settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE']:
from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys as SpatialRefSys
elif postgis:
from django.contrib.gis.db.backends.postgis.models import PostGISSpatialRefSys as SpatialRefSys
elif spatialite:
from django.contrib.gis.db.backends.spatialite.models import SpatialiteSpatialRefSys as SpatialRefSys
else:
SpatialRefSys = None
|
epequeno/ThinkPy-Solutions
|
refs/heads/master
|
ch03/3.03.py
|
1
|
# Exercise 3.3 Python provides a built-in function called len that returns
# the length of a string, so the value of len('allen') is 5.
# Write a function named right_justify that takes a string named s as a
# parameter and prints the string with enough leading spaces so that the last
# letter of the string is in column 70 of the display.
# >>> right_justify('allen')
# allen
word = raw_input('Word?\n')
def right_justify(s):
print " " * (70 - len(s)) + s
right_justify(word)
|
greyhwndz/rethinkdb
|
refs/heads/next
|
external/v8_3.30.33.16/build/gyp/test/mac/gyptest-objc-gc.py
|
90
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that GC objc settings are handled correctly.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
# set |match| to ignore build stderr output.
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
match = lambda a, b: True)
CHDIR = 'objc-gc'
test.run_gyp('test.gyp', chdir=CHDIR)
build_error_code = {
'xcode': [1, 65], # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
'make': 2,
'ninja': 1,
}[test.format]
test.build('test.gyp', 'gc_exe_fails', chdir=CHDIR, status=build_error_code)
test.build(
'test.gyp', 'gc_off_exe_req_lib', chdir=CHDIR, status=build_error_code)
test.build('test.gyp', 'gc_req_exe', chdir=CHDIR)
test.run_built_executable('gc_req_exe', chdir=CHDIR, stdout="gc on: 1\n")
test.build('test.gyp', 'gc_exe_req_lib', chdir=CHDIR)
test.run_built_executable('gc_exe_req_lib', chdir=CHDIR, stdout="gc on: 1\n")
test.build('test.gyp', 'gc_exe', chdir=CHDIR)
test.run_built_executable('gc_exe', chdir=CHDIR, stdout="gc on: 1\n")
test.build('test.gyp', 'gc_off_exe', chdir=CHDIR)
test.run_built_executable('gc_off_exe', chdir=CHDIR, stdout="gc on: 0\n")
test.pass_test()
|
vinhqdang/project_euler
|
refs/heads/master
|
problem_29_fool.py
|
1
|
res = list ()
for i in range (2, 101):
for j in range (2, 101):
a = i ** j
if a not in res:
res.append (a)
print len(res)
|
smilusingjavascript/blink
|
refs/heads/master
|
LayoutTests/http/tests/websocket/deflate-frame-invalid-parameter_wsh.py
|
4
|
# Copyright 2012, Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import urllib
from mod_pywebsocket import handshake
from mod_pywebsocket.handshake.hybi import compute_accept
def web_socket_do_extra_handshake(request):
resources = request.ws_resource.split('?', 1)
parameters = None
if len(resources) == 2:
parameters = urllib.unquote(resources[1])
message = 'HTTP/1.1 101 Switching Protocols\r\n'
message += 'Upgrade: websocket\r\n'
message += 'Connection: Upgrade\r\n'
message += 'Sec-WebSocket-Accept: %s\r\n' % compute_accept(request.headers_in['Sec-WebSocket-Key'])[0]
message += 'Sec-WebSocket-Extensions: x-webkit-deflate-frame'
if parameters:
message += '; %s\r\n' % parameters
else:
message += '\r\n'
message += '\r\n'
request.connection.write(message)
# Prevents pywebsocket from sending its own handshake message.
raise handshake.AbortedByUserException('Abort the connection')
def web_socket_transfer_data(request):
pass
|
yigitguler/django
|
refs/heads/master
|
tests/syndication_tests/tests.py
|
25
|
from __future__ import unicode_literals
import datetime
from xml.dom import minidom
try:
import pytz
except ImportError:
pytz = None
from django.contrib.syndication import views
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings
from django.test.utils import requires_tz_support
from django.utils.feedgenerator import rfc2822_date, rfc3339_date
from django.utils import timezone
from .models import Entry
TZ = timezone.get_default_timezone()
class FeedTestCase(TestCase):
fixtures = ['feeddata.json']
def setUp(self):
# Django cannot deal with very old dates when pytz isn't installed.
if pytz is None:
old_entry = Entry.objects.get(pk=1)
old_entry.updated = datetime.datetime(1980, 1, 1, 12, 30)
old_entry.published = datetime.datetime(1986, 9, 25, 20, 15, 00)
old_entry.save()
def assertChildNodes(self, elem, expected):
actual = set(n.nodeName for n in elem.childNodes)
expected = set(expected)
self.assertEqual(actual, expected)
def assertChildNodeContent(self, elem, expected):
for k, v in expected.items():
self.assertEqual(
elem.getElementsByTagName(k)[0].firstChild.wholeText, v)
def assertCategories(self, elem, expected):
self.assertEqual(set(i.firstChild.wholeText for i in elem.childNodes if i.nodeName == 'category'), set(expected))
######################################
# Feed view
######################################
@override_settings(ROOT_URLCONF='syndication_tests.urls')
class SyndicationFeedTest(FeedTestCase):
"""
Tests for the high-level syndication feed framework.
"""
def test_rss2_feed(self):
"""
Test the structure and content of feeds generated by Rss201rev2Feed.
"""
response = self.client.get('/syndication/rss2/')
doc = minidom.parseString(response.content)
# Making sure there's only 1 `rss` element and that the correct
# RSS version was specified.
feed_elem = doc.getElementsByTagName('rss')
self.assertEqual(len(feed_elem), 1)
feed = feed_elem[0]
self.assertEqual(feed.getAttribute('version'), '2.0')
# Making sure there's only one `channel` element w/in the
# `rss` element.
chan_elem = feed.getElementsByTagName('channel')
self.assertEqual(len(chan_elem), 1)
chan = chan_elem[0]
# Find the last build date
d = Entry.objects.latest('published').published
last_build_date = rfc2822_date(timezone.make_aware(d, TZ))
self.assertChildNodes(chan, ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'atom:link', 'ttl', 'copyright', 'category'])
self.assertChildNodeContent(chan, {
'title': 'My blog',
'description': 'A more thorough description of my blog.',
'link': 'http://example.com/blog/',
'language': 'en',
'lastBuildDate': last_build_date,
#'atom:link': '',
'ttl': '600',
'copyright': 'Copyright (c) 2007, Sally Smith',
})
self.assertCategories(chan, ['python', 'django'])
# Ensure the content of the channel is correct
self.assertChildNodeContent(chan, {
'title': 'My blog',
'link': 'http://example.com/blog/',
})
# Check feed_url is passed
self.assertEqual(
chan.getElementsByTagName('atom:link')[0].getAttribute('href'),
'http://example.com/syndication/rss2/'
)
# Find the pubdate of the first feed item
d = Entry.objects.get(pk=1).published
pub_date = rfc2822_date(timezone.make_aware(d, TZ))
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), Entry.objects.count())
self.assertChildNodeContent(items[0], {
'title': 'My first entry',
'description': 'Overridden description: My first entry',
'link': 'http://example.com/blog/1/',
'guid': 'http://example.com/blog/1/',
'pubDate': pub_date,
'author': 'test@example.com (Sally Smith)',
})
self.assertCategories(items[0], ['python', 'testing'])
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'category', 'pubDate', 'author'])
# Assert that <guid> does not have any 'isPermaLink' attribute
self.assertIsNone(item.getElementsByTagName(
'guid')[0].attributes.get('isPermaLink'))
def test_rss2_feed_guid_permalink_false(self):
"""
Test if the 'isPermaLink' attribute of <guid> element of an item
in the RSS feed is 'false'.
"""
response = self.client.get(
'/syndication/rss2/guid_ispermalink_false/')
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName(
'rss')[0].getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
for item in items:
self.assertEqual(
item.getElementsByTagName('guid')[0].attributes.get(
'isPermaLink').value, "false")
def test_rss2_feed_guid_permalink_true(self):
"""
Test if the 'isPermaLink' attribute of <guid> element of an item
in the RSS feed is 'true'.
"""
response = self.client.get(
'/syndication/rss2/guid_ispermalink_true/')
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName(
'rss')[0].getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
for item in items:
self.assertEqual(
item.getElementsByTagName('guid')[0].attributes.get(
'isPermaLink').value, "true")
def test_rss091_feed(self):
"""
Test the structure and content of feeds generated by RssUserland091Feed.
"""
response = self.client.get('/syndication/rss091/')
doc = minidom.parseString(response.content)
# Making sure there's only 1 `rss` element and that the correct
# RSS version was specified.
feed_elem = doc.getElementsByTagName('rss')
self.assertEqual(len(feed_elem), 1)
feed = feed_elem[0]
self.assertEqual(feed.getAttribute('version'), '0.91')
# Making sure there's only one `channel` element w/in the
# `rss` element.
chan_elem = feed.getElementsByTagName('channel')
self.assertEqual(len(chan_elem), 1)
chan = chan_elem[0]
self.assertChildNodes(chan, ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'atom:link', 'ttl', 'copyright', 'category'])
# Ensure the content of the channel is correct
self.assertChildNodeContent(chan, {
'title': 'My blog',
'link': 'http://example.com/blog/',
})
self.assertCategories(chan, ['python', 'django'])
# Check feed_url is passed
self.assertEqual(
chan.getElementsByTagName('atom:link')[0].getAttribute('href'),
'http://example.com/syndication/rss091/'
)
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), Entry.objects.count())
self.assertChildNodeContent(items[0], {
'title': 'My first entry',
'description': 'Overridden description: My first entry',
'link': 'http://example.com/blog/1/',
})
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description'])
self.assertCategories(item, [])
def test_atom_feed(self):
"""
Test the structure and content of feeds generated by Atom1Feed.
"""
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.nodeName, 'feed')
self.assertEqual(feed.getAttribute('xmlns'), 'http://www.w3.org/2005/Atom')
self.assertChildNodes(feed, ['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'rights', 'category', 'author'])
for link in feed.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href'), 'http://example.com/syndication/atom/')
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), Entry.objects.count())
for entry in entries:
self.assertChildNodes(entry, [
'title',
'link',
'id',
'summary',
'category',
'updated',
'published',
'rights',
'author',
])
summary = entry.getElementsByTagName('summary')[0]
self.assertEqual(summary.getAttribute('type'), 'html')
def test_atom_feed_published_and_updated_elements(self):
"""
Test that the published and updated elements are not
the same and now adhere to RFC 4287.
"""
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
entries = feed.getElementsByTagName('entry')
published = entries[0].getElementsByTagName('published')[0].firstChild.wholeText
updated = entries[0].getElementsByTagName('updated')[0].firstChild.wholeText
self.assertNotEqual(published, updated)
def test_latest_post_date(self):
"""
Test that both the published and updated dates are
considered when determining the latest post date.
"""
# this feed has a `published` element with the latest date
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.latest('published').published
latest_published = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest_published)
# this feed has an `updated` element with the latest date
response = self.client.get('/syndication/latest/')
feed = minidom.parseString(response.content).firstChild
updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.exclude(pk=5).latest('updated').updated
latest_updated = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest_updated)
def test_custom_feed_generator(self):
response = self.client.get('/syndication/custom/')
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.nodeName, 'feed')
self.assertEqual(feed.getAttribute('django'), 'rocks')
self.assertChildNodes(feed, ['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'spam', 'rights', 'category', 'author'])
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), Entry.objects.count())
for entry in entries:
self.assertEqual(entry.getAttribute('bacon'), 'yum')
self.assertChildNodes(entry, [
'title',
'link',
'id',
'summary',
'ministry',
'rights',
'author',
'updated',
'published',
'category',
])
summary = entry.getElementsByTagName('summary')[0]
self.assertEqual(summary.getAttribute('type'), 'html')
def test_title_escaping(self):
"""
Tests that titles are escaped correctly in RSS feeds.
"""
response = self.client.get('/syndication/rss2/')
doc = minidom.parseString(response.content)
for item in doc.getElementsByTagName('item'):
link = item.getElementsByTagName('link')[0]
if link.firstChild.wholeText == 'http://example.com/blog/4/':
title = item.getElementsByTagName('title')[0]
self.assertEqual(title.firstChild.wholeText, 'A & B < C > D')
def test_naive_datetime_conversion(self):
"""
Test that datetimes are correctly converted to the local time zone.
"""
# Naive date times passed in get converted to the local time zone, so
# check the received zone offset against the local offset.
response = self.client.get('/syndication/naive-dates/')
doc = minidom.parseString(response.content)
updated = doc.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.latest('published').published
latest = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest)
def test_aware_datetime_conversion(self):
"""
Test that datetimes with timezones don't get trodden on.
"""
response = self.client.get('/syndication/aware-dates/')
doc = minidom.parseString(response.content)
published = doc.getElementsByTagName('published')[0].firstChild.wholeText
self.assertEqual(published[-6:], '+00:42')
@requires_tz_support
def test_feed_last_modified_time_naive_date(self):
"""
Tests the Last-Modified header with naive publication dates.
"""
response = self.client.get('/syndication/naive-dates/')
self.assertEqual(response['Last-Modified'], 'Tue, 26 Mar 2013 01:00:00 GMT')
def test_feed_last_modified_time(self):
"""
Tests the Last-Modified header with aware publication dates.
"""
response = self.client.get('/syndication/aware-dates/')
self.assertEqual(response['Last-Modified'], 'Mon, 25 Mar 2013 19:18:00 GMT')
# No last-modified when feed has no item_pubdate
response = self.client.get('/syndication/no_pubdate/')
self.assertFalse(response.has_header('Last-Modified'))
def test_feed_url(self):
"""
Test that the feed_url can be overridden.
"""
response = self.client.get('/syndication/feedurl/')
doc = minidom.parseString(response.content)
for link in doc.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href'), 'http://example.com/customfeedurl/')
def test_secure_urls(self):
"""
Test URLs are prefixed with https:// when feed is requested over HTTPS.
"""
response = self.client.get('/syndication/rss2/', **{
'wsgi.url_scheme': 'https',
})
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName('channel')[0]
self.assertEqual(
chan.getElementsByTagName('link')[0].firstChild.wholeText[0:5],
'https'
)
atom_link = chan.getElementsByTagName('atom:link')[0]
self.assertEqual(atom_link.getAttribute('href')[0:5], 'https')
for link in doc.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href')[0:5], 'https')
def test_item_link_error(self):
"""
Test that an ImproperlyConfigured is raised if no link could be found
for the item(s).
"""
self.assertRaises(ImproperlyConfigured,
self.client.get,
'/syndication/articles/')
def test_template_feed(self):
"""
Test that the item title and description can be overridden with
templates.
"""
response = self.client.get('/syndication/template/')
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName('rss')[0]
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertChildNodeContent(items[0], {
'title': 'Title in your templates: My first entry\n',
'description': 'Description in your templates: My first entry\n',
'link': 'http://example.com/blog/1/',
})
def test_template_context_feed(self):
"""
Test that custom context data can be passed to templates for title
and description.
"""
response = self.client.get('/syndication/template_context/')
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName('rss')[0]
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertChildNodeContent(items[0], {
'title': 'My first entry (foo is bar)\n',
'description': 'My first entry (foo is bar)\n',
})
def test_add_domain(self):
"""
Test add_domain() prefixes domains onto the correct URLs.
"""
self.assertEqual(
views.add_domain('example.com', '/foo/?arg=value'),
'http://example.com/foo/?arg=value'
)
self.assertEqual(
views.add_domain('example.com', '/foo/?arg=value', True),
'https://example.com/foo/?arg=value'
)
self.assertEqual(
views.add_domain('example.com', 'http://djangoproject.com/doc/'),
'http://djangoproject.com/doc/'
)
self.assertEqual(
views.add_domain('example.com', 'https://djangoproject.com/doc/'),
'https://djangoproject.com/doc/'
)
self.assertEqual(
views.add_domain('example.com', 'mailto:uhoh@djangoproject.com'),
'mailto:uhoh@djangoproject.com'
)
self.assertEqual(
views.add_domain('example.com', '//example.com/foo/?arg=value'),
'http://example.com/foo/?arg=value'
)
|
edcast-inc/edx-platform-edcast
|
refs/heads/master
|
lms/djangoapps/psychometrics/models.py
|
150
|
#
# db model for psychometrics data
#
# this data is collected in real time
#
from django.db import models
from courseware.models import StudentModule
class PsychometricData(models.Model):
"""
This data is a table linking student, module, and module performance,
including number of attempts, grade, max grade, and time of checks.
Links to instances of StudentModule, but only those for capa problems.
Note that StudentModule.module_state_key is a :class:`Location` instance.
checktimes is extracted from tracking logs, or added by capa module via psychometrics callback.
"""
studentmodule = models.ForeignKey(StudentModule, db_index=True, unique=True) # contains student, module_state_key, course_id
done = models.BooleanField(default=False)
attempts = models.IntegerField(default=0) # extracted from studentmodule.state
checktimes = models.TextField(null=True, blank=True) # internally stored as list of datetime objects
# keep in mind
# grade = studentmodule.grade
# max_grade = studentmodule.max_grade
# student = studentmodule.student
# course_id = studentmodule.course_id
# location = studentmodule.module_state_key
def __unicode__(self):
sm = self.studentmodule
return "[PsychometricData] %s url=%s, grade=%s, max=%s, attempts=%s, ct=%s" % (sm.student,
sm.module_state_key,
sm.grade,
sm.max_grade,
self.attempts,
self.checktimes)
|
cyrusd/retrying
|
refs/heads/master
|
retrying.py
|
1
|
## Copyright 2013-2014 Ray Holder
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import random
import six
import sys
import time
import traceback
# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...
MAX_WAIT = 1073741823
def retry(*dargs, **dkw):
"""
Decorator function that instantiates the Retrying object
@param *dargs: positional arguments passed to Retrying object
@param **dkw: keyword arguments passed to the Retrying object
"""
# support both @retry and @retry() as valid syntax
if len(dargs) == 1 and callable(dargs[0]):
def wrap_simple(f):
@six.wraps(f)
def wrapped_f(*args, **kw):
return Retrying().call(f, *args, **kw)
return wrapped_f
return wrap_simple(dargs[0])
else:
def wrap(f):
@six.wraps(f)
def wrapped_f(*args, **kw):
return Retrying(*dargs, **dkw).call(f, *args, **kw)
return wrapped_f
return wrap
class Retrying(object):
def __init__(self,
stop=None, wait=None,
stop_max_attempt_number=None,
stop_max_delay=None,
wait_fixed=None,
wait_random_min=None, wait_random_max=None,
wait_incrementing_start=None, wait_incrementing_increment=None,
wait_exponential_multiplier=None, wait_exponential_max=None,
retry_on_exception=None,
retry_on_result=None,
wrap_exception=False,
stop_func=None,
wait_func=None,
wait_jitter_max=None,
before_attempts=None,
after_attempts=None):
self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number
self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay
self._wait_fixed = 1000 if wait_fixed is None else wait_fixed
self._wait_random_min = 0 if wait_random_min is None else wait_random_min
self._wait_random_max = 1000 if wait_random_max is None else wait_random_max
self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start
self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment
self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier
self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max
self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max
self._before_attempts = before_attempts
self._after_attempts = after_attempts
# TODO add chaining of stop behaviors
# stop behavior
stop_funcs = []
if stop_max_attempt_number is not None:
stop_funcs.append(self.stop_after_attempt)
if stop_max_delay is not None:
stop_funcs.append(self.stop_after_delay)
if stop_func is not None:
self.stop = stop_func
elif stop is None:
self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs)
else:
self.stop = getattr(self, stop)
# TODO add chaining of wait behaviors
# wait behavior
wait_funcs = [lambda *args, **kwargs: 0]
if wait_fixed is not None:
wait_funcs.append(self.fixed_sleep)
if wait_random_min is not None or wait_random_max is not None:
wait_funcs.append(self.random_sleep)
if wait_incrementing_start is not None or wait_incrementing_increment is not None:
wait_funcs.append(self.incrementing_sleep)
if wait_exponential_multiplier is not None or wait_exponential_max is not None:
wait_funcs.append(self.exponential_sleep)
if wait_func is not None:
self.wait = wait_func
elif wait is None:
self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs)
else:
self.wait = getattr(self, wait)
# retry on exception filter
if retry_on_exception is None:
self._retry_on_exception = self.always_reject
else:
self._retry_on_exception = retry_on_exception
# TODO simplify retrying by Exception types
# retry on result filter
if retry_on_result is None:
self._retry_on_result = self.never_reject
else:
self._retry_on_result = retry_on_result
self._wrap_exception = wrap_exception
def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Stop after the previous attempt >= stop_max_attempt_number."""
return previous_attempt_number >= self._stop_max_attempt_number
def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Stop after the time from the first attempt >= stop_max_delay."""
return delay_since_first_attempt_ms >= self._stop_max_delay
def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Don't sleep at all before retrying."""
return 0
def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a fixed amount of time between each retry."""
return self._wait_fixed
def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a random amount of time between wait_random_min and wait_random_max"""
return random.randint(self._wait_random_min, self._wait_random_max)
def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""
Sleep an incremental amount of time after each attempt, starting at
wait_incrementing_start and incrementing by wait_incrementing_increment
"""
result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1))
if result < 0:
result = 0
return result
def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
exp = 2 ** previous_attempt_number
result = self._wait_exponential_multiplier * exp
if result > self._wait_exponential_max:
result = self._wait_exponential_max
if result < 0:
result = 0
return result
def never_reject(self, result):
return False
def always_reject(self, result):
return True
def should_reject(self, attempt):
reject = False
if attempt.has_exception:
reject |= self._retry_on_exception(attempt.value[1])
else:
reject |= self._retry_on_result(attempt.value)
return reject
def call(self, fn, *args, **kwargs):
start_time = int(round(time.time() * 1000))
attempt_number = 1
while True:
if self._before_attempts:
self._before_attempts(attempt_number)
try:
attempt = Attempt(fn(*args, **kwargs), attempt_number, False)
except:
tb = sys.exc_info()
attempt = Attempt(tb, attempt_number, True)
if not self.should_reject(attempt):
return attempt.get(self._wrap_exception)
if self._after_attempts:
self._after_attempts(attempt_number)
delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time
if self.stop(attempt_number, delay_since_first_attempt_ms):
if not self._wrap_exception and attempt.has_exception:
# get() on an attempt with an exception should cause it to be raised, but raise just in case
raise attempt.get()
else:
raise RetryError(attempt)
else:
sleep = self.wait(attempt_number, delay_since_first_attempt_ms)
if self._wait_jitter_max:
jitter = random.random() * self._wait_jitter_max
sleep = sleep + max(0, jitter)
time.sleep(sleep / 1000.0)
attempt_number += 1
class Attempt(object):
"""
An Attempt encapsulates a call to a target function that may end as a
normal return value from the function or an Exception depending on what
occurred during the execution.
"""
def __init__(self, value, attempt_number, has_exception):
self.value = value
self.attempt_number = attempt_number
self.has_exception = has_exception
def get(self, wrap_exception=False):
"""
Return the return value of this Attempt instance or raise an Exception.
If wrap_exception is true, this Attempt is wrapped inside of a
RetryError before being raised.
"""
if self.has_exception:
if wrap_exception:
raise RetryError(self)
else:
six.reraise(self.value[0], self.value[1], self.value[2])
else:
return self.value
def __repr__(self):
if self.has_exception:
return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2])))
else:
return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value)
class RetryError(Exception):
"""
A RetryError encapsulates the last Attempt instance right before giving up.
"""
def __init__(self, last_attempt):
self.last_attempt = last_attempt
def __str__(self):
return "RetryError[{0}]".format(self.last_attempt)
|
orneryhippo/saturdays
|
refs/heads/master
|
rabbit/receive.py
|
1
|
#!/usr/bin/env python
import pika
mq="ec2-52-11-222-159.us-west-2.compute.amazonaws.com"
connection = pika.BlockingConnection(pika.ConnectionParameters(
host=mq))
channel = connection.channel()
channel.queue_declare(queue='hello')
def callback(ch, method, properties, body):
print(" [x] Received %r" % body)
channel.basic_consume(callback,
queue='hello',
no_ack=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
|
noironetworks/heat
|
refs/heads/master
|
heat/tests/test_exception.py
|
1
|
#
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import mock
import six
from heat.common import exception
from heat.common.i18n import _
from heat.tests import common
class TestException(exception.HeatException):
msg_fmt = _("Testing message %(text)s")
class TestHeatException(common.HeatTestCase):
def test_fatal_exception_error(self):
self.useFixture(fixtures.MonkeyPatch(
'heat.common.exception._FATAL_EXCEPTION_FORMAT_ERRORS',
True))
self.assertRaises(KeyError, TestException)
def test_format_string_error_message(self):
message = "This format %(message)s should work"
err = exception.Error(message)
self.assertEqual(message, six.text_type(err))
class TestStackValidationFailed(common.HeatTestCase):
scenarios = [
('test_error_as_exception', dict(
kwargs=dict(
error=exception.StackValidationFailed(
error='Error',
path=['some', 'path'],
message='Some message')),
expected='Error: some.path: Some message',
called_error='Error',
called_path=['some', 'path'],
called_msg='Some message'
)),
('test_full_exception', dict(
kwargs=dict(
error='Error',
path=['some', 'path'],
message='Some message'),
expected='Error: some.path: Some message',
called_error='Error',
called_path=['some', 'path'],
called_msg='Some message'
)),
('test_no_error_exception', dict(
kwargs=dict(
path=['some', 'path'],
message='Chain letter'),
expected='some.path: Chain letter',
called_error='',
called_path=['some', 'path'],
called_msg='Chain letter'
)),
('test_no_path_exception', dict(
kwargs=dict(
error='Error',
message='Just no.'),
expected='Error: Just no.',
called_error='Error',
called_path=[],
called_msg='Just no.'
)),
('test_no_msg_exception', dict(
kwargs=dict(
error='Error',
path=['we', 'lost', 'our', 'message']),
expected='Error: we.lost.our.message: ',
called_error='Error',
called_path=['we', 'lost', 'our', 'message'],
called_msg=''
)),
('test_old_format_exception', dict(
kwargs=dict(
message='Wow. I think I am old error message format.'
),
expected='Wow. I think I am old error message format.',
called_error='',
called_path=[],
called_msg='Wow. I think I am old error message format.'
)),
('test_int_path_item_exception', dict(
kwargs=dict(
path=['null', 0]
),
expected='null[0]: ',
called_error='',
called_path=['null', 0],
called_msg=''
)),
('test_digit_path_item_exception', dict(
kwargs=dict(
path=['null', '0']
),
expected='null[0]: ',
called_error='',
called_path=['null', '0'],
called_msg=''
)),
('test_string_path_exception', dict(
kwargs=dict(
path='null[0].not_null'
),
expected='null[0].not_null: ',
called_error='',
called_path=['null[0].not_null'],
called_msg=''
))
]
def test_exception(self):
try:
raise exception.StackValidationFailed(**self.kwargs)
except exception.StackValidationFailed as ex:
self.assertIn(self.expected, six.text_type(ex))
self.assertIn(self.called_error, ex.error)
self.assertEqual(self.called_path, ex.path)
self.assertEqual(self.called_msg, ex.error_message)
class TestResourceFailure(common.HeatTestCase):
def test_status_reason_resource(self):
reason = ('Resource CREATE failed: ValueError: resources.oops: '
'Test Resource failed oops')
exc = exception.ResourceFailure(reason, None, action='CREATE')
self.assertEqual('ValueError', exc.error)
self.assertEqual(['resources', 'oops'], exc.path)
self.assertEqual('Test Resource failed oops', exc.error_message)
def test_status_reason_general(self):
reason = ('something strange happened')
exc = exception.ResourceFailure(reason, None, action='CREATE')
self.assertEqual('', exc.error)
self.assertEqual([], exc.path)
self.assertEqual('something strange happened', exc.error_message)
def test_status_reason_general_res(self):
res = mock.Mock()
res.name = 'fred'
res.stack.t.get_section_name.return_value = 'Resources'
reason = ('something strange happened')
exc = exception.ResourceFailure(reason, res, action='CREATE')
self.assertEqual('', exc.error)
self.assertEqual(['Resources', 'fred'], exc.path)
self.assertEqual('something strange happened', exc.error_message)
def test_std_exception(self):
base_exc = ValueError('sorry mom')
exc = exception.ResourceFailure(base_exc, None, action='UPDATE')
self.assertEqual('ValueError', exc.error)
self.assertEqual([], exc.path)
self.assertEqual('sorry mom', exc.error_message)
def test_std_exception_with_resource(self):
base_exc = ValueError('sorry mom')
res = mock.Mock()
res.name = 'fred'
res.stack.t.get_section_name.return_value = 'Resources'
exc = exception.ResourceFailure(base_exc, res, action='UPDATE')
self.assertEqual('ValueError', exc.error)
self.assertEqual(['Resources', 'fred'], exc.path)
self.assertEqual('sorry mom', exc.error_message)
def test_heat_exception(self):
base_exc = ValueError('sorry mom')
heat_exc = exception.ResourceFailure(base_exc, None, action='UPDATE')
exc = exception.ResourceFailure(heat_exc, None, action='UPDATE')
self.assertEqual('ValueError', exc.error)
self.assertEqual([], exc.path)
self.assertEqual('sorry mom', exc.error_message)
def test_nested_exceptions(self):
res = mock.Mock()
res.name = 'frodo'
res.stack.t.get_section_name.return_value = 'Resources'
reason = ('Resource UPDATE failed: ValueError: resources.oops: '
'Test Resource failed oops')
base_exc = exception.ResourceFailure(reason, res, action='UPDATE')
exc = exception.ResourceFailure(base_exc, res, action='UPDATE')
self.assertEqual(['Resources', 'frodo', 'resources', 'oops'], exc.path)
self.assertEqual('ValueError', exc.error)
self.assertEqual('Test Resource failed oops', exc.error_message)
|
TangHao1987/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/views/decorators/vary.py
|
307
|
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from django.utils.cache import patch_vary_headers
from django.utils.decorators import available_attrs
def vary_on_headers(*headers):
"""
A view decorator that adds the specified headers to the Vary header of the
response. Usage:
@vary_on_headers('Cookie', 'Accept-language')
def index(request):
...
Note that the header names are not case-sensitive.
"""
def decorator(func):
def inner_func(*args, **kwargs):
response = func(*args, **kwargs)
patch_vary_headers(response, headers)
return response
return wraps(func, assigned=available_attrs(func))(inner_func)
return decorator
def vary_on_cookie(func):
"""
A view decorator that adds "Cookie" to the Vary header of a response. This
indicates that a page's contents depends on cookies. Usage:
@vary_on_cookie
def index(request):
...
"""
def inner_func(*args, **kwargs):
response = func(*args, **kwargs)
patch_vary_headers(response, ('Cookie',))
return response
return wraps(func, assigned=available_attrs(func))(inner_func)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.