repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
bohdan-shramko/learning-python
|
refs/heads/master
|
source/sublime-packages/Packages/PyV8/win64-p3/PyV8.py
|
22
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, os, re
import logging
import collections
is_py3k = sys.version_info[0] > 2
if is_py3k:
import _thread as thread
from io import StringIO
str = str
raw_input = input
else:
import _thread
try:
from io import StringIO
except ImportError:
from io import StringIO
try:
import json
except ImportError:
import simplejson as json
import _PyV8
__author__ = 'Flier Lu <flier.lu@gmail.com>'
__version__ = '1.0'
__all__ = ["ReadOnly", "DontEnum", "DontDelete", "Internal",
"JSError", "JSObject", "JSArray", "JSFunction",
"JSClass", "JSEngine", "JSContext",
"JSObjectSpace", "JSAllocationAction",
"JSStackTrace", "JSStackFrame", "profiler",
"JSExtension", "JSLocker", "JSUnlocker", "AST"]
class JSAttribute(object):
def __init__(self, name):
self.name = name
def __call__(self, func):
setattr(func, "__%s__" % self.name, True)
return func
ReadOnly = JSAttribute(name='readonly')
DontEnum = JSAttribute(name='dontenum')
DontDelete = JSAttribute(name='dontdel')
Internal = JSAttribute(name='internal')
class JSError(Exception):
def __init__(self, impl):
Exception.__init__(self)
self._impl = impl
def __str__(self):
return str(self._impl)
def __unicode__(self, *args, **kwargs):
return str(self._impl)
def __getattribute__(self, attr):
impl = super(JSError, self).__getattribute__("_impl")
try:
return getattr(impl, attr)
except AttributeError:
return super(JSError, self).__getattribute__(attr)
RE_FRAME = re.compile(r"\s+at\s(?:new\s)?(?P<func>.+)\s\((?P<file>[^:]+):?(?P<row>\d+)?:?(?P<col>\d+)?\)")
RE_FUNC = re.compile(r"\s+at\s(?:new\s)?(?P<func>.+)\s\((?P<file>[^\)]+)\)")
RE_FILE = re.compile(r"\s+at\s(?P<file>[^:]+):?(?P<row>\d+)?:?(?P<col>\d+)?")
@staticmethod
def parse_stack(value):
stack = []
def int_or_nul(value):
return int(value) if value else None
for line in value.split('\n')[1:]:
m = JSError.RE_FRAME.match(line)
if m:
stack.append((m.group('func'), m.group('file'), int_or_nul(m.group('row')), int_or_nul(m.group('col'))))
continue
m = JSError.RE_FUNC.match(line)
if m:
stack.append((m.group('func'), m.group('file'), None, None))
continue
m = JSError.RE_FILE.match(line)
if m:
stack.append((None, m.group('file'), int_or_nul(m.group('row')), int_or_nul(m.group('col'))))
continue
assert line
return stack
@property
def frames(self):
return self.parse_stack(self.stackTrace)
_PyV8._JSError._jsclass = JSError
JSObject = _PyV8.JSObject
JSArray = _PyV8.JSArray
JSFunction = _PyV8.JSFunction
# contribute by e.generalov
JS_ESCAPABLE = re.compile(r'([^\x00-\x7f])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
def _js_escape_unicode_re_callack(match):
n = ord(match.group(0))
if n < 0x10000:
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u%04x\\u%04x' % (s1, s2)
def js_escape_unicode(text):
"""Return an ASCII-only representation of a JavaScript string"""
if isinstance(text, str):
if HAS_UTF8.search(text) is None:
return text
text = text.decode('UTF-8')
return str(JS_ESCAPABLE.sub(_js_escape_unicode_re_callack, text))
class JSExtension(_PyV8.JSExtension):
def __init__(self, name, source, callback=None, dependencies=[], register=True):
_PyV8.JSExtension.__init__(self, js_escape_unicode(name), js_escape_unicode(source), callback, dependencies, register)
def func_apply(self, thisArg, argArray=[]):
if isinstance(thisArg, JSObject):
return self.invoke(thisArg, argArray)
this = JSContext.current.eval("(%s)" % json.dumps(thisArg))
return self.invoke(this, argArray)
JSFunction.apply = func_apply
class JSLocker(_PyV8.JSLocker):
def __enter__(self):
self.enter()
if JSContext.entered:
self.leave()
raise RuntimeError("Lock should be acquired before enter the context")
return self
def __exit__(self, exc_type, exc_value, traceback):
if JSContext.entered:
self.leave()
raise RuntimeError("Lock should be released after leave the context")
self.leave()
if is_py3k:
def __bool__(self):
return self.entered()
else:
def __nonzero__(self):
return self.entered()
class JSUnlocker(_PyV8.JSUnlocker):
def __enter__(self):
self.enter()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.leave()
if is_py3k:
def __bool__(self):
return self.entered()
else:
def __nonzero__(self):
return self.entered()
class JSClass(object):
__properties__ = {}
__watchpoints__ = {}
def __getattr__(self, name):
if name == 'constructor':
return JSClassConstructor(self.__class__)
if name == 'prototype':
return JSClassPrototype(self.__class__)
prop = self.__dict__.setdefault('__properties__', {}).get(name, None)
if prop and isinstance(prop[0], collections.Callable):
return prop[0]()
raise AttributeError(name)
def __setattr__(self, name, value):
prop = self.__dict__.setdefault('__properties__', {}).get(name, None)
if prop and isinstance(prop[1], collections.Callable):
return prop[1](value)
return object.__setattr__(self, name, value)
def toString(self):
"Returns a string representation of an object."
return "[object %s]" % self.__class__.__name__
def toLocaleString(self):
"Returns a value as a string value appropriate to the host environment's current locale."
return self.toString()
def valueOf(self):
"Returns the primitive value of the specified object."
return self
def hasOwnProperty(self, name):
"Returns a Boolean value indicating whether an object has a property with the specified name."
return hasattr(self, name)
def isPrototypeOf(self, obj):
"Returns a Boolean value indicating whether an object exists in the prototype chain of another object."
raise NotImplementedError()
def __defineGetter__(self, name, getter):
"Binds an object's property to a function to be called when that property is looked up."
self.__properties__[name] = (getter, self.__lookupSetter__(name))
def __lookupGetter__(self, name):
"Return the function bound as a getter to the specified property."
return self.__properties__.get(name, (None, None))[0]
def __defineSetter__(self, name, setter):
"Binds an object's property to a function to be called when an attempt is made to set that property."
self.__properties__[name] = (self.__lookupGetter__(name), setter)
def __lookupSetter__(self, name):
"Return the function bound as a setter to the specified property."
return self.__properties__.get(name, (None, None))[1]
def watch(self, prop, handler):
"Watches for a property to be assigned a value and runs a function when that occurs."
self.__watchpoints__[prop] = handler
def unwatch(self, prop):
"Removes a watchpoint set with the watch method."
del self.__watchpoints__[prop]
class JSClassConstructor(JSClass):
def __init__(self, cls):
self.cls = cls
@property
def name(self):
return self.cls.__name__
def toString(self):
return "function %s() {\n [native code]\n}" % self.name
def __call__(self, *args, **kwds):
return self.cls(*args, **kwds)
class JSClassPrototype(JSClass):
def __init__(self, cls):
self.cls = cls
@property
def constructor(self):
return JSClassConstructor(self.cls)
@property
def name(self):
return self.cls.__name__
class JSDebugProtocol(object):
"""
Support the V8 debugger JSON based protocol.
<http://code.google.com/p/v8/wiki/DebuggerProtocol>
"""
class Packet(object):
REQUEST = 'request'
RESPONSE = 'response'
EVENT = 'event'
def __init__(self, payload):
self.data = json.loads(payload) if type(payload) in [str, str] else payload
@property
def seq(self):
return self.data['seq']
@property
def type(self):
return self.data['type']
class Request(Packet):
@property
def cmd(self):
return self.data['command']
@property
def args(self):
return self.data['args']
class Response(Packet):
@property
def request_seq(self):
return self.data['request_seq']
@property
def cmd(self):
return self.data['command']
@property
def body(self):
return self.data['body']
@property
def running(self):
return self.data['running']
@property
def success(self):
return self.data['success']
@property
def message(self):
return self.data['message']
class Event(Packet):
@property
def event(self):
return self.data['event']
@property
def body(self):
return self.data['body']
def __init__(self):
self.seq = 0
def nextSeq(self):
seq = self.seq
self.seq += 1
return seq
def parsePacket(self, payload):
obj = json.loads(payload)
return JSDebugProtocol.Event(obj) if obj['type'] == 'event' else JSDebugProtocol.Response(obj)
class JSDebugEvent(_PyV8.JSDebugEvent):
class FrameData(object):
def __init__(self, frame, count, name, value):
self.frame = frame
self.count = count
self.name = name
self.value = value
def __len__(self):
return self.count(self.frame)
def __iter__(self):
for i in range(self.count(self.frame)):
yield (self.name(self.frame, i), self.value(self.frame, i))
class Frame(object):
def __init__(self, frame):
self.frame = frame
@property
def index(self):
return int(self.frame.index())
@property
def function(self):
return self.frame.func()
@property
def receiver(self):
return self.frame.receiver()
@property
def isConstructCall(self):
return bool(self.frame.isConstructCall())
@property
def isDebuggerFrame(self):
return bool(self.frame.isDebuggerFrame())
@property
def argumentCount(self):
return int(self.frame.argumentCount())
def argumentName(self, idx):
return str(self.frame.argumentName(idx))
def argumentValue(self, idx):
return self.frame.argumentValue(idx)
@property
def arguments(self):
return JSDebugEvent.FrameData(self, self.argumentCount, self.argumentName, self.argumentValue)
def localCount(self, idx):
return int(self.frame.localCount())
def localName(self, idx):
return str(self.frame.localName(idx))
def localValue(self, idx):
return self.frame.localValue(idx)
@property
def locals(self):
return JSDebugEvent.FrameData(self, self.localCount, self.localName, self.localValue)
@property
def sourcePosition(self):
return self.frame.sourcePosition()
@property
def sourceLine(self):
return int(self.frame.sourceLine())
@property
def sourceColumn(self):
return int(self.frame.sourceColumn())
@property
def sourceLineText(self):
return str(self.frame.sourceLineText())
def evaluate(self, source, disable_break = True):
return self.frame.evaluate(source, disable_break)
@property
def invocationText(self):
return str(self.frame.invocationText())
@property
def sourceAndPositionText(self):
return str(self.frame.sourceAndPositionText())
@property
def localsText(self):
return str(self.frame.localsText())
def __str__(self):
return str(self.frame.toText())
class Frames(object):
def __init__(self, state):
self.state = state
def __len__(self):
return self.state.frameCount
def __iter__(self):
for i in range(self.state.frameCount):
yield self.state.frame(i)
class State(object):
def __init__(self, state):
self.state = state
@property
def frameCount(self):
return int(self.state.frameCount())
def frame(self, idx = None):
return JSDebugEvent.Frame(self.state.frame(idx))
@property
def selectedFrame(self):
return int(self.state.selectedFrame())
@property
def frames(self):
return JSDebugEvent.Frames(self)
def __repr__(self):
s = StringIO()
try:
for frame in self.frames:
s.write(str(frame))
return s.getvalue()
finally:
s.close()
class DebugEvent(object):
pass
class StateEvent(DebugEvent):
__state = None
@property
def state(self):
if not self.__state:
self.__state = JSDebugEvent.State(self.event.executionState())
return self.__state
class BreakEvent(StateEvent):
type = _PyV8.JSDebugEvent.Break
def __init__(self, event):
self.event = event
class ExceptionEvent(StateEvent):
type = _PyV8.JSDebugEvent.Exception
def __init__(self, event):
self.event = event
class NewFunctionEvent(DebugEvent):
type = _PyV8.JSDebugEvent.NewFunction
def __init__(self, event):
self.event = event
class Script(object):
def __init__(self, script):
self.script = script
@property
def source(self):
return self.script.source()
@property
def id(self):
return self.script.id()
@property
def name(self):
return self.script.name()
@property
def lineOffset(self):
return self.script.lineOffset()
@property
def lineCount(self):
return self.script.lineCount()
@property
def columnOffset(self):
return self.script.columnOffset()
@property
def type(self):
return self.script.type()
def __repr__(self):
return "<%s script %s @ %d:%d> : '%s'" % (self.type, self.name,
self.lineOffset, self.columnOffset,
self.source)
class CompileEvent(StateEvent):
def __init__(self, event):
self.event = event
@property
def script(self):
if not hasattr(self, "_script"):
setattr(self, "_script", JSDebugEvent.Script(self.event.script()))
return self._script
def __str__(self):
return str(self.script)
class BeforeCompileEvent(CompileEvent):
type = _PyV8.JSDebugEvent.BeforeCompile
def __init__(self, event):
JSDebugEvent.CompileEvent.__init__(self, event)
def __repr__(self):
return "before compile script: %s\n%s" % (repr(self.script), repr(self.state))
class AfterCompileEvent(CompileEvent):
type = _PyV8.JSDebugEvent.AfterCompile
def __init__(self, event):
JSDebugEvent.CompileEvent.__init__(self, event)
def __repr__(self):
return "after compile script: %s\n%s" % (repr(self.script), repr(self.state))
onMessage = None
onBreak = None
onException = None
onNewFunction = None
onBeforeCompile = None
onAfterCompile = None
class JSDebugger(JSDebugProtocol, JSDebugEvent):
def __init__(self):
JSDebugProtocol.__init__(self)
JSDebugEvent.__init__(self)
def __enter__(self):
self.enabled = True
return self
def __exit__(self, exc_type, exc_value, traceback):
self.enabled = False
@property
def context(self):
if not hasattr(self, '_context'):
self._context = JSContext(ctxt=_PyV8.debug().context)
return self._context
def isEnabled(self):
return _PyV8.debug().enabled
def setEnabled(self, enable):
dbg = _PyV8.debug()
if enable:
dbg.onDebugEvent = self.onDebugEvent
dbg.onDebugMessage = self.onDebugMessage
dbg.onDispatchDebugMessages = self.onDispatchDebugMessages
else:
dbg.onDebugEvent = None
dbg.onDebugMessage = None
dbg.onDispatchDebugMessages = None
dbg.enabled = enable
enabled = property(isEnabled, setEnabled)
def onDebugMessage(self, msg, data):
if self.onMessage:
self.onMessage(json.loads(msg))
def onDebugEvent(self, type, state, evt):
if type == JSDebugEvent.Break:
if self.onBreak: self.onBreak(JSDebugEvent.BreakEvent(evt))
elif type == JSDebugEvent.Exception:
if self.onException: self.onException(JSDebugEvent.ExceptionEvent(evt))
elif type == JSDebugEvent.NewFunction:
if self.onNewFunction: self.onNewFunction(JSDebugEvent.NewFunctionEvent(evt))
elif type == JSDebugEvent.BeforeCompile:
if self.onBeforeCompile: self.onBeforeCompile(JSDebugEvent.BeforeCompileEvent(evt))
elif type == JSDebugEvent.AfterCompile:
if self.onAfterCompile: self.onAfterCompile(JSDebugEvent.AfterCompileEvent(evt))
def onDispatchDebugMessages(self):
return True
def debugBreak(self):
_PyV8.debug().debugBreak()
def debugBreakForCommand(self):
_PyV8.debug().debugBreakForCommand()
def cancelDebugBreak(self):
_PyV8.debug().cancelDebugBreak()
def processDebugMessages(self):
_PyV8.debug().processDebugMessages()
def sendCommand(self, cmd, *args, **kwds):
request = json.dumps({
'seq': self.nextSeq(),
'type': 'request',
'command': cmd,
'arguments': kwds
})
_PyV8.debug().sendCommand(request)
return request
def debugContinue(self, action='next', steps=1):
return self.sendCommand('continue', stepaction=action)
def stepNext(self, steps=1):
"""Step to the next statement in the current function."""
return self.debugContinue(action='next', steps=steps)
def stepIn(self, steps=1):
"""Step into new functions invoked or the next statement in the current function."""
return self.debugContinue(action='in', steps=steps)
def stepOut(self, steps=1):
"""Step out of the current function."""
return self.debugContinue(action='out', steps=steps)
def stepMin(self, steps=1):
"""Perform a minimum step in the current function."""
return self.debugContinue(action='out', steps=steps)
class JSProfiler(_PyV8.JSProfiler):
@property
def logs(self):
pos = 0
while True:
size, buf = self.getLogLines(pos)
if size == 0:
break
for line in buf.split('\n'):
yield line
pos += size
profiler = JSProfiler()
JSObjectSpace = _PyV8.JSObjectSpace
JSAllocationAction = _PyV8.JSAllocationAction
class JSEngine(_PyV8.JSEngine):
def __init__(self):
_PyV8.JSEngine.__init__(self)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
del self
JSScript = _PyV8.JSScript
JSStackTrace = _PyV8.JSStackTrace
JSStackTrace.Options = _PyV8.JSStackTraceOptions
JSStackFrame = _PyV8.JSStackFrame
class JSIsolate(_PyV8.JSIsolate):
def __enter__(self):
self.enter()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.leave()
del self
class JSContext(_PyV8.JSContext):
def __init__(self, obj=None, extensions=None, ctxt=None):
if JSLocker.active:
self.lock = JSLocker()
self.lock.enter()
if ctxt:
_PyV8.JSContext.__init__(self, ctxt)
else:
_PyV8.JSContext.__init__(self, obj, extensions or [])
def __enter__(self):
self.enter()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.leave()
if hasattr(JSLocker, 'lock'):
self.lock.leave()
self.lock = None
del self
# contribute by marc boeker <http://code.google.com/u/marc.boeker/>
def convert(obj):
if type(obj) == _PyV8.JSArray:
return [convert(v) for v in obj]
if type(obj) == _PyV8.JSObject:
return dict([[str(k), convert(obj.__getattr__(str(k)))] for k in (obj.__dir__() if is_py3k else obj.__members__)])
return obj
class AST:
Scope = _PyV8.AstScope
VarMode = _PyV8.AstVariableMode
Var = _PyV8.AstVariable
Label = _PyV8.AstLabel
NodeType = _PyV8.AstNodeType
Node = _PyV8.AstNode
Statement = _PyV8.AstStatement
Expression = _PyV8.AstExpression
Breakable = _PyV8.AstBreakableStatement
Block = _PyV8.AstBlock
Declaration = _PyV8.AstDeclaration
VariableDeclaration = _PyV8.AstVariableDeclaration
Module = _PyV8.AstModule
ModuleDeclaration = _PyV8.AstModuleDeclaration
ModuleLiteral = _PyV8.AstModuleLiteral
ModuleVariable = _PyV8.AstModuleVariable
ModulePath = _PyV8.AstModulePath
Iteration = _PyV8.AstIterationStatement
DoWhile = _PyV8.AstDoWhileStatement
While = _PyV8.AstWhileStatement
For = _PyV8.AstForStatement
ForIn = _PyV8.AstForInStatement
ExpressionStatement = _PyV8.AstExpressionStatement
Continue = _PyV8.AstContinueStatement
Break = _PyV8.AstBreakStatement
Return = _PyV8.AstReturnStatement
With = _PyV8.AstWithStatement
Case = _PyV8.AstCaseClause
Switch = _PyV8.AstSwitchStatement
Try = _PyV8.AstTryStatement
TryCatch = _PyV8.AstTryCatchStatement
TryFinally = _PyV8.AstTryFinallyStatement
Debugger = _PyV8.AstDebuggerStatement
Empty = _PyV8.AstEmptyStatement
Literal = _PyV8.AstLiteral
MaterializedLiteral = _PyV8.AstMaterializedLiteral
PropertyKind = _PyV8.AstPropertyKind
ObjectProperty = _PyV8.AstObjectProperty
Object = _PyV8.AstObjectLiteral
RegExp = _PyV8.AstRegExpLiteral
Array = _PyV8.AstArrayLiteral
VarProxy = _PyV8.AstVariableProxy
Property = _PyV8.AstProperty
Call = _PyV8.AstCall
CallNew = _PyV8.AstCallNew
CallRuntime = _PyV8.AstCallRuntime
Op = _PyV8.AstOperation
UnaryOp = _PyV8.AstUnaryOperation
BinOp = _PyV8.AstBinaryOperation
CountOp = _PyV8.AstCountOperation
CompOp = _PyV8.AstCompareOperation
Conditional = _PyV8.AstConditional
Assignment = _PyV8.AstAssignment
Throw = _PyV8.AstThrow
Function = _PyV8.AstFunctionLiteral
SharedFunction = _PyV8.AstSharedFunctionInfoLiteral
This = _PyV8.AstThisFunction
from datetime import *
import unittest
import traceback
if is_py3k:
def toNativeString(s):
return s
def toUnicodeString(s):
return s
else:
def toNativeString(s, encoding='utf-8'):
return s.encode(encoding) if isinstance(s, str) else s
def toUnicodeString(s, encoding='utf-8'):
return s if isinstance(s, str) else str(s, encoding)
class TestContext(unittest.TestCase):
def testMultiNamespace(self):
self.assertTrue(not bool(JSContext.inContext))
self.assertTrue(not bool(JSContext.entered))
class Global(object):
name = "global"
g = Global()
with JSContext(g) as ctxt:
self.assertTrue(bool(JSContext.inContext))
self.assertEqual(g.name, str(JSContext.entered.locals.name))
self.assertEqual(g.name, str(JSContext.current.locals.name))
class Local(object):
name = "local"
l = Local()
with JSContext(l):
self.assertTrue(bool(JSContext.inContext))
self.assertEqual(l.name, str(JSContext.entered.locals.name))
self.assertEqual(l.name, str(JSContext.current.locals.name))
self.assertTrue(bool(JSContext.inContext))
self.assertEqual(g.name, str(JSContext.entered.locals.name))
self.assertEqual(g.name, str(JSContext.current.locals.name))
self.assertTrue(not bool(JSContext.entered))
self.assertTrue(not bool(JSContext.inContext))
def _testMultiContext(self):
# Create an environment
with JSContext() as ctxt0:
ctxt0.securityToken = "password"
global0 = ctxt0.locals
global0.custom = 1234
self.assertEqual(1234, int(global0.custom))
# Create an independent environment
with JSContext() as ctxt1:
ctxt1.securityToken = ctxt0.securityToken
global1 = ctxt1.locals
global1.custom = 1234
with ctxt0:
self.assertEqual(1234, int(global0.custom))
self.assertEqual(1234, int(global1.custom))
# Now create a new context with the old global
with JSContext(global1) as ctxt2:
ctxt2.securityToken = ctxt1.securityToken
with ctxt1:
self.assertEqual(1234, int(global1.custom))
def _testSecurityChecks(self):
with JSContext() as env1:
env1.securityToken = "foo"
# Create a function in env1.
env1.eval("spy=function(){return spy;}")
spy = env1.locals.spy
self.assertTrue(isinstance(spy, _PyV8.JSFunction))
# Create another function accessing global objects.
env1.eval("spy2=function(){return 123;}")
spy2 = env1.locals.spy2
self.assertTrue(isinstance(spy2, _PyV8.JSFunction))
# Switch to env2 in the same domain and invoke spy on env2.
env2 = JSContext()
env2.securityToken = "foo"
with env2:
result = spy.apply(env2.locals)
self.assertTrue(isinstance(result, _PyV8.JSFunction))
env2.securityToken = "bar"
# Call cross_domain_call, it should throw an exception
with env2:
self.assertRaises(JSError, spy2.apply, env2.locals)
def _testCrossDomainDelete(self):
with JSContext() as env1:
env2 = JSContext()
# Set to the same domain.
env1.securityToken = "foo"
env2.securityToken = "foo"
env1.locals.prop = 3
env2.locals.env1 = env1.locals
# Change env2 to a different domain and delete env1.prop.
#env2.securityToken = "bar"
self.assertEqual(3, int(env1.eval("prop")))
with env2:
self.assertEqual(3, int(env2.eval("this.env1.prop")))
self.assertEqual("false", str(env2.eval("delete env1.prop")))
# Check that env1.prop still exists.
self.assertEqual(3, int(env1.locals.prop))
class TestWrapper(unittest.TestCase):
def testObject(self):
with JSContext() as ctxt:
o = ctxt.eval("new Object()")
self.assertTrue(hash(o) > 0)
o1 = o.clone()
self.assertEqual(hash(o1), hash(o))
self.assertTrue(o != o1)
self.assertRaises(UnboundLocalError, o.clone)
def testAutoConverter(self):
with JSContext() as ctxt:
ctxt.eval("""
var_i = 1;
var_f = 1.0;
var_s = "test";
var_b = true;
var_s_obj = new String("test");
var_b_obj = new Boolean(true);
var_f_obj = new Number(1.5);
""")
vars = ctxt.locals
var_i = vars.var_i
self.assertTrue(var_i)
self.assertEqual(1, int(var_i))
var_f = vars.var_f
self.assertTrue(var_f)
self.assertEqual(1.0, float(vars.var_f))
var_s = vars.var_s
self.assertTrue(var_s)
self.assertEqual("test", str(vars.var_s))
var_b = vars.var_b
self.assertTrue(var_b)
self.assertTrue(bool(var_b))
self.assertEqual("test", vars.var_s_obj)
self.assertTrue(vars.var_b_obj)
self.assertEqual(1.5, vars.var_f_obj)
attrs = dir(ctxt.locals)
self.assertTrue(attrs)
self.assertTrue("var_i" in attrs)
self.assertTrue("var_f" in attrs)
self.assertTrue("var_s" in attrs)
self.assertTrue("var_b" in attrs)
self.assertTrue("var_s_obj" in attrs)
self.assertTrue("var_b_obj" in attrs)
self.assertTrue("var_f_obj" in attrs)
def testExactConverter(self):
class MyInteger(int, JSClass):
pass
class MyString(str, JSClass):
pass
class MyUnicode(str, JSClass):
pass
class MyDateTime(time, JSClass):
pass
class Global(JSClass):
var_bool = True
var_int = 1
var_float = 1.0
var_str = 'str'
var_unicode = 'unicode'
var_datetime = datetime.now()
var_date = date.today()
var_time = time()
var_myint = MyInteger()
var_mystr = MyString('mystr')
var_myunicode = MyUnicode('myunicode')
var_mytime = MyDateTime()
with JSContext(Global()) as ctxt:
typename = ctxt.eval("(function (name) { return this[name].constructor.name; })")
typeof = ctxt.eval("(function (name) { return typeof(this[name]); })")
self.assertEqual('Boolean', typename('var_bool'))
self.assertEqual('Number', typename('var_int'))
self.assertEqual('Number', typename('var_float'))
self.assertEqual('String', typename('var_str'))
self.assertEqual('String', typename('var_unicode'))
self.assertEqual('Date', typename('var_datetime'))
self.assertEqual('Date', typename('var_date'))
self.assertEqual('Date', typename('var_time'))
self.assertEqual('MyInteger', typename('var_myint'))
self.assertEqual('MyString', typename('var_mystr'))
self.assertEqual('MyUnicode', typename('var_myunicode'))
self.assertEqual('MyDateTime', typename('var_mytime'))
self.assertEqual('object', typeof('var_myint'))
self.assertEqual('object', typeof('var_mystr'))
self.assertEqual('object', typeof('var_myunicode'))
self.assertEqual('object', typeof('var_mytime'))
def testJavascriptWrapper(self):
with JSContext() as ctxt:
self.assertEqual(type(None), type(ctxt.eval("null")))
self.assertEqual(type(None), type(ctxt.eval("undefined")))
self.assertEqual(bool, type(ctxt.eval("true")))
self.assertEqual(str, type(ctxt.eval("'test'")))
self.assertEqual(int, type(ctxt.eval("123")))
self.assertEqual(float, type(ctxt.eval("3.14")))
self.assertEqual(datetime, type(ctxt.eval("new Date()")))
self.assertEqual(JSArray, type(ctxt.eval("[1, 2, 3]")))
self.assertEqual(JSFunction, type(ctxt.eval("(function() {})")))
self.assertEqual(JSObject, type(ctxt.eval("new Object()")))
def testPythonWrapper(self):
with JSContext() as ctxt:
typeof = ctxt.eval("(function type(value) { return typeof value; })")
protoof = ctxt.eval("(function protoof(value) { return Object.prototype.toString.apply(value); })")
self.assertEqual('[object Null]', protoof(None))
self.assertEqual('boolean', typeof(True))
self.assertEqual('number', typeof(123))
self.assertEqual('number', typeof(3.14))
self.assertEqual('string', typeof('test'))
self.assertEqual('string', typeof('test'))
self.assertEqual('[object Date]', protoof(datetime.now()))
self.assertEqual('[object Date]', protoof(date.today()))
self.assertEqual('[object Date]', protoof(time()))
def test():
pass
self.assertEqual('[object Function]', protoof(abs))
self.assertEqual('[object Function]', protoof(test))
self.assertEqual('[object Function]', protoof(self.testPythonWrapper))
self.assertEqual('[object Function]', protoof(int))
def testFunction(self):
with JSContext() as ctxt:
func = ctxt.eval("""
(function ()
{
function a()
{
return "abc";
}
return a();
})
""")
self.assertEqual("abc", str(func()))
self.assertTrue(func != None)
self.assertFalse(func == None)
func = ctxt.eval("(function test() {})")
self.assertEqual("test", func.name)
self.assertEqual("", func.resname)
self.assertEqual(0, func.linenum)
self.assertEqual(14, func.colnum)
self.assertEqual(0, func.lineoff)
self.assertEqual(0, func.coloff)
#TODO fix me, why the setter doesn't work?
# func.name = "hello"
# it seems __setattr__ was called instead of CJavascriptFunction::SetName
func.setName("hello")
self.assertEqual("hello", func.name)
def testCall(self):
class Hello(object):
def __call__(self, name):
return "hello " + name
class Global(JSClass):
hello = Hello()
with JSContext(Global()) as ctxt:
self.assertEqual("hello flier", ctxt.eval("hello('flier')"))
def testJSFunction(self):
with JSContext() as ctxt:
hello = ctxt.eval("(function (name) { return 'hello ' + name; })")
self.assertTrue(isinstance(hello, _PyV8.JSFunction))
self.assertEqual("hello flier", hello('flier'))
self.assertEqual("hello flier", hello.invoke(['flier']))
obj = ctxt.eval("({ 'name': 'flier', 'hello': function (name) { return 'hello ' + name + ' from ' + this.name; }})")
hello = obj.hello
self.assertTrue(isinstance(hello, JSFunction))
self.assertEqual("hello flier from flier", hello('flier'))
tester = ctxt.eval("({ 'name': 'tester' })")
self.assertEqual("hello flier from tester", hello.invoke(tester, ['flier']))
self.assertEqual("hello flier from json", hello.apply({ 'name': 'json' }, ['flier']))
def testConstructor(self):
with JSContext() as ctx:
ctx.eval("""
var Test = function() {
this.trySomething();
};
Test.prototype.trySomething = function() {
this.name = 'flier';
};
var Test2 = function(first_name, last_name) {
this.name = first_name + ' ' + last_name;
};
""")
self.assertTrue(isinstance(ctx.locals.Test, _PyV8.JSFunction))
test = JSObject.create(ctx.locals.Test)
self.assertTrue(isinstance(ctx.locals.Test, _PyV8.JSObject))
self.assertEqual("flier", test.name);
test2 = JSObject.create(ctx.locals.Test2, ('Flier', 'Lu'))
self.assertEqual("Flier Lu", test2.name);
test3 = JSObject.create(ctx.locals.Test2, ('Flier', 'Lu'), { 'email': 'flier.lu@gmail.com' })
self.assertEqual("flier.lu@gmail.com", test3.email);
def testJSError(self):
with JSContext() as ctxt:
try:
ctxt.eval('throw "test"')
self.fail()
except:
self.assertTrue(JSError, sys.exc_info()[0])
def testErrorInfo(self):
with JSContext() as ctxt:
with JSEngine() as engine:
try:
engine.compile("""
function hello()
{
throw Error("hello world");
}
hello();""", "test", 10, 10).run()
self.fail()
except JSError as e:
self.assertTrue(str(e).startswith('JSError: Error: hello world ( test @ 14 : 34 ) ->'))
self.assertEqual("Error", e.name)
self.assertEqual("hello world", e.message)
self.assertEqual("test", e.scriptName)
self.assertEqual(14, e.lineNum)
self.assertEqual(102, e.startPos)
self.assertEqual(103, e.endPos)
self.assertEqual(34, e.startCol)
self.assertEqual(35, e.endCol)
self.assertEqual('throw Error("hello world");', e.sourceLine.strip())
self.assertEqual('Error: hello world\n' +
' at Error (<anonymous>)\n' +
' at hello (test:14:35)\n' +
' at test:17:25', e.stackTrace)
def testParseStack(self):
self.assertEqual([
('Error', 'unknown source', None, None),
('test', 'native', None, None),
('<anonymous>', 'test0', 3, 5),
('f', 'test1', 2, 19),
('g', 'test2', 1, 15),
(None, 'test3', 1, None),
(None, 'test3', 1, 1),
], JSError.parse_stack("""Error: err
at Error (unknown source)
at test (native)
at new <anonymous> (test0:3:5)
at f (test1:2:19)
at g (test2:1:15)
at test3:1
at test3:1:1"""))
def testStackTrace(self):
class Global(JSClass):
def GetCurrentStackTrace(self, limit):
return JSStackTrace.GetCurrentStackTrace(4, JSStackTrace.Options.Detailed)
with JSContext(Global()) as ctxt:
st = ctxt.eval("""
function a()
{
return GetCurrentStackTrace(10);
}
function b()
{
return eval("a()");
}
function c()
{
return new b();
}
c();""", "test")
self.assertEqual(4, len(st))
self.assertEqual("\tat a (test:4:28)\n\tat (eval)\n\tat b (test:8:28)\n\tat c (test:12:28)\n", str(st))
self.assertEqual("test.a (4:28)\n. (1:1) eval\ntest.b (8:28) constructor\ntest.c (12:28)",
"\n".join(["%s.%s (%d:%d)%s%s" % (
f.scriptName, f.funcName, f.lineNum, f.column,
' eval' if f.isEval else '',
' constructor' if f.isConstructor else '') for f in st]))
def testPythonException(self):
class Global(JSClass):
def raiseException(self):
raise RuntimeError("Hello")
with JSContext(Global()) as ctxt:
r = ctxt.eval("""
msg ="";
try
{
this.raiseException()
}
catch(e)
{
msg += "catch " + e + ";";
}
finally
{
msg += "finally";
}""")
self.assertEqual("catch Error: Hello;finally", str(ctxt.locals.msg))
def testExceptionMapping(self):
class TestException(Exception):
pass
class Global(JSClass):
def raiseIndexError(self):
return [1, 2, 3][5]
def raiseAttributeError(self):
None.hello()
def raiseSyntaxError(self):
eval("???")
def raiseTypeError(self):
int(sys)
def raiseNotImplementedError(self):
raise NotImplementedError("Not support")
def raiseExceptions(self):
raise TestException()
with JSContext(Global()) as ctxt:
ctxt.eval("try { this.raiseIndexError(); } catch (e) { msg = e; }")
self.assertEqual("RangeError: list index out of range", str(ctxt.locals.msg))
ctxt.eval("try { this.raiseAttributeError(); } catch (e) { msg = e; }")
self.assertEqual("ReferenceError: 'NoneType' object has no attribute 'hello'", str(ctxt.locals.msg))
ctxt.eval("try { this.raiseSyntaxError(); } catch (e) { msg = e; }")
self.assertEqual("SyntaxError: invalid syntax", str(ctxt.locals.msg))
ctxt.eval("try { this.raiseTypeError(); } catch (e) { msg = e; }")
self.assertEqual("TypeError: int() argument must be a string or a number, not 'module'", str(ctxt.locals.msg))
ctxt.eval("try { this.raiseNotImplementedError(); } catch (e) { msg = e; }")
self.assertEqual("Error: Not support", str(ctxt.locals.msg))
self.assertRaises(TestException, ctxt.eval, "this.raiseExceptions();")
def testArray(self):
with JSContext() as ctxt:
array = ctxt.eval("""
var array = new Array();
for (i=0; i<10; i++)
{
array[i] = 10-i;
}
array;
""")
self.assertTrue(isinstance(array, _PyV8.JSArray))
self.assertEqual(10, len(array))
self.assertTrue(5 in array)
self.assertFalse(15 in array)
self.assertEqual(10, len(array))
for i in range(10):
self.assertEqual(10-i, array[i])
array[5] = 0
self.assertEqual(0, array[5])
del array[5]
self.assertEqual(None, array[5])
# array [10, 9, 8, 7, 6, None, 4, 3, 2, 1]
# array[4:7] 4^^^^^^^^^7
# array[-3:-1] -3^^^^^^-1
# array[0:0] []
self.assertEqual([6, None, 4], array[4:7])
self.assertEqual([3, 2], array[-3:-1])
self.assertEqual([], array[0:0])
array[1:3] = [9, 9, 9]
self.assertEqual([10, 9, 9, 9, 7, 6, None, 4, 3, 2, 1], list(array))
array[5:8] = [8, 8]
self.assertEqual([10, 9, 9, 9, 7, 8, 8, 3, 2, 1], list(array))
del array[1:4]
self.assertEqual([10, 7, 8, 8, 3, 2, 1], list(array))
ctxt.locals.array1 = JSArray(5)
ctxt.locals.array2 = JSArray([1, 2, 3, 4, 5])
for i in range(len(ctxt.locals.array2)):
ctxt.locals.array1[i] = ctxt.locals.array2[i] * 10
ctxt.eval("""
var sum = 0;
for (i=0; i<array1.length; i++)
sum += array1[i]
for (i=0; i<array2.length; i++)
sum += array2[i]
""")
self.assertEqual(165, ctxt.locals.sum)
ctxt.locals.array3 = [1, 2, 3, 4, 5]
self.assertTrue(ctxt.eval('array3[1] === 2'))
self.assertTrue(ctxt.eval('array3[9] === undefined'))
args = [
["a = Array(7); for(i=0; i<a.length; i++) a[i] = i; a[3] = undefined; a[a.length-1]; a", "0,1,2,,4,5,6", [0, 1, 2, None, 4, 5, 6]],
["a = Array(7); for(i=0; i<a.length - 1; i++) a[i] = i; a[a.length-1]; a", "0,1,2,3,4,5,", [0, 1, 2, 3, 4, 5, None]],
["a = Array(7); for(i=1; i<a.length; i++) a[i] = i; a[a.length-1]; a", ",1,2,3,4,5,6", [None, 1, 2, 3, 4, 5, 6]]
]
for arg in args:
array = ctxt.eval(arg[0])
self.assertEqual(arg[1], str(array))
self.assertEqual(arg[2], [array[i] for i in range(len(array))])
self.assertEqual(3, ctxt.eval("(function (arr) { return arr.length; })")(JSArray([1, 2, 3])))
self.assertEqual(2, ctxt.eval("(function (arr, idx) { return arr[idx]; })")(JSArray([1, 2, 3]), 1))
self.assertEqual('[object Array]', ctxt.eval("(function (arr) { return Object.prototype.toString.call(arr); })")(JSArray([1, 2, 3])))
self.assertEqual('[object Array]', ctxt.eval("(function (arr) { return Object.prototype.toString.call(arr); })")(JSArray((1, 2, 3))))
self.assertEqual('[object Array]', ctxt.eval("(function (arr) { return Object.prototype.toString.call(arr); })")(JSArray(list(range(3)))))
[x for x in JSArray([1,2,3])]
def testMultiDimArray(self):
with JSContext() as ctxt:
ret = ctxt.eval("""
({
'test': function(){
return [
[ 1, 'abla' ],
[ 2, 'ajkss' ],
]
}
})
""").test()
self.assertEqual([[1, 'abla'], [2, 'ajkss']], convert(ret))
def testLazyConstructor(self):
class Globals(JSClass):
def __init__(self):
self.array=JSArray([1,2,3])
with JSContext(Globals()) as ctxt:
self.assertEqual(2, ctxt.eval("""array[1]"""))
def testForEach(self):
class NamedClass(object):
foo = 1
def __init__(self):
self.bar = 2
@property
def foobar(self):
return self.foo + self.bar
def gen(x):
for i in range(x):
yield i
with JSContext() as ctxt:
func = ctxt.eval("""(function (k) {
var result = [];
for (var prop in k) {
result.push(prop);
}
return result;
})""")
self.assertTrue(set(["bar", "foo", "foobar"]).issubset(set(func(NamedClass()))))
self.assertEqual(["0", "1", "2"], list(func([1, 2, 3])))
self.assertEqual(["0", "1", "2"], list(func((1, 2, 3))))
self.assertEqual(["1", "2", "3"], list(func({1:1, 2:2, 3:3})))
self.assertEqual(["0", "1", "2"], list(func(gen(3))))
def testDict(self):
with JSContext() as ctxt:
obj = ctxt.eval("var r = { 'a' : 1, 'b' : 2 }; r")
self.assertEqual(1, obj.a)
self.assertEqual(2, obj.b)
self.assertEqual({ 'a' : 1, 'b' : 2 }, dict(obj))
self.assertEqual({ 'a': 1,
'b': [1, 2, 3],
'c': { 'str' : 'goofy',
'float' : 1.234,
'obj' : { 'name': 'john doe' }},
'd': True,
'e': None },
convert(ctxt.eval("""var x =
{ a: 1,
b: [1, 2, 3],
c: { str: 'goofy',
float: 1.234,
obj: { name: 'john doe' }},
d: true,
e: null }; x""")))
def testDate(self):
with JSContext() as ctxt:
now1 = ctxt.eval("new Date();")
self.assertTrue(now1)
now2 = datetime.utcnow()
delta = now2 - now1 if now2 > now1 else now1 - now2
self.assertTrue(delta < timedelta(seconds=1))
func = ctxt.eval("(function (d) { return d.toString(); })")
now = datetime.now()
self.assertTrue(str(func(now)).startswith(now.strftime("%a %b %d %Y %H:%M:%S")))
def testUnicode(self):
with JSContext() as ctxt:
self.assertEqual("人", toUnicodeString(ctxt.eval("\"人\"")))
self.assertEqual("é", toUnicodeString(ctxt.eval("\"é\"")))
func = ctxt.eval("(function (msg) { return msg.length; })")
self.assertEqual(2, func("测试"))
def testClassicStyleObject(self):
class FileSystemWarpper:
@property
def cwd(self):
return os.getcwd()
class Global:
@property
def fs(self):
return FileSystemWarpper()
with JSContext(Global()) as ctxt:
self.assertEqual(os.getcwd(), ctxt.eval("fs.cwd"))
def testRefCount(self):
count = sys.getrefcount(None)
class Global(JSClass):
pass
with JSContext(Global()) as ctxt:
ctxt.eval("""
var none = null;
""")
self.assertEqual(count+1, sys.getrefcount(None))
ctxt.eval("""
var none = null;
""")
self.assertEqual(count+1, sys.getrefcount(None))
def testProperty(self):
class Global(JSClass):
def __init__(self, name):
self._name = name
def getname(self):
return self._name
def setname(self, name):
self._name = name
def delname(self):
self._name = 'deleted'
name = property(getname, setname, delname)
g = Global('world')
with JSContext(g) as ctxt:
self.assertEqual('world', ctxt.eval("name"))
self.assertEqual('flier', ctxt.eval("this.name = 'flier';"))
self.assertEqual('flier', ctxt.eval("name"))
self.assertTrue(ctxt.eval("delete name"))
###
# FIXME replace the global object with Python object
#
#self.assertEqual('deleted', ctxt.eval("name"))
#ctxt.eval("__defineGetter__('name', function() { return 'fixed'; });")
#self.assertEqual('fixed', ctxt.eval("name"))
def testGetterAndSetter(self):
class Global(JSClass):
def __init__(self, testval):
self.testval = testval
with JSContext(Global("Test Value A")) as ctxt:
self.assertEqual("Test Value A", ctxt.locals.testval)
ctxt.eval("""
this.__defineGetter__("test", function() {
return this.testval;
});
this.__defineSetter__("test", function(val) {
this.testval = val;
});
""")
self.assertEqual("Test Value A", ctxt.locals.test)
ctxt.eval("test = 'Test Value B';")
self.assertEqual("Test Value B", ctxt.locals.test)
def testDestructor(self):
import gc
owner = self
owner.deleted = False
class Hello(object):
def say(self):
pass
def __del__(self):
owner.deleted = True
def test():
with JSContext() as ctxt:
fn = ctxt.eval("(function (obj) { obj.say(); })")
obj = Hello()
self.assertEqual(2, sys.getrefcount(obj))
fn(obj)
self.assertEqual(4, sys.getrefcount(obj))
del obj
test()
self.assertFalse(owner.deleted)
JSEngine.collect()
gc.collect()
self.assertTrue(owner.deleted)
def testNullInString(self):
with JSContext() as ctxt:
fn = ctxt.eval("(function (s) { return s; })")
self.assertEqual("hello \0 world", fn("hello \0 world"))
def testLivingObjectCache(self):
class Global(JSClass):
i = 1
b = True
o = object()
with JSContext(Global()) as ctxt:
self.assertTrue(ctxt.eval("i == i"))
self.assertTrue(ctxt.eval("b == b"))
self.assertTrue(ctxt.eval("o == o"))
def testNamedSetter(self):
class Obj(JSClass):
@property
def p(self):
return self._p
@p.setter
def p(self, value):
self._p = value
class Global(JSClass):
def __init__(self):
self.obj = Obj()
self.d = {}
self.p = None
with JSContext(Global()) as ctxt:
ctxt.eval("""
x = obj;
x.y = 10;
x.p = 10;
d.y = 10;
""")
self.assertEqual(10, ctxt.eval("obj.y"))
self.assertEqual(10, ctxt.eval("obj.p"))
self.assertEqual(10, ctxt.locals.d['y'])
def testWatch(self):
class Obj(JSClass):
def __init__(self):
self.p = 1
class Global(JSClass):
def __init__(self):
self.o = Obj()
with JSContext(Global()) as ctxt:
ctxt.eval("""
o.watch("p", function (id, oldval, newval) {
return oldval + newval;
});
""")
self.assertEqual(1, ctxt.eval("o.p"))
ctxt.eval("o.p = 2;")
self.assertEqual(3, ctxt.eval("o.p"))
ctxt.eval("delete o.p;")
self.assertEqual(None, ctxt.eval("o.p"))
ctxt.eval("o.p = 2;")
self.assertEqual(2, ctxt.eval("o.p"))
ctxt.eval("o.unwatch('p');")
ctxt.eval("o.p = 1;")
self.assertEqual(1, ctxt.eval("o.p"))
def testReferenceError(self):
class Global(JSClass):
def __init__(self):
self.s = self
with JSContext(Global()) as ctxt:
self.assertRaises(ReferenceError, ctxt.eval, 'x')
self.assertTrue(ctxt.eval("typeof(x) === 'undefined'"))
self.assertTrue(ctxt.eval("typeof(String) === 'function'"))
self.assertTrue(ctxt.eval("typeof(s.String) === 'undefined'"))
self.assertTrue(ctxt.eval("typeof(s.z) === 'undefined'"))
def testRaiseExceptionInGetter(self):
class Document(JSClass):
def __getattr__(self, name):
if name == 'y':
raise TypeError()
return JSClass.__getattr__(self, name)
class Global(JSClass):
def __init__(self):
self.document = Document()
with JSContext(Global()) as ctxt:
self.assertEqual(None, ctxt.eval('document.x'))
self.assertRaises(TypeError, ctxt.eval, 'document.y')
class TestMultithread(unittest.TestCase):
def testLocker(self):
self.assertFalse(JSLocker.active)
self.assertFalse(JSLocker.locked)
with JSLocker() as outter_locker:
self.assertTrue(JSLocker.active)
self.assertTrue(JSLocker.locked)
self.assertTrue(outter_locker)
with JSLocker() as inner_locker:
self.assertTrue(JSLocker.locked)
self.assertTrue(outter_locker)
self.assertTrue(inner_locker)
with JSUnlocker() as unlocker:
self.assertFalse(JSLocker.locked)
self.assertTrue(outter_locker)
self.assertTrue(inner_locker)
self.assertTrue(JSLocker.locked)
self.assertTrue(JSLocker.active)
self.assertFalse(JSLocker.locked)
locker = JSLocker()
with JSContext():
self.assertRaises(RuntimeError, locker.__enter__)
self.assertRaises(RuntimeError, locker.__exit__, None, None, None)
del locker
def testMultiPythonThread(self):
import time, threading
class Global:
count = 0
started = threading.Event()
finished = threading.Semaphore(0)
def sleep(self, ms):
time.sleep(ms / 1000.0)
self.count += 1
g = Global()
def run():
with JSContext(g) as ctxt:
ctxt.eval("""
started.wait();
for (i=0; i<10; i++)
{
sleep(100);
}
finished.release();
""")
threading.Thread(target=run).start()
now = time.time()
self.assertEqual(0, g.count)
g.started.set()
g.finished.acquire()
self.assertEqual(10, g.count)
self.assertTrue((time.time() - now) >= 1)
def testMultiJavascriptThread(self):
import time, threading
class Global:
result = []
def add(self, value):
with JSUnlocker():
time.sleep(0.1)
self.result.append(value)
g = Global()
def run():
with JSContext(g) as ctxt:
ctxt.eval("""
for (i=0; i<10; i++)
add(i);
""")
threads = [threading.Thread(target=run), threading.Thread(target=run)]
with JSLocker():
for t in threads: t.start()
for t in threads: t.join()
self.assertEqual(20, len(g.result))
def _testPreemptionJavascriptThreads(self):
import time, threading
class Global:
result = []
def add(self, value):
# we use preemption scheduler to switch between threads
# so, just comment the JSUnlocker
#
# with JSUnlocker() as unlocker:
time.sleep(0.1)
self.result.append(value)
g = Global()
def run():
with JSContext(g) as ctxt:
ctxt.eval("""
for (i=0; i<10; i++)
add(i);
""")
threads = [threading.Thread(target=run), threading.Thread(target=run)]
with JSLocker() as locker:
JSLocker.startPreemption(100)
for t in threads: t.start()
for t in threads: t.join()
self.assertEqual(20, len(g.result))
class TestEngine(unittest.TestCase):
def testClassProperties(self):
with JSContext() as ctxt:
self.assertTrue(str(JSEngine.version).startswith("3."))
self.assertFalse(JSEngine.dead)
def testCompile(self):
with JSContext() as ctxt:
with JSEngine() as engine:
s = engine.compile("1+2")
self.assertTrue(isinstance(s, _PyV8.JSScript))
self.assertEqual("1+2", s.source)
self.assertEqual(3, int(s.run()))
self.assertRaises(SyntaxError, engine.compile, "1+")
def testPrecompile(self):
with JSContext() as ctxt:
with JSEngine() as engine:
data = engine.precompile("1+2")
self.assertTrue(data)
self.assertEqual(28, len(data))
s = engine.compile("1+2", precompiled=data)
self.assertTrue(isinstance(s, _PyV8.JSScript))
self.assertEqual("1+2", s.source)
self.assertEqual(3, int(s.run()))
self.assertRaises(SyntaxError, engine.precompile, "1+")
def testUnicodeSource(self):
class Global(JSClass):
var = '测试'
def __getattr__(self, name):
if (name if is_py3k else name.decode('utf-8')) == '变量':
return self.var
return JSClass.__getattr__(self, name)
g = Global()
with JSContext(g) as ctxt:
with JSEngine() as engine:
src = """
function 函数() { return 变量.length; }
函数();
var func = function () {};
"""
data = engine.precompile(src)
self.assertTrue(data)
self.assertEqual(68, len(data))
s = engine.compile(src, precompiled=data)
self.assertTrue(isinstance(s, _PyV8.JSScript))
self.assertEqual(toNativeString(src), s.source)
self.assertEqual(2, s.run())
func_name = toNativeString('函数')
self.assertTrue(hasattr(ctxt.locals, func_name))
func = getattr(ctxt.locals, func_name)
self.assertTrue(isinstance(func, _PyV8.JSFunction))
self.assertEqual(func_name, func.name)
self.assertEqual("", func.resname)
self.assertEqual(1, func.linenum)
self.assertEqual(0, func.lineoff)
self.assertEqual(0, func.coloff)
var_name = toNativeString('变量')
setattr(ctxt.locals, var_name, '测试长字符串')
self.assertEqual(6, func())
self.assertEqual("func", ctxt.locals.func.inferredname)
def testExtension(self):
extSrc = """function hello(name) { return "hello " + name + " from javascript"; }"""
extJs = JSExtension("hello/javascript", extSrc)
self.assertTrue(extJs)
self.assertEqual("hello/javascript", extJs.name)
self.assertEqual(extSrc, extJs.source)
self.assertFalse(extJs.autoEnable)
self.assertTrue(extJs.registered)
TestEngine.extJs = extJs
with JSContext(extensions=['hello/javascript']) as ctxt:
self.assertEqual("hello flier from javascript", ctxt.eval("hello('flier')"))
# test the auto enable property
with JSContext() as ctxt:
self.assertRaises(ReferenceError, ctxt.eval, "hello('flier')")
extJs.autoEnable = True
self.assertTrue(extJs.autoEnable)
with JSContext() as ctxt:
self.assertEqual("hello flier from javascript", ctxt.eval("hello('flier')"))
extJs.autoEnable = False
self.assertFalse(extJs.autoEnable)
with JSContext() as ctxt:
self.assertRaises(ReferenceError, ctxt.eval, "hello('flier')")
extUnicodeSrc = """function helloW(name) { return "hello " + name + " from javascript"; }"""
extUnicodeJs = JSExtension("helloW/javascript", extUnicodeSrc)
self.assertTrue(extUnicodeJs)
self.assertEqual("helloW/javascript", extUnicodeJs.name)
self.assertEqual(toNativeString(extUnicodeSrc), extUnicodeJs.source)
self.assertFalse(extUnicodeJs.autoEnable)
self.assertTrue(extUnicodeJs.registered)
TestEngine.extUnicodeJs = extUnicodeJs
with JSContext(extensions=['helloW/javascript']) as ctxt:
self.assertEqual("hello flier from javascript", ctxt.eval("helloW('flier')"))
ret = ctxt.eval("helloW('世界')")
self.assertEqual("hello 世界 from javascript", ret if is_py3k else ret.decode('UTF-8'))
def testNativeExtension(self):
extSrc = "native function hello();"
extPy = JSExtension("hello/python", extSrc, lambda func: lambda name: "hello " + name + " from python", register=False)
self.assertTrue(extPy)
self.assertEqual("hello/python", extPy.name)
self.assertEqual(extSrc, extPy.source)
self.assertFalse(extPy.autoEnable)
self.assertFalse(extPy.registered)
extPy.register()
self.assertTrue(extPy.registered)
TestEngine.extPy = extPy
with JSContext(extensions=['hello/python']) as ctxt:
self.assertEqual("hello flier from python", ctxt.eval("hello('flier')"))
def _testSerialize(self):
data = None
self.assertFalse(JSContext.entered)
with JSContext() as ctxt:
self.assertTrue(JSContext.entered)
#ctxt.eval("function hello(name) { return 'hello ' + name; }")
data = JSEngine.serialize()
self.assertTrue(data)
self.assertTrue(len(data) > 0)
self.assertFalse(JSContext.entered)
#JSEngine.deserialize()
self.assertTrue(JSContext.entered)
self.assertEqual('hello flier', JSContext.current.eval("hello('flier');"))
def testEval(self):
with JSContext() as ctxt:
self.assertEqual(3, int(ctxt.eval("1+2")))
def testGlobal(self):
class Global(JSClass):
version = "1.0"
with JSContext(Global()) as ctxt:
vars = ctxt.locals
# getter
self.assertEqual(Global.version, str(vars.version))
self.assertEqual(Global.version, str(ctxt.eval("version")))
self.assertRaises(ReferenceError, ctxt.eval, "nonexists")
# setter
self.assertEqual(2.0, float(ctxt.eval("version = 2.0")))
self.assertEqual(2.0, float(vars.version))
def testThis(self):
class Global(JSClass):
version = 1.0
with JSContext(Global()) as ctxt:
self.assertEqual("[object Global]", str(ctxt.eval("this")))
self.assertEqual(1.0, float(ctxt.eval("this.version")))
def testObjectBuildInMethods(self):
class Global(JSClass):
version = 1.0
with JSContext(Global()) as ctxt:
self.assertEqual("[object Global]", str(ctxt.eval("this.toString()")))
self.assertEqual("[object Global]", str(ctxt.eval("this.toLocaleString()")))
self.assertEqual(Global.version, float(ctxt.eval("this.valueOf()").version))
self.assertTrue(bool(ctxt.eval("this.hasOwnProperty(\"version\")")))
self.assertFalse(ctxt.eval("this.hasOwnProperty(\"nonexistent\")"))
def testPythonWrapper(self):
class Global(JSClass):
s = [1, 2, 3]
d = {'a': {'b': 'c'}, 'd': ['e', 'f']}
g = Global()
with JSContext(g) as ctxt:
ctxt.eval("""
s[2] = s[1] + 2;
s[0] = s[1];
delete s[1];
""")
self.assertEqual([2, 4], g.s)
self.assertEqual('c', ctxt.eval("d.a.b"))
self.assertEqual(['e', 'f'], ctxt.eval("d.d"))
ctxt.eval("""
d.a.q = 4
delete d.d
""")
self.assertEqual(4, g.d['a']['q'])
self.assertEqual(None, ctxt.eval("d.d"))
def _testMemoryAllocationCallback(self):
alloc = {}
def callback(space, action, size):
alloc[(space, action)] = alloc.setdefault((space, action), 0) + size
JSEngine.setMemoryAllocationCallback(callback)
with JSContext() as ctxt:
self.assertFalse((JSObjectSpace.Code, JSAllocationAction.alloc) in alloc)
ctxt.eval("var o = new Array(1000);")
self.assertTrue((JSObjectSpace.Code, JSAllocationAction.alloc) in alloc)
JSEngine.setMemoryAllocationCallback(None)
class TestDebug(unittest.TestCase):
def setUp(self):
self.engine = JSEngine()
def tearDown(self):
del self.engine
events = []
def processDebugEvent(self, event):
try:
logging.debug("receive debug event: %s", repr(event))
self.events.append(repr(event))
except:
logging.error("fail to process debug event")
logging.debug(traceback.extract_stack())
def testEventDispatch(self):
debugger = JSDebugger()
self.assertTrue(not debugger.enabled)
debugger.onBreak = lambda evt: self.processDebugEvent(evt)
debugger.onException = lambda evt: self.processDebugEvent(evt)
debugger.onNewFunction = lambda evt: self.processDebugEvent(evt)
debugger.onBeforeCompile = lambda evt: self.processDebugEvent(evt)
debugger.onAfterCompile = lambda evt: self.processDebugEvent(evt)
with JSContext() as ctxt:
debugger.enabled = True
self.assertEqual(3, int(ctxt.eval("function test() { text = \"1+2\"; return eval(text) } test()")))
debugger.enabled = False
self.assertRaises(JSError, JSContext.eval, ctxt, "throw 1")
self.assertTrue(not debugger.enabled)
self.assertEqual(4, len(self.events))
class TestProfile(unittest.TestCase):
def _testStart(self):
self.assertFalse(profiler.started)
profiler.start()
self.assertTrue(profiler.started)
profiler.stop()
self.assertFalse(profiler.started)
def _testResume(self):
self.assertTrue(profiler.paused)
self.assertEqual(profiler.Modules.cpu, profiler.modules)
profiler.resume()
profiler.resume(profiler.Modules.heap)
# TODO enable profiler with resume
#self.assertFalse(profiler.paused)
class TestAST(unittest.TestCase):
class Checker(object):
def __init__(self, testcase):
self.testcase = testcase
self.called = []
def __enter__(self):
self.ctxt = JSContext()
self.ctxt.enter()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.ctxt.leave()
def __getattr__(self, name):
return getattr(self.testcase, name)
def test(self, script):
JSEngine().compile(script).visit(self)
return self.called
def onProgram(self, prog):
self.ast = prog.toAST()
self.json = json.loads(prog.toJSON())
for decl in prog.scope.declarations:
decl.visit(self)
for stmt in prog.body:
stmt.visit(self)
def onBlock(self, block):
for stmt in block.statements:
stmt.visit(self)
def onExpressionStatement(self, stmt):
stmt.expression.visit(self)
#print type(stmt.expression), stmt.expression
def testBlock(self):
class BlockChecker(TestAST.Checker):
def onBlock(self, stmt):
self.called.append('block')
self.assertEqual(AST.NodeType.Block, stmt.type)
self.assertTrue(stmt.initializerBlock)
self.assertFalse(stmt.anonymous)
target = stmt.breakTarget
self.assertTrue(target)
self.assertFalse(target.bound)
self.assertTrue(target.unused)
self.assertFalse(target.linked)
self.assertEqual(2, len(stmt.statements))
self.assertEqual(['%InitializeVarGlobal("i", 0);', '%InitializeVarGlobal("j", 0);'], [str(s) for s in stmt.statements])
with BlockChecker(self) as checker:
self.assertEqual(['block'], checker.test("var i, j;"))
self.assertEqual("""FUNC
. NAME ""
. INFERRED NAME ""
. DECLS
. . VAR "i"
. . VAR "j"
. BLOCK INIT
. . CALL RUNTIME InitializeVarGlobal
. . . LITERAL "i"
. . . LITERAL 0
. . CALL RUNTIME InitializeVarGlobal
. . . LITERAL "j"
. . . LITERAL 0
""", checker.ast)
self.assertEqual(['FunctionLiteral', {'name': ''},
['Declaration', {'mode': 'VAR'},
['Variable', {'name': 'i'}]
], ['Declaration', {'mode':'VAR'},
['Variable', {'name': 'j'}]
], ['Block',
['ExpressionStatement', ['CallRuntime', {'name': 'InitializeVarGlobal'},
['Literal', {'handle':'i'}],
['Literal', {'handle': 0}]]],
['ExpressionStatement', ['CallRuntime', {'name': 'InitializeVarGlobal'},
['Literal', {'handle': 'j'}],
['Literal', {'handle': 0}]]]
]
], checker.json)
def testIfStatement(self):
class IfStatementChecker(TestAST.Checker):
def onIfStatement(self, stmt):
self.called.append('if')
self.assertTrue(stmt)
self.assertEqual(AST.NodeType.IfStatement, stmt.type)
self.assertEqual(7, stmt.pos)
stmt.pos = 100
self.assertEqual(100, stmt.pos)
self.assertTrue(stmt.hasThenStatement)
self.assertTrue(stmt.hasElseStatement)
self.assertEqual("((value % 2) == 0)", str(stmt.condition))
self.assertEqual("{ s = \"even\"; }", str(stmt.thenStatement))
self.assertEqual("{ s = \"odd\"; }", str(stmt.elseStatement))
self.assertFalse(stmt.condition.isPropertyName)
with IfStatementChecker(self) as checker:
self.assertEqual(['if'], checker.test("var s; if (value % 2 == 0) { s = 'even'; } else { s = 'odd'; }"))
def testForStatement(self):
class ForStatementChecker(TestAST.Checker):
def onForStatement(self, stmt):
self.called.append('for')
self.assertEqual("{ j += i; }", str(stmt.body))
self.assertEqual("i = 0;", str(stmt.init))
self.assertEqual("(i < 10)", str(stmt.condition))
self.assertEqual("(i++);", str(stmt.nextStmt))
target = stmt.continueTarget
self.assertTrue(target)
self.assertFalse(target.bound)
self.assertTrue(target.unused)
self.assertFalse(target.linked)
self.assertFalse(stmt.fastLoop)
def onForInStatement(self, stmt):
self.called.append('forIn')
self.assertEqual("{ out += name; }", str(stmt.body))
self.assertEqual("name", str(stmt.each))
self.assertEqual("names", str(stmt.enumerable))
def onWhileStatement(self, stmt):
self.called.append('while')
self.assertEqual("{ i += 1; }", str(stmt.body))
self.assertEqual("(i < 10)", str(stmt.condition))
def onDoWhileStatement(self, stmt):
self.called.append('doWhile')
self.assertEqual("{ i += 1; }", str(stmt.body))
self.assertEqual("(i < 10)", str(stmt.condition))
self.assertEqual(281, stmt.conditionPos)
with ForStatementChecker(self) as checker:
self.assertEqual(['for', 'forIn', 'while', 'doWhile'], checker.test("""
var i, j;
for (i=0; i<10; i++) { j+=i; }
var names = new Array();
var out = '';
for (name in names) { out += name; }
while (i<10) { i += 1; }
do { i += 1; } while (i<10);
"""))
def testCallStatements(self):
class CallStatementChecker(TestAST.Checker):
def onVariableDeclaration(self, decl):
self.called.append('var')
var = decl.proxy
if var.name == 's':
self.assertEqual(AST.VarMode.var, decl.mode)
self.assertTrue(var.isValidLeftHandSide)
self.assertFalse(var.isArguments)
self.assertFalse(var.isThis)
def onFunctionDeclaration(self, decl):
self.called.append('func')
var = decl.proxy
if var.name == 'hello':
self.assertEqual(AST.VarMode.var, decl.mode)
self.assertTrue(decl.function)
self.assertEqual('(function hello(name) { s = ("Hello " + name); })', str(decl.function))
elif var.name == 'dog':
self.assertEqual(AST.VarMode.var, decl.mode)
self.assertTrue(decl.function)
self.assertEqual('(function dog(name) { (this).name = name; })', str(decl.function))
def onCall(self, expr):
self.called.append('call')
self.assertEqual("hello", str(expr.expression))
self.assertEqual(['"flier"'], [str(arg) for arg in expr.args])
self.assertEqual(159, expr.pos)
def onCallNew(self, expr):
self.called.append('callNew')
self.assertEqual("dog", str(expr.expression))
self.assertEqual(['"cat"'], [str(arg) for arg in expr.args])
self.assertEqual(191, expr.pos)
def onCallRuntime(self, expr):
self.called.append('callRuntime')
self.assertEqual("InitializeVarGlobal", expr.name)
self.assertEqual(['"s"', '0'], [str(arg) for arg in expr.args])
self.assertFalse(expr.isJsRuntime)
with CallStatementChecker(self) as checker:
self.assertEqual(['var', 'func', 'func', 'callRuntime', 'call', 'callNew'], checker.test("""
var s;
function hello(name) { s = "Hello " + name; }
function dog(name) { this.name = name; }
hello("flier");
new dog("cat");
"""))
def testTryStatements(self):
class TryStatementsChecker(TestAST.Checker):
def onThrow(self, expr):
self.called.append('try')
self.assertEqual('"abc"', str(expr.exception))
self.assertEqual(66, expr.pos)
def onTryCatchStatement(self, stmt):
self.called.append('catch')
self.assertEqual("{ throw \"abc\"; }", str(stmt.tryBlock))
#FIXME self.assertEqual([], stmt.targets)
stmt.tryBlock.visit(self)
self.assertEqual("err", str(stmt.variable.name))
self.assertEqual("{ s = err; }", str(stmt.catchBlock))
def onTryFinallyStatement(self, stmt):
self.called.append('finally')
self.assertEqual("{ throw \"abc\"; }", str(stmt.tryBlock))
#FIXME self.assertEqual([], stmt.targets)
self.assertEqual("{ s += \".\"; }", str(stmt.finallyBlock))
with TryStatementsChecker(self) as checker:
self.assertEqual(['catch', 'try', 'finally'], checker.test("""
var s;
try {
throw "abc";
}
catch (err) {
s = err;
};
try {
throw "abc";
}
finally {
s += ".";
}
"""))
def testLiterals(self):
class LiteralChecker(TestAST.Checker):
def onCallRuntime(self, expr):
expr.args[1].visit(self)
def onLiteral(self, litr):
self.called.append('literal')
self.assertFalse(litr.isPropertyName)
self.assertFalse(litr.isNull)
self.assertFalse(litr.isTrue)
def onRegExpLiteral(self, litr):
self.called.append('regex')
self.assertEqual("test", litr.pattern)
self.assertEqual("g", litr.flags)
def onObjectLiteral(self, litr):
self.called.append('object')
self.assertEqual('constant:"name"="flier",constant:"sex"=true',
",".join(["%s:%s=%s" % (prop.kind, prop.key, prop.value) for prop in litr.properties]))
def onArrayLiteral(self, litr):
self.called.append('array')
self.assertEqual('"hello","world",42',
",".join([str(value) for value in litr.values]))
with LiteralChecker(self) as checker:
self.assertEqual(['literal', 'regex', 'literal', 'literal'], checker.test("""
false;
/test/g;
var o = { name: 'flier', sex: true };
var a = ['hello', 'world', 42];
"""))
def testOperations(self):
class OperationChecker(TestAST.Checker):
def onUnaryOperation(self, expr):
self.called.append('unaryOp')
self.assertEqual(AST.Op.BIT_NOT, expr.op)
self.assertEqual("i", expr.expression.name)
#print "unary", expr
def onIncrementOperation(self, expr):
self.fail()
def onBinaryOperation(self, expr):
self.called.append('binOp')
self.assertEqual(AST.Op.ADD, expr.op)
self.assertEqual("i", str(expr.left))
self.assertEqual("j", str(expr.right))
self.assertEqual(36, expr.pos)
#print "bin", expr
def onAssignment(self, expr):
self.called.append('assign')
self.assertEqual(AST.Op.ASSIGN_ADD, expr.op)
self.assertEqual(AST.Op.ADD, expr.binop)
self.assertEqual("i", str(expr.target))
self.assertEqual("1", str(expr.value))
self.assertEqual(53, expr.pos)
self.assertEqual("(i + 1)", str(expr.binOperation))
self.assertTrue(expr.compound)
def onCountOperation(self, expr):
self.called.append('countOp')
self.assertFalse(expr.prefix)
self.assertTrue(expr.postfix)
self.assertEqual(AST.Op.INC, expr.op)
self.assertEqual(AST.Op.ADD, expr.binop)
self.assertEqual(71, expr.pos)
self.assertEqual("i", expr.expression.name)
#print "count", expr
def onCompareOperation(self, expr):
self.called.append('compOp')
if len(self.called) == 4:
self.assertEqual(AST.Op.EQ, expr.op)
self.assertEqual(88, expr.pos) # i==j
else:
self.assertEqual(AST.Op.EQ_STRICT, expr.op)
self.assertEqual(106, expr.pos) # i===j
self.assertEqual("i", str(expr.left))
self.assertEqual("j", str(expr.right))
#print "comp", expr
def onConditional(self, expr):
self.called.append('conditional')
self.assertEqual("(i > j)", str(expr.condition))
self.assertEqual("i", str(expr.thenExpr))
self.assertEqual("j", str(expr.elseExpr))
self.assertEqual(144, expr.thenExprPos)
self.assertEqual(146, expr.elseExprPos)
with OperationChecker(self) as checker:
self.assertEqual(['binOp', 'assign', 'countOp', 'compOp', 'compOp', 'unaryOp', 'conditional'], checker.test("""
var i, j;
i+j;
i+=1;
i++;
i==j;
i===j;
~i;
i>j?i:j;
"""))
def testSwitchStatement(self):
class SwitchStatementChecker(TestAST.Checker):
def onSwitchStatement(self, stmt):
self.called.append('switch')
self.assertEqual('expr', stmt.tag.name)
self.assertEqual(2, len(stmt.cases))
case = stmt.cases[0]
self.assertFalse(case.isDefault)
self.assertTrue(case.label.isString)
self.assertEqual(0, case.bodyTarget.pos)
self.assertEqual(57, case.position)
self.assertEqual(1, len(case.statements))
case = stmt.cases[1]
self.assertTrue(case.isDefault)
self.assertEqual(None, case.label)
self.assertEqual(0, case.bodyTarget.pos)
self.assertEqual(109, case.position)
self.assertEqual(1, len(case.statements))
with SwitchStatementChecker(self) as checker:
self.assertEqual(['switch'], checker.test("""
switch (expr) {
case 'flier':
break;
default:
break;
}
"""))
if __name__ == '__main__':
if "-v" in sys.argv:
level = logging.DEBUG
else:
level = logging.WARN
if "-p" in sys.argv:
sys.argv.remove("-p")
print("Press any key to continue or attach process #%d..." % os.getpid())
input()
logging.basicConfig(level=level, format='%(asctime)s %(levelname)s %(message)s')
logging.info("testing PyV8 module %s with V8 v%s", __version__, JSEngine.version)
unittest.main()
|
umitproject/openmonitor-aggregator
|
refs/heads/master
|
autoload/middleware.py
|
132
|
from django.utils.importlib import import_module
from django.conf import settings
# load all models.py to ensure signal handling installation or index loading
# of some apps
for app in settings.INSTALLED_APPS:
try:
import_module('%s.models' % (app))
except ImportError:
pass
class AutoloadMiddleware(object):
"""Empty because the import above already does everything for us"""
pass
|
pierricgimmig/orbitprofiler
|
refs/heads/headless
|
contrib/conan/recipes/llvm-common/llvmmodulepackage.py
|
1
|
from llvmpackage import LLVMPackage
from conans import tools
import os
class LLVMModulePackage(LLVMPackage):
version = LLVMPackage.version
@property
def _header_only(self):
return getattr(self, 'header_only', False)
def build_requirements(self):
if not hasattr(self, 'llvm_component'):
raise RuntimeError('LLVM Module package {} has no associated base LLVM component set'.format(self.name))
if not self._header_only and not hasattr(self, 'llvm_module'):
raise RuntimeError('LLVM Module package {} has not associtated {} module set'.format(self.name, self.llvm_component))
self.build_requires(self._llvm_dependency_package(self.llvm_component))
def configure(self):
super().configure()
self.output.info("Requiring llvm base component dependency '{}' as shared library: {}".format(self.llvm_component, self._build_shared))
self.options[self.llvm_component].shared = self._build_shared
self.options[self.llvm_component].sources_repo = self.options.sources_repo
self.options[self.llvm_component].no_rtti = self.options.no_rtti
def copy_from_component(self, pattern, src='', dst='', keep_path=True):
root = self.deps_cpp_info[self.llvm_component].rootpath
self.copy(pattern, src=os.path.join(root, src), dst=dst, keep_path=keep_path)
def package(self):
if not self._header_only:
component_lib_dir = os.path.join(self.deps_cpp_info[self.llvm_component].rootpath, 'lib')
self.output.info('Packaging {} library files, imported from {} package lib dir (\"{}\")'.format(self.name, self.llvm_component, component_lib_dir))
if hasattr(self, 'library_name'):
self.output.info('Copying library file by given name "{}"'.format(self.library_name))
lib_globs = ['*{}.*'.format(self.library_name)]
else:
lib_globs = [
'*{}{}.*'.format(self.llvm_component.lower(), self.llvm_module),
'*{}{}.*'.format(self.llvm_component.upper(), self.llvm_module)
]
for lib_glob in lib_globs:
self.copy(lib_glob,
src=component_lib_dir,
dst='lib',
keep_path=False)
if self._header_only:
if not hasattr(self, 'include_dirs'):
self.include_dirs = [os.path.join(self.llvm_component, self.llvm_module)]
for include_dir in self.include_dirs:
self.output.info('Packaging headers from \"{}\"'.format(include_dir))
self.copy('*',
src=os.path.join(self.deps_cpp_info[self.llvm_component].rootpath, 'include', include_dir),
dst=os.path.join('include', include_dir))
def package_id(self):
if self._header_only:
os = self.info.settings.os
self.info.settings.clear()
self.info.settings.os = os
self.info.options.clear()
self.info.requires.clear()
def package_info(self):
if not self._header_only:
self.cpp_info.libs = tools.collect_libs(self)
|
elaske/tsp-data
|
refs/heads/develop
|
html5lib/tests/support.py
|
450
|
from __future__ import absolute_import, division, unicode_literals
import os
import sys
import codecs
import glob
import xml.sax.handler
base_path = os.path.split(__file__)[0]
test_dir = os.path.join(base_path, 'testdata')
sys.path.insert(0, os.path.abspath(os.path.join(base_path,
os.path.pardir,
os.path.pardir)))
from html5lib import treebuilders
del base_path
# Build a dict of avaliable trees
treeTypes = {"DOM": treebuilders.getTreeBuilder("dom")}
# Try whatever etree implementations are avaliable from a list that are
#"supposed" to work
try:
import xml.etree.ElementTree as ElementTree
treeTypes['ElementTree'] = treebuilders.getTreeBuilder("etree", ElementTree, fullTree=True)
except ImportError:
try:
import elementtree.ElementTree as ElementTree
treeTypes['ElementTree'] = treebuilders.getTreeBuilder("etree", ElementTree, fullTree=True)
except ImportError:
pass
try:
import xml.etree.cElementTree as cElementTree
treeTypes['cElementTree'] = treebuilders.getTreeBuilder("etree", cElementTree, fullTree=True)
except ImportError:
try:
import cElementTree
treeTypes['cElementTree'] = treebuilders.getTreeBuilder("etree", cElementTree, fullTree=True)
except ImportError:
pass
try:
import lxml.etree as lxml # flake8: noqa
except ImportError:
pass
else:
treeTypes['lxml'] = treebuilders.getTreeBuilder("lxml")
def get_data_files(subdirectory, files='*.dat'):
return glob.glob(os.path.join(test_dir, subdirectory, files))
class DefaultDict(dict):
def __init__(self, default, *args, **kwargs):
self.default = default
dict.__init__(self, *args, **kwargs)
def __getitem__(self, key):
return dict.get(self, key, self.default)
class TestData(object):
def __init__(self, filename, newTestHeading="data", encoding="utf8"):
if encoding is None:
self.f = open(filename, mode="rb")
else:
self.f = codecs.open(filename, encoding=encoding)
self.encoding = encoding
self.newTestHeading = newTestHeading
def __del__(self):
self.f.close()
def __iter__(self):
data = DefaultDict(None)
key = None
for line in self.f:
heading = self.isSectionHeading(line)
if heading:
if data and heading == self.newTestHeading:
# Remove trailing newline
data[key] = data[key][:-1]
yield self.normaliseOutput(data)
data = DefaultDict(None)
key = heading
data[key] = "" if self.encoding else b""
elif key is not None:
data[key] += line
if data:
yield self.normaliseOutput(data)
def isSectionHeading(self, line):
"""If the current heading is a test section heading return the heading,
otherwise return False"""
# print(line)
if line.startswith("#" if self.encoding else b"#"):
return line[1:].strip()
else:
return False
def normaliseOutput(self, data):
# Remove trailing newlines
for key, value in data.items():
if value.endswith("\n" if self.encoding else b"\n"):
data[key] = value[:-1]
return data
def convert(stripChars):
def convertData(data):
"""convert the output of str(document) to the format used in the testcases"""
data = data.split("\n")
rv = []
for line in data:
if line.startswith("|"):
rv.append(line[stripChars:])
else:
rv.append(line)
return "\n".join(rv)
return convertData
convertExpected = convert(2)
def errorMessage(input, expected, actual):
msg = ("Input:\n%s\nExpected:\n%s\nRecieved\n%s\n" %
(repr(input), repr(expected), repr(actual)))
if sys.version_info.major == 2:
msg = msg.encode("ascii", "backslashreplace")
return msg
class TracingSaxHandler(xml.sax.handler.ContentHandler):
def __init__(self):
xml.sax.handler.ContentHandler.__init__(self)
self.visited = []
def startDocument(self):
self.visited.append('startDocument')
def endDocument(self):
self.visited.append('endDocument')
def startPrefixMapping(self, prefix, uri):
# These are ignored as their order is not guaranteed
pass
def endPrefixMapping(self, prefix):
# These are ignored as their order is not guaranteed
pass
def startElement(self, name, attrs):
self.visited.append(('startElement', name, attrs))
def endElement(self, name):
self.visited.append(('endElement', name))
def startElementNS(self, name, qname, attrs):
self.visited.append(('startElementNS', name, qname, dict(attrs)))
def endElementNS(self, name, qname):
self.visited.append(('endElementNS', name, qname))
def characters(self, content):
self.visited.append(('characters', content))
def ignorableWhitespace(self, whitespace):
self.visited.append(('ignorableWhitespace', whitespace))
def processingInstruction(self, target, data):
self.visited.append(('processingInstruction', target, data))
def skippedEntity(self, name):
self.visited.append(('skippedEntity', name))
|
lmazuel/ansible
|
refs/heads/devel
|
test/integration/targets/module_utils/module_utils/spam8/ham/__init__.py
|
298
|
eggs = 'spam8:eggs'
|
SymbiFlow/nextpnr
|
refs/heads/master
|
ice40/tmfuzz.py
|
2
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ../nextpnr-ice40 --hx8k --tmfuzz > tmfuzz_hx8k.txt
# ../nextpnr-ice40 --lp8k --tmfuzz > tmfuzz_lp8k.txt
# ../nextpnr-ice40 --up5k --tmfuzz > tmfuzz_up5k.txt
import numpy as np
import matplotlib.pyplot as plt
from collections import defaultdict
device = "hx8k"
# device = "lp8k"
# device = "up5k"
sel_src_type = "LUTFF_OUT"
sel_dst_type = "LUTFF_IN_LUT"
#%% Read fuzz data
src_dst_pairs = defaultdict(lambda: 0)
delay_data = list()
all_delay_data = list()
delay_map_sum = np.zeros((41, 41))
delay_map_sum2 = np.zeros((41, 41))
delay_map_count = np.zeros((41, 41))
same_tile_delays = list()
neighbour_tile_delays = list()
type_delta_data = dict()
with open("tmfuzz_%s.txt" % device, "r") as f:
for line in f:
line = line.split()
if line[0] == "dst":
dst_xy = (int(line[1]), int(line[2]))
dst_type = line[3]
dst_wire = line[4]
src_xy = (int(line[1]), int(line[2]))
src_type = line[3]
src_wire = line[4]
delay = int(line[5])
estdelay = int(line[6])
all_delay_data.append((delay, estdelay))
src_dst_pairs[src_type, dst_type] += 1
dx = dst_xy[0] - src_xy[0]
dy = dst_xy[1] - src_xy[1]
if src_type == sel_src_type and dst_type == sel_dst_type:
if dx == 0 and dy == 0:
same_tile_delays.append(delay)
elif abs(dx) <= 1 and abs(dy) <= 1:
neighbour_tile_delays.append(delay)
else:
delay_data.append((delay, estdelay, dx, dy, 0, 0, 0))
relx = 20 + dst_xy[0] - src_xy[0]
rely = 20 + dst_xy[1] - src_xy[1]
if (0 <= relx <= 40) and (0 <= rely <= 40):
delay_map_sum[relx, rely] += delay
delay_map_sum2[relx, rely] += delay*delay
delay_map_count[relx, rely] += 1
if dst_type == sel_dst_type:
if src_type not in type_delta_data:
type_delta_data[src_type] = list()
type_delta_data[src_type].append((dx, dy, delay))
delay_data = np.array(delay_data)
all_delay_data = np.array(all_delay_data)
max_delay = np.max(delay_data[:, 0:2])
mean_same_tile_delays = np.mean(neighbour_tile_delays)
mean_neighbour_tile_delays = np.mean(neighbour_tile_delays)
print("Avg same tile delay: %.2f (%.2f std, N=%d)" % \
(mean_same_tile_delays, np.std(same_tile_delays), len(same_tile_delays)))
print("Avg neighbour tile delay: %.2f (%.2f std, N=%d)" % \
(mean_neighbour_tile_delays, np.std(neighbour_tile_delays), len(neighbour_tile_delays)))
#%% Apply simple low-weight bluring to fill gaps
for i in range(0):
neigh_sum = np.zeros((41, 41))
neigh_sum2 = np.zeros((41, 41))
neigh_count = np.zeros((41, 41))
for x in range(41):
for y in range(41):
for p in range(-1, 2):
for q in range(-1, 2):
if p == 0 and q == 0:
continue
if 0 <= (x+p) <= 40:
if 0 <= (y+q) <= 40:
neigh_sum[x, y] += delay_map_sum[x+p, y+q]
neigh_sum2[x, y] += delay_map_sum2[x+p, y+q]
neigh_count[x, y] += delay_map_count[x+p, y+q]
delay_map_sum += 0.1 * neigh_sum
delay_map_sum2 += 0.1 * neigh_sum2
delay_map_count += 0.1 * neigh_count
delay_map = delay_map_sum / delay_map_count
delay_map_std = np.sqrt(delay_map_count*delay_map_sum2 - delay_map_sum**2) / delay_map_count
#%% Print src-dst-pair summary
print("Src-Dst-Type pair summary:")
for cnt, src, dst in sorted([(v, k[0], k[1]) for k, v in src_dst_pairs.items()]):
print("%20s %20s %5d%s" % (src, dst, cnt, " *" if src == sel_src_type and dst == sel_dst_type else ""))
print()
#%% Plot estimate vs actual delay
plt.figure(figsize=(8, 3))
plt.title("Estimate vs Actual Delay")
plt.plot(all_delay_data[:, 0], all_delay_data[:, 1], ".")
plt.plot(delay_data[:, 0], delay_data[:, 1], ".")
plt.plot([0, max_delay], [0, max_delay], "k")
plt.ylabel("Estimated Delay")
plt.xlabel("Actual Delay")
plt.grid()
plt.show()
#%% Plot delay heatmap and std dev heatmap
plt.figure(figsize=(9, 3))
plt.subplot(121)
plt.title("Actual Delay Map")
plt.imshow(delay_map)
plt.colorbar()
plt.subplot(122)
plt.title("Standard Deviation")
plt.imshow(delay_map_std)
plt.colorbar()
plt.show()
#%% Generate Model #0
def nonlinearPreprocessor0(dx, dy):
dx, dy = abs(dx), abs(dy)
values = [1.0]
values.append(dx + dy)
return np.array(values)
A = np.zeros((41*41, len(nonlinearPreprocessor0(0, 0))))
b = np.zeros(41*41)
index = 0
for x in range(41):
for y in range(41):
if delay_map_count[x, y] > 0:
A[index, :] = nonlinearPreprocessor0(x-20, y-20)
b[index] = delay_map[x, y]
index += 1
model0_params, _, _, _ = np.linalg.lstsq(A, b)
print("Model #0 parameters:", model0_params)
model0_map = np.zeros((41, 41))
for x in range(41):
for y in range(41):
v = np.dot(model0_params, nonlinearPreprocessor0(x-20, y-20))
model0_map[x, y] = v
plt.figure(figsize=(9, 3))
plt.subplot(121)
plt.title("Model #0 Delay Map")
plt.imshow(model0_map)
plt.colorbar()
plt.subplot(122)
plt.title("Model #0 Error Map")
plt.imshow(model0_map - delay_map)
plt.colorbar()
plt.show()
for i in range(delay_data.shape[0]):
dx = delay_data[i, 2]
dy = delay_data[i, 3]
delay_data[i, 4] = np.dot(model0_params, nonlinearPreprocessor0(dx, dy))
plt.figure(figsize=(8, 3))
plt.title("Model #0 vs Actual Delay")
plt.plot(delay_data[:, 0], delay_data[:, 4], ".")
plt.plot(delay_map.flat, model0_map.flat, ".")
plt.plot([0, max_delay], [0, max_delay], "k")
plt.ylabel("Model #0 Delay")
plt.xlabel("Actual Delay")
plt.grid()
plt.show()
print("In-sample RMS error: %f" % np.sqrt(np.nanmean((delay_map - model0_map)**2)))
print("Out-of-sample RMS error: %f" % np.sqrt(np.nanmean((delay_data[:, 0] - delay_data[:, 4])**2)))
print()
#%% Generate Model #1
def nonlinearPreprocessor1(dx, dy):
dx, dy = abs(dx), abs(dy)
values = [1.0]
values.append(dx + dy) # 1-norm
values.append((dx**2 + dy**2)**(1/2)) # 2-norm
values.append((dx**3 + dy**3)**(1/3)) # 3-norm
return np.array(values)
A = np.zeros((41*41, len(nonlinearPreprocessor1(0, 0))))
b = np.zeros(41*41)
index = 0
for x in range(41):
for y in range(41):
if delay_map_count[x, y] > 0:
A[index, :] = nonlinearPreprocessor1(x-20, y-20)
b[index] = delay_map[x, y]
index += 1
model1_params, _, _, _ = np.linalg.lstsq(A, b)
print("Model #1 parameters:", model1_params)
model1_map = np.zeros((41, 41))
for x in range(41):
for y in range(41):
v = np.dot(model1_params, nonlinearPreprocessor1(x-20, y-20))
model1_map[x, y] = v
plt.figure(figsize=(9, 3))
plt.subplot(121)
plt.title("Model #1 Delay Map")
plt.imshow(model1_map)
plt.colorbar()
plt.subplot(122)
plt.title("Model #1 Error Map")
plt.imshow(model1_map - delay_map)
plt.colorbar()
plt.show()
for i in range(delay_data.shape[0]):
dx = delay_data[i, 2]
dy = delay_data[i, 3]
delay_data[i, 5] = np.dot(model1_params, nonlinearPreprocessor1(dx, dy))
plt.figure(figsize=(8, 3))
plt.title("Model #1 vs Actual Delay")
plt.plot(delay_data[:, 0], delay_data[:, 5], ".")
plt.plot(delay_map.flat, model1_map.flat, ".")
plt.plot([0, max_delay], [0, max_delay], "k")
plt.ylabel("Model #1 Delay")
plt.xlabel("Actual Delay")
plt.grid()
plt.show()
print("In-sample RMS error: %f" % np.sqrt(np.nanmean((delay_map - model1_map)**2)))
print("Out-of-sample RMS error: %f" % np.sqrt(np.nanmean((delay_data[:, 0] - delay_data[:, 5])**2)))
print()
#%% Generate Model #2
def nonlinearPreprocessor2(v):
return np.array([1, v, np.sqrt(v)])
A = np.zeros((41*41, len(nonlinearPreprocessor2(0))))
b = np.zeros(41*41)
index = 0
for x in range(41):
for y in range(41):
if delay_map_count[x, y] > 0:
A[index, :] = nonlinearPreprocessor2(model1_map[x, y])
b[index] = delay_map[x, y]
index += 1
model2_params, _, _, _ = np.linalg.lstsq(A, b)
print("Model #2 parameters:", model2_params)
model2_map = np.zeros((41, 41))
for x in range(41):
for y in range(41):
v = np.dot(model1_params, nonlinearPreprocessor1(x-20, y-20))
v = np.dot(model2_params, nonlinearPreprocessor2(v))
model2_map[x, y] = v
plt.figure(figsize=(9, 3))
plt.subplot(121)
plt.title("Model #2 Delay Map")
plt.imshow(model2_map)
plt.colorbar()
plt.subplot(122)
plt.title("Model #2 Error Map")
plt.imshow(model2_map - delay_map)
plt.colorbar()
plt.show()
for i in range(delay_data.shape[0]):
dx = delay_data[i, 2]
dy = delay_data[i, 3]
delay_data[i, 6] = np.dot(model2_params, nonlinearPreprocessor2(delay_data[i, 5]))
plt.figure(figsize=(8, 3))
plt.title("Model #2 vs Actual Delay")
plt.plot(delay_data[:, 0], delay_data[:, 6], ".")
plt.plot(delay_map.flat, model2_map.flat, ".")
plt.plot([0, max_delay], [0, max_delay], "k")
plt.ylabel("Model #2 Delay")
plt.xlabel("Actual Delay")
plt.grid()
plt.show()
print("In-sample RMS error: %f" % np.sqrt(np.nanmean((delay_map - model2_map)**2)))
print("Out-of-sample RMS error: %f" % np.sqrt(np.nanmean((delay_data[:, 0] - delay_data[:, 6])**2)))
print()
#%% Generate deltas for different source net types
type_deltas = dict()
print("Delay deltas for different src types:")
for src_type in sorted(type_delta_data.keys()):
deltas = list()
for dx, dy, delay in type_delta_data[src_type]:
dx = abs(dx)
dy = abs(dy)
if dx > 1 or dy > 1:
est = model0_params[0] + model0_params[1] * (dx + dy)
else:
est = mean_neighbour_tile_delays
deltas.append(delay - est)
print("%15s: %8.2f (std %6.2f)" % (\
src_type, np.mean(deltas), np.std(deltas)))
type_deltas[src_type] = np.mean(deltas)
#%% Print C defs of model parameters
print("--snip--")
print("%d, %d, %d," % (mean_neighbour_tile_delays, 128 * model0_params[0], 128 * model0_params[1]))
print("%d, %d, %d, %d," % (128 * model1_params[0], 128 * model1_params[1], 128 * model1_params[2], 128 * model1_params[3]))
print("%d, %d, %d," % (128 * model2_params[0], 128 * model2_params[1], 128 * model2_params[2]))
print("%d, %d, %d, %d" % (type_deltas["LOCAL"], type_deltas["LUTFF_IN"], \
(type_deltas["SP4_H"] + type_deltas["SP4_V"]) / 2,
(type_deltas["SP12_H"] + type_deltas["SP12_V"]) / 2))
print("--snap--")
|
ryfeus/lambda-packs
|
refs/heads/master
|
Tensorflow_Pandas_Numpy/source3.6/tensorflow/contrib/linalg/python/ops/linear_operator_udvh_update.py
|
31
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Perturb a `LinearOperator` with a rank `K` update."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework.python.framework import tensor_util as contrib_tensor_util
from tensorflow.contrib.linalg.python.ops import linear_operator
from tensorflow.contrib.linalg.python.ops import linear_operator_diag
from tensorflow.contrib.linalg.python.ops import linear_operator_identity
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
__all__ = ["LinearOperatorUDVHUpdate",]
class LinearOperatorUDVHUpdate(linear_operator.LinearOperator):
"""Perturb a `LinearOperator` with a rank `K` update.
This operator acts like a [batch] matrix `A` with shape
`[B1,...,Bb, M, N]` for some `b >= 0`. The first `b` indices index a
batch member. For every batch index `(i1,...,ib)`, `A[i1,...,ib, : :]` is
an `M x N` matrix.
`LinearOperatorUDVHUpdate` represents `A = L + U D V^H`, where
```
L, is a LinearOperator representing [batch] M x N matrices
U, is a [batch] M x K matrix. Typically K << M.
D, is a [batch] K x K matrix.
V, is a [batch] N x K matrix. Typically K << N.
V^H is the Hermitian transpose (adjoint) of V.
```
If `M = N`, determinants and solves are done using the matrix determinant
lemma and Woodbury identities, and thus require L and D to be non-singular.
Solves and determinants will be attempted unless the "is_non_singular"
property of L and D is False.
In the event that L and D are positive-definite, and U = V, solves and
determinants can be done using a Cholesky factorization.
```python
# Create a 3 x 3 diagonal linear operator.
diag_operator = LinearOperatorDiag(
diag_update=[1., 2., 3.], is_non_singular=True, is_self_adjoint=True,
is_positive_definite=True)
# Perturb with a rank 2 perturbation
operator = LinearOperatorUDVHUpdate(
operator=diag_operator,
u=[[1., 2.], [-1., 3.], [0., 0.]],
diag_update=[11., 12.],
v=[[1., 2.], [-1., 3.], [10., 10.]])
operator.shape
==> [3, 3]
operator.log_abs_determinant()
==> scalar Tensor
x = ... Shape [3, 4] Tensor
operator.matmul(x)
==> Shape [3, 4] Tensor
```
### Shape compatibility
This operator acts on [batch] matrix with compatible shape.
`x` is a batch matrix with compatible shape for `matmul` and `solve` if
```
operator.shape = [B1,...,Bb] + [M, N], with b >= 0
x.shape = [B1,...,Bb] + [N, R], with R >= 0.
```
### Performance
Suppose `operator` is a `LinearOperatorUDVHUpdate` of shape `[M, N]`,
made from a rank `K` update of `base_operator` which performs `.matmul(x)` on
`x` having `x.shape = [N, R]` with `O(L_matmul*N*R)` complexity (and similarly
for `solve`, `determinant`. Then, if `x.shape = [N, R]`,
* `operator.matmul(x)` is `O(L_matmul*N*R + K*N*R)`
and if `M = N`,
* `operator.solve(x)` is `O(L_matmul*N*R + N*K*R + K^2*R + K^3)`
* `operator.determinant()` is `O(L_determinant + L_solve*N*K + K^2*N + K^3)`
If instead `operator` and `x` have shape `[B1,...,Bb, M, N]` and
`[B1,...,Bb, N, R]`, every operation increases in complexity by `B1*...*Bb`.
#### Matrix property hints
This `LinearOperator` is initialized with boolean flags of the form `is_X`,
for `X = non_singular`, `self_adjoint`, `positive_definite`,
`diag_update_positive` and `square`. These have the following meaning:
* If `is_X == True`, callers should expect the operator to have the
property `X`. This is a promise that should be fulfilled, but is *not* a
runtime assert. For example, finite floating point precision may result
in these promises being violated.
* If `is_X == False`, callers should expect the operator to not have `X`.
* If `is_X == None` (the default), callers should have no expectation either
way.
"""
def __init__(self,
base_operator,
u,
diag_update=None,
v=None,
is_diag_update_positive=None,
is_non_singular=None,
is_self_adjoint=None,
is_positive_definite=None,
is_square=None,
name="LinearOperatorUDVHUpdate"):
"""Initialize a `LinearOperatorUDVHUpdate`.
This creates a `LinearOperator` of the form `A = L + U D V^H`, with
`L` a `LinearOperator`, `U, V` both [batch] matrices, and `D` a [batch]
diagonal matrix.
If `L` is non-singular, solves and determinants are available.
Solves/determinants both involve a solve/determinant of a `K x K` system.
In the event that L and D are self-adjoint positive-definite, and U = V,
this can be done using a Cholesky factorization. The user should set the
`is_X` matrix property hints, which will trigger the appropriate code path.
Args:
base_operator: Shape `[B1,...,Bb, M, N]` real `float32` or `float64`
`LinearOperator`. This is `L` above.
u: Shape `[B1,...,Bb, M, K]` `Tensor` of same `dtype` as `base_operator`.
This is `U` above.
diag_update: Optional shape `[B1,...,Bb, K]` `Tensor` with same `dtype`
as `base_operator`. This is the diagonal of `D` above.
Defaults to `D` being the identity operator.
v: Optional `Tensor` of same `dtype` as `u` and shape `[B1,...,Bb, N, K]`
Defaults to `v = u`, in which case the perturbation is symmetric.
If `M != N`, then `v` must be set since the perturbation is not square.
is_diag_update_positive: Python `bool`.
If `True`, expect `diag_update > 0`.
is_non_singular: Expect that this operator is non-singular.
Default is `None`, unless `is_positive_definite` is auto-set to be
`True` (see below).
is_self_adjoint: Expect that this operator is equal to its hermitian
transpose. Default is `None`, unless `base_operator` is self-adjoint
and `v = None` (meaning `u=v`), in which case this defaults to `True`.
is_positive_definite: Expect that this operator is positive definite.
Default is `None`, unless `base_operator` is positive-definite
`v = None` (meaning `u=v`), and `is_diag_update_positive`, in which case
this defaults to `True`.
Note that we say an operator is positive definite when the quadratic
form `x^H A x` has positive real part for all nonzero `x`.
is_square: Expect that this operator acts like square [batch] matrices.
name: A name for this `LinearOperator`.
Raises:
ValueError: If `is_X` flags are set in an inconsistent way.
"""
# TODO(langmore) support complex types.
# Complex types are not allowed due to tf.cholesky() requiring float.
# If complex dtypes are allowed, we update the following
# 1. is_diag_update_positive should still imply that `diag > 0`, but we need
# to remind the user that this implies diag is real. This is needed
# because if diag has non-zero imaginary part, it will not be
# self-adjoint positive definite.
dtype = base_operator.dtype
allowed_dtypes = [dtypes.float32, dtypes.float64]
if dtype not in allowed_dtypes:
raise TypeError(
"Argument matrix must have dtype in %s. Found: %s"
% (allowed_dtypes, dtype))
if diag_update is None:
if is_diag_update_positive is False:
raise ValueError(
"Default diagonal is the identity, which is positive. However, "
"user set 'is_diag_update_positive' to False.")
is_diag_update_positive = True
# In this case, we can use a Cholesky decomposition to help us solve/det.
self._use_cholesky = (
base_operator.is_positive_definite and base_operator.is_self_adjoint
and is_diag_update_positive
and v is None)
# Possibly auto-set some characteristic flags from None to True.
# If the Flags were set (by the user) incorrectly to False, then raise.
if base_operator.is_self_adjoint and v is None and not dtype.is_complex:
if is_self_adjoint is False:
raise ValueError(
"A = L + UDU^H, with L self-adjoint and D real diagonal. Since"
" UDU^H is self-adjoint, this must be a self-adjoint operator.")
is_self_adjoint = True
# The condition for using a cholesky is sufficient for SPD, and
# we no weaker choice of these hints leads to SPD. Therefore,
# the following line reads "if hints indicate SPD..."
if self._use_cholesky:
if (
is_positive_definite is False
or is_self_adjoint is False
or is_non_singular is False):
raise ValueError(
"Arguments imply this is self-adjoint positive-definite operator.")
is_positive_definite = True
is_self_adjoint = True
values = base_operator.graph_parents + [u, diag_update, v]
with ops.name_scope(name, values=values):
# Create U and V.
self._u = ops.convert_to_tensor(u, name="u")
if v is None:
self._v = self._u
else:
self._v = ops.convert_to_tensor(v, name="v")
if diag_update is None:
self._diag_update = None
else:
self._diag_update = ops.convert_to_tensor(
diag_update, name="diag_update")
# Create base_operator L.
self._base_operator = base_operator
graph_parents = base_operator.graph_parents + [
self.u, self._diag_update, self.v]
graph_parents = [p for p in graph_parents if p is not None]
super(LinearOperatorUDVHUpdate, self).__init__(
dtype=self._base_operator.dtype,
graph_parents=graph_parents,
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
name=name)
# Create the diagonal operator D.
self._set_diag_operators(diag_update, is_diag_update_positive)
self._is_diag_update_positive = is_diag_update_positive
contrib_tensor_util.assert_same_float_dtype(
(base_operator, self.u, self.v, self._diag_update))
self._check_shapes()
# Pre-compute the so-called "capacitance" matrix
# C := D^{-1} + V^H L^{-1} U
self._capacitance = self._make_capacitance()
if self._use_cholesky:
self._chol_capacitance = linalg_ops.cholesky(self._capacitance)
def _check_shapes(self):
"""Static check that shapes are compatible."""
# Broadcast shape also checks that u and v are compatible.
uv_shape = array_ops.broadcast_static_shape(
self.u.get_shape(), self.v.get_shape())
batch_shape = array_ops.broadcast_static_shape(
self.base_operator.batch_shape, uv_shape[:-2])
self.base_operator.domain_dimension.assert_is_compatible_with(
uv_shape[-2])
if self._diag_update is not None:
uv_shape[-1].assert_is_compatible_with(self._diag_update.get_shape()[-1])
array_ops.broadcast_static_shape(
batch_shape, self._diag_update.get_shape()[:-1])
def _set_diag_operators(self, diag_update, is_diag_update_positive):
"""Set attributes self._diag_update and self._diag_operator."""
if diag_update is not None:
self._diag_operator = linear_operator_diag.LinearOperatorDiag(
self._diag_update, is_positive_definite=is_diag_update_positive)
self._diag_inv_operator = linear_operator_diag.LinearOperatorDiag(
1. / self._diag_update, is_positive_definite=is_diag_update_positive)
else:
if self.u.get_shape()[-1].value is not None:
r = self.u.get_shape()[-1].value
else:
r = array_ops.shape(self.u)[-1]
self._diag_operator = linear_operator_identity.LinearOperatorIdentity(
num_rows=r, dtype=self.dtype)
self._diag_inv_operator = self._diag_operator
@property
def u(self):
"""If this operator is `A = L + U D V^H`, this is the `U`."""
return self._u
@property
def v(self):
"""If this operator is `A = L + U D V^H`, this is the `V`."""
return self._v
@property
def is_diag_update_positive(self):
"""If this operator is `A = L + U D V^H`, this hints `D > 0` elementwise."""
return self._is_diag_update_positive
@property
def diag_update(self):
"""If this operator is `A = L + U D V^H`, this is the diagonal of `D`."""
return self._diag_update
@property
def diag_operator(self):
"""If this operator is `A = L + U D V^H`, this is `D`."""
return self._diag_operator
@property
def base_operator(self):
"""If this operator is `A = L + U D V^H`, this is the `L`."""
return self._base_operator
def _shape(self):
batch_shape = array_ops.broadcast_static_shape(
self.base_operator.batch_shape,
self.u.get_shape()[:-2])
return batch_shape.concatenate(self.base_operator.shape[-2:])
def _shape_tensor(self):
batch_shape = array_ops.broadcast_dynamic_shape(
self.base_operator.batch_shape_tensor(),
array_ops.shape(self.u)[:-2])
return array_ops.concat(
[batch_shape, self.base_operator.shape_tensor()[-2:]], axis=0)
def _matmul(self, x, adjoint=False, adjoint_arg=False):
u = self.u
v = self.v
l = self.base_operator
d = self.diag_operator
leading_term = l.matmul(x, adjoint=adjoint, adjoint_arg=adjoint_arg)
if adjoint:
uh_x = math_ops.matmul(u, x, adjoint_a=True, adjoint_b=adjoint_arg)
d_uh_x = d.matmul(uh_x, adjoint=adjoint)
v_d_uh_x = math_ops.matmul(v, d_uh_x)
return leading_term + v_d_uh_x
else:
vh_x = math_ops.matmul(v, x, adjoint_a=True, adjoint_b=adjoint_arg)
d_vh_x = d.matmul(vh_x, adjoint=adjoint)
u_d_vh_x = math_ops.matmul(u, d_vh_x)
return leading_term + u_d_vh_x
def _determinant(self):
if self.is_positive_definite:
return math_ops.exp(self.log_abs_determinant())
# The matrix determinant lemma gives
# https://en.wikipedia.org/wiki/Matrix_determinant_lemma
# det(L + UDV^H) = det(D^{-1} + V^H L^{-1} U) det(D) det(L)
# = det(C) det(D) det(L)
# where C is sometimes known as the capacitance matrix,
# C := D^{-1} + V^H L^{-1} U
det_c = linalg_ops.matrix_determinant(self._capacitance)
det_d = self.diag_operator.determinant()
det_l = self.base_operator.determinant()
return det_c * det_d * det_l
def _log_abs_determinant(self):
# Recall
# det(L + UDV^H) = det(D^{-1} + V^H L^{-1} U) det(D) det(L)
# = det(C) det(D) det(L)
log_abs_det_d = self.diag_operator.log_abs_determinant()
log_abs_det_l = self.base_operator.log_abs_determinant()
if self._use_cholesky:
chol_cap_diag = array_ops.matrix_diag_part(self._chol_capacitance)
log_abs_det_c = 2 * math_ops.reduce_sum(
math_ops.log(chol_cap_diag), reduction_indices=[-1])
else:
det_c = linalg_ops.matrix_determinant(self._capacitance)
log_abs_det_c = math_ops.log(math_ops.abs(det_c))
return log_abs_det_c + log_abs_det_d + log_abs_det_l
def _solve(self, rhs, adjoint=False, adjoint_arg=False):
if self.base_operator.is_non_singular is False:
raise ValueError(
"Solve not implemented unless this is a perturbation of a "
"non-singular LinearOperator.")
# The Woodbury formula gives:
# https://en.wikipedia.org/wiki/Woodbury_matrix_identity
# (L + UDV^H)^{-1}
# = L^{-1} - L^{-1} U (D^{-1} + V^H L^{-1} U)^{-1} V^H L^{-1}
# = L^{-1} - L^{-1} U C^{-1} V^H L^{-1}
# where C is the capacitance matrix, C := D^{-1} + V^H L^{-1} U
# Note also that, with ^{-H} being the inverse of the adjoint,
# (L + UDV^H)^{-H}
# = L^{-H} - L^{-H} V C^{-H} U^H L^{-H}
l = self.base_operator
if adjoint:
v = self.u
u = self.v
else:
v = self.v
u = self.u
# L^{-1} rhs
linv_rhs = l.solve(rhs, adjoint=adjoint, adjoint_arg=adjoint_arg)
# V^H L^{-1} rhs
vh_linv_rhs = math_ops.matmul(v, linv_rhs, adjoint_a=True)
# C^{-1} V^H L^{-1} rhs
if self._use_cholesky:
capinv_vh_linv_rhs = linalg_ops.cholesky_solve(
self._chol_capacitance, vh_linv_rhs)
else:
capinv_vh_linv_rhs = linalg_ops.matrix_solve(
self._capacitance, vh_linv_rhs, adjoint=adjoint)
# U C^{-1} V^H M^{-1} rhs
u_capinv_vh_linv_rhs = math_ops.matmul(u, capinv_vh_linv_rhs)
# L^{-1} U C^{-1} V^H L^{-1} rhs
linv_u_capinv_vh_linv_rhs = l.solve(u_capinv_vh_linv_rhs, adjoint=adjoint)
# L^{-1} - L^{-1} U C^{-1} V^H L^{-1}
return linv_rhs - linv_u_capinv_vh_linv_rhs
def _make_capacitance(self):
# C := D^{-1} + V^H L^{-1} U
# which is sometimes known as the "capacitance" matrix.
# L^{-1} U
linv_u = self.base_operator.solve(self.u)
# V^H L^{-1} U
vh_linv_u = math_ops.matmul(self.v, linv_u, adjoint_a=True)
# D^{-1} + V^H L^{-1} V
capacitance = self._diag_inv_operator.add_to_tensor(vh_linv_u)
return capacitance
|
mqtlam/dcgan-tfslim
|
refs/heads/master
|
image_ops.py
|
1
|
from math import sqrt, ceil
from PIL import Image
import numpy as np
def center_crop(image, crop_size=None):
"""Center crop image.
Args:
image: PIL image
crop_size: if specified, size of square to center crop
otherwise, fit largest square to center of image
Returns:
cropped PIL image
"""
width, height = image.size
# if crop size not specified, use the largest square in the center of image
if crop_size is None:
crop_size = min(height, width)
# compute crop parameters
top = int(round((height - crop_size)/2.))
left = int(round((width - crop_size)/2.))
bottom = top + crop_size
right = left + crop_size
return image.crop((left, top, right, bottom))
def get_image(image_path, image_size, is_crop=True):
"""Load image from file and crop/resize as necessary.
Args:
image_path: path to image
image_size: width/height to resize image
crop: center crop if True [True]
Returns:
numpy array of loaded/cropped/resized image
"""
# load image
img = Image.open(image_path)
if img.mode != 'RGB':
img = img.convert('RGB')
# center crop
if is_crop:
img_center_crop = center_crop(img)
else:
img_center_crop = img
# resize
img_resized = img_center_crop.resize((image_size, image_size), Image.ANTIALIAS)
# convert to numpy and normalize
img_array = np.asarray(img_resized).astype(np.float32)/127.5 - 1.
img.close()
return img_array
def save_image(image, image_path):
"""Save an image.
Postconditions:
saves to image file
Args:
images: one image
image_path: path to save image
"""
# transform back from [-1,1] to [0,1]
img = (image+1.)/2.
# save image
im = Image.fromarray(np.uint8(img*255))
im.save(image_path)
def save_images(images, image_path):
"""Save images.
Postconditions:
saves to image file
Args:
images: list of images
image_path: path to save image
"""
# transform back from [-1,1] to [0,1]
images = (images+1.)/2.
# determine tiled image shape
num_cols = int(ceil(sqrt(len(images))))
num_rows = int(ceil(1.*len(images)/num_cols))
# create new tiled image
h, w = images.shape[1], images.shape[2]
img = np.zeros((h * num_rows, w * num_cols, 3))
for i, image in enumerate(images):
c = i % num_cols
r = i // num_cols
img[r*h:r*h+h, c*w:c*w+w, :] = image
# save image
im = Image.fromarray(np.uint8(img*255))
im.save(image_path)
|
codecollision/DropboxToFlickr
|
refs/heads/master
|
django/utils/dates.py
|
488
|
"Commonly-used date structures"
from django.utils.translation import ugettext_lazy as _, pgettext_lazy
WEEKDAYS = {
0:_('Monday'), 1:_('Tuesday'), 2:_('Wednesday'), 3:_('Thursday'), 4:_('Friday'),
5:_('Saturday'), 6:_('Sunday')
}
WEEKDAYS_ABBR = {
0:_('Mon'), 1:_('Tue'), 2:_('Wed'), 3:_('Thu'), 4:_('Fri'),
5:_('Sat'), 6:_('Sun')
}
WEEKDAYS_REV = {
'monday':0, 'tuesday':1, 'wednesday':2, 'thursday':3, 'friday':4,
'saturday':5, 'sunday':6
}
MONTHS = {
1:_('January'), 2:_('February'), 3:_('March'), 4:_('April'), 5:_('May'), 6:_('June'),
7:_('July'), 8:_('August'), 9:_('September'), 10:_('October'), 11:_('November'),
12:_('December')
}
MONTHS_3 = {
1:_('jan'), 2:_('feb'), 3:_('mar'), 4:_('apr'), 5:_('may'), 6:_('jun'),
7:_('jul'), 8:_('aug'), 9:_('sep'), 10:_('oct'), 11:_('nov'), 12:_('dec')
}
MONTHS_3_REV = {
'jan':1, 'feb':2, 'mar':3, 'apr':4, 'may':5, 'jun':6, 'jul':7, 'aug':8,
'sep':9, 'oct':10, 'nov':11, 'dec':12
}
MONTHS_AP = { # month names in Associated Press style
1: pgettext_lazy('abbrev. month', 'Jan.'),
2: pgettext_lazy('abbrev. month', 'Feb.'),
3: pgettext_lazy('abbrev. month', 'March'),
4: pgettext_lazy('abbrev. month', 'April'),
5: pgettext_lazy('abbrev. month', 'May'),
6: pgettext_lazy('abbrev. month', 'June'),
7: pgettext_lazy('abbrev. month', 'July'),
8: pgettext_lazy('abbrev. month', 'Aug.'),
9: pgettext_lazy('abbrev. month', 'Sept.'),
10: pgettext_lazy('abbrev. month', 'Oct.'),
11: pgettext_lazy('abbrev. month', 'Nov.'),
12: pgettext_lazy('abbrev. month', 'Dec.')
}
MONTHS_ALT = { # required for long date representation by some locales
1: pgettext_lazy('alt. month', 'January'),
2: pgettext_lazy('alt. month', 'February'),
3: pgettext_lazy('alt. month', 'March'),
4: pgettext_lazy('alt. month', 'April'),
5: pgettext_lazy('alt. month', 'May'),
6: pgettext_lazy('alt. month', 'June'),
7: pgettext_lazy('alt. month', 'July'),
8: pgettext_lazy('alt. month', 'August'),
9: pgettext_lazy('alt. month', 'September'),
10: pgettext_lazy('alt. month', 'October'),
11: pgettext_lazy('alt. month', 'November'),
12: pgettext_lazy('alt. month', 'December')
}
|
ktnyt/chainer
|
refs/heads/master
|
chainer/distributions/gumbel.py
|
2
|
import numpy
import chainer
from chainer.backends import cuda
from chainer import distribution
from chainer.functions.math import exponential
from chainer.functions.math import lgamma
EULER = 0.57721566490153286060651209008240243104215933593992
class Gumbel(distribution.Distribution):
"""Gumbel Distribution.
The probability density function of the distribution is expressed as
.. math::
f(x) = \\frac{1}{\\eta} \
\\exp\\left\\{ - \\frac{x - \\mu}{\\eta} \\right\\} \
\\exp\\left[-\\exp\\left\\{-\\frac{x - \\mu}{\\eta} \
\\right\\}\\right],
Args:
loc(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution :math:`\\mu`.
scale(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution :math:`\\eta`.
"""
def __init__(self, loc, scale):
super(Gumbel, self).__init__()
self.__loc = chainer.as_variable(loc)
self.__scale = chainer.as_variable(scale)
@property
def loc(self):
return self.__loc
@property
def scale(self):
return self.__scale
@property
def batch_shape(self):
return self.loc.shape
@property
def entropy(self):
return exponential.log(self.scale) + (EULER + 1)
@property
def event_shape(self):
return ()
@property
def _is_gpu(self):
return isinstance(self.loc.data, cuda.ndarray)
def log_prob(self, x):
y = (x - self.loc) / self.scale
return - exponential.log(self.scale) - y - exponential.exp(-y)
@property
def mean(self):
return self.loc + EULER * self.scale
def sample_n(self, n):
xp = cuda.get_array_module(self.loc)
if xp is cuda.cupy:
eps = xp.random.gumbel(
size=(n,)+self.batch_shape, dtype=self.loc.dtype)
else:
eps = xp.random.gumbel(
size=(n,)+self.batch_shape).astype(self.loc.dtype)
noise = self.scale * eps + self.loc
return noise
@property
def support(self):
return 'real'
@property
def variance(self):
return numpy.pi ** 2 * self.scale ** 2 / 6
@distribution.register_kl(Gumbel, Gumbel)
def _kl_gumbel_gumbel(dist1, dist2):
scale_1d2 = dist1.scale / dist2.scale
return exponential.log(dist2.scale) - exponential.log(dist1.scale) \
+ EULER * (scale_1d2 - 1.) \
+ exponential.exp((dist2.loc - dist1.loc) / dist2.scale
+ lgamma.lgamma(scale_1d2 + 1.)) \
- 1 + (dist1.loc - dist2.loc) / dist2.scale
|
sudheesh001/oh-mainline
|
refs/heads/master
|
mysite/profile/migrations/0022_asheesh_add_person_tag_link.py
|
17
|
# This file is part of OpenHatch.
# Copyright (C) 2009 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.profile.models import *
class Migration:
def forwards(self, orm):
# Adding model 'Link_Person_Tag'
db.create_table('profile_link_person_tag', (
('id', models.AutoField(primary_key=True)),
('tag', models.ForeignKey(orm.Tag)),
('project', models.ForeignKey(orm.Person)),
('time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 6, 29, 0, 21, 25, 66622))),
('source', models.CharField(max_length=200)),
))
db.send_create_signal('profile', ['Link_Person_Tag'])
# Changing field 'Person.time_record_was_created'
db.alter_column('profile_person', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 6, 29, 0, 21, 24, 174516)))
# Changing field 'Link_ProjectExp_Tag.time_record_was_created'
db.alter_column('profile_link_projectexp_tag', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 6, 29, 0, 21, 24, 576063)))
# Changing field 'Link_Project_Tag.time_record_was_created'
db.alter_column('profile_link_project_tag', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 6, 29, 0, 21, 24, 667481)))
def backwards(self, orm):
# Deleting model 'Link_Person_Tag'
db.delete_table('profile_link_person_tag')
# Changing field 'Person.time_record_was_created'
db.alter_column('profile_person', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 6, 28, 14, 36, 28, 64541)))
# Changing field 'Link_ProjectExp_Tag.time_record_was_created'
db.alter_column('profile_link_projectexp_tag', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 6, 28, 14, 36, 27, 969508)))
# Changing field 'Link_Project_Tag.time_record_was_created'
db.alter_column('profile_link_project_tag', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 6, 28, 14, 36, 28, 410014)))
models = {
'profile.person': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'interested_in_working_on': ('models.CharField', [], {'default': "''", 'max_length': '1024'}),
'last_polled': ('models.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_touched': ('models.DateTimeField', [], {'null': 'True'}),
'name': ('models.CharField', [], {'max_length': '200'}),
'password_hash_md5': ('models.CharField', [], {'max_length': '200'}),
'poll_on_next_web_view': ('models.BooleanField', [], {'default': 'True'}),
'time_record_was_created': ('models.DateTimeField', [], {'default': 'datetime.datetime(2009, 6, 29, 0, 21, 25, 501145)'}),
'username': ('models.CharField', [], {'max_length': '200'})
},
'profile.link_person_tag': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'project': ('models.ForeignKey', ["orm['profile.Person']"], {}),
'source': ('models.CharField', [], {'max_length': '200'}),
'tag': ('models.ForeignKey', ["orm['profile.Tag']"], {}),
'time_record_was_created': ('models.DateTimeField', [], {'default': 'datetime.datetime(2009, 6, 29, 0, 21, 25, 346041)'})
},
'profile.tag': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'tag_type': ('models.ForeignKey', ["orm['profile.TagType']"], {}),
'text': ('models.CharField', [], {'max_length': '50'})
},
'profile.link_projectexp_tag': {
'Meta': {'unique_together': "[('tag','project_exp','source'),]"},
'favorite': ('models.BooleanField', [], {'default': 'False'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'project_exp': ('models.ForeignKey', ["orm['profile.ProjectExp']"], {}),
'source': ('models.CharField', [], {'max_length': '200'}),
'tag': ('models.ForeignKey', ["orm['profile.Tag']"], {}),
'time_record_was_created': ('models.DateTimeField', [], {'default': 'datetime.datetime(2009, 6, 29, 0, 21, 25, 125823)'})
},
'profile.sourceforgeperson': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'username': ('models.CharField', [], {'max_length': '200'})
},
'profile.link_project_tag': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'project': ('models.ForeignKey', ["orm['search.Project']"], {}),
'source': ('models.CharField', [], {'max_length': '200'}),
'tag': ('models.ForeignKey', ["orm['profile.Tag']"], {}),
'time_record_was_created': ('models.DateTimeField', [], {'default': 'datetime.datetime(2009, 6, 29, 0, 21, 25, 843354)'})
},
'profile.sourceforgeproject': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'unixname': ('models.CharField', [], {'max_length': '200'})
},
'search.project': {
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'profile.link_sf_proj_dude_fm': {
'Meta': {'unique_together': "[('person','project'),]"},
'date_collected': ('models.DateTimeField', [], {}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'is_admin': ('models.BooleanField', [], {'default': 'False'}),
'person': ('models.ForeignKey', ["orm['profile.SourceForgePerson']"], {}),
'position': ('models.CharField', [], {'max_length': '200'}),
'project': ('models.ForeignKey', ["orm['profile.SourceForgeProject']"], {})
},
'profile.tagtype': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'name': ('models.CharField', [], {'max_length': '100'}),
'prefix': ('models.CharField', [], {'max_length': '20'})
},
'profile.projectexp': {
'description': ('models.TextField', [], {}),
'favorite': ('models.BooleanField', [], {'default': '0'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'last_touched': ('models.DateTimeField', [], {'null': 'True'}),
'man_months': ('models.PositiveIntegerField', [], {'null': 'True'}),
'person': ('models.ForeignKey', ["orm['profile.Person']"], {}),
'person_role': ('models.CharField', [], {'max_length': '200'}),
'primary_language': ('models.CharField', [], {'max_length': '200', 'null': 'True'}),
'project': ('models.ForeignKey', ["orm['search.Project']"], {}),
'source': ('models.CharField', [], {'max_length': '100', 'null': 'True'}),
'time_record_was_created': ('models.DateTimeField', [], {'null': 'True'}),
'url': ('models.URLField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['profile']
|
dursk/django
|
refs/heads/master
|
tests/null_fk_ordering/models.py
|
210
|
"""
Regression tests for proper working of ForeignKey(null=True). Tests these bugs:
* #7512: including a nullable foreign key reference in Meta ordering has un
xpected results
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# The first two models represent a very simple null FK ordering case.
class Author(models.Model):
name = models.CharField(max_length=150)
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField(max_length=150)
author = models.ForeignKey(Author, models.SET_NULL, null=True)
def __str__(self):
return 'Article titled: %s' % (self.title, )
class Meta:
ordering = ['author__name', ]
# These following 4 models represent a far more complex ordering case.
class SystemInfo(models.Model):
system_name = models.CharField(max_length=32)
class Forum(models.Model):
system_info = models.ForeignKey(SystemInfo, models.CASCADE)
forum_name = models.CharField(max_length=32)
@python_2_unicode_compatible
class Post(models.Model):
forum = models.ForeignKey(Forum, models.SET_NULL, null=True)
title = models.CharField(max_length=32)
def __str__(self):
return self.title
@python_2_unicode_compatible
class Comment(models.Model):
post = models.ForeignKey(Post, models.SET_NULL, null=True)
comment_text = models.CharField(max_length=250)
class Meta:
ordering = ['post__forum__system_info__system_name', 'comment_text']
def __str__(self):
return self.comment_text
|
jollaman999/msm-mako-lollipop-mr1
|
refs/heads/f2fs
|
Documentation/target/tcm_mod_builder.py
|
4981
|
#!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
|
ptisserand/portage
|
refs/heads/master
|
pym/portage/package/ebuild/_ipc/QueryCommand.py
|
12
|
# Copyright 2010-2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import unicode_literals
import io
import portage
from portage import os
from portage.dep import Atom, _repo_name_re
from portage.eapi import eapi_has_repo_deps
from portage.elog import messages as elog_messages
from portage.exception import InvalidAtom
from portage.package.ebuild._ipc.IpcCommand import IpcCommand
from portage.util import normalize_path
from portage.versions import best
class QueryCommand(IpcCommand):
__slots__ = ('phase', 'settings',)
_db = None
@classmethod
def get_db(cls):
if cls._db is not None:
return cls._db
return portage.db
def __init__(self, settings, phase):
IpcCommand.__init__(self)
self.settings = settings
self.phase = phase
def __call__(self, argv):
"""
@return: tuple of (stdout, stderr, returncode)
"""
# Python 3:
# cmd, root, *args = argv
cmd = argv[0]
root = argv[1]
args = argv[2:]
warnings = []
warnings_str = ''
db = self.get_db()
eapi = self.settings.get('EAPI')
root = normalize_path(root).rstrip(os.path.sep) + os.path.sep
if root not in db:
return ('', '%s: Invalid ROOT: %s\n' % (cmd, root), 3)
portdb = db[root]["porttree"].dbapi
vardb = db[root]["vartree"].dbapi
if cmd in ('best_version', 'has_version'):
allow_repo = eapi_has_repo_deps(eapi)
try:
atom = Atom(args[0], allow_repo=allow_repo)
except InvalidAtom:
return ('', '%s: Invalid atom: %s\n' % (cmd, args[0]), 2)
try:
atom = Atom(args[0], allow_repo=allow_repo, eapi=eapi)
except InvalidAtom as e:
warnings.append("QA Notice: %s: %s" % (cmd, e))
use = self.settings.get('PORTAGE_BUILT_USE')
if use is None:
use = self.settings['PORTAGE_USE']
use = frozenset(use.split())
atom = atom.evaluate_conditionals(use)
if warnings:
warnings_str = self._elog('eqawarn', warnings)
if cmd == 'has_version':
if vardb.match(atom):
returncode = 0
else:
returncode = 1
return ('', warnings_str, returncode)
elif cmd == 'best_version':
m = best(vardb.match(atom))
return ('%s\n' % m, warnings_str, 0)
elif cmd in ('master_repositories', 'repository_path', 'available_eclasses', 'eclass_path', 'license_path'):
repo = _repo_name_re.match(args[0])
if repo is None:
return ('', '%s: Invalid repository: %s\n' % (cmd, args[0]), 2)
try:
repo = portdb.repositories[args[0]]
except KeyError:
return ('', warnings_str, 1)
if cmd == 'master_repositories':
return ('%s\n' % ' '.join(x.name for x in repo.masters), warnings_str, 0)
elif cmd == 'repository_path':
return ('%s\n' % repo.location, warnings_str, 0)
elif cmd == 'available_eclasses':
return ('%s\n' % ' '.join(sorted(repo.eclass_db.eclasses)), warnings_str, 0)
elif cmd == 'eclass_path':
try:
eclass = repo.eclass_db.eclasses[args[1]]
except KeyError:
return ('', warnings_str, 1)
return ('%s\n' % eclass.location, warnings_str, 0)
elif cmd == 'license_path':
paths = reversed([os.path.join(x.location, 'licenses', args[1]) for x in list(repo.masters) + [repo]])
for path in paths:
if os.path.exists(path):
return ('%s\n' % path, warnings_str, 0)
return ('', warnings_str, 1)
else:
return ('', 'Invalid command: %s\n' % cmd, 3)
def _elog(self, elog_funcname, lines):
"""
This returns a string, to be returned via ipc and displayed at the
appropriate place in the build output. We wouldn't want to open the
log here since it is already opened by AbstractEbuildProcess and we
don't want to corrupt it, especially if it is being written with
compression.
"""
out = io.StringIO()
phase = self.phase
elog_func = getattr(elog_messages, elog_funcname)
global_havecolor = portage.output.havecolor
try:
portage.output.havecolor = \
self.settings.get('NOCOLOR', 'false').lower() in ('no', 'false')
for line in lines:
elog_func(line, phase=phase, key=self.settings.mycpv, out=out)
finally:
portage.output.havecolor = global_havecolor
msg = out.getvalue()
return msg
|
synergeticsedx/deployment-wipro
|
refs/heads/oxa/master.fic
|
common/test/acceptance/tests/lms/test_lms_edxnotes.py
|
8
|
"""
Test LMS Notes
"""
from unittest import skip
import random
from uuid import uuid4
from datetime import datetime
from nose.plugins.attrib import attr
from common.test.acceptance.tests.helpers import UniqueCourseTest, EventsTestMixin
from common.test.acceptance.fixtures.course import CourseFixture, XBlockFixtureDesc
from common.test.acceptance.pages.lms.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.course_nav import CourseNavPage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.pages.lms.edxnotes import EdxNotesUnitPage, EdxNotesPage, EdxNotesPageNoContent
from common.test.acceptance.fixtures.edxnotes import EdxNotesFixture, Note, Range
from flaky import flaky
class EdxNotesTestMixin(UniqueCourseTest):
"""
Creates a course with initial data and contains useful helper methods.
"""
def setUp(self):
"""
Initialize pages and install a course fixture.
"""
super(EdxNotesTestMixin, self).setUp()
self.courseware_page = CoursewarePage(self.browser, self.course_id)
self.course_nav = CourseNavPage(self.browser)
self.note_unit_page = EdxNotesUnitPage(self.browser, self.course_id)
self.notes_page = EdxNotesPage(self.browser, self.course_id)
self.username = str(uuid4().hex)[:5]
self.email = "{}@email.com".format(self.username)
self.selector = "annotate-id"
self.edxnotes_fixture = EdxNotesFixture()
self.course_fixture = CourseFixture(
self.course_info["org"], self.course_info["number"],
self.course_info["run"], self.course_info["display_name"]
)
self.course_fixture.add_advanced_settings({
u"edxnotes": {u"value": True}
})
self.course_fixture.add_children(
XBlockFixtureDesc("chapter", "Test Section 1").add_children(
XBlockFixtureDesc("sequential", "Test Subsection 1").add_children(
XBlockFixtureDesc("vertical", "Test Unit 1").add_children(
XBlockFixtureDesc(
"html",
"Test HTML 1",
data="""
<p><span class="{}">Annotate this!</span></p>
<p>Annotate this</p>
""".format(self.selector)
),
XBlockFixtureDesc(
"html",
"Test HTML 2",
data="""<p><span class="{}">Annotate this!</span></p>""".format(self.selector)
),
),
XBlockFixtureDesc("vertical", "Test Unit 2").add_children(
XBlockFixtureDesc(
"html",
"Test HTML 3",
data="""<p><span class="{}">Annotate this!</span></p>""".format(self.selector)
),
),
),
XBlockFixtureDesc("sequential", "Test Subsection 2").add_children(
XBlockFixtureDesc("vertical", "Test Unit 3").add_children(
XBlockFixtureDesc(
"html",
"Test HTML 4",
data="""
<p><span class="{}">Annotate this!</span></p>
""".format(self.selector)
),
),
),
),
XBlockFixtureDesc("chapter", "Test Section 2").add_children(
XBlockFixtureDesc("sequential", "Test Subsection 3").add_children(
XBlockFixtureDesc("vertical", "Test Unit 4").add_children(
XBlockFixtureDesc(
"html",
"Test HTML 5",
data="""
<p><span class="{}">Annotate this!</span></p>
""".format(self.selector)
),
XBlockFixtureDesc(
"html",
"Test HTML 6",
data="""<p><span class="{}">Annotate this!</span></p>""".format(self.selector)
),
),
),
)).install()
self.addCleanup(self.edxnotes_fixture.cleanup)
AutoAuthPage(self.browser, username=self.username, email=self.email, course_id=self.course_id).visit()
def _add_notes(self):
xblocks = self.course_fixture.get_nested_xblocks(category="html")
notes_list = []
for index, xblock in enumerate(xblocks):
notes_list.append(
Note(
user=self.username,
usage_id=xblock.locator,
course_id=self.course_fixture._course_key,
ranges=[Range(startOffset=index, endOffset=index + 5)]
)
)
self.edxnotes_fixture.create_notes(notes_list)
self.edxnotes_fixture.install()
@attr(shard=4)
class EdxNotesDefaultInteractionsTest(EdxNotesTestMixin):
"""
Tests for creation, editing, deleting annotations inside annotatable components in LMS.
"""
def create_notes(self, components, offset=0):
self.assertGreater(len(components), 0)
index = offset
for component in components:
for note in component.create_note(".{}".format(self.selector)):
note.text = "TEST TEXT {}".format(index)
index += 1
def edit_notes(self, components, offset=0):
self.assertGreater(len(components), 0)
index = offset
for component in components:
self.assertGreater(len(component.notes), 0)
for note in component.edit_note():
note.text = "TEST TEXT {}".format(index)
index += 1
def edit_tags_in_notes(self, components, tags):
self.assertGreater(len(components), 0)
index = 0
for component in components:
self.assertGreater(len(component.notes), 0)
for note in component.edit_note():
note.tags = tags[index]
index += 1
self.assertEqual(index, len(tags), "Number of supplied tags did not match components")
def remove_notes(self, components):
self.assertGreater(len(components), 0)
for component in components:
self.assertGreater(len(component.notes), 0)
component.remove_note()
def assert_notes_are_removed(self, components):
for component in components:
self.assertEqual(0, len(component.notes))
def assert_text_in_notes(self, notes):
actual = [note.text for note in notes]
expected = ["TEST TEXT {}".format(i) for i in xrange(len(notes))]
self.assertEqual(expected, actual)
def assert_tags_in_notes(self, notes, expected_tags):
actual = [note.tags for note in notes]
expected = [expected_tags[i] for i in xrange(len(notes))]
self.assertEqual(expected, actual)
def test_can_create_notes(self):
"""
Scenario: User can create notes.
Given I have a course with 3 annotatable components
And I open the unit with 2 annotatable components
When I add 2 notes for the first component and 1 note for the second
Then I see that notes were correctly created
When I change sequential position to "2"
And I add note for the annotatable component on the page
Then I see that note was correctly created
When I refresh the page
Then I see that note was correctly stored
When I change sequential position to "1"
Then I see that notes were correctly stored on the page
"""
self.note_unit_page.visit()
components = self.note_unit_page.components
self.create_notes(components)
self.assert_text_in_notes(self.note_unit_page.notes)
self.courseware_page.go_to_sequential_position(2)
components = self.note_unit_page.components
self.create_notes(components)
components = self.note_unit_page.refresh()
self.assert_text_in_notes(self.note_unit_page.notes)
self.courseware_page.go_to_sequential_position(1)
components = self.note_unit_page.components
self.assert_text_in_notes(self.note_unit_page.notes)
def test_can_edit_notes(self):
"""
Scenario: User can edit notes.
Given I have a course with 3 components with notes
And I open the unit with 2 annotatable components
When I change text in the notes
Then I see that notes were correctly changed
When I change sequential position to "2"
And I change the note on the page
Then I see that note was correctly changed
When I refresh the page
Then I see that edited note was correctly stored
When I change sequential position to "1"
Then I see that edited notes were correctly stored on the page
"""
self._add_notes()
self.note_unit_page.visit()
components = self.note_unit_page.components
self.edit_notes(components)
self.assert_text_in_notes(self.note_unit_page.notes)
self.courseware_page.go_to_sequential_position(2)
components = self.note_unit_page.components
self.edit_notes(components)
self.assert_text_in_notes(self.note_unit_page.notes)
components = self.note_unit_page.refresh()
self.assert_text_in_notes(self.note_unit_page.notes)
self.courseware_page.go_to_sequential_position(1)
components = self.note_unit_page.components
self.assert_text_in_notes(self.note_unit_page.notes)
def test_can_delete_notes(self):
"""
Scenario: User can delete notes.
Given I have a course with 3 components with notes
And I open the unit with 2 annotatable components
When I remove all notes on the page
Then I do not see any notes on the page
When I change sequential position to "2"
And I remove all notes on the page
Then I do not see any notes on the page
When I refresh the page
Then I do not see any notes on the page
When I change sequential position to "1"
Then I do not see any notes on the page
"""
self._add_notes()
self.note_unit_page.visit()
components = self.note_unit_page.components
self.remove_notes(components)
self.assert_notes_are_removed(components)
self.courseware_page.go_to_sequential_position(2)
components = self.note_unit_page.components
self.remove_notes(components)
self.assert_notes_are_removed(components)
components = self.note_unit_page.refresh()
self.assert_notes_are_removed(components)
self.courseware_page.go_to_sequential_position(1)
components = self.note_unit_page.components
self.assert_notes_are_removed(components)
def test_can_create_note_with_tags(self):
"""
Scenario: a user of notes can define one with tags
Given I have a course with 3 annotatable components
And I open the unit with 2 annotatable components
When I add a note with tags for the first component
And I refresh the page
Then I see that note was correctly stored with its tags
"""
self.note_unit_page.visit()
components = self.note_unit_page.components
for note in components[0].create_note(".{}".format(self.selector)):
note.tags = ["fruit", "tasty"]
self.note_unit_page.refresh()
self.assertEqual(["fruit", "tasty"], self.note_unit_page.notes[0].tags)
def test_can_change_tags(self):
"""
Scenario: a user of notes can edit tags on notes
Given I have a course with 3 components with notes
When I open the unit with 2 annotatable components
And I edit tags on the notes for the 2 annotatable components
Then I see that the tags were correctly changed
And I again edit tags on the notes for the 2 annotatable components
And I refresh the page
Then I see that the tags were correctly changed
"""
self._add_notes()
self.note_unit_page.visit()
components = self.note_unit_page.components
self.edit_tags_in_notes(components, [["hard"], ["apple", "pear"]])
self.assert_tags_in_notes(self.note_unit_page.notes, [["hard"], ["apple", "pear"]])
self.edit_tags_in_notes(components, [[], ["avocado"]])
self.assert_tags_in_notes(self.note_unit_page.notes, [[], ["avocado"]])
self.note_unit_page.refresh()
self.assert_tags_in_notes(self.note_unit_page.notes, [[], ["avocado"]])
def test_sr_labels(self):
"""
Scenario: screen reader labels exist for text and tags fields
Given I have a course with 3 components with notes
When I open the unit with 2 annotatable components
And I open the editor for each note
Then the text and tags fields both have screen reader labels
"""
self._add_notes()
self.note_unit_page.visit()
# First note is in the first annotatable component, will have field indexes 0 and 1.
for note in self.note_unit_page.components[0].edit_note():
self.assertTrue(note.has_sr_label(0, 0, "Note"))
self.assertTrue(note.has_sr_label(1, 1, "Tags (space-separated)"))
# Second note is in the second annotatable component, will have field indexes 2 and 3.
for note in self.note_unit_page.components[1].edit_note():
self.assertTrue(note.has_sr_label(0, 2, "Note"))
self.assertTrue(note.has_sr_label(1, 3, "Tags (space-separated)"))
@attr(shard=4)
class EdxNotesPageTest(EventsTestMixin, EdxNotesTestMixin):
"""
Tests for Notes page.
"""
def _add_notes(self, notes_list):
self.edxnotes_fixture.create_notes(notes_list)
self.edxnotes_fixture.install()
def _add_default_notes(self, tags=None, extra_notes=0):
"""
Creates 5 test notes by default & number of extra_notes will be created if specified.
If tags are not specified, will populate the notes with some test tag data.
If tags are specified, they will be used for each of the 3 notes that have tags.
"""
xblocks = self.course_fixture.get_nested_xblocks(category="html")
# pylint: disable=attribute-defined-outside-init
self.raw_note_list = [
Note(
usage_id=xblocks[4].locator,
user=self.username,
course_id=self.course_fixture._course_key,
text="First note",
quote="Annotate this",
updated=datetime(2011, 1, 1, 1, 1, 1, 1).isoformat(),
),
Note(
usage_id=xblocks[2].locator,
user=self.username,
course_id=self.course_fixture._course_key,
text="",
quote=u"Annotate this",
updated=datetime(2012, 1, 1, 1, 1, 1, 1).isoformat(),
tags=["Review", "cool"] if tags is None else tags
),
Note(
usage_id=xblocks[0].locator,
user=self.username,
course_id=self.course_fixture._course_key,
text="Third note",
quote="Annotate this",
updated=datetime(2013, 1, 1, 1, 1, 1, 1).isoformat(),
ranges=[Range(startOffset=0, endOffset=18)],
tags=["Cool", "TODO"] if tags is None else tags
),
Note(
usage_id=xblocks[3].locator,
user=self.username,
course_id=self.course_fixture._course_key,
text="Fourth note",
quote="",
updated=datetime(2014, 1, 1, 1, 1, 1, 1).isoformat(),
tags=["review"] if tags is None else tags
),
Note(
usage_id=xblocks[1].locator,
user=self.username,
course_id=self.course_fixture._course_key,
text="Fifth note",
quote="Annotate this",
updated=datetime(2015, 1, 1, 1, 1, 1, 1).isoformat()
),
]
if extra_notes > 0:
for __ in range(extra_notes):
self.raw_note_list.append(
Note(
usage_id=xblocks[random.choice([0, 1, 2, 3, 4, 5])].locator,
user=self.username,
course_id=self.course_fixture._course_key, # pylint: disable=protected-access
text="Fourth note",
quote="",
updated=datetime(2014, 1, 1, 1, 1, 1, 1).isoformat(),
tags=["review"] if tags is None else tags
)
)
self._add_notes(self.raw_note_list)
def assertNoteContent(self, item, text=None, quote=None, unit_name=None, time_updated=None, tags=None):
""" Verifies the expected properties of the note. """
self.assertEqual(text, item.text)
if item.quote is not None:
self.assertIn(quote, item.quote)
else:
self.assertIsNone(quote)
self.assertEqual(unit_name, item.unit_name)
self.assertEqual(time_updated, item.time_updated)
self.assertEqual(tags, item.tags)
def assertChapterContent(self, item, title=None, subtitles=None):
"""
Verifies the expected title and subsection titles (subtitles) for the given chapter.
"""
self.assertEqual(item.title, title)
self.assertEqual(item.subtitles, subtitles)
def assertGroupContent(self, item, title=None, notes=None):
"""
Verifies the expected title and child notes for the given group.
"""
self.assertEqual(item.title, title)
self.assertEqual(item.notes, notes)
def assert_viewed_event(self, view=None):
"""
Verifies that the correct view event was captured for the Notes page.
"""
# There will always be an initial event for "Recent Activity" because that is the default view.
# If view is something besides "Recent Activity", expect 2 events, with the second one being
# the view name passed in.
if view == 'Recent Activity':
view = None
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.course.student_notes.notes_page_viewed'},
number_of_matches=1 if view is None else 2
)
expected_events = [{'event': {'view': 'Recent Activity'}}]
if view:
expected_events.append({'event': {'view': view}})
self.assert_events_match(expected_events, actual_events)
def assert_unit_link_event(self, usage_id, view):
"""
Verifies that the correct used_unit_link event was captured for the Notes page.
"""
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.course.student_notes.used_unit_link'},
number_of_matches=1
)
expected_events = [
{'event': {'component_usage_id': usage_id, 'view': view}}
]
self.assert_events_match(expected_events, actual_events)
def assert_search_event(self, search_string, number_of_results):
"""
Verifies that the correct searched event was captured for the Notes page.
"""
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.course.student_notes.searched'},
number_of_matches=1
)
expected_events = [
{'event': {'search_string': search_string, 'number_of_results': number_of_results}}
]
self.assert_events_match(expected_events, actual_events)
def _verify_pagination_info(
self,
notes_count_on_current_page,
header_text,
previous_button_enabled,
next_button_enabled,
current_page_number,
total_pages
):
"""
Verify pagination info
"""
self.assertEqual(self.notes_page.count(), notes_count_on_current_page)
self.assertEqual(self.notes_page.get_pagination_header_text(), header_text)
if total_pages > 1:
self.assertEqual(self.notes_page.footer_visible, True)
self.assertEqual(self.notes_page.is_previous_page_button_enabled(), previous_button_enabled)
self.assertEqual(self.notes_page.is_next_page_button_enabled(), next_button_enabled)
self.assertEqual(self.notes_page.get_current_page_number(), current_page_number)
self.assertEqual(self.notes_page.get_total_pages, total_pages)
else:
self.assertEqual(self.notes_page.footer_visible, False)
def search_and_verify(self):
"""
Add, search and verify notes.
"""
self._add_default_notes(extra_notes=22)
self.notes_page.visit()
# Run the search
self.notes_page.search("note")
# No error message appears
self.assertFalse(self.notes_page.is_error_visible)
self.assertIn(u"Search Results", self.notes_page.tabs)
self.assertEqual(self.notes_page.get_total_pages, 2)
def test_no_content(self):
"""
Scenario: User can see `No content` message.
Given I have a course without notes
When I open Notes page
Then I see only "You do not have any notes within the course." message
"""
notes_page_empty = EdxNotesPageNoContent(self.browser, self.course_id)
notes_page_empty.visit()
self.assertIn(
"You have not made any notes in this course yet. Other students in this course are using notes to:",
notes_page_empty.no_content_text)
def test_notes_works_correctly_with_xss(self):
"""
Scenario: Note text & tags should be HTML and JS escaped
Given I am enrolled in a course with notes enabled
When I visit the Notes page, with a Notes text and tag containing HTML characters like < and >
Then the text and tags appear as expected due to having been properly escaped
"""
xblocks = self.course_fixture.get_nested_xblocks(category="html")
self._add_notes([
Note(
usage_id=xblocks[0].locator,
user=self.username,
course_id=self.course_fixture._course_key, # pylint: disable=protected-access
text='<script>alert("XSS")</script>',
quote="quote",
updated=datetime(2014, 1, 1, 1, 1, 1, 1).isoformat(),
tags=['<script>alert("XSS")</script>']
),
Note(
usage_id=xblocks[1].locator,
user=self.username,
course_id=self.course_fixture._course_key, # pylint: disable=protected-access
text='<b>bold</b>',
quote="quote",
updated=datetime(2014, 2, 1, 1, 1, 1, 1).isoformat(),
tags=['<i>bold</i>']
)
])
self.notes_page.visit()
notes = self.notes_page.notes
self.assertEqual(len(notes), 2)
self.assertNoteContent(
notes[0],
quote=u"quote",
text='<b>bold</b>',
unit_name="Test Unit 1",
time_updated="Feb 01, 2014 at 01:01 UTC",
tags=['<i>bold</i>']
)
self.assertNoteContent(
notes[1],
quote=u"quote",
text='<script>alert("XSS")</script>',
unit_name="Test Unit 1",
time_updated="Jan 01, 2014 at 01:01 UTC",
tags=['<script>alert("XSS")</script>']
)
def test_recent_activity_view(self):
"""
Scenario: User can view all notes by recent activity.
Given I have a course with 5 notes
When I open Notes page
Then I see 5 notes sorted by the updated date
And I see correct content in the notes
And an event has fired indicating that the Recent Activity view was selected
"""
self._add_default_notes()
self.notes_page.visit()
notes = self.notes_page.notes
self.assertEqual(len(notes), 5)
self.assertNoteContent(
notes[0],
quote=u"Annotate this",
text=u"Fifth note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2015 at 01:01 UTC"
)
self.assertNoteContent(
notes[1],
text=u"Fourth note",
unit_name="Test Unit 3",
time_updated="Jan 01, 2014 at 01:01 UTC",
tags=["review"]
)
self.assertNoteContent(
notes[2],
quote="Annotate this",
text=u"Third note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2013 at 01:01 UTC",
tags=["Cool", "TODO"]
)
self.assertNoteContent(
notes[3],
quote=u"Annotate this",
unit_name="Test Unit 2",
time_updated="Jan 01, 2012 at 01:01 UTC",
tags=["Review", "cool"]
)
self.assertNoteContent(
notes[4],
quote=u"Annotate this",
text=u"First note",
unit_name="Test Unit 4",
time_updated="Jan 01, 2011 at 01:01 UTC"
)
self.assert_viewed_event()
def test_course_structure_view(self):
"""
Scenario: User can view all notes by location in Course.
Given I have a course with 5 notes
When I open Notes page
And I switch to "Location in Course" view
Then I see 2 groups, 3 sections and 5 notes
And I see correct content in the notes and groups
And an event has fired indicating that the Location in Course view was selected
"""
self._add_default_notes()
self.notes_page.visit().switch_to_tab("structure")
notes = self.notes_page.notes
groups = self.notes_page.chapter_groups
sections = self.notes_page.subsection_groups
self.assertEqual(len(notes), 5)
self.assertEqual(len(groups), 2)
self.assertEqual(len(sections), 3)
self.assertChapterContent(
groups[0],
title=u"Test Section 1",
subtitles=[u"Test Subsection 1", u"Test Subsection 2"]
)
self.assertGroupContent(
sections[0],
title=u"Test Subsection 1",
notes=[u"Fifth note", u"Third note", None]
)
self.assertNoteContent(
notes[0],
quote=u"Annotate this",
text=u"Fifth note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2015 at 01:01 UTC"
)
self.assertNoteContent(
notes[1],
quote=u"Annotate this",
text=u"Third note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2013 at 01:01 UTC",
tags=["Cool", "TODO"]
)
self.assertNoteContent(
notes[2],
quote=u"Annotate this",
unit_name="Test Unit 2",
time_updated="Jan 01, 2012 at 01:01 UTC",
tags=["Review", "cool"]
)
self.assertGroupContent(
sections[1],
title=u"Test Subsection 2",
notes=[u"Fourth note"]
)
self.assertNoteContent(
notes[3],
text=u"Fourth note",
unit_name="Test Unit 3",
time_updated="Jan 01, 2014 at 01:01 UTC",
tags=["review"]
)
self.assertChapterContent(
groups[1],
title=u"Test Section 2",
subtitles=[u"Test Subsection 3"],
)
self.assertGroupContent(
sections[2],
title=u"Test Subsection 3",
notes=[u"First note"]
)
self.assertNoteContent(
notes[4],
quote=u"Annotate this",
text=u"First note",
unit_name="Test Unit 4",
time_updated="Jan 01, 2011 at 01:01 UTC"
)
self.assert_viewed_event('Location in Course')
def test_tags_view(self):
"""
Scenario: User can view all notes by associated tags.
Given I have a course with 5 notes and I am viewing the Notes page
When I switch to the "Tags" view
Then I see 4 tag groups
And I see correct content in the notes and groups
And an event has fired indicating that the Tags view was selected
"""
self._add_default_notes()
self.notes_page.visit().switch_to_tab("tags")
notes = self.notes_page.notes
groups = self.notes_page.tag_groups
self.assertEqual(len(notes), 7)
self.assertEqual(len(groups), 4)
# Tag group "cool"
self.assertGroupContent(
groups[0],
title=u"cool (2)",
notes=[u"Third note", None]
)
self.assertNoteContent(
notes[0],
quote=u"Annotate this",
text=u"Third note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2013 at 01:01 UTC",
tags=["Cool", "TODO"]
)
self.assertNoteContent(
notes[1],
quote=u"Annotate this",
unit_name="Test Unit 2",
time_updated="Jan 01, 2012 at 01:01 UTC",
tags=["Review", "cool"]
)
# Tag group "review"
self.assertGroupContent(
groups[1],
title=u"review (2)",
notes=[u"Fourth note", None]
)
self.assertNoteContent(
notes[2],
text=u"Fourth note",
unit_name="Test Unit 3",
time_updated="Jan 01, 2014 at 01:01 UTC",
tags=["review"]
)
self.assertNoteContent(
notes[3],
quote=u"Annotate this",
unit_name="Test Unit 2",
time_updated="Jan 01, 2012 at 01:01 UTC",
tags=["Review", "cool"]
)
# Tag group "todo"
self.assertGroupContent(
groups[2],
title=u"todo (1)",
notes=["Third note"]
)
self.assertNoteContent(
notes[4],
quote=u"Annotate this",
text=u"Third note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2013 at 01:01 UTC",
tags=["Cool", "TODO"]
)
# Notes with no tags
self.assertGroupContent(
groups[3],
title=u"[no tags] (2)",
notes=["Fifth note", "First note"]
)
self.assertNoteContent(
notes[5],
quote=u"Annotate this",
text=u"Fifth note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2015 at 01:01 UTC"
)
self.assertNoteContent(
notes[6],
quote=u"Annotate this",
text=u"First note",
unit_name="Test Unit 4",
time_updated="Jan 01, 2011 at 01:01 UTC"
)
self.assert_viewed_event('Tags')
def test_easy_access_from_notes_page(self):
"""
Scenario: Ensure that the link to the Unit works correctly.
Given I have a course with 5 notes
When I open Notes page
And I click on the first unit link
Then I see correct text on the unit page and a unit link event was fired
When go back to the Notes page
And I switch to "Location in Course" view
And I click on the second unit link
Then I see correct text on the unit page and a unit link event was fired
When go back to the Notes page
And I switch to "Tags" view
And I click on the first unit link
Then I see correct text on the unit page and a unit link event was fired
When go back to the Notes page
And I run the search with "Fifth" query
And I click on the first unit link
Then I see correct text on the unit page and a unit link event was fired
"""
def assert_page(note, usage_id, view):
""" Verify that clicking on the unit link works properly. """
quote = note.quote
note.go_to_unit()
self.courseware_page.wait_for_page()
self.assertIn(quote, self.courseware_page.xblock_component_html_content())
self.assert_unit_link_event(usage_id, view)
self.reset_event_tracking()
self._add_default_notes()
self.notes_page.visit()
note = self.notes_page.notes[0]
assert_page(note, self.raw_note_list[4]['usage_id'], "Recent Activity")
self.notes_page.visit()
self.notes_page.switch_to_tab("structure")
note = self.notes_page.notes[1]
assert_page(note, self.raw_note_list[2]['usage_id'], "Location in Course")
self.notes_page.visit()
self.notes_page.switch_to_tab("tags")
note = self.notes_page.notes[0]
assert_page(note, self.raw_note_list[2]['usage_id'], "Tags")
self.notes_page.visit()
self.notes_page.search("Fifth")
self.notes_page.wait_for_ajax()
note = self.notes_page.notes[0]
assert_page(note, self.raw_note_list[4]['usage_id'], "Search Results")
def test_search_behaves_correctly(self):
"""
Scenario: Searching behaves correctly.
Given I have a course with 5 notes
When I open Notes page
When I run the search with " " query
Then I see the following error message "Please enter a term in the search field."
And I do not see "Search Results" tab
When I run the search with "note" query
Then I see that error message disappears
And I see that "Search Results" tab appears with 4 notes found
And an event has fired indicating that the Search Results view was selected
And an event has fired recording the search that was performed
"""
self._add_default_notes()
self.notes_page.visit()
# Run the search with whitespaces only
self.notes_page.search(" ")
# Displays error message
self.assertTrue(self.notes_page.is_error_visible)
self.assertEqual(self.notes_page.error_text, u"Please enter a term in the search field.")
# Search results tab does not appear
self.assertNotIn(u"Search Results", self.notes_page.tabs)
# Run the search with correct query
self.notes_page.search("note")
# Error message disappears
self.assertFalse(self.notes_page.is_error_visible)
self.assertIn(u"Search Results", self.notes_page.tabs)
notes = self.notes_page.notes
self.assertEqual(len(notes), 4)
self.assertNoteContent(
notes[0],
quote=u"Annotate this",
text=u"Fifth note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2015 at 01:01 UTC"
)
self.assertNoteContent(
notes[1],
text=u"Fourth note",
unit_name="Test Unit 3",
time_updated="Jan 01, 2014 at 01:01 UTC",
tags=["review"]
)
self.assertNoteContent(
notes[2],
quote="Annotate this",
text=u"Third note",
unit_name="Test Unit 1",
time_updated="Jan 01, 2013 at 01:01 UTC",
tags=["Cool", "TODO"]
)
self.assertNoteContent(
notes[3],
quote=u"Annotate this",
text=u"First note",
unit_name="Test Unit 4",
time_updated="Jan 01, 2011 at 01:01 UTC"
)
self.assert_viewed_event('Search Results')
self.assert_search_event('note', 4)
@skip("scroll to tag functionality is disabled")
def test_scroll_to_tag_recent_activity(self):
"""
Scenario: Can scroll to a tag group from the Recent Activity view (default view)
Given I have a course with 5 notes and I open the Notes page
When I click on a tag associated with a note
Then the Tags view tab gets focus and I scroll to the section of notes associated with that tag
"""
self._add_default_notes(["apple", "banana", "kiwi", "pear", "pumpkin", "squash", "zucchini"])
self.notes_page.visit()
self._scroll_to_tag_and_verify("pear", 3)
@skip("scroll to tag functionality is disabled")
def test_scroll_to_tag_course_structure(self):
"""
Scenario: Can scroll to a tag group from the Course Structure view
Given I have a course with 5 notes and I open the Notes page and select the Course Structure view
When I click on a tag associated with a note
Then the Tags view tab gets focus and I scroll to the section of notes associated with that tag
"""
self._add_default_notes(["apple", "banana", "kiwi", "pear", "pumpkin", "squash", "zucchini"])
self.notes_page.visit().switch_to_tab("structure")
self._scroll_to_tag_and_verify("squash", 5)
@skip("scroll to tag functionality is disabled")
def test_scroll_to_tag_search(self):
"""
Scenario: Can scroll to a tag group from the Search Results view
Given I have a course with 5 notes and I open the Notes page and perform a search
Then the Search view tab opens and gets focus
And when I click on a tag associated with a note
Then the Tags view tab gets focus and I scroll to the section of notes associated with that tag
"""
self._add_default_notes(["apple", "banana", "kiwi", "pear", "pumpkin", "squash", "zucchini"])
self.notes_page.visit().search("note")
self._scroll_to_tag_and_verify("pumpkin", 4)
@skip("scroll to tag functionality is disabled")
def test_scroll_to_tag_from_tag_view(self):
"""
Scenario: Can scroll to a tag group from the Tags view
Given I have a course with 5 notes and I open the Notes page and select the Tag view
When I click on a tag associated with a note
Then I scroll to the section of notes associated with that tag
"""
self._add_default_notes(["apple", "banana", "kiwi", "pear", "pumpkin", "squash", "zucchini"])
self.notes_page.visit().switch_to_tab("tags")
self._scroll_to_tag_and_verify("kiwi", 2)
def _scroll_to_tag_and_verify(self, tag_name, group_index):
""" Helper method for all scroll to tag tests """
self.notes_page.notes[1].go_to_tag(tag_name)
# Because all the notes (with tags) have the same tags, they will end up ordered alphabetically.
pear_group = self.notes_page.tag_groups[group_index]
self.assertEqual(tag_name + " (3)", pear_group.title)
self.assertTrue(pear_group.scrolled_to_top(group_index))
def test_tabs_behaves_correctly(self):
"""
Scenario: Tabs behaves correctly.
Given I have a course with 5 notes
When I open Notes page
Then I see only "Recent Activity", "Location in Course", and "Tags" tabs
When I run the search with "note" query
And I see that "Search Results" tab appears with 4 notes found
Then I switch to "Recent Activity" tab
And I see all 5 notes
Then I switch to "Location in Course" tab
And I see all 2 groups and 5 notes
When I switch back to "Search Results" tab
Then I can still see 4 notes found
When I close "Search Results" tab
Then I see that "Recent Activity" tab becomes active
And "Search Results" tab disappears
And I see all 5 notes
"""
self._add_default_notes()
self.notes_page.visit()
# We're on Recent Activity tab.
self.assertEqual(len(self.notes_page.tabs), 3)
self.assertEqual([u"Recent Activity", u"Location in Course", u"Tags"], self.notes_page.tabs)
self.notes_page.search("note")
# We're on Search Results tab
self.assertEqual(len(self.notes_page.tabs), 4)
self.assertIn(u"Search Results", self.notes_page.tabs)
self.assertEqual(len(self.notes_page.notes), 4)
# We can switch on Recent Activity tab and back.
self.notes_page.switch_to_tab("recent")
self.assertEqual(len(self.notes_page.notes), 5)
self.notes_page.switch_to_tab("structure")
self.assertEqual(len(self.notes_page.chapter_groups), 2)
self.assertEqual(len(self.notes_page.notes), 5)
self.notes_page.switch_to_tab("search")
self.assertEqual(len(self.notes_page.notes), 4)
# Can close search results page
self.notes_page.close_tab()
self.assertEqual(len(self.notes_page.tabs), 3)
self.assertNotIn(u"Search Results", self.notes_page.tabs)
self.assertEqual(len(self.notes_page.notes), 5)
def test_open_note_when_accessed_from_notes_page(self):
"""
Scenario: Ensure that the link to the Unit opens a note only once.
Given I have a course with 2 sequentials that contain respectively one note and two notes
When I open Notes page
And I click on the first unit link
Then I see the note opened on the unit page
When I switch to the second sequential
I do not see any note opened
When I switch back to first sequential
I do not see any note opened
"""
xblocks = self.course_fixture.get_nested_xblocks(category="html")
self._add_notes([
Note(
usage_id=xblocks[1].locator,
user=self.username,
course_id=self.course_fixture._course_key,
text="Third note",
quote="Annotate this",
updated=datetime(2012, 1, 1, 1, 1, 1, 1).isoformat(),
ranges=[Range(startOffset=0, endOffset=14)],
),
Note(
usage_id=xblocks[2].locator,
user=self.username,
course_id=self.course_fixture._course_key,
text="Second note",
quote="Annotate this",
updated=datetime(2013, 1, 1, 1, 1, 1, 1).isoformat(),
ranges=[Range(startOffset=0, endOffset=14)],
),
Note(
usage_id=xblocks[0].locator,
user=self.username,
course_id=self.course_fixture._course_key,
text="First note",
quote="Annotate this",
updated=datetime(2014, 1, 1, 1, 1, 1, 1).isoformat(),
ranges=[Range(startOffset=0, endOffset=14)],
),
])
self.notes_page.visit()
item = self.notes_page.notes[0]
item.go_to_unit()
self.courseware_page.wait_for_page()
note = self.note_unit_page.notes[0]
self.assertTrue(note.is_visible)
note = self.note_unit_page.notes[1]
self.assertFalse(note.is_visible)
self.courseware_page.go_to_sequential_position(2)
note = self.note_unit_page.notes[0]
self.assertFalse(note.is_visible)
self.courseware_page.go_to_sequential_position(1)
note = self.note_unit_page.notes[0]
self.assertFalse(note.is_visible)
def test_page_size_limit(self):
"""
Scenario: Verify that we can't get notes more than default page size.
Given that I am a registered user
And I have a course with 11 notes
When I open Notes page
Then I can see notes list contains 10 items
And I should see paging header and footer with correct data
And I should see disabled previous button
And I should also see enabled next button
"""
self._add_default_notes(extra_notes=21)
self.notes_page.visit()
self._verify_pagination_info(
notes_count_on_current_page=25,
header_text='Showing 1-25 out of 26 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
def test_pagination_with_single_page(self):
"""
Scenario: Notes list pagination works as expected for single page
Given that I am a registered user
And I have a course with 5 notes
When I open Notes page
Then I can see notes list contains 5 items
And I should see paging header and footer with correct data
And I should see disabled previous and next buttons
"""
self._add_default_notes()
self.notes_page.visit()
self._verify_pagination_info(
notes_count_on_current_page=5,
header_text='Showing 1-5 out of 5 total',
previous_button_enabled=False,
next_button_enabled=False,
current_page_number=1,
total_pages=1
)
def test_next_and_previous_page_button(self):
"""
Scenario: Next & Previous buttons are working as expected for notes list pagination
Given that I am a registered user
And I have a course with 26 notes
When I open Notes page
Then I can see notes list contains 25 items
And I should see paging header and footer with correct data
And I should see disabled previous button
And I should see enabled next button
When I click on next page button in footer
Then I should be navigated to second page
And I should see a list with 1 item
And I should see paging header and footer with correct info
And I should see enabled previous button
And I should also see disabled next button
When I click on previous page button in footer
Then I should be navigated to first page
And I should see a list with 25 items
And I should see paging header and footer with correct info
And I should see disabled previous button
And I should also see enabled next button
"""
self._add_default_notes(extra_notes=21)
self.notes_page.visit()
self._verify_pagination_info(
notes_count_on_current_page=25,
header_text='Showing 1-25 out of 26 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
self.notes_page.press_next_page_button()
self._verify_pagination_info(
notes_count_on_current_page=1,
header_text='Showing 26-26 out of 26 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
self.notes_page.press_previous_page_button()
self._verify_pagination_info(
notes_count_on_current_page=25,
header_text='Showing 1-25 out of 26 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
def test_pagination_with_valid_and_invalid_page_number(self):
"""
Scenario: Notes list pagination works as expected for valid & invalid page number
Given that I am a registered user
And I have a course with 26 notes
When I open Notes page
Then I can see notes list contains 25 items
And I should see paging header and footer with correct data
And I should see total page value is 2
When I enter 2 in the page number input
Then I should be navigated to page 2
When I enter 3 in the page number input
Then I should not be navigated away from page 2
"""
self._add_default_notes(extra_notes=21)
self.notes_page.visit()
self.assertEqual(self.notes_page.get_total_pages, 2)
# test pagination with valid page number
self.notes_page.go_to_page(2)
self._verify_pagination_info(
notes_count_on_current_page=1,
header_text='Showing 26-26 out of 26 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
# test pagination with invalid page number
self.notes_page.go_to_page(3)
self._verify_pagination_info(
notes_count_on_current_page=1,
header_text='Showing 26-26 out of 26 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
def test_search_behaves_correctly_with_pagination(self):
"""
Scenario: Searching behaves correctly with pagination.
Given that I am a registered user
And I have a course with 27 notes
When I open Notes page
Then I can see notes list with 25 items
And I should see paging header and footer with correct data
And previous button is disabled
And next button is enabled
When I run the search with "note" query
Then I see no error message
And I see that "Search Results" tab appears with 26 notes found
And an event has fired indicating that the Search Results view was selected
And an event has fired recording the search that was performed
"""
self.search_and_verify()
self._verify_pagination_info(
notes_count_on_current_page=25,
header_text='Showing 1-25 out of 26 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
self.assert_viewed_event('Search Results')
self.assert_search_event('note', 26)
def test_search_with_next_and_prev_page_button(self):
"""
Scenario: Next & Previous buttons are working as expected for search
Given that I am a registered user
And I have a course with 27 notes
When I open Notes page
Then I can see notes list with 25 items
And I should see paging header and footer with correct data
And previous button is disabled
And next button is enabled
When I run the search with "note" query
Then I see that "Search Results" tab appears with 26 notes found
And an event has fired indicating that the Search Results view was selected
And an event has fired recording the search that was performed
When I click on next page button in footer
Then I should be navigated to second page
And I should see a list with 1 item
And I should see paging header and footer with correct info
And I should see enabled previous button
And I should also see disabled next button
When I click on previous page button in footer
Then I should be navigated to first page
And I should see a list with 25 items
And I should see paging header and footer with correct info
And I should see disabled previous button
And I should also see enabled next button
"""
self.search_and_verify()
self._verify_pagination_info(
notes_count_on_current_page=25,
header_text='Showing 1-25 out of 26 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
self.assert_viewed_event('Search Results')
self.assert_search_event('note', 26)
self.notes_page.press_next_page_button()
self._verify_pagination_info(
notes_count_on_current_page=1,
header_text='Showing 26-26 out of 26 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
self.notes_page.press_previous_page_button()
self._verify_pagination_info(
notes_count_on_current_page=25,
header_text='Showing 1-25 out of 26 total',
previous_button_enabled=False,
next_button_enabled=True,
current_page_number=1,
total_pages=2
)
def test_search_with_valid_and_invalid_page_number(self):
"""
Scenario: Notes list pagination works as expected for valid & invalid page number
Given that I am a registered user
And I have a course with 27 notes
When I open Notes page
Then I can see notes list contains 25 items
And I should see paging header and footer with correct data
And I should see total page value is 2
When I run the search with "note" query
Then I see that "Search Results" tab appears with 26 notes found
And an event has fired indicating that the Search Results view was selected
And an event has fired recording the search that was performed
When I enter 2 in the page number input
Then I should be navigated to page 2
When I enter 3 in the page number input
Then I should not be navigated away from page 2
"""
self.search_and_verify()
# test pagination with valid page number
self.notes_page.go_to_page(2)
self._verify_pagination_info(
notes_count_on_current_page=1,
header_text='Showing 26-26 out of 26 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
# test pagination with invalid page number
self.notes_page.go_to_page(3)
self._verify_pagination_info(
notes_count_on_current_page=1,
header_text='Showing 26-26 out of 26 total',
previous_button_enabled=True,
next_button_enabled=False,
current_page_number=2,
total_pages=2
)
@attr(shard=4)
class EdxNotesToggleSingleNoteTest(EdxNotesTestMixin):
"""
Tests for toggling single annotation.
"""
def setUp(self):
super(EdxNotesToggleSingleNoteTest, self).setUp()
self._add_notes()
self.note_unit_page.visit()
def test_can_toggle_by_clicking_on_highlighted_text(self):
"""
Scenario: User can toggle a single note by clicking on highlighted text.
Given I have a course with components with notes
When I click on highlighted text
And I move mouse out of the note
Then I see that the note is still shown
When I click outside the note
Then I see the the note is closed
"""
note = self.note_unit_page.notes[0]
note.click_on_highlight()
self.note_unit_page.move_mouse_to("body")
self.assertTrue(note.is_visible)
self.note_unit_page.click("body")
self.assertFalse(note.is_visible)
def test_can_toggle_by_clicking_on_the_note(self):
"""
Scenario: User can toggle a single note by clicking on the note.
Given I have a course with components with notes
When I click on the note
And I move mouse out of the note
Then I see that the note is still shown
When I click outside the note
Then I see the the note is closed
"""
note = self.note_unit_page.notes[0]
note.show().click_on_viewer()
self.note_unit_page.move_mouse_to("body")
self.assertTrue(note.is_visible)
self.note_unit_page.click("body")
self.assertFalse(note.is_visible)
def test_interaction_between_notes(self):
"""
Scenario: Interactions between notes works well.
Given I have a course with components with notes
When I click on highlighted text in the first component
And I move mouse out of the note
Then I see that the note is still shown
When I click on highlighted text in the second component
Then I see that the new note is shown
"""
note_1 = self.note_unit_page.notes[0]
note_2 = self.note_unit_page.notes[1]
note_1.click_on_highlight()
self.note_unit_page.move_mouse_to("body")
self.assertTrue(note_1.is_visible)
note_2.click_on_highlight()
self.assertFalse(note_1.is_visible)
self.assertTrue(note_2.is_visible)
@attr(shard=4)
class EdxNotesToggleNotesTest(EdxNotesTestMixin):
"""
Tests for toggling visibility of all notes.
"""
def setUp(self):
super(EdxNotesToggleNotesTest, self).setUp()
self._add_notes()
self.note_unit_page.visit()
def test_can_disable_all_notes(self):
"""
Scenario: User can disable all notes.
Given I have a course with components with notes
And I open the unit with annotatable components
When I click on "Show notes" checkbox
Then I do not see any notes on the sequential position
When I change sequential position to "2"
Then I still do not see any notes on the sequential position
When I go to "Test Subsection 2" subsection
Then I do not see any notes on the subsection
"""
# Disable all notes
self.note_unit_page.toggle_visibility()
self.assertEqual(len(self.note_unit_page.notes), 0)
self.courseware_page.go_to_sequential_position(2)
self.assertEqual(len(self.note_unit_page.notes), 0)
self.course_nav.go_to_section(u"Test Section 1", u"Test Subsection 2")
self.assertEqual(len(self.note_unit_page.notes), 0)
def test_can_reenable_all_notes(self):
"""
Scenario: User can toggle notes visibility.
Given I have a course with components with notes
And I open the unit with annotatable components
When I click on "Show notes" checkbox
Then I do not see any notes on the sequential position
When I click on "Show notes" checkbox again
Then I see that all notes appear
When I change sequential position to "2"
Then I still can see all notes on the sequential position
When I go to "Test Subsection 2" subsection
Then I can see all notes on the subsection
"""
# Disable notes
self.note_unit_page.toggle_visibility()
self.assertEqual(len(self.note_unit_page.notes), 0)
# Enable notes to make sure that I can enable notes without refreshing
# the page.
self.note_unit_page.toggle_visibility()
self.assertGreater(len(self.note_unit_page.notes), 0)
self.courseware_page.go_to_sequential_position(2)
self.assertGreater(len(self.note_unit_page.notes), 0)
self.course_nav.go_to_section(u"Test Section 1", u"Test Subsection 2")
self.assertGreater(len(self.note_unit_page.notes), 0)
|
highweb-project/highweb-webcl-html5spec
|
refs/heads/highweb-20160310
|
third_party/jinja2/bccache.py
|
241
|
# -*- coding: utf-8 -*-
"""
jinja2.bccache
~~~~~~~~~~~~~~
This module implements the bytecode cache system Jinja is optionally
using. This is useful if you have very complex template situations and
the compiliation of all those templates slow down your application too
much.
Situations where this is useful are often forking web applications that
are initialized on the first request.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from os import path, listdir
import sys
import marshal
import tempfile
import fnmatch
from hashlib import sha1
from jinja2.utils import open_if_exists
from jinja2._compat import BytesIO, pickle, PY2, text_type
# marshal works better on 3.x, one hack less required
if not PY2:
marshal_dump = marshal.dump
marshal_load = marshal.load
else:
def marshal_dump(code, f):
if isinstance(f, file):
marshal.dump(code, f)
else:
f.write(marshal.dumps(code))
def marshal_load(f):
if isinstance(f, file):
return marshal.load(f)
return marshal.loads(f.read())
bc_version = 2
# magic version used to only change with new jinja versions. With 2.6
# we change this to also take Python version changes into account. The
# reason for this is that Python tends to segfault if fed earlier bytecode
# versions because someone thought it would be a good idea to reuse opcodes
# or make Python incompatible with earlier versions.
bc_magic = 'j2'.encode('ascii') + \
pickle.dumps(bc_version, 2) + \
pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1])
class Bucket(object):
"""Buckets are used to store the bytecode for one template. It's created
and initialized by the bytecode cache and passed to the loading functions.
The buckets get an internal checksum from the cache assigned and use this
to automatically reject outdated cache material. Individual bytecode
cache subclasses don't have to care about cache invalidation.
"""
def __init__(self, environment, key, checksum):
self.environment = environment
self.key = key
self.checksum = checksum
self.reset()
def reset(self):
"""Resets the bucket (unloads the bytecode)."""
self.code = None
def load_bytecode(self, f):
"""Loads bytecode from a file or file like object."""
# make sure the magic header is correct
magic = f.read(len(bc_magic))
if magic != bc_magic:
self.reset()
return
# the source code of the file changed, we need to reload
checksum = pickle.load(f)
if self.checksum != checksum:
self.reset()
return
self.code = marshal_load(f)
def write_bytecode(self, f):
"""Dump the bytecode into the file or file like object passed."""
if self.code is None:
raise TypeError('can\'t write empty bucket')
f.write(bc_magic)
pickle.dump(self.checksum, f, 2)
marshal_dump(self.code, f)
def bytecode_from_string(self, string):
"""Load bytecode from a string."""
self.load_bytecode(BytesIO(string))
def bytecode_to_string(self):
"""Return the bytecode as string."""
out = BytesIO()
self.write_bytecode(out)
return out.getvalue()
class BytecodeCache(object):
"""To implement your own bytecode cache you have to subclass this class
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
these methods are passed a :class:`~jinja2.bccache.Bucket`.
A very basic bytecode cache that saves the bytecode on the file system::
from os import path
class MyCache(BytecodeCache):
def __init__(self, directory):
self.directory = directory
def load_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
if path.exists(filename):
with open(filename, 'rb') as f:
bucket.load_bytecode(f)
def dump_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
with open(filename, 'wb') as f:
bucket.write_bytecode(f)
A more advanced version of a filesystem based bytecode cache is part of
Jinja2.
"""
def load_bytecode(self, bucket):
"""Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.
"""
raise NotImplementedError()
def dump_bytecode(self, bucket):
"""Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.
"""
raise NotImplementedError()
def clear(self):
"""Clears the cache. This method is not used by Jinja2 but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
"""
def get_cache_key(self, name, filename=None):
"""Returns the unique hash key for this template name."""
hash = sha1(name.encode('utf-8'))
if filename is not None:
filename = '|' + filename
if isinstance(filename, text_type):
filename = filename.encode('utf-8')
hash.update(filename)
return hash.hexdigest()
def get_source_checksum(self, source):
"""Returns a checksum for the source."""
return sha1(source.encode('utf-8')).hexdigest()
def get_bucket(self, environment, name, filename, source):
"""Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.
"""
key = self.get_cache_key(name, filename)
checksum = self.get_source_checksum(source)
bucket = Bucket(environment, key, checksum)
self.load_bytecode(bucket)
return bucket
def set_bucket(self, bucket):
"""Put the bucket into the cache."""
self.dump_bytecode(bucket)
class FileSystemBytecodeCache(BytecodeCache):
"""A bytecode cache that stores bytecode on the filesystem. It accepts
two arguments: The directory where the cache items are stored and a
pattern string that is used to build the filename.
If no directory is specified the system temporary items folder is used.
The pattern can be used to have multiple separate caches operate on the
same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
is replaced with the cache key.
>>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
This bytecode cache supports clearing of the cache using the clear method.
"""
def __init__(self, directory=None, pattern='__jinja2_%s.cache'):
if directory is None:
directory = tempfile.gettempdir()
self.directory = directory
self.pattern = pattern
def _get_cache_filename(self, bucket):
return path.join(self.directory, self.pattern % bucket.key)
def load_bytecode(self, bucket):
f = open_if_exists(self._get_cache_filename(bucket), 'rb')
if f is not None:
try:
bucket.load_bytecode(f)
finally:
f.close()
def dump_bytecode(self, bucket):
f = open(self._get_cache_filename(bucket), 'wb')
try:
bucket.write_bytecode(f)
finally:
f.close()
def clear(self):
# imported lazily here because google app-engine doesn't support
# write access on the file system and the function does not exist
# normally.
from os import remove
files = fnmatch.filter(listdir(self.directory), self.pattern % '*')
for filename in files:
try:
remove(path.join(self.directory, filename))
except OSError:
pass
class MemcachedBytecodeCache(BytecodeCache):
"""This class implements a bytecode cache that uses a memcache cache for
storing the information. It does not enforce a specific memcache library
(tummy's memcache or cmemcache) but will accept any class that provides
the minimal interface required.
Libraries compatible with this class:
- `werkzeug <http://werkzeug.pocoo.org/>`_.contrib.cache
- `python-memcached <http://www.tummy.com/Community/software/python-memcached/>`_
- `cmemcache <http://gijsbert.org/cmemcache/>`_
(Unfortunately the django cache interface is not compatible because it
does not support storing binary data, only unicode. You can however pass
the underlying cache client to the bytecode cache which is available
as `django.core.cache.cache._client`.)
The minimal interface for the client passed to the constructor is this:
.. class:: MinimalClientInterface
.. method:: set(key, value[, timeout])
Stores the bytecode in the cache. `value` is a string and
`timeout` the timeout of the key. If timeout is not provided
a default timeout or no timeout should be assumed, if it's
provided it's an integer with the number of seconds the cache
item should exist.
.. method:: get(key)
Returns the value for the cache key. If the item does not
exist in the cache the return value must be `None`.
The other arguments to the constructor are the prefix for all keys that
is added before the actual cache key and the timeout for the bytecode in
the cache system. We recommend a high (or no) timeout.
This bytecode cache does not support clearing of used items in the cache.
The clear method is a no-operation function.
.. versionadded:: 2.7
Added support for ignoring memcache errors through the
`ignore_memcache_errors` parameter.
"""
def __init__(self, client, prefix='jinja2/bytecode/', timeout=None,
ignore_memcache_errors=True):
self.client = client
self.prefix = prefix
self.timeout = timeout
self.ignore_memcache_errors = ignore_memcache_errors
def load_bytecode(self, bucket):
try:
code = self.client.get(self.prefix + bucket.key)
except Exception:
if not self.ignore_memcache_errors:
raise
code = None
if code is not None:
bucket.bytecode_from_string(code)
def dump_bytecode(self, bucket):
args = (self.prefix + bucket.key, bucket.bytecode_to_string())
if self.timeout is not None:
args += (self.timeout,)
try:
self.client.set(*args)
except Exception:
if not self.ignore_memcache_errors:
raise
|
lebronhkh/pythondotorg
|
refs/heads/master
|
pages/tests/test_views.py
|
14
|
from .base import BasePageTests
from django.contrib.sites.models import Site
from django.contrib.redirects.models import Redirect
class PageViewTests(BasePageTests):
def test_page_view(self):
r = self.client.get('/one/')
self.assertEqual(r.context['page'], self.p1)
# drafts are available only to staff users
self.p1.is_published = False
self.p1.save()
r = self.client.get('/one/')
self.assertEqual(r.status_code, 404)
self.client.login(username='staff_user', password='staff_user')
r = self.client.get('/one/')
self.assertEqual(r.status_code, 200)
def test_with_query_string(self):
r = self.client.get('/one/?foo')
self.assertEqual(r.context['page'], self.p1)
def test_redirect(self):
"""
Check that redirects still have priority over pages.
"""
redirect = Redirect.objects.create(
old_path='/%s/' % self.p1.path,
new_path='http://redirected.example.com',
site=Site.objects.get_current()
)
response = self.client.get(redirect.old_path)
self.assertEqual(response.status_code, 301)
self.assertEqual(response['Location'], redirect.new_path)
redirect.delete()
|
Prasad9/incubator-mxnet
|
refs/heads/master
|
example/reinforcement-learning/parallel_actor_critic/model.py
|
24
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from itertools import chain
import numpy as np
import scipy.signal
import mxnet as mx
class Agent(object):
def __init__(self, input_size, act_space, config):
super(Agent, self).__init__()
self.input_size = input_size
self.num_envs = config.num_envs
self.ctx = config.ctx
self.act_space = act_space
self.config = config
# Shared network.
net = mx.sym.Variable('data')
net = mx.sym.FullyConnected(
data=net, name='fc1', num_hidden=config.hidden_size, no_bias=True)
net = mx.sym.Activation(data=net, name='relu1', act_type="relu")
# Policy network.
policy_fc = mx.sym.FullyConnected(
data=net, name='policy_fc', num_hidden=act_space, no_bias=True)
policy = mx.sym.SoftmaxActivation(data=policy_fc, name='policy')
policy = mx.sym.clip(data=policy, a_min=1e-5, a_max=1 - 1e-5)
log_policy = mx.sym.log(data=policy, name='log_policy')
out_policy = mx.sym.BlockGrad(data=policy, name='out_policy')
# Negative entropy.
neg_entropy = policy * log_policy
neg_entropy = mx.sym.MakeLoss(
data=neg_entropy, grad_scale=config.entropy_wt, name='neg_entropy')
# Value network.
value = mx.sym.FullyConnected(data=net, name='value', num_hidden=1)
self.sym = mx.sym.Group([log_policy, value, neg_entropy, out_policy])
self.model = mx.mod.Module(self.sym, data_names=('data',),
label_names=None)
self.paralell_num = config.num_envs * config.t_max
self.model.bind(
data_shapes=[('data', (self.paralell_num, input_size))],
label_shapes=None,
grad_req="write")
self.model.init_params(config.init_func)
optimizer_params = {'learning_rate': config.learning_rate,
'rescale_grad': 1.0}
if config.grad_clip:
optimizer_params['clip_gradient'] = config.clip_magnitude
self.model.init_optimizer(
kvstore='local', optimizer=config.update_rule,
optimizer_params=optimizer_params)
def act(self, ps):
us = np.random.uniform(size=ps.shape[0])[:, np.newaxis]
as_ = (np.cumsum(ps, axis=1) > us).argmax(axis=1)
return as_
def train_step(self, env_xs, env_as, env_rs, env_vs):
# NOTE(reed): Reshape to set the data shape.
self.model.reshape([('data', (len(env_xs), self.input_size))])
xs = mx.nd.array(env_xs, ctx=self.ctx)
as_ = np.array(list(chain.from_iterable(env_as)))
# Compute discounted rewards and advantages.
advs = []
gamma, lambda_ = self.config.gamma, self.config.lambda_
for i in xrange(len(env_vs)):
# Compute advantages using Generalized Advantage Estimation;
# see eqn. (16) of [Schulman 2016].
delta_t = (env_rs[i] + gamma*np.array(env_vs[i][1:]) -
np.array(env_vs[i][:-1]))
advs.extend(self._discount(delta_t, gamma * lambda_))
# Negative generalized advantage estimations.
neg_advs_v = -np.asarray(advs)
# NOTE(reed): Only keeping the grads for selected actions.
neg_advs_np = np.zeros((len(advs), self.act_space), dtype=np.float32)
neg_advs_np[np.arange(neg_advs_np.shape[0]), as_] = neg_advs_v
neg_advs = mx.nd.array(neg_advs_np, ctx=self.ctx)
# NOTE(reed): The grads of values is actually negative advantages.
v_grads = mx.nd.array(self.config.vf_wt * neg_advs_v[:, np.newaxis],
ctx=self.ctx)
data_batch = mx.io.DataBatch(data=[xs], label=None)
self._forward_backward(data_batch=data_batch,
out_grads=[neg_advs, v_grads])
self._update_params()
def _discount(self, x, gamma):
return scipy.signal.lfilter([1], [1, -gamma], x[::-1], axis=0)[::-1]
def _forward_backward(self, data_batch, out_grads=None):
self.model.forward(data_batch, is_train=True)
self.model.backward(out_grads=out_grads)
def _update_params(self):
self.model.update()
self.model._sync_params_from_devices()
|
hnakamur/django
|
refs/heads/master
|
django/contrib/gis/forms/__init__.py
|
597
|
from django.forms import * # NOQA
from .fields import ( # NOQA
GeometryCollectionField, GeometryField, LineStringField,
MultiLineStringField, MultiPointField, MultiPolygonField, PointField,
PolygonField,
)
from .widgets import BaseGeometryWidget, OpenLayersWidget, OSMWidget # NOQA
|
monetizeio/django-pgmp
|
refs/heads/master
|
django_pgmp/db/fields/mpz_test/models.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# === django_pgmp.db.fields.mpz_test.models -------------------------------===
# This file is part of django-pgpm. django-pgpm is copyright © 2012, RokuSigma
# Inc. and contributors. See AUTHORS and LICENSE for more details.
#
# django-pgpm is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# django-pgpm is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with django-pgpm. If not, see <http://www.gnu.org/licenses/>.
# ===----------------------------------------------------------------------===
from django.db import models
from django_pgmp.db.fields import MultiPrecisionIntegerField
class MultiPrecisionIntegerModel(models.Model):
value = MultiPrecisionIntegerField()
# ===----------------------------------------------------------------------===
# End of File
# ===----------------------------------------------------------------------===
|
ecell/epdp_ecell4_impl_temporary
|
refs/heads/use-ecell4-namespace
|
test/NetworkRules_test.py
|
6
|
#!/usr/bin/env python
import _gfrd
import unittest
class NetworkRulesTestCase(unittest.TestCase):
def setUp(self):
self.m = _gfrd.Model()
self.s1 = _gfrd.SpeciesType()
self.m.add_species_type(self.s1)
self.s2 = _gfrd.SpeciesType()
self.m.add_species_type(self.s2)
def tearDown(self):
pass
def test_add_reaction_rule(self):
self.m.network_rules.add_reaction_rule(
_gfrd.ReactionRule([self.s1], [self.s1, self.s2]))
self.assertTrue(True)
self.m.network_rules.add_reaction_rule(
_gfrd.ReactionRule([self.s2], [self.s1, self.s2]))
self.assertTrue(True)
self.assertRaises(_gfrd.AlreadyExists,
lambda: self.m.network_rules.add_reaction_rule(
_gfrd.ReactionRule([self.s1], [self.s1, self.s2])))
self.assertRaises(_gfrd.AlreadyExists,
lambda: self.m.network_rules.add_reaction_rule(
_gfrd.ReactionRule([self.s2], [self.s1, self.s2])))
def test_remove_reaction_rule_1(self):
# Start with None.
assert self.m.network_rules.query_reaction_rule(self.s1) == None
# Add 1.
rr = _gfrd.ReactionRule([self.s1], [self.s1, self.s2])
rr['k'] = '0.1'
self.m.network_rules.add_reaction_rule(rr)
rules = set(self.m.network_rules.query_reaction_rule(self.s1))
self.assertEqual(1, len(rules))
# Remove 1 to get 0.
self.m.network_rules.remove_reaction_rule(rr)
gen = self.m.network_rules.query_reaction_rule(self.s1)
assert len(set(gen)) == 0
def test_query_reaction_rule(self):
r1 = _gfrd.ReactionRule([self.s1], [self.s1, self.s2])
self.m.network_rules.add_reaction_rule(r1)
a = self.m.network_rules.query_reaction_rule(self.s1)
self.assertTrue(iter(a) != None)
a = list(a)
self.assertEqual(1, len(a))
self.assertTrue(r1 in a)
r2 = _gfrd.ReactionRule([self.s1], [self.s1])
self.m.network_rules.add_reaction_rule(r2)
a = self.m.network_rules.query_reaction_rule(self.s1)
self.assertTrue(iter(a) != None)
a = list(a)
self.assertEqual(2, len(a))
self.assertTrue(r1 in a)
self.assertTrue(r2 in a)
if __name__ == "__main__":
unittest.main()
|
stevenewey/wagtail
|
refs/heads/master
|
wagtail/wagtailcore/fields.py
|
5
|
from __future__ import absolute_import, unicode_literals
import json
from django.db import models
from django import forms
from django.core.serializers.json import DjangoJSONEncoder
from django.utils.six import with_metaclass
from wagtail.wagtailcore.rich_text import DbWhitelister, expand_db_html
from wagtail.utils.widgets import WidgetWithScript
from wagtail.wagtailcore.blocks import Block, StreamBlock, StreamValue, BlockField
class RichTextArea(WidgetWithScript, forms.Textarea):
def get_panel(self):
from wagtail.wagtailadmin.edit_handlers import RichTextFieldPanel
return RichTextFieldPanel
def render(self, name, value, attrs=None):
if value is None:
translated_value = None
else:
translated_value = expand_db_html(value, for_editor=True)
return super(RichTextArea, self).render(name, translated_value, attrs)
def render_js_init(self, id_, name, value):
return "makeRichTextEditable({0});".format(json.dumps(id_))
def value_from_datadict(self, data, files, name):
original_value = super(RichTextArea, self).value_from_datadict(data, files, name)
if original_value is None:
return None
return DbWhitelister.clean(original_value)
class RichTextField(models.TextField):
def formfield(self, **kwargs):
defaults = {'widget': RichTextArea}
defaults.update(kwargs)
return super(RichTextField, self).formfield(**defaults)
class StreamField(with_metaclass(models.SubfieldBase, models.Field)):
def __init__(self, block_types, **kwargs):
if isinstance(block_types, Block):
self.stream_block = block_types
elif isinstance(block_types, type):
self.stream_block = block_types()
else:
self.stream_block = StreamBlock(block_types)
super(StreamField, self).__init__(**kwargs)
def get_internal_type(self):
return 'TextField'
def get_panel(self):
from wagtail.wagtailadmin.edit_handlers import StreamFieldPanel
return StreamFieldPanel
def deconstruct(self):
name, path, _, kwargs = super(StreamField, self).deconstruct()
block_types = self.stream_block.child_blocks.items()
args = [block_types]
return name, path, args, kwargs
def to_python(self, value):
if value is None or value == '':
return StreamValue(self.stream_block, [])
elif isinstance(value, StreamValue):
return value
else: # assume string
try:
unpacked_value = json.loads(value)
except ValueError:
# value is not valid JSON; most likely, this field was previously a
# rich text field before being migrated to StreamField, and the data
# was left intact in the migration. Return an empty stream instead.
# TODO: keep this raw text data around as a property of the StreamValue
# so that it can be retrieved in data migrations
return StreamValue(self.stream_block, [])
if unpacked_value is None:
# we get here if value is the literal string 'null'. This should probably
# never happen if the rest of the (de)serialization code is working properly,
# but better to handle it just in case...
return StreamValue(self.stream_block, [])
return self.stream_block.to_python(unpacked_value)
def get_prep_value(self, value):
return json.dumps(self.stream_block.get_prep_value(value), cls=DjangoJSONEncoder)
def formfield(self, **kwargs):
"""
Override formfield to use a plain forms.Field so that we do no transformation on the value
(as distinct from the usual fallback of forms.CharField, which transforms it into a string).
"""
defaults = {'form_class': BlockField, 'block': self.stream_block}
defaults.update(kwargs)
return super(StreamField, self).formfield(**defaults)
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
return self.get_prep_value(value)
def get_searchable_content(self, value):
return self.stream_block.get_searchable_content(value)
|
HLFH/CouchPotatoServer
|
refs/heads/develop
|
couchpotato/core/media/movie/providers/trailer/mechanize/_sockettimeout.py
|
149
|
import socket
try:
_GLOBAL_DEFAULT_TIMEOUT = socket._GLOBAL_DEFAULT_TIMEOUT
except AttributeError:
_GLOBAL_DEFAULT_TIMEOUT = object()
|
drexly/openhgsenti
|
refs/heads/master
|
lib/django/core/serializers/json.py
|
320
|
"""
Serialize data to/from JSON
"""
# Avoid shadowing the standard library json module
from __future__ import absolute_import, unicode_literals
import datetime
import decimal
import json
import sys
import uuid
from django.core.serializers.base import DeserializationError
from django.core.serializers.python import (
Deserializer as PythonDeserializer, Serializer as PythonSerializer,
)
from django.utils import six
from django.utils.timezone import is_aware
class Serializer(PythonSerializer):
"""
Convert a queryset to JSON.
"""
internal_use_only = False
def _init_options(self):
if json.__version__.split('.') >= ['2', '1', '3']:
# Use JS strings to represent Python Decimal instances (ticket #16850)
self.options.update({'use_decimal': False})
self._current = None
self.json_kwargs = self.options.copy()
self.json_kwargs.pop('stream', None)
self.json_kwargs.pop('fields', None)
if self.options.get('indent'):
# Prevent trailing spaces
self.json_kwargs['separators'] = (',', ': ')
def start_serialization(self):
self._init_options()
self.stream.write("[")
def end_serialization(self):
if self.options.get("indent"):
self.stream.write("\n")
self.stream.write("]")
if self.options.get("indent"):
self.stream.write("\n")
def end_object(self, obj):
# self._current has the field data
indent = self.options.get("indent")
if not self.first:
self.stream.write(",")
if not indent:
self.stream.write(" ")
if indent:
self.stream.write("\n")
json.dump(self.get_dump_object(obj), self.stream,
cls=DjangoJSONEncoder, **self.json_kwargs)
self._current = None
def getvalue(self):
# Grand-parent super
return super(PythonSerializer, self).getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of JSON data.
"""
if not isinstance(stream_or_string, (bytes, six.string_types)):
stream_or_string = stream_or_string.read()
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8')
try:
objects = json.loads(stream_or_string)
for obj in PythonDeserializer(objects, **options):
yield obj
except GeneratorExit:
raise
except Exception as e:
# Map to deserializer error
six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
class DjangoJSONEncoder(json.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time, decimal types and UUIDs.
"""
def default(self, o):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
if is_aware(o):
raise ValueError("JSON can't represent timezone-aware times.")
r = o.isoformat()
if o.microsecond:
r = r[:12]
return r
elif isinstance(o, decimal.Decimal):
return str(o)
elif isinstance(o, uuid.UUID):
return str(o)
else:
return super(DjangoJSONEncoder, self).default(o)
# Older, deprecated class name (for backwards compatibility purposes).
DateTimeAwareJSONEncoder = DjangoJSONEncoder
|
sodafree/backend
|
refs/heads/master
|
build/selenium/build/lib.linux-i686-2.7/selenium/webdriver/support/events.py
|
32
|
#!/usr/bin/python
#
# Copyright 2011 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abstract_event_listener import AbstractEventListener
from event_firing_webdriver import EventFiringWebDriver
|
durai145/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/footyroom.py
|
104
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class FootyRoomIE(InfoExtractor):
_VALID_URL = r'http://footyroom\.com/(?P<id>[^/]+)'
_TESTS = [{
'url': 'http://footyroom.com/schalke-04-0-2-real-madrid-2015-02/',
'info_dict': {
'id': 'schalke-04-0-2-real-madrid-2015-02',
'title': 'Schalke 04 0 – 2 Real Madrid',
},
'playlist_count': 3,
}, {
'url': 'http://footyroom.com/georgia-0-2-germany-2015-03/',
'info_dict': {
'id': 'georgia-0-2-germany-2015-03',
'title': 'Georgia 0 – 2 Germany',
},
'playlist_count': 1,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
playlist = self._parse_json(
self._search_regex(
r'VideoSelector\.load\((\[.+?\])\);', webpage, 'video selector'),
playlist_id)
playlist_title = self._og_search_title(webpage)
entries = []
for video in playlist:
payload = video.get('payload')
if not payload:
continue
playwire_url = self._search_regex(
r'data-config="([^"]+)"', payload,
'playwire url', default=None)
if playwire_url:
entries.append(self.url_result(self._proto_relative_url(
playwire_url, 'http:'), 'Playwire'))
return self.playlist_result(entries, playlist_id, playlist_title)
|
estebanlazza/GestorDeMusicos
|
refs/heads/master
|
vendor/doctrine/orm/docs/en/_exts/configurationblock.py
|
2577
|
#Copyright (c) 2010 Fabien Potencier
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is furnished
#to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from string import upper
class configurationblock(nodes.General, nodes.Element):
pass
class ConfigurationBlock(Directive):
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
formats = {
'html': 'HTML',
'xml': 'XML',
'php': 'PHP',
'yaml': 'YAML',
'jinja': 'Twig',
'html+jinja': 'Twig',
'jinja+html': 'Twig',
'php+html': 'PHP',
'html+php': 'PHP',
'ini': 'INI',
'php-annotations': 'Annotations',
}
def run(self):
env = self.state.document.settings.env
node = nodes.Element()
node.document = self.state.document
self.state.nested_parse(self.content, self.content_offset, node)
entries = []
for i, child in enumerate(node):
if isinstance(child, nodes.literal_block):
# add a title (the language name) before each block
#targetid = "configuration-block-%d" % env.new_serialno('configuration-block')
#targetnode = nodes.target('', '', ids=[targetid])
#targetnode.append(child)
innernode = nodes.emphasis(self.formats[child['language']], self.formats[child['language']])
para = nodes.paragraph()
para += [innernode, child]
entry = nodes.list_item('')
entry.append(para)
entries.append(entry)
resultnode = configurationblock()
resultnode.append(nodes.bullet_list('', *entries))
return [resultnode]
def visit_configurationblock_html(self, node):
self.body.append(self.starttag(node, 'div', CLASS='configuration-block'))
def depart_configurationblock_html(self, node):
self.body.append('</div>\n')
def visit_configurationblock_latex(self, node):
pass
def depart_configurationblock_latex(self, node):
pass
def setup(app):
app.add_node(configurationblock,
html=(visit_configurationblock_html, depart_configurationblock_html),
latex=(visit_configurationblock_latex, depart_configurationblock_latex))
app.add_directive('configuration-block', ConfigurationBlock)
|
romkij/client
|
refs/heads/master
|
vendor/doctrine/orm/docs/en/_exts/configurationblock.py
|
2577
|
#Copyright (c) 2010 Fabien Potencier
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is furnished
#to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from string import upper
class configurationblock(nodes.General, nodes.Element):
pass
class ConfigurationBlock(Directive):
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
formats = {
'html': 'HTML',
'xml': 'XML',
'php': 'PHP',
'yaml': 'YAML',
'jinja': 'Twig',
'html+jinja': 'Twig',
'jinja+html': 'Twig',
'php+html': 'PHP',
'html+php': 'PHP',
'ini': 'INI',
'php-annotations': 'Annotations',
}
def run(self):
env = self.state.document.settings.env
node = nodes.Element()
node.document = self.state.document
self.state.nested_parse(self.content, self.content_offset, node)
entries = []
for i, child in enumerate(node):
if isinstance(child, nodes.literal_block):
# add a title (the language name) before each block
#targetid = "configuration-block-%d" % env.new_serialno('configuration-block')
#targetnode = nodes.target('', '', ids=[targetid])
#targetnode.append(child)
innernode = nodes.emphasis(self.formats[child['language']], self.formats[child['language']])
para = nodes.paragraph()
para += [innernode, child]
entry = nodes.list_item('')
entry.append(para)
entries.append(entry)
resultnode = configurationblock()
resultnode.append(nodes.bullet_list('', *entries))
return [resultnode]
def visit_configurationblock_html(self, node):
self.body.append(self.starttag(node, 'div', CLASS='configuration-block'))
def depart_configurationblock_html(self, node):
self.body.append('</div>\n')
def visit_configurationblock_latex(self, node):
pass
def depart_configurationblock_latex(self, node):
pass
def setup(app):
app.add_node(configurationblock,
html=(visit_configurationblock_html, depart_configurationblock_html),
latex=(visit_configurationblock_latex, depart_configurationblock_latex))
app.add_directive('configuration-block', ConfigurationBlock)
|
yufengg/tensorflow
|
refs/heads/master
|
tensorflow/python/debug/wrappers/dumping_wrapper.py
|
26
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Debugger wrapper session that dumps debug data to file:// URLs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import threading
import time
# Google-internal import(s).
from tensorflow.core.util import event_pb2
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.wrappers import framework
from tensorflow.python.platform import gfile
class DumpingDebugWrapperSession(framework.NonInteractiveDebugWrapperSession):
"""Debug Session wrapper that dumps debug data to filesystem."""
def __init__(self,
sess,
session_root,
watch_fn=None,
thread_name_filter=None,
log_usage=True):
"""Constructor of DumpingDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
session_root: (`str`) Path to the session root directory. Must be a
directory that does not exist or an empty directory. If the directory
does not exist, it will be created by the debugger core during debug
@{tf.Session.run}
calls.
As the `run()` calls occur, subdirectories will be added to
`session_root`. The subdirectories' names has the following pattern:
run_<epoch_time_stamp>_<zero_based_run_counter>
E.g., run_1480734393835964_ad4c953a85444900ae79fc1b652fb324
watch_fn: (`Callable`) A Callable that can be used to define per-run
debug ops and watched tensors. See the doc of
`NonInteractiveDebugWrapperSession.__init__()` for details.
thread_name_filter: Regular-expression white list for threads on which the
wrapper session will be active. See doc of `BaseDebugWrapperSession` for
more details.
log_usage: (`bool`) whether the usage of this class is to be logged.
Raises:
ValueError: If `session_root` is an existing and non-empty directory or
if `session_root` is a file.
"""
if log_usage:
pass # No logging for open-source.
framework.NonInteractiveDebugWrapperSession.__init__(
self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter)
if gfile.Exists(session_root):
if not gfile.IsDirectory(session_root):
raise ValueError(
"session_root path points to a file: %s" % session_root)
elif gfile.ListDirectory(session_root):
raise ValueError(
"session_root path points to a non-empty directory: %s" %
session_root)
else:
gfile.MakeDirs(session_root)
self._session_root = session_root
self._run_counter = 0
self._run_counter_lock = threading.Lock()
def prepare_run_debug_urls(self, fetches, feed_dict):
"""Implementation of abstrat method in superclass.
See doc of `NonInteractiveDebugWrapperSession.prepare_run_debug_urls()`
for details. This implementation creates a run-specific subdirectory under
self._session_root and stores information regarding run `fetches` and
`feed_dict.keys()` in the subdirectory.
Args:
fetches: Same as the `fetches` argument to `Session.run()`
feed_dict: Same as the `feed_dict` argument to `Session.run()`
Returns:
debug_urls: (`str` or `list` of `str`) file:// debug URLs to be used in
this `Session.run()` call.
"""
# Add a UUID to accommodate the possibility of concurrent run() calls.
self._run_counter_lock.acquire()
run_dir = os.path.join(self._session_root, "run_%d_%d" %
(int(time.time() * 1e6), self._run_counter))
self._run_counter += 1
self._run_counter_lock.release()
gfile.MkDir(run_dir)
fetches_event = event_pb2.Event()
fetches_event.log_message.message = repr(fetches)
fetches_path = os.path.join(
run_dir,
debug_data.METADATA_FILE_PREFIX + debug_data.FETCHES_INFO_FILE_TAG)
with gfile.Open(os.path.join(fetches_path), "wb") as f:
f.write(fetches_event.SerializeToString())
feed_keys_event = event_pb2.Event()
feed_keys_event.log_message.message = (repr(feed_dict.keys()) if feed_dict
else repr(feed_dict))
feed_keys_path = os.path.join(
run_dir,
debug_data.METADATA_FILE_PREFIX + debug_data.FEED_KEYS_INFO_FILE_TAG)
with gfile.Open(os.path.join(feed_keys_path), "wb") as f:
f.write(feed_keys_event.SerializeToString())
return ["file://" + run_dir]
|
pyconsk/django-konfera
|
refs/heads/master
|
konfera/tests/test_templatetags.py
|
2
|
from decimal import Decimal
from django.test import TestCase
from konfera.templatetags.custom_filters import currency, currency_code
from konfera.settings import CURRENCY
class TestCurrencyTag(TestCase):
def setUp(self):
self.value_empty_str = ''
self.value_str = 'some string'
self.value_float = 12.5
self.value_negative_int = -12582
self.value_int = 2
self.value_str_int = '7'
def test_value_decimal(self):
test_subjects = (
(Decimal('1.54'), '1.54'),
(Decimal('43.331'), '43.34'),
(Decimal('12345.67894'), '12345.68'),
(Decimal('0.9999999'), '1.00'),
(Decimal('-2.31'), '-2.31'),
(Decimal('-71.455'), '-71.46'),
)
for subj in test_subjects:
self.assertEqual(currency(subj[0]), '%s %s' % (subj[1], CURRENCY[0]))
self.assertEqual(currency_code(subj[0]), '%s %s' % (subj[1], CURRENCY[1]))
def test_value_empty_string(self):
self.assertRaises(AttributeError, currency, self.value_empty_str)
self.assertRaises(AttributeError, currency_code, self.value_empty_str)
def test_value_string(self):
self.assertRaises(AttributeError, currency, self.value_str)
self.assertRaises(AttributeError, currency_code, self.value_str)
def test_value_float(self):
self.assertRaises(AttributeError, currency, self.value_float)
self.assertRaises(AttributeError, currency_code, self.value_float)
def test_value_negative_int(self):
self.assertRaises(AttributeError, currency, self.value_negative_int)
self.assertRaises(AttributeError, currency_code, self.value_negative_int)
def test_value_int(self):
self.assertRaises(AttributeError, currency, self.value_int)
self.assertRaises(AttributeError, currency_code, self.value_int)
def test_value_str_int(self):
self.assertRaises(AttributeError, currency, self.value_str_int)
self.assertRaises(AttributeError, currency_code, self.value_str_int)
|
whaleygeek/hack-iotic-sensor
|
refs/heads/master
|
devroom.py
|
1
|
# devroom.py 26/08/2014 D.J.Whale
import piduino
from Copro import *
import RPi.GPIO as GPIO
import time
import datetime
import urllib2 #TODO deprecate
# import IoticLabs.IOTConnector as IOT
# CONFIGURATION ----------------------------------------------------------
LED = 11 # same pin as piduino D13, which is the LED
DOOR = 9 # same pin as piduino D12, wired to a button
POLL_TIME = 0.25 # how long to wait for an ADC reading
REPORT_TIME = 1.0 # fastest to report to web page that data has changed
DOOR_OPEN = True
#TODO deprecate
UPDATE_URL = "http://www.thinkingbinaries.com/test/door/doorupdate.php"
def ui(msg):
print(str(msg))
# INIT -------------------------------------------------------------------
piduino.firmware("copro.hex")
piduino.connect(baud=9600)
GPIO.setmode(GPIO.BCM)
GPIO.setup(LED, GPIO.OUT)
GPIO.setup(DOOR, GPIO.IN)
# IOT.joinAs("com.iotic-labs/devroom")
# entryDoor = IOT.advertise("door/entry")
# ambientLight = IOT.advertise("light/ambient")
# ambientTemp = IOT.advertuse("temp/ambient")
# BACK-END ---------------------------------------------------------------
#TODO deprecate
def send(t, s):
try:
urllib2.urlopen(UPDATE_URL + "?time=" + t + "&status=" + s)
except:
ui("Failed to send to internet, ignoring")
#TODO deprecate
def reportState(t, s):
dstr = "STATE:"
if s:
dstr += "OPEN"
GPIO.output(LED, True)
else:
dstr += "CLOSED"
GPIO.output(LED, False)
ui(t + "," + dstr)
send(t, dstr)
#TODO deprecate
def reportChange(t, d):
dstr = "CHANGE:"
if d:
dstr += "OPENED"
GPIO.output(LED, True)
else:
dstr += "CLOSED"
GPIO.output(LED, False)
ui(t + "," + dstr)
send(t, dstr)
#TODO deprecate? time in default message?
def timestamp():
return datetime.datetime.now().strftime("%Y-%m-%d-%H:%M:%S")
# FRONT-END ------------------------------------------------------------
def main():
lasttime = time.time()
ui("starting...")
door_state = GPIO.input(DOOR)
reportState(timestamp(), door_state)
while True:
# ACQUIRE DATA (fast)
#TODO still knitting in ADC readings
adcvals = readNextAdcValues(timeout=POLL_TIME)
if adcvals != None:
#fix the order (which is a left over from the neopixel demo)
#we re-ordered adcs because sk wired rgb as gbr!
a2, a0, a1 = adcvals
print("A0=" + str(a0) + " A1=" + str(a1) + " A2=" + str(a2))
door = GPIO.input(DOOR)
# if ambient light reading has changed in last 5 seconds
# might want some averaging and stats here (min/max/N)
# ambientLight.share(a0)
# if temp has changed in last 5 seconds
# might want some averaging and stats here (min/max/N)
# ambientTemp.share(a1)
# REPORT DATA (slower)
newtime = time.time()
if newtime > (lasttime + REPORT_TIME):
if door != door_state:
reportChange(timestamp(), door) # deprecate
# entryDoor.share(door_state)
lasttime = newtime
door_state = door
# MAIN PROGRAM ---------------------------------------------------------
try:
main()
finally:
GPIO.cleanup()
#IOT.cleanup()
# END
|
Mobii/twilio-python
|
refs/heads/master
|
twilio/rest/resources/connection.py
|
48
|
from .imports import (
httplib2,
socks,
PROXY_TYPE_HTTP,
PROXY_TYPE_SOCKS4,
PROXY_TYPE_SOCKS5
)
class Connection(object):
'''Class for setting proxy configuration to be used for REST calls.'''
_proxy_info = None
@classmethod
def proxy_info(cls):
'''Returns the currently-set proxy information
as an httplib2.ProxyInfo object.
'''
return cls._proxy_info
@classmethod
def set_proxy_info(cls, proxy_host, proxy_port,
proxy_type=PROXY_TYPE_HTTP, proxy_rdns=None,
proxy_user=None, proxy_pass=None):
'''Set proxy configuration for future REST API calls.
:param str proxy_host: Hostname of the proxy to use.
:param int proxy_port: Port to connect to.
:param proxy_type: The proxy protocol to use. One of
PROXY_TYPE_HTTP, PROXY_TYPE_SOCKS4, PROXY_TYPE_SOCKS5.
Defaults to connection.PROXY_TYPE_HTTP.
:param bool proxy_rdns: Use the proxy host's DNS resolver.
:param str proxy_user: Username for the proxy.
:param str proxy_pass: Password for the proxy.
'''
cls._proxy_info = httplib2.ProxyInfo(
proxy_type,
proxy_host,
proxy_port,
proxy_rdns=proxy_rdns,
proxy_user=proxy_user,
proxy_pass=proxy_pass,
)
_hush_pyflakes = [
socks,
PROXY_TYPE_SOCKS4,
PROXY_TYPE_SOCKS5
]
|
spectralDNS/spectralDNS
|
refs/heads/master
|
spectralDNS/config.py
|
2
|
"""Parameters for the spectralDNS solvers
The parameters are kept in dictionary 'params'. The values of this
dictionary may be accessed as attributes, e.g.,
M = config.params.M does the same thing as M = config.params['M']
Generic parameters for all solvers::
precision (str) ('double', 'single')
optimization (str) ('cython', 'numba', None)
make_profile (int) Whether on not to enable profiling
dt (float) Time step for fixed time step integrators
T (float) End time
nu (float) Viscosity
t (float) Time
tstep (int) Time step
L (float, float(, float)) Domain size (2 for 2D, 3 for 3D)
M (int, int(, int)) Mesh size (2 for 2D, 3 for 3D)
write_result (int) Store results as HDF5 every (*) time step
checkpoint (int) Save intermediate result every (*)
dealias (str) ('3/2-rule', '2/3-rule', 'None')
decomposition (str) ('slab', 'pencil')
ntol (int) Tolerance (number of accurate digits used in tests)
threads (int) Number of threads used for FFTs
h5filename (str) Filename for storing HDF5 results
verbose (bool) Print some timings in the end
convection (str) ('Standard', 'Divergence', 'Skewed', 'Vortex')
Parameters for 3D explicit solvers::
integrator (str) ('RK4', 'ForwardEuler', 'AB2', 'BS5_adaptive', 'BS5_fixed')
TOL (float) Accuracy used in BS5_adaptive
Solver specific parameters triply periodic domain::
MHD::
eta (float) Model parameter
Solver specific parameters doubly periodic domain::
Bq2D::
Ri (float) Model parameter (Richardson number)
Pr (float) Model parameter (Prandtl number)
"""
__author__ = "Mikael Mortensen <mikaem@math.uio.no>"
__date__ = "2015-04-08"
__copyright__ = "Copyright (C) 2015-2018 " + __author__
__license__ = "GNU Lesser GPL version 3 or any later version"
import argparse
import collections
import json
from numpy import pi, array, float32, float64
#pylint: disable=global-statement,redefined-outer-name,exec-used
class AttributeDict(collections.MutableMapping, dict):
"""Dictionary class
The values of this dictionary may be accessed as attributes:
p = Params({'M': 2})
M = p.M
N = p['M']
assert M is N
"""
def __init__(self, *args, **kwargs):
super(AttributeDict, self).__init__(*args, **kwargs)
self.__dict__ = self
def __getattribute__(self, key):
return dict.__getattribute__(self, key)
def __setattr__(self, key, val):
dict.__setattr__(self, key, val)
def __getitem__(self, key):
return dict.__getitem__(self, key)
def __setitem__(self, key, val):
dict.__setitem__(self, key, val)
def __delitem__(self, key):
dict.__delitem__(self, key)
def __iter__(self):
return dict.__iter__(self)
def __len__(self):
return dict.__len__(self)
def __contains__(self, x):
return dict.__contains__(self, x)
class Params(AttributeDict):
"""Class for collection of parameters
The values of this dictionary may be accessed as attributes:
p = Params({'M': 2})
M = p.M
N = p['M']
assert M is N
"""
def __init__(self, *args, **kwargs):
AttributeDict.__init__(self, *args, **kwargs)
def __getattr__(self, key):
# Called if key is missing in __getattribute__
if key == 'dx':
return self.L / self.N
elif key == 'N':
assert 'M' in self
mval = self.M
return 2**mval
else:
raise KeyError
def __getattribute__(self, key):
if key in ('nu', 'dt', 'Ri', 'Pr', 'eta'):
fl = float32 if self['precision'] == 'single' else float64
return fl(dict.__getattribute__(self, key))
return dict.__getattribute__(self, key)
def __setattr__(self, key, val):
if key in ('N', 'M', 'L'):
self.__setitem__(key, val)
else:
dict.__setattr__(self, key, val)
def __setitem__(self, key, val):
if key in ('M', 'N'):
val = array([int(str(f)) for f in val], dtype=int)
val.flags.writeable = False
dict.__setitem__(self, key, val)
elif key == 'L':
val = array([eval(str(f), {"__builtins__": None}, {'pi': pi}) for f in val],
dtype=float)
val.flags.writeable = False
dict.__setitem__(self, key, val)
else:
dict.__setitem__(self, key, val)
fft_plans = collections.defaultdict(lambda: "FFTW_MEASURE",
{'dct': "FFTW_MEASURE"})
class PlanAction(argparse.Action):
"""Action for planning FFT"""
def __call__(self, parser, namespace, values, option_string=None):
global fft_plans
fft_plans.update(json.loads(values))
setattr(namespace, self.dest, fft_plans)
# Create an instance of the Params class to hold all parameters for the solvers
params = Params()
# Create the main parser
parser = argparse.ArgumentParser(prog='spectralDNS', add_help=False)
# Arguments used by all solvers
parser.add_argument('--precision', default='double',
choices=('single', 'double'))
parser.add_argument('--optimization', default='',
choices=('cython', 'weave', 'numba', 'pythran'),
help='Choose implementation method for optimization')
parser.add_argument('--make_profile', default=0, type=int,
help='Enable cProfile profiler')
parser.add_argument('--dt', default=0.01, type=float,
help='Time step size')
parser.add_argument('--T', default=0.1, type=float,
help='End time')
parser.add_argument('--write_result', default=1e8, metavar=('tstep'), type=int,
help='Write results to HDF5 every tstep')
parser.add_argument('--checkpoint', default=1e8, type=int,
help='Save intermediate result every...')
parser.add_argument('--nu', default=0.000625, type=float,
help='Viscosity')
parser.add_argument('--t', default=0.0, type=float,
help='Time')
parser.add_argument('--tstep', default=0, type=int,
help='Time step')
parser.add_argument('--filemode', default='w',
choices=('w', 'r', 'a'),
help='Choose mode for opening HDF5 files')
parser.add_argument('--dealias', default='2/3-rule',
choices=('2/3-rule', '3/2-rule', 'None'),
help='Choose dealiasing method')
parser.add_argument('--decomposition', default='slab', choices=('slab', 'pencil'),
help="Choose MPI decomposition between slab and pencil.")
parser.add_argument('--ntol', default=7, type=int,
help='Tolerance - number of accurate digits')
parser.add_argument('--threads', default=1, type=int,
help='Number of threads used for FFTs')
parser.add_argument('--planner_effort', action=PlanAction, default=fft_plans,
help="""Planning effort for FFTs. Usage, e.g., --planner_effort '{"dct":"FFTW_EXHAUSTIVE"}' """)
parser.add_argument('--h5filename', default='results', type=str,
help='Filename of HDF5 datafile used to store intermediate checkpoint data or timeseries results')
parser.add_argument('--verbose', dest='verbose', action='store_true', help='Print timings in the end')
parser.add_argument('--no-verbose', dest='verbose', action='store_false', help='Do not print timings in the end')
parser.set_defaults(verbose=True)
parser.add_argument('--mask_nyquist', dest='mask_nyquist', action='store_true', help='Eliminate Nyquist frequency')
parser.add_argument('--no-mask_nyquist', dest='mask_nyquist', action='store_false', help='Do not eliminate Nyquist frequency')
parser.set_defaults(mask_nyquist=True)
# Arguments for 3D isotropic solvers
triplyperiodic = argparse.ArgumentParser(parents=[parser])
triplyperiodic.add_argument('--convection', default='Vortex',
choices=('Standard', 'Divergence', 'Skewed', 'Vortex'),
help='Choose method for computing the nonlinear convective term')
triplyperiodic.add_argument('--L', default=[2*pi, 2*pi, 2*pi], metavar=("Lx", "Ly", "Lz"), nargs=3,
help='Physical mesh size')
triplyperiodic.add_argument('--M', default=[6, 6, 6], metavar=("Mx", "My", "Mz"), nargs=3,
help='Mesh size is pow(2, M[i]) in direction i. Used if N is missing.')
triplyperiodic.add_argument('--TOL', type=float, default=1e-6,
help='Tolerance for adaptive time integrator')
triplyperiodic.add_argument('--integrator', default='RK4',
choices=('RK4', 'ForwardEuler', 'AB2', 'BS5_adaptive', 'BS5_fixed'),
help='Integrator for triply periodic domain')
trippelsubparsers = triplyperiodic.add_subparsers(dest='solver')
# Remember! Subparser arguments must be invoked after the positional argument
# E.g, python TG.py --M 6 6 6 NS --integrator RK4
parser_NS = trippelsubparsers.add_parser('NS', help='Regular Navier Stokes solver')
parser_VV = trippelsubparsers.add_parser('VV', help='Velocity-Vorticity formulation')
parser_MHD = trippelsubparsers.add_parser('MHD', help='Magnetohydrodynamics solver')
parser_MHD.add_argument('--eta', default=0.01, type=float, help='MHD parameter')
parser_Bq = trippelsubparsers.add_parser('Bq', help='Navier Stokes solver with Boussinesq model')
parser_Bq.add_argument('--Ri', default=0.1, type=float, help='Richardson number')
parser_Bq.add_argument('--Pr', default=1.0, type=float, help='Prandtl number')
# Arguments for 2D periodic solvers
doublyperiodic = argparse.ArgumentParser(parents=[parser])
doublyperiodic.add_argument('--integrator', default='RK4',
choices=('RK4', 'ForwardEuler', 'AB2', 'BS5_fixed', 'BS5_adaptive'),
help='Integrator for doubly periodic domain')
doublyperiodic.add_argument('--L', default=[2*pi, 2*pi], nargs=2, metavar=('Lx', 'Ly'),
help='Physical mesh size')
doublyperiodic.add_argument('--convection', default='Vortex',
choices=('Vortex'),
help='Choose method for computing the nonlinear convective term')
doublyperiodic.add_argument('--TOL', type=float, default=1e-6,
help='Tolerance for adaptive time integrator')
doublyperiodic.add_argument('--M', default=[6, 6], nargs=2, metavar=('Mx', 'My'),
help='Mesh size is pow(2, M[i]) in direction i. Used if N is missing.')
doublesubparsers = doublyperiodic.add_subparsers(dest='solver')
parser_NS2D = doublesubparsers.add_parser('NS2D', help='Regular 2D Navier Stokes solver')
parser_Bq2D = doublesubparsers.add_parser('Bq2D', help='Regular 2D Navier Stokes solver with Boussinesq model.')
parser_Bq2D.add_argument('--Ri', default=0.1, type=float, help='Richardson number')
parser_Bq2D.add_argument('--Pr', default=1.0, type=float, help='Prandtl number')
# Arguments for channel solvers with one inhomogeneous direction
channel = argparse.ArgumentParser(parents=[parser])
channel.add_argument('--convection', default='Vortex',
choices=('Standard', 'Divergence', 'Skew', 'Vortex'),
help='Choose method for computing the nonlinear convective term')
channel.add_argument('--L', default=[2, 2*pi, 2*pi], nargs=3, metavar=('Lx', 'Ly', 'Lz'),
help='Physical mesh size')
channel.add_argument('--M', default=[6, 6, 6], nargs=3, metavar=('Mx', 'My', 'Mz'),
help='Mesh size is pow(2, M[i]) in direction i. Used if N is missing.')
channel.add_argument('--Dquad', default='GC', choices=('GC', 'GL'),
help="Choose quadrature scheme for Dirichlet space. GC = Chebyshev-Gauss (x_k=cos((2k+1)/(2N+2)*pi)) and GL = Gauss-Lobatto (x_k=cos(k*pi/N))")
channel.add_argument('--Bquad', default='GC', choices=('GC', 'GL'),
help="Choose quadrature scheme for Biharmonic space. GC = Chebyshev-Gauss (x_k=cos((2k+1)/(2N+2)*pi)) and GL = Gauss-Lobatto (x_k=cos(k*pi/N))")
channel.add_argument('--Nquad', default='GC', choices=('GC', 'GL'),
help="Choose quadrature scheme for Neumann space. GC = Chebyshev-Gauss (x_k=cos((2k+1)/(2N+2)*pi)) and GL = Gauss-Lobatto (x_k=cos(k*pi/N))")
channelsubparsers = channel.add_subparsers(dest='solver')
KMM = channelsubparsers.add_parser('KMM', help='Kim Moin Moser channel solver with Crank-Nicolson and Adams-Bashforth discretization.')
KMM.add_argument('--integrator', default='implicit', choices=('implicit',), help='Regular Crank-Nicolson/Adams-Bashforth integrator for channel solver')
KMMr = channelsubparsers.add_parser('KMMr', help='Kim Moin Moser channel solver with Crank-Nicolson and Adams-Bashforth discretization. Inhomogeneous space in z-direction.')
KMMr.add_argument('--integrator', default='implicit', choices=('implicit',), help='Regular Crank-Nicolson/Adams-Bashforth integrator for channel solver')
KMMRK3 = channelsubparsers.add_parser('KMMRK3', help='Kim Moin Moser channel solver with third order semi-implicit Runge-Kutta discretization.')
KMMRK3.add_argument('--integrator', default='implicitRK3', choices=('implicitRK3',), help='RK3 integrator for channel solver')
KMM_RB = channelsubparsers.add_parser('KMM_RB', help='Rayleigh-Benard channel solver using KMM')
KMM_RB.add_argument('--integrator', default='implicit', choices=('implicit',), help='Regular Crank-Nicolson/Adams-Bashforth integrator for channel solver')
KMMRK3_RB = channelsubparsers.add_parser('KMMRK3_RB', help='Rayleigh-Benard channel solver using KMMRK3.')
KMMRK3_RB.add_argument('--integrator', default='implicitRK3', choices=('implicitRK3',), help='RK3 integrator for channel solver')
#IPCS = channelsubparsers.add_parser('IPCS', help='Incremental pressure correction channel solver with Crank-Nicolson and Adams-Bashforth discretization.')
#IPCS.add_argument('--integrator', default='implicit', choices=('implicit',), help='Regular Crank-Nicolson/Adams-Bashforth integrator for channel solver')
#IPCSR = channelsubparsers.add_parser('IPCSR', help='Incremental pressure correction channel solver with Crank-Nicolson and Adams-Bashforth discretization.')
#IPCSR.add_argument('--integrator', default='implicit', choices=('implicit',), help='Regular Crank-Nicolson/Adams-Bashforth integrator for channel solver')
Coupled = channelsubparsers.add_parser('Coupled', help='Coupled channel solver with Crank-Nicolson and Adams-Bashforth discretization.')
Coupled.add_argument('--integrator', default='implicit', choices=('implicit',), help='Regular Crank-Nicolson/Adams-Bashforth integrator for channel solver')
CoupledRK3 = channelsubparsers.add_parser('CoupledRK3', help='Coupled channel solver with RK3.')
CoupledRK3.add_argument('--integrator', default='implicit', choices=('implicit',), help='Coupled RK3 integrator for channel solver')
def update(new, mesh="triplyperiodic"):
"""Update spectralDNS parameters"""
global fft_plans
assert isinstance(new, dict)
if 'planner_effort' in new:
fft_plans.update(new['planner_effort'])
new['planner_effort'] = fft_plans
globals()[mesh].set_defaults(**new)
|
chrrrles/ansible-modules-extras
|
refs/heads/devel
|
clustering/consul.py
|
54
|
#!/usr/bin/python
#
# (c) 2015, Steve Gargan <steve.gargan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: consul
short_description: "Add, modify & delete services within a consul cluster."
description:
- Registers services and checks for an agent with a consul cluster.
A service is some process running on the agent node that should be advertised by
consul's discovery mechanism. It may optionally supply a check definition,
a periodic service test to notify the consul cluster of service's health.
- "Checks may also be registered per node e.g. disk usage, or cpu usage and
notify the health of the entire node to the cluster.
Service level checks do not require a check name or id as these are derived
by Consul from the Service name and id respectively by appending 'service:'
Node level checks require a check_name and optionally a check_id."
- Currently, there is no complete way to retrieve the script, interval or ttl
metadata for a registered check. Without this metadata it is not possible to
tell if the data supplied with ansible represents a change to a check. As a
result this does not attempt to determine changes and will always report a
changed occurred. An api method is planned to supply this metadata so at that
stage change management will be added.
- "See http://consul.io for more details."
requirements:
- "python >= 2.6"
- python-consul
- requests
version_added: "2.0"
author: "Steve Gargan (@sgargan)"
options:
state:
description:
- register or deregister the consul service, defaults to present
required: true
choices: ['present', 'absent']
service_name:
description:
- Unique name for the service on a node, must be unique per node,
required if registering a service. May be ommitted if registering
a node level check
required: false
service_id:
description:
- the ID for the service, must be unique per node, defaults to the
service name if the service name is supplied
required: false
default: service_name if supplied
host:
description:
- host of the consul agent defaults to localhost
required: false
default: localhost
port:
description:
- the port on which the consul agent is running
required: false
default: 8500
notes:
description:
- Notes to attach to check when registering it.
required: false
default: None
service_port:
description:
- the port on which the service is listening required for
registration of a service, i.e. if service_name or service_id is set
required: false
tags:
description:
- a list of tags that will be attached to the service registration.
required: false
default: None
script:
description:
- the script/command that will be run periodically to check the health
of the service. Scripts require an interval and vise versa
required: false
default: None
interval:
description:
- the interval at which the service check will be run. This is a number
with a s or m suffix to signify the units of seconds or minutes e.g
15s or 1m. If no suffix is supplied, m will be used by default e.g.
1 will be 1m. Required if the script param is specified.
required: false
default: None
check_id:
description:
- an ID for the service check, defaults to the check name, ignored if
part of a service definition.
required: false
default: None
check_name:
description:
- a name for the service check, defaults to the check id. required if
standalone, ignored if part of service definition.
required: false
default: None
ttl:
description:
- checks can be registered with a ttl instead of a script and interval
this means that the service will check in with the agent before the
ttl expires. If it doesn't the check will be considered failed.
Required if registering a check and the script an interval are missing
Similar to the interval this is a number with a s or m suffix to
signify the units of seconds or minutes e.g 15s or 1m. If no suffix
is supplied, m will be used by default e.g. 1 will be 1m
required: false
default: None
token:
description:
- the token key indentifying an ACL rule set. May be required to register services.
required: false
default: None
"""
EXAMPLES = '''
- name: register nginx service with the local consul agent
consul:
name: nginx
service_port: 80
- name: register nginx service with curl check
consul:
name: nginx
service_port: 80
script: "curl http://localhost"
interval: 60s
- name: register nginx with some service tags
consul:
name: nginx
service_port: 80
tags:
- prod
- webservers
- name: remove nginx service
consul:
name: nginx
state: absent
- name: create a node level check to test disk usage
consul:
check_name: Disk usage
check_id: disk_usage
script: "/opt/disk_usage.py"
interval: 5m
'''
import sys
try:
import json
except ImportError:
import simplejson as json
try:
import consul
from requests.exceptions import ConnectionError
python_consul_installed = True
except ImportError, e:
python_consul_installed = False
def register_with_consul(module):
state = module.params.get('state')
if state == 'present':
add(module)
else:
remove(module)
def add(module):
''' adds a service or a check depending on supplied configuration'''
check = parse_check(module)
service = parse_service(module)
if not service and not check:
module.fail_json(msg='a name and port are required to register a service')
if service:
if check:
service.add_check(check)
add_service(module, service)
elif check:
add_check(module, check)
def remove(module):
''' removes a service or a check '''
service_id = module.params.get('service_id') or module.params.get('service_name')
check_id = module.params.get('check_id') or module.params.get('check_name')
if not (service_id or check_id):
module.fail_json(msg='services and checks are removed by id or name.'\
' please supply a service id/name or a check id/name')
if service_id:
remove_service(module, service_id)
else:
remove_check(module, check_id)
def add_check(module, check):
''' registers a check with the given agent. currently there is no way
retrieve the full metadata of an existing check through the consul api.
Without this we can't compare to the supplied check and so we must assume
a change. '''
if not check.name:
module.fail_json(msg='a check name is required for a node level check,'\
' one not attached to a service')
consul_api = get_consul_api(module)
check.register(consul_api)
module.exit_json(changed=True,
check_id=check.check_id,
check_name=check.name,
script=check.script,
interval=check.interval,
ttl=check.ttl)
def remove_check(module, check_id):
''' removes a check using its id '''
consul_api = get_consul_api(module)
if check_id in consul_api.agent.checks():
consul_api.agent.check.deregister(check_id)
module.exit_json(changed=True, id=check_id)
module.exit_json(changed=False, id=check_id)
def add_service(module, service):
''' registers a service with the the current agent '''
result = service
changed = False
consul_api = get_consul_api(module)
existing = get_service_by_id(consul_api, service.id)
# there is no way to retreive the details of checks so if a check is present
# in the service it must be reregistered
if service.has_checks() or not existing or not existing == service:
service.register(consul_api)
# check that it registered correctly
registered = get_service_by_id(consul_api, service.id)
if registered:
result = registered
changed = True
module.exit_json(changed=changed,
service_id=result.id,
service_name=result.name,
service_port=result.port,
checks=map(lambda x: x.to_dict(), service.checks),
tags=result.tags)
def remove_service(module, service_id):
''' deregister a service from the given agent using its service id '''
consul_api = get_consul_api(module)
service = get_service_by_id(consul_api, service_id)
if service:
consul_api.agent.service.deregister(service_id)
module.exit_json(changed=True, id=service_id)
module.exit_json(changed=False, id=service_id)
def get_consul_api(module, token=None):
return consul.Consul(host=module.params.get('host'),
port=module.params.get('port'),
token=module.params.get('token'))
def get_service_by_id(consul_api, service_id):
''' iterate the registered services and find one with the given id '''
for name, service in consul_api.agent.services().iteritems():
if service['ID'] == service_id:
return ConsulService(loaded=service)
def parse_check(module):
if module.params.get('script') and module.params.get('ttl'):
module.fail_json(
msg='check are either script or ttl driven, supplying both does'\
' not make sense')
if module.params.get('check_id') or module.params.get('script') or module.params.get('ttl'):
return ConsulCheck(
module.params.get('check_id'),
module.params.get('check_name'),
module.params.get('check_node'),
module.params.get('check_host'),
module.params.get('script'),
module.params.get('interval'),
module.params.get('ttl'),
module.params.get('notes')
)
def parse_service(module):
if module.params.get('service_name') and module.params.get('service_port'):
return ConsulService(
module.params.get('service_id'),
module.params.get('service_name'),
module.params.get('service_port'),
module.params.get('tags'),
)
elif module.params.get('service_name') and not module.params.get('service_port'):
module.fail_json(
msg="service_name supplied but no service_port, a port is required"\
" to configure a service. Did you configure the 'port' "\
"argument meaning 'service_port'?")
class ConsulService():
def __init__(self, service_id=None, name=None, port=-1,
tags=None, loaded=None):
self.id = self.name = name
if service_id:
self.id = service_id
self.port = port
self.tags = tags
self.checks = []
if loaded:
self.id = loaded['ID']
self.name = loaded['Service']
self.port = loaded['Port']
self.tags = loaded['Tags']
def register(self, consul_api):
if len(self.checks) > 0:
check = self.checks[0]
consul_api.agent.service.register(
self.name,
service_id=self.id,
port=self.port,
tags=self.tags,
script=check.script,
interval=check.interval,
ttl=check.ttl)
else:
consul_api.agent.service.register(
self.name,
service_id=self.id,
port=self.port,
tags=self.tags)
def add_check(self, check):
self.checks.append(check)
def checks(self):
return self.checks
def has_checks(self):
return len(self.checks) > 0
def __eq__(self, other):
return (isinstance(other, self.__class__)
and self.id == other.id
and self.name == other.name
and self.port == other.port
and self.tags == other.tags)
def __ne__(self, other):
return not self.__eq__(other)
def to_dict(self):
data = {'id': self.id, "name": self.name}
if self.port:
data['port'] = self.port
if self.tags and len(self.tags) > 0:
data['tags'] = self.tags
if len(self.checks) > 0:
data['check'] = self.checks[0].to_dict()
return data
class ConsulCheck():
def __init__(self, check_id, name, node=None, host='localhost',
script=None, interval=None, ttl=None, notes=None):
self.check_id = self.name = name
if check_id:
self.check_id = check_id
self.script = script
self.interval = self.validate_duration('interval', interval)
self.ttl = self.validate_duration('ttl', ttl)
self.notes = notes
self.node = node
self.host = host
def validate_duration(self, name, duration):
if duration:
duration_units = ['ns', 'us', 'ms', 's', 'm', 'h']
if not any((duration.endswith(suffix) for suffix in duration_units)):
raise Exception('Invalid %s %s you must specify units (%s)' %
(name, duration, ', '.join(duration_units)))
return duration
def register(self, consul_api):
consul_api.agent.check.register(self.name, check_id=self.check_id,
script=self.script,
interval=self.interval,
ttl=self.ttl, notes=self.notes)
def __eq__(self, other):
return (isinstance(other, self.__class__)
and self.check_id == other.check_id
and self.name == other.name
and self.script == script
and self.interval == interval)
def __ne__(self, other):
return not self.__eq__(other)
def to_dict(self):
data = {}
self._add(data, 'id', attr='check_id')
self._add(data, 'name', attr='check_name')
self._add(data, 'script')
self._add(data, 'node')
self._add(data, 'notes')
self._add(data, 'host')
self._add(data, 'interval')
self._add(data, 'ttl')
return data
def _add(self, data, key, attr=None):
try:
if attr == None:
attr = key
data[key] = getattr(self, attr)
except:
pass
def test_dependencies(module):
if not python_consul_installed:
module.fail_json(msg="python-consul required for this module. "\
"see http://python-consul.readthedocs.org/en/latest/#installation")
def main():
module = AnsibleModule(
argument_spec=dict(
host=dict(default='localhost'),
port=dict(default=8500, type='int'),
check_id=dict(required=False),
check_name=dict(required=False),
check_node=dict(required=False),
check_host=dict(required=False),
notes=dict(required=False),
script=dict(required=False),
service_id=dict(required=False),
service_name=dict(required=False),
service_port=dict(required=False, type='int'),
state=dict(default='present', choices=['present', 'absent']),
interval=dict(required=False, type='str'),
ttl=dict(required=False, type='str'),
tags=dict(required=False, type='list'),
token=dict(required=False)
),
supports_check_mode=False,
)
test_dependencies(module)
try:
register_with_consul(module)
except ConnectionError, e:
module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % (
module.params.get('host'), module.params.get('port'), str(e)))
except Exception, e:
module.fail_json(msg=str(e))
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
jimi-c/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/avi/avi_scheduler.py
|
20
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_scheduler
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of Scheduler Avi RESTful Object
description:
- This module is used to configure Scheduler object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
backup_config_ref:
description:
- Backup configuration to be executed by this scheduler.
- It is a reference to an object of type backupconfiguration.
enabled:
description:
- Boolean flag to set enabled.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
type: bool
end_date_time:
description:
- Scheduler end date and time.
frequency:
description:
- Frequency at which custom scheduler will run.
- Allowed values are 0-60.
frequency_unit:
description:
- Unit at which custom scheduler will run.
- Enum options - SCHEDULER_FREQUENCY_UNIT_MIN, SCHEDULER_FREQUENCY_UNIT_HOUR, SCHEDULER_FREQUENCY_UNIT_DAY, SCHEDULER_FREQUENCY_UNIT_WEEK,
- SCHEDULER_FREQUENCY_UNIT_MONTH.
name:
description:
- Name of scheduler.
required: true
run_mode:
description:
- Scheduler run mode.
- Enum options - RUN_MODE_PERIODIC, RUN_MODE_AT, RUN_MODE_NOW.
run_script_ref:
description:
- Control script to be executed by this scheduler.
- It is a reference to an object of type alertscriptconfig.
scheduler_action:
description:
- Define scheduler action.
- Enum options - SCHEDULER_ACTION_RUN_A_SCRIPT, SCHEDULER_ACTION_BACKUP.
- Default value when not specified in API or module is interpreted by Avi Controller as SCHEDULER_ACTION_BACKUP.
start_date_time:
description:
- Scheduler start date and time.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create Scheduler object
avi_scheduler:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_scheduler
"""
RETURN = '''
obj:
description: Scheduler (api/scheduler) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
backup_config_ref=dict(type='str',),
enabled=dict(type='bool',),
end_date_time=dict(type='str',),
frequency=dict(type='int',),
frequency_unit=dict(type='str',),
name=dict(type='str', required=True),
run_mode=dict(type='str',),
run_script_ref=dict(type='str',),
scheduler_action=dict(type='str',),
start_date_time=dict(type='str',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'scheduler',
set([]))
if __name__ == '__main__':
main()
|
antb/TPT----My-old-mod
|
refs/heads/master
|
src/python/stdlib/ctypes/test/test_keeprefs.py
|
80
|
from ctypes import *
import unittest
class SimpleTestCase(unittest.TestCase):
def test_cint(self):
x = c_int()
self.assertEqual(x._objects, None)
x.value = 42
self.assertEqual(x._objects, None)
x = c_int(99)
self.assertEqual(x._objects, None)
def test_ccharp(self):
x = c_char_p()
self.assertEqual(x._objects, None)
x.value = "abc"
self.assertEqual(x._objects, "abc")
x = c_char_p("spam")
self.assertEqual(x._objects, "spam")
class StructureTestCase(unittest.TestCase):
def test_cint_struct(self):
class X(Structure):
_fields_ = [("a", c_int),
("b", c_int)]
x = X()
self.assertEqual(x._objects, None)
x.a = 42
x.b = 99
self.assertEqual(x._objects, None)
def test_ccharp_struct(self):
class X(Structure):
_fields_ = [("a", c_char_p),
("b", c_char_p)]
x = X()
self.assertEqual(x._objects, None)
x.a = "spam"
x.b = "foo"
self.assertEqual(x._objects, {"0": "spam", "1": "foo"})
def test_struct_struct(self):
class POINT(Structure):
_fields_ = [("x", c_int), ("y", c_int)]
class RECT(Structure):
_fields_ = [("ul", POINT), ("lr", POINT)]
r = RECT()
r.ul.x = 0
r.ul.y = 1
r.lr.x = 2
r.lr.y = 3
self.assertEqual(r._objects, None)
r = RECT()
pt = POINT(1, 2)
r.ul = pt
self.assertEqual(r._objects, {'0': {}})
r.ul.x = 22
r.ul.y = 44
self.assertEqual(r._objects, {'0': {}})
r.lr = POINT()
self.assertEqual(r._objects, {'0': {}, '1': {}})
class ArrayTestCase(unittest.TestCase):
def test_cint_array(self):
INTARR = c_int * 3
ia = INTARR()
self.assertEqual(ia._objects, None)
ia[0] = 1
ia[1] = 2
ia[2] = 3
self.assertEqual(ia._objects, None)
class X(Structure):
_fields_ = [("x", c_int),
("a", INTARR)]
x = X()
x.x = 1000
x.a[0] = 42
x.a[1] = 96
self.assertEqual(x._objects, None)
x.a = ia
self.assertEqual(x._objects, {'1': {}})
class PointerTestCase(unittest.TestCase):
def test_p_cint(self):
i = c_int(42)
x = pointer(i)
self.assertEqual(x._objects, {'1': i})
class DeletePointerTestCase(unittest.TestCase):
def X_test(self):
class X(Structure):
_fields_ = [("p", POINTER(c_char_p))]
x = X()
i = c_char_p("abc def")
from sys import getrefcount as grc
print "2?", grc(i)
x.p = pointer(i)
print "3?", grc(i)
for i in range(320):
c_int(99)
x.p[0]
print x.p[0]
## del x
## print "2?", grc(i)
## del i
import gc
gc.collect()
for i in range(320):
c_int(99)
x.p[0]
print x.p[0]
print x.p.contents
## print x._objects
x.p[0] = "spam spam"
## print x.p[0]
print "+" * 42
print x._objects
class PointerToStructure(unittest.TestCase):
def test(self):
class POINT(Structure):
_fields_ = [("x", c_int), ("y", c_int)]
class RECT(Structure):
_fields_ = [("a", POINTER(POINT)),
("b", POINTER(POINT))]
r = RECT()
p1 = POINT(1, 2)
r.a = pointer(p1)
r.b = pointer(p1)
## from pprint import pprint as pp
## pp(p1._objects)
## pp(r._objects)
r.a[0].x = 42
r.a[0].y = 99
# to avoid leaking when tests are run several times
# clean up the types left in the cache.
from ctypes import _pointer_type_cache
del _pointer_type_cache[POINT]
if __name__ == "__main__":
unittest.main()
|
MebiusHKU/flask-web
|
refs/heads/master
|
flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/utf8prober.py
|
2918
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8SMModel
ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(UTF8SMModel)
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mCodingSM.reset()
self._mNumOfMBChar = 0
def get_charset_name(self):
return "utf-8"
def feed(self, aBuf):
for c in aBuf:
codingState = self._mCodingSM.next_state(c)
if codingState == constants.eError:
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
if self._mCodingSM.get_current_charlen() >= 2:
self._mNumOfMBChar += 1
if self.get_state() == constants.eDetecting:
if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
unlike = 0.99
if self._mNumOfMBChar < 6:
for i in range(0, self._mNumOfMBChar):
unlike = unlike * ONE_CHAR_PROB
return 1.0 - unlike
else:
return unlike
|
baberthal/CouchPotatoServer
|
refs/heads/master
|
libs/suds/xsd/__init__.py
|
206
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{schema} module provides a intelligent representation of
an XSD schema. The I{raw} model is the XML tree and the I{model}
is the denormalized, objectified and intelligent view of the schema.
Most of the I{value-add} provided by the model is centered around
tranparent referenced type resolution and targeted denormalization.
"""
from logging import getLogger
from suds import *
from suds.sax import Namespace, splitPrefix
log = getLogger(__name__)
def qualify(ref, resolvers, defns=Namespace.default):
"""
Get a reference that is I{qualified} by namespace.
@param ref: A referenced schema type name.
@type ref: str
@param resolvers: A list of objects to be used to resolve types.
@type resolvers: [L{sax.element.Element},]
@param defns: An optional target namespace used to qualify references
when no prefix is specified.
@type defns: A default namespace I{tuple: (prefix,uri)} used when ref not prefixed.
@return: A qualified reference.
@rtype: (name, namespace-uri)
"""
ns = None
p, n = splitPrefix(ref)
if p is not None:
if not isinstance(resolvers, (list, tuple)):
resolvers = (resolvers,)
for r in resolvers:
resolved = r.resolvePrefix(p)
if resolved[1] is not None:
ns = resolved
break
if ns is None:
raise Exception('prefix (%s) not resolved' % p)
else:
ns = defns
return (n, ns[1])
def isqref(object):
"""
Get whether the object is a I{qualified reference}.
@param object: An object to be tested.
@type object: I{any}
@rtype: boolean
@see: L{qualify}
"""
return (\
isinstance(object, tuple) and \
len(object) == 2 and \
isinstance(object[0], basestring) and \
isinstance(object[1], basestring))
class Filter:
def __init__(self, inclusive=False, *items):
self.inclusive = inclusive
self.items = items
def __contains__(self, x):
if self.inclusive:
result = ( x in self.items )
else:
result = ( x not in self.items )
return result
|
rabipanda/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/dense_update_ops_test.py
|
76
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tf.Assign*."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class AssignOpTest(test.TestCase):
def _initAssignFetch(self, x, y, use_gpu=False):
"""Initialize a param to init and update it with y."""
super(AssignOpTest, self).setUp()
with self.test_session(use_gpu=use_gpu):
p = variables.Variable(x)
assign = state_ops.assign(p, y)
p.initializer.run()
new_value = assign.eval()
return p.eval(), new_value
def _initAssignAddFetch(self, x, y, use_gpu=False):
"""Initialize a param to init, and compute param += y."""
with self.test_session(use_gpu=use_gpu):
p = variables.Variable(x)
add = state_ops.assign_add(p, y)
p.initializer.run()
new_value = add.eval()
return p.eval(), new_value
def _initAssignSubFetch(self, x, y, use_gpu=False):
"""Initialize a param to init, and compute param -= y."""
with self.test_session(use_gpu=use_gpu):
p = variables.Variable(x)
sub = state_ops.assign_sub(p, y)
p.initializer.run()
new_value = sub.eval()
return p.eval(), new_value
def _testTypes(self, vals):
for dtype in [np.float32, np.float64, np.int32, np.int64]:
x = np.zeros(vals.shape).astype(dtype)
y = vals.astype(dtype)
var_value, op_value = self._initAssignFetch(x, y, use_gpu=False)
self.assertAllEqual(y, var_value)
self.assertAllEqual(y, op_value)
var_value, op_value = self._initAssignAddFetch(x, y, use_gpu=False)
self.assertAllEqual(x + y, var_value)
self.assertAllEqual(x + y, op_value)
var_value, op_value = self._initAssignSubFetch(x, y, use_gpu=False)
self.assertAllEqual(x - y, var_value)
self.assertAllEqual(x - y, op_value)
if test.is_built_with_cuda() and dtype in [np.float32, np.float64]:
var_value, op_value = self._initAssignFetch(x, y, use_gpu=True)
self.assertAllEqual(y, var_value)
self.assertAllEqual(y, op_value)
var_value, op_value = self._initAssignAddFetch(x, y, use_gpu=True)
self.assertAllEqual(x + y, var_value)
self.assertAllEqual(x + y, op_value)
var_value, op_value = self._initAssignSubFetch(x, y, use_gpu=False)
self.assertAllEqual(x - y, var_value)
self.assertAllEqual(x - y, op_value)
def testBasic(self):
self._testTypes(np.arange(0, 20).reshape([4, 5]))
def testAssignNonStrictShapeChecking(self):
with self.test_session():
data = array_ops.fill([1024, 1024], 0)
p = variables.Variable([1])
a = state_ops.assign(p, data, validate_shape=False)
a.op.run()
self.assertAllEqual(p.eval(), data.eval())
# Assign to yet another shape
data2 = array_ops.fill([10, 10], 1)
a2 = state_ops.assign(p, data2, validate_shape=False)
a2.op.run()
self.assertAllEqual(p.eval(), data2.eval())
def testInitRequiredAssignAdd(self):
with self.test_session():
p = variables.Variable(array_ops.fill([1024, 1024], 1), dtypes.int32)
a = state_ops.assign_add(p, array_ops.fill([1024, 1024], 0))
with self.assertRaisesOpError("use uninitialized"):
a.op.run()
def testInitRequiredAssignSub(self):
with self.test_session():
p = variables.Variable(array_ops.fill([1024, 1024], 1), dtypes.int32)
a = state_ops.assign_sub(p, array_ops.fill([1024, 1024], 0))
with self.assertRaisesOpError("use uninitialized"):
a.op.run()
if __name__ == "__main__":
test.main()
|
krisrogers/textisbeautiful
|
refs/heads/master
|
tib/wsgi.py
|
1
|
"""
WSGI config for tib project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tib.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
matpalm/drivebot
|
refs/heads/master
|
src/reset_robot_pos.py
|
1
|
# various utils for dealing with reseting robots to a random position
import rospy
from geometry_msgs.msg import Pose2D
from stdr_msgs.srv import MoveRobot
import random
import math
class BotPosition(object):
def __init__(self, robot_id):
srv_str = "/robot%s/replace" % robot_id
rospy.wait_for_service(srv_str)
self.move = rospy.ServiceProxy(srv_str, MoveRobot)
# TODO: handle rospy.service.ServiceException which can thrown from this
self.starting_random_positions = None
self.straight_section_poses = None
# reset bot to a random position
def reset_robot_random_pose(self):
if self.starting_random_positions is None:
self.starting_random_positions = []
# top straight
for x in range(1, 10):
self.starting_random_positions.append((x, 9))
# rhs straight
for y in range(1, 9):
self.starting_random_positions.append((9, y))
# lhs zip / zag, top to bottom
for y in range(5, 9):
self.starting_random_positions.append((1, y))
for x in range(2, 5):
self.starting_random_positions.append((x, 5))
for y in range(1, 5):
self.starting_random_positions.append((5, y))
for x in range(6, 9):
self.starting_random_positions.append((x, 1))
# check no dups
assert len(self.starting_random_positions) == len(set(self.starting_random_positions)),\
("%s" % self.starting_random_positions)
# pick a random starting pose
start_x, start_y = random.choice(self.starting_random_positions)
new_pose = Pose2D()
new_pose.x = start_x
new_pose.y = start_y
new_pose.theta = random.random() * 2 * math.pi
self.move(new_pose)
def reset_robot_on_straight_section(self):
if self.straight_section_poses is None:
self.straight_section_poses = [(3,9,0), (7,9,0), (9,7,4.71),
(9,3,4.71), (7,1,3.14), (5,3,1.57),
(3,5,3.14), (1,7,1.57)]
start_x, start_y, start_theta = random.choice(self.straight_section_poses)
new_pose = Pose2D()
new_pose.x = start_x
new_pose.y = start_y
new_pose.theta = start_theta
self.move(new_pose)
|
julien78910/CouchPotatoServer
|
refs/heads/develop
|
libs/html5lib/treewalkers/pulldom.py
|
1729
|
from __future__ import absolute_import, division, unicode_literals
from xml.dom.pulldom import START_ELEMENT, END_ELEMENT, \
COMMENT, IGNORABLE_WHITESPACE, CHARACTERS
from . import _base
from ..constants import voidElements
class TreeWalker(_base.TreeWalker):
def __iter__(self):
ignore_until = None
previous = None
for event in self.tree:
if previous is not None and \
(ignore_until is None or previous[1] is ignore_until):
if previous[1] is ignore_until:
ignore_until = None
for token in self.tokens(previous, event):
yield token
if token["type"] == "EmptyTag":
ignore_until = previous[1]
previous = event
if ignore_until is None or previous[1] is ignore_until:
for token in self.tokens(previous, None):
yield token
elif ignore_until is not None:
raise ValueError("Illformed DOM event stream: void element without END_ELEMENT")
def tokens(self, event, next):
type, node = event
if type == START_ELEMENT:
name = node.nodeName
namespace = node.namespaceURI
attrs = {}
for attr in list(node.attributes.keys()):
attr = node.getAttributeNode(attr)
attrs[(attr.namespaceURI, attr.localName)] = attr.value
if name in voidElements:
for token in self.emptyTag(namespace,
name,
attrs,
not next or next[1] is not node):
yield token
else:
yield self.startTag(namespace, name, attrs)
elif type == END_ELEMENT:
name = node.nodeName
namespace = node.namespaceURI
if name not in voidElements:
yield self.endTag(namespace, name)
elif type == COMMENT:
yield self.comment(node.nodeValue)
elif type in (IGNORABLE_WHITESPACE, CHARACTERS):
for token in self.text(node.nodeValue):
yield token
else:
yield self.unknown(type)
|
Alshain-Oy/Cloudsnake-Application-Server
|
refs/heads/master
|
clients/get_info.py
|
1
|
#!/usr/bin/env python
# Cloudsnake Application server
# Licensed under Apache License, see license.txt
# Author: Markus Gronholm <markus@alshain.fi> Alshain Oy
import libCloudSnakeClient as SnakeClient
import pprint
client = SnakeClient.CloudSnakeClient( 'http://localhost:8500', 'maintenance' )
get_report = client.get_method( 'report' )
#print test_001( {'data':[]})
#print get_output()
#client.print_output()
pprint.pprint( get_report() )
|
chrisseto/waterbutler
|
refs/heads/develop
|
waterbutler/providers/github/settings.py
|
6
|
try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('GITHUB_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.github.com/')
VIEW_URL = config.get('VIEW_URL', 'https://github.com/')
MOVE_MESSAGE = config.get('MOVE_MESSAGE', 'Moved on behalf of WaterButler')
COPY_MESSAGE = config.get('COPY_MESSAGE', 'Copied on behalf of WaterButler')
DELETE_FILE_MESSAGE = config.get('DELETE_FILE_MESSAGE', 'File deleted on behalf of WaterButler')
UPDATE_FILE_MESSAGE = config.get('UPDATE_FILE_MESSAGE', 'File updated on behalf of WaterButler')
UPLOAD_FILE_MESSAGE = config.get('UPLOAD_FILE_MESSAGE', 'File uploaded on behalf of WaterButler')
DELETE_FOLDER_MESSAGE = config.get('DELETE_FOLDER_MESSAGE', 'Folder deleted on behalf of WaterButler')
|
bholmgren/napalm-eos
|
refs/heads/master
|
setup.py
|
2
|
"""setup.py file."""
import uuid
from setuptools import setup, find_packages
from pip.req import parse_requirements
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm-eos",
version="0.6.1",
packages=find_packages(),
author="David Barroso, Mircea Ulinic",
author_email="dbarrosop@dravetech.com, mircea@cloudflare.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm-eos",
include_package_data=True,
install_requires=reqs,
)
|
r8o8s1e0/three.js
|
refs/heads/master
|
utils/exporters/blender/addons/io_three/exporter/api/material.py
|
55
|
from bpy import data, types
from .. import constants, logger
from .constants import MULTIPLY, WIRE, IMAGE
def _material(func):
"""
:param func:
"""
def inner(name, *args, **kwargs):
"""
:param name:
:param *args:
:param **kwargs:
"""
if isinstance(name, types.Material):
material = name
else:
material = data.materials[name]
return func(material, *args, **kwargs)
return inner
@_material
def ambient_color(material):
"""
:param material:
:return: rgb value
:rtype: tuple
"""
logger.debug("material.ambient_color(%s)", material)
diffuse = diffuse_color(material)
return (material.ambient * diffuse[0],
material.ambient * diffuse[1],
material.ambient * diffuse[2])
@_material
def blending(material):
"""
:param material:
:return: THREE_blending_type value
"""
logger.debug("material.blending(%s)", material)
try:
blend = material.THREE_blending_type
except AttributeError:
logger.debug("No THREE_blending_type attribute found")
blend = constants.NORMAL_BLENDING
return blend
@_material
def bump_map(material):
"""
:param material:
:return: texture node for bump
"""
logger.debug("material.bump_map(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_normal and not \
texture.texture.use_normal_map:
return texture.texture
@_material
def bump_scale(material):
"""
:param material:
:rtype: float
"""
return normal_scale(material)
@_material
def depth_test(material):
"""
:param material:
:return: THREE_depth_test value
:rtype: bool
"""
logger.debug("material.depth_test(%s)", material)
try:
test = material.THREE_depth_test
except AttributeError:
logger.debug("No THREE_depth_test attribute found")
test = True
return test
@_material
def depth_write(material):
"""
:param material:
:return: THREE_depth_write value
:rtype: bool
"""
logger.debug("material.depth_write(%s)", material)
try:
write = material.THREE_depth_write
except AttributeError:
logger.debug("No THREE_depth_write attribute found")
write = True
return write
@_material
def double_sided(material):
"""
:param material:
:return: THREE_double_sided value
:rtype: bool
"""
logger.debug("material.double_sided(%s)", material)
try:
write = material.THREE_double_sided
except AttributeError:
logger.debug("No THREE_double_sided attribute found")
write = False
return write
@_material
def diffuse_color(material):
"""
:param material:
:return: rgb value
:rtype: tuple
"""
logger.debug("material.diffuse_color(%s)", material)
return (material.diffuse_intensity * material.diffuse_color[0],
material.diffuse_intensity * material.diffuse_color[1],
material.diffuse_intensity * material.diffuse_color[2])
@_material
def diffuse_map(material):
"""
:param material:
:return: texture node for map
"""
logger.debug("material.diffuse_map(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_color_diffuse and not \
texture.blend_type == MULTIPLY:
return texture.texture
@_material
def emissive_color(material):
"""
:param material:
:return: rgb value
:rtype: tuple
"""
logger.debug("material.emissive_color(%s)", material)
diffuse = diffuse_color(material)
return (material.emit * diffuse[0],
material.emit * diffuse[1],
material.emit * diffuse[2])
@_material
def light_map(material):
"""
:param material:
:return: texture node for light maps
"""
logger.debug("material.light_map(%s)", material)
for texture in _valid_textures(material, strict_use=False):
if texture.use_map_color_diffuse and \
texture.blend_type == MULTIPLY:
return texture.texture
@_material
def normal_scale(material):
"""
:param material:
:rtype: float
"""
logger.debug("material.normal_scale(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_normal:
return texture.normal_factor
@_material
def normal_map(material):
"""
:param material:
:return: texture node for normals
"""
logger.debug("material.normal_map(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_normal and \
texture.texture.use_normal_map:
return texture.texture
@_material
def opacity(material):
"""
:param material:
:rtype: float
"""
logger.debug("material.opacity(%s)", material)
return round(material.alpha, 2)
@_material
def shading(material):
"""
:param material:
:return: shading type (phong or lambert)
"""
logger.debug("material.shading(%s)", material)
dispatch = {
True: constants.PHONG,
False: constants.LAMBERT
}
return dispatch[material.specular_intensity > 0.0]
@_material
def specular_coef(material):
"""
:param material:
:rtype: float
"""
logger.debug("material.specular_coef(%s)", material)
return material.specular_hardness
@_material
def specular_color(material):
"""
:param material:
:return: rgb value
:rtype: tuple
"""
logger.debug("material.specular_color(%s)", material)
return (material.specular_intensity * material.specular_color[0],
material.specular_intensity * material.specular_color[1],
material.specular_intensity * material.specular_color[2])
@_material
def specular_map(material):
"""
:param material:
:return: texture node for specular
"""
logger.debug("material.specular_map(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_specular:
return texture.texture
@_material
def transparent(material):
"""
:param material:
:rtype: bool
"""
logger.debug("material.transparent(%s)", material)
return material.use_transparency
@_material
def type(material):
"""
:param material:
:return: THREE compatible shader type
"""
logger.debug("material.type(%s)", material)
if material.diffuse_shader != 'LAMBERT':
material_type = constants.BASIC
elif material.specular_intensity > 0:
material_type = constants.PHONG
else:
material_type = constants.LAMBERT
return material_type
@_material
def use_vertex_colors(material):
"""
:param material:
:rtype: bool
"""
logger.debug("material.use_vertex_colors(%s)", material)
return material.use_vertex_color_paint
def used_materials():
"""
:return: list of materials that are in use
:rtype: generator
"""
logger.debug("material.used_materials()")
for material in data.materials:
if material.users > 0:
yield material.name
@_material
def visible(material):
"""
:param material:
:return: THREE_visible value
:rtype: bool
"""
logger.debug("material.visible(%s)", material)
try:
vis = material.THREE_visible
except AttributeError:
logger.debug("No THREE_visible attribute found")
vis = True
return vis
@_material
def wireframe(material):
"""
:param material:
:rtype: bool
"""
logger.debug("material.wireframe(%s)", material)
return material.type == WIRE
def _valid_textures(material, strict_use=True):
"""
:param material:
:rtype: generator
"""
for texture in material.texture_slots:
if not texture:
continue
if strict_use:
in_use = texture.use
else:
in_use = True
if not in_use:
continue
if not texture.texture or texture.texture.type != IMAGE:
logger.warning("Unable to export non-image texture %s", texture)
continue
logger.debug("Valid texture found %s", texture)
yield texture
|
dhermes/foreign-fortran
|
refs/heads/master
|
cython/check_cython.py
|
1
|
from __future__ import print_function
import numpy as np
from check_ctypes import MAKE_UDF_TEMPLATE
from check_ctypes import SEPARATOR
import example
def main():
print(SEPARATOR)
# foo()
bar = 1.0
baz = 16.0
quux = example.foo(bar, baz)
print("quux = foo({}, {}) = {}".format(bar, baz, quux))
print(SEPARATOR)
# make_udf()
buzz = 1.25
broken = 5.0
how_many = 1337
quuz = example.make_udf(buzz, broken, how_many)
msg = MAKE_UDF_TEMPLATE.format(buzz, broken, how_many, quuz)
print(msg, end="")
print(SEPARATOR)
# foo_array()
val = np.asfortranarray([[3.0, 4.5], [1.0, 1.25], [9.0, 0.0], [-1.0, 4.0]])
two_val = example.foo_array(val)
print("val =\n{}".format(val))
print("two_val = foo_array(val)")
print("two_val =\n{}".format(two_val))
print(SEPARATOR)
# udf_ptr()
made_it = example.udf_ptr()
print("made_it = udf_ptr()\n = {}".format(made_it))
print(SEPARATOR)
# just_print()
print("just_print()")
example.just_print()
print(SEPARATOR)
# get_include()
include_dir = example.get_include()
msg = "example.get_include() =\n{}".format(include_dir)
print(msg)
print(SEPARATOR)
# "Turn the knob" module constant
knob = example.view_knob()
print("view_knob() = {}".format(knob))
new_value = 42
print("turn_knob({})".format(new_value))
example.turn_knob(new_value)
knob = example.view_knob()
print("view_knob() = {}".format(knob))
if __name__ == "__main__":
main()
|
eesatfan/vuplus-enigma2
|
refs/heads/vuplus_experimental
|
lib/python/Plugins/SystemPlugins/FactoryTest/plugin.py
|
2
|
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Components.MenuList import MenuList
from Tools.Directories import fileExists
from Components.ServiceList import ServiceList
from Components.ActionMap import ActionMap,NumberActionMap
from Components.config import config
from os import system,access,F_OK,R_OK,W_OK
from Components.Label import Label
from Components.AVSwitch import AVSwitch
from time import sleep
from Components.Console import Console
from enigma import eTimer
from Components.HTMLComponent import HTMLComponent
from Components.GUIComponent import GUIComponent
from enigma import eListboxPythonStringContent, eListbox, gFont, eServiceCenter, eDVBResourceManager
from enigma import eServiceReference
from sctest import eSctest
from enigma import eDVBDB
from Components.NimManager import nimmanager
from enigma import eDVBCI_UI,eDVBCIInterfaces
from Tools.Directories import resolveFilename, SCOPE_SYSETC
from Components.Sources.StaticText import StaticText
class TestResultList(MenuList):
def postWidgetCreate(self, instance):
self.instance.setSelectionEnable(0)
instance.setContent(self.l)
instance.selectionChanged.get().append(self.selectionChanged)
if self.enableWrapAround:
self.instance.setWrapAround(True)
def updateList(self, list):
self.list = list
self.l.setList(self.list)
class TuneMessageBox(MessageBox):
skin = """
<screen position="center,center" size="600,10" title="Message">
<widget name="text" position="65,8" size="420,0" font="Regular;22" />
<widget name="ErrorPixmap" pixmap="Vu_HD/icons/input_error.png" position="5,5" size="53,53" alphatest="blend" />
<widget name="QuestionPixmap" pixmap="Vu_HD/icons/input_question.png" position="5,5" size="53,53" alphatest="blend" />
<widget name="InfoPixmap" pixmap="Vu_HD/icons/input_info.png" position="5,5" size="53,53" alphatest="blend" />
<widget name="list" position="100,100" size="380,375" transparent="1" backgroundColor="darkgrey" />
<!-- Signal Quality -->
<eLabel text="SNR : " position="60,130" size="60,25" font="Regular;25" transparent="1" />
<widget source="session.FrontendStatus" render="Label" position="120,130" size="60,25" font="Regular;25" transparent="1">
<convert type="FrontendInfo">SNRdB</convert>
</widget>
<!-- AGC -->
<eLabel text="AGC : " position="200,130" size="60,25" font="Regular;25" transparent="1" noWrap="1" />
<widget source="session.FrontendStatus" render="Label" position="260,130" size="60,25" font="Regular;25" transparent="1" noWrap="1">
<convert type="FrontendInfo">AGC</convert>
</widget>
<applet type="onLayoutFinish">
# this should be factored out into some helper code, but currently demonstrates applets.
from enigma import eSize, ePoint
orgwidth = self.instance.size().width()
orgpos = self.instance.position()
textsize = self["text"].getSize()
# y size still must be fixed in font stuff...
textsize = (textsize[0] + 50, textsize[1] + 50)
offset = 0
if self.type == self.TYPE_YESNO:
offset = 60
wsizex = textsize[0] + 60
wsizey = textsize[1] + offset
if (280 > wsizex):
wsizex = 280
wsizex = wsizex + 30
wsizey = wsizey + 30
wsize = (wsizex, wsizey)
# resize
self.instance.resize(eSize(*wsize))
# resize label
self["text"].instance.resize(eSize(*textsize))
# move list
listsize = (wsizex, 50)
self["list"].instance.move(ePoint(0, textsize[1]))
self["list"].instance.resize(eSize(*listsize))
# center window
newwidth = wsize[0]
self.instance.move(ePoint(orgpos.x() + (orgwidth - newwidth)/2, orgpos.y()))
</applet>
</screen>"""
def __init__(self, session, text, type = MessageBox.TYPE_YESNO):
MessageBox.__init__(self, session, text, type)
class FactoryTestSummary(Screen):
skin = """
<screen name="FactoryTestSummary" position="0,0" size="132,64" id="1">
<widget source="parent.Title" render="Label" position="6,0" size="132,64" font="Regular;18" halign="center" valign="center"/>
</screen>"""
class FactoryTestSummary_VFD(Screen):
skin = """
<screen name="FactoryTestSummary" position="0,0" size="256,64" id="1">
<widget source="parent.Title" render="Label" position="0,0" size="256,64" font="VFD;24" halign="center" valign="center"/>
</screen>"""
class FactoryTest(Screen):
skin = """
<screen name="FactoryTest" position="300,100" size="660,550" title="Test Menu" >
<widget name="testlist" position="10,0" size="440,455" itemHeight="35" />
<widget name="resultlist" position="470,0" size="60,455" itemHeight="35" />
<widget name="testdate" position="20,470" size="250,35" font="Regular;30" />
<widget name="testversion" position="20,505" size="250,35" font="Regular;30" />
<widget name="mactext" position="320,470" size="340,35" font="Regular;30" />
</screen>"""
def createSummary(self):
if self.model == 4:
return FactoryTestSummary_VFD
else:
return FactoryTestSummary
def __init__(self, session):
self["actions"] = NumberActionMap(["OkCancelActions","WizardActions","NumberActions","ColorActions",],
{
"left": self.nothing,
"right":self.nothing,
"ok": self.TestAction,
"testexit": self.keyCancel,
"agingstart": self.Agingmode,
"up": self.keyup,
"down": self.keydown,
"0": self.numberaction,
"1": self.numberaction,
"2": self.numberaction,
"3": self.numberaction,
"4": self.numberaction,
"5": self.numberaction,
"6": self.numberaction,
"7": self.numberaction,
"8": self.numberaction,
"9": self.numberaction,
"red": self.shutdownaction,
"blue": self.Agingmode2,
}, -2)
Screen.__init__(self, session)
TESTPROGRAM_DATE = self.getImageVersion() +" (v1.20)"
TESTPROGRAM_VERSION = "Version 01.20"
self.model = 0
self.getModelInfo()
self["testdate"]=Label((TESTPROGRAM_DATE))
self["testversion"]=Label(("Loading version..."))
self["mactext"]=Label(("Loading mac address..."))
if self.model == 0 or self.model == 1:
nimConfig = nimmanager.getNimConfig(0)
nimConfig.configMode.slot_id=0
nimConfig.configMode.value= "simple"
nimConfig.diseqcMode.value="diseqc_a_b"
nimConfig.diseqcA.value="160"
nimConfig.diseqcB.value="100"
if self.model == 0:
nimConfig = nimmanager.getNimConfig(1)
nimConfig.configMode.slot_id=1
nimConfig.configMode.value= "simple"
nimConfig.diseqcMode.value="diseqc_a_b"
nimConfig.diseqcA.value="130"
nimConfig.diseqcB.value="192"
if self.model == 2:
pass
if self.model == 3 or self.model == 4:
self.NimType = {}
sat_list = ["160","100","130","192","620","642","685","720"]
# ' sat ' : namespace , # satname
# '160' : '0xA00000', # Eutelsat W2
# '100' : '0x64af79', # Eutelsat
# '130' : '0x820000', # Hotbird
# '192' : '0xC00000', # Astra
# '620' : '0x26c0000', # Intelsat 902
# '642' : '0x282AF79' # Intelsat 906
# '685' : '02ad0000' # Panamsat 7,10 (68.5E)
# '720' : '02d0af79' # Panamsat 4 (72.0E)
try:
nimfile = open("/proc/bus/nim_sockets")
except IOError:
nimfile = None
if nimfile is None:
self.session.openWithCallback(self.close, MessageBox, _("File not Found!\n/proc/bus/nim_sockets"), MessageBox.TYPE_ERROR)
for line in nimfile.readlines():
print line
if line == "":
break
if line.strip().startswith("NIM Socket"):
parts = line.strip().split(" ")
current_slot = int(parts[2][:-1])
self.NimType[current_slot]={}
self.NimType[current_slot]["slot"] = current_slot
elif line.strip().startswith("Type:"):
print str(line.strip())
self.NimType[current_slot]["type"] = str(line.strip()[6:])
if self.NimType[current_slot]["type"].startswith("DVB-S"):
self.NimType[current_slot]["sat1"] = sat_list.pop(0)
self.NimType[current_slot]["sat2"] = sat_list.pop(0)
else:
self.NimType[current_slot]["sat1"] = None
self.NimType[current_slot]["sat2"] = None
elif line.strip().startswith("empty"):
self.NimType.pop(current_slot)
nimfile.close()
if True:
for (key, val) in self.NimType.items():
print key
print val
if val["type"].startswith("DVB-S"):
print "nimConfig (dvb-s): ",key
nimConfig = nimmanager.getNimConfig(key)
nimConfig.configMode.slot_id=key
nimConfig.configMode.value= "simple"
nimConfig.diseqcMode.value="diseqc_a_b"
nimConfig.diseqcA.value = val["sat1"]
nimConfig.diseqcB.value = val["sat2"]
else :
nimConfig = nimmanager.getNimConfig(key)
print "configMode check : ",nimConfig.configMode.value
nimmanager.sec.update()
system("cp /usr/lib/enigma2/python/Plugins/SystemPlugins/FactoryTest/testdb /etc/enigma2/lamedb")
db = eDVBDB.getInstance()
db.reloadServicelist()
self.createConfig()
self.rlist = []
for x in range(self.menulength-1):
self.rlist.append((".."))
self.rlist.append((" "))
self["resultlist"] = TestResultList(self.rlist)
self.avswitch = AVSwitch()
self.scTest= eSctest()
self.testing = 0
self.servicelist = ServiceList()
self.oldref = session.nav.getCurrentlyPlayingServiceReference()
print "oldref",self.oldref
session.nav.stopService() # try to disable foreground service
self.tunemsgtimer = eTimer()
self.tunemsgtimer.callback.append(self.tunemsg)
self.camstep = 1
self.camtimer = eTimer()
self.camtimer.callback.append(self.cam_state)
self.mactry = 1
self.getmacaddr()
self.getversion()
self.tunerlock = 0
self.tuningtimer = eTimer()
self.tuningtimer.callback.append(self.updateStatus)
self.satatry = 8
self.satatimer = eTimer()
self.satatimer.callback.append(self.sataCheck)
self.usbtimer = eTimer()
self.usbtimer.callback.append(self.usbCheck)
self.setSourceVar()
self.FanSpeedUp(255)
def FanSpeedUp(self,value):
if self.model in (3,4): # uno or ultimo
if value <0:
value = 0
elif value >255:
value = 255
print "[FactoryTest, FanSpeedUp] setPWM to : %d"%value
f = open("/proc/stb/fp/fan_pwm", "w")
f.write("%x" % value)
f.close()
def createConfig(self):
tlist = []
self.satetestIndex = -1
self.scarttestIndex = -1
if self.model == 0:
self.satetestIndex=0
tlist.append((" 0. Sata & extend hdd test",self.satetestIndex))
self.usbtestIndex=1
tlist.append((" 1. USB test",self.usbtestIndex))
self.fronttestIndex=2
tlist.append((" 2. Front test",self.fronttestIndex))
self.smarttestIndex=3
tlist.append((" 3. Smartcard test",self.smarttestIndex))
self.tuner1_1testIndex=4
tlist.append((" 4. T1/H/22K x /4:3/CVBS",self.tuner1_1testIndex))
self.tuner1_2testIndex=5
tlist.append((" 5. T1/V/22k o/16:9/RGB",self.tuner1_2testIndex))
self.tuner2_1testIndex=6
tlist.append((" 6. T2/H/22k x/4:3/YC",self.tuner2_1testIndex))
self.tuner2_2testIndex=7
tlist.append((" 7. T2/V/22k o/16:9/CVBS/CAM",self.tuner2_2testIndex))
self.scarttestIndex=8
tlist.append((" 8. VCR Scart loop",self.scarttestIndex))
self.rs232testIndex=9
tlist.append((" 9. RS232 test",self.rs232testIndex))
self.ethernettestIndex=10
tlist.append(("10. Ethernet & mac test",self.ethernettestIndex))
self.fdefaultIndex=11
tlist.append(("11. Factory default",self.fdefaultIndex))
self.shutdownIndex=12
tlist.append(("12. Shutdown(Deep Standby)",self.shutdownIndex))
self.tuner_test_first_index = 4
self.tuner_test_last_index = 7
elif self.model == 1:
self.usbtestIndex=0
tlist.append((" 0. USB test",self.usbtestIndex))
self.fronttestIndex=1
tlist.append((" 1. Front test",self.fronttestIndex))
self.smarttestIndex=2
tlist.append((" 2. Smartcard test",self.smarttestIndex))
self.tuner1_1testIndex=3
tlist.append((" 3. T1/H/22K x/4:3/CVBS",self.tuner1_1testIndex))
self.tuner2_2testIndex = self.tuner1_2testIndex=4
tlist.append((" 4. T1/V/22k o/16:9/RGB/CAM",self.tuner1_2testIndex))
self.rs232testIndex=5
tlist.append((" 5. RS232 test",self.rs232testIndex))
self.ethernettestIndex=6
tlist.append((" 6. Ethernet & mac test",self.ethernettestIndex))
self.fdefaultIndex=7
tlist.append((" 7. Factory default",self.fdefaultIndex))
self.shutdownIndex=8
tlist.append((" 8. Shutdown(Deep Standby)",self.shutdownIndex))
self.tuner_test_first_index = 3
self.tuner_test_last_index = 4
elif self.model == 2:
self.satetestIndex=0
tlist.append((" 0. Sata & extend hdd test",self.satetestIndex))
self.usbtestIndex=1
tlist.append((" 1. USB test",self.usbtestIndex))
self.fronttestIndex=2
tlist.append((" 2. Front test",self.fronttestIndex))
self.smarttestIndex=3
tlist.append((" 3. Smartcard test",self.smarttestIndex))
self.tuner1_1testIndex=4
tlist.append((" 4. T1 H 22K x 4:3 CVBS",self.tuner1_1testIndex))
self.tuner1_2testIndex=5
tlist.append((" 5. T1 V 22k o 16:9 RGB",self.tuner1_2testIndex))
self.tuner2_1testIndex = -1
self.tuner2_2testIndex=6
tlist.append((" 6. T2 DVB-C 4:3 YC CAM",self.tuner2_2testIndex))
self.rs232testIndex=7
tlist.append((" 7. RS232 test",self.rs232testIndex))
self.ethernettestIndex=8
tlist.append(("8. Ethernet & mac test",self.ethernettestIndex))
self.fdefaultIndex=9
tlist.append(("9. Factory default",self.fdefaultIndex))
self.shutdownIndex=10
tlist.append(("10. Shutdown",self.shutdownIndex))
self.tuner_test_first_index = 4
self.tuner_test_last_index = 6
elif self.model == 3 or self.model == 4:
self.satetestIndex=0
tlist.append((" 0. Sata & extend hdd test",self.satetestIndex))
self.usbtestIndex=1
tlist.append((" 1. USB test",self.usbtestIndex))
self.fronttestIndex=2
tlist.append((" 2. Front test",self.fronttestIndex))
self.smarttestIndex=3
tlist.append((" 3. Smartcard test",self.smarttestIndex))
if self.model == 3:
self.tuner_test_first_index = current_index = 4
elif self.model == 4:
self.tuner_test_first_index = 4
current_index = 0
AspectRatio=["4:3", "16:9"]
ColorFormat=["CVBS","RGB","YC","CVBS","CVBS","CVBS","CVBS","CVBS"]
self.tuneInfo={}
tunelist = []
for (key, val) in self.NimType.items():
if val["type"].startswith("DVB-S"):
# Chang : DVB -S setting diseqc A
getRatio = AspectRatio.pop(0) # ratio
AspectRatio.append(getRatio)
getColorFormat=ColorFormat.pop(0) # colorFormat
menuname=" %d. T%d/%s/H/22k x/%s/%s" % (current_index, key+1, val["type"], getRatio, getColorFormat) #menuname
print current_index
# current_index=4
self.setTuneInfo(index=current_index, slot=key, type=val["type"], sat=val["sat1"], pol="H", tone=False, ratio=getRatio, color=getColorFormat, cam=False) # setTuneInfo
# self.setTuneInfo(current_index, key, val["type"], val["sat1"], "H", True, getRatio, getColorFormat, False) # setTuneInfo
tunelist.append((menuname,current_index))
current_index+=1
# Chang : DVB -S setting diseqc B
getRatio = AspectRatio.pop(0)
AspectRatio.append(getRatio)
getColorFormat=ColorFormat.pop(0)
menuname=" %d. T%d/%s/V/22k o/%s/%s" % (current_index, key+1, val["type"], getRatio, getColorFormat)
if len(self.NimType) == key+1: # CAM test on/off
menuname+="/CAM"
camtest = True
else:
camtest = False
self.setTuneInfo( index=current_index, slot=key, type=val["type"], sat=val["sat2"], pol="V", tone=True, ratio=getRatio, color=getColorFormat, cam=camtest)
tunelist.append((menuname,current_index))
current_index+=1
# Chang : DVB -T or DVB-C
elif val["type"].startswith("DVB-T") or val["type"].startswith("DVB-C"):
additionalMenu = None
menulen = 1
if len(self.NimType) == 1:
additionalMenu = True
menulen +=1
for x in range(menulen):
getRatio = AspectRatio.pop(0)
AspectRatio.append(getRatio)
getColorFormat=ColorFormat.pop(0)
menuname=" %d. T%d/%s/%s/%s" % (current_index, key+1, val["type"], getRatio, getColorFormat)
if len(self.NimType) == key+1 and (additionalMenu is None or x != 0): # CAM test on/off
menuname+=" CAM"
camtest = True
else:
camtest = False
self.setTuneInfo( index=current_index, slot=key, type=val["type"], sat=None, pol=None, tone=None, ratio=getRatio, color=getColorFormat, cam=camtest)
tunelist.append((menuname,current_index))
current_index+=1
if self.model == 3:
tlist.extend(tunelist)
self.tuner_test_last_index = current_index-1
elif self.model == 4:
self.tunelist = tunelist
self.tuner_test_last_index = 4
current_index = self.tuner_test_last_index
tlist.append((" %d. Tuning test" % current_index,self.tuner_test_last_index))
current_index+=1
self.rs232testIndex=current_index
tlist.append((" %d. RS232 test" % current_index,self.rs232testIndex))
current_index+=1
self.ethernettestIndex=current_index
tlist.append((" %d. Ethernet & mac test" % current_index,self.ethernettestIndex))
current_index+=1
self.fdefaultIndex=current_index
tlist.append((" %d. Factory default" % current_index,self.fdefaultIndex))
current_index+=1
self.shutdownIndex=current_index
tlist.append((" %d. Shutdown(Deep Standby)" % current_index,self.shutdownIndex))
self.menulength= len(tlist)
self["testlist"] = MenuList(tlist)
def setTuneInfo(self,index=0,slot=0,type="DVB-S2",sat="160",pol="H",tone=True,ratio="4:3",color="CVBS",cam=False):
self.tuneInfo[index]={}
self.tuneInfo[index]["slot"]=slot
self.tuneInfo[index]["type"]=type
self.tuneInfo[index]["sat"]=sat
self.tuneInfo[index]["pol"]=pol
self.tuneInfo[index]["22k"]=tone
self.tuneInfo[index]["ratio"]=ratio
self.tuneInfo[index]["color"]=color
self.tuneInfo[index]["cam"]=cam
def getModelInfo(self):
getmodel = 0
if fileExists("/proc/stb/info/vumodel"):
vumodel = open("/proc/stb/info/vumodel")
info=vumodel.read().strip()
vumodel.close()
if info == "duo":
self.model = 0
getmodel = 1
print "getModelInfo : duo"
if info == "solo":
self.model = 1
getmodel = 1
print "getModelInfo : solo"
if info == "combo":
self.model = 2
getmodel = 1
print "getModelInfo : combo"
if info == "uno":
self.model = 3
getmodel = 1
print "getModelInfo : uno"
if info == "ultimo":
self.model = 4
getmodel = 1
print "getModelInfo : ultimo"
if getmodel == 0 and fileExists("/proc/stb/info/version"):
vesion = open("/proc/stb/info/version")
info=version.read()
version.close()
if info[:2] == "14":
self.model = 1
print "getModelInfo : solo_"
elif info[:2] == "12":
self.model = 0
print "getModelInfo : duo_"
def nothing(self):
print "nothing"
def keyup(self):
print "self.menulength = ",self.menulength
print "self[\"testlist\"].getCurrent()[1] = ",self["testlist"].getCurrent()[1]
if self.testing==1:
return
if self["testlist"].getCurrent()[1]==0:
self["testlist"].moveToIndex(self.menulength-1)
self["resultlist"].moveToIndex(self.menulength-1)
else:
self["testlist"].up()
self["resultlist"].up()
def keydown(self):
print "self.menulength = ",self.menulength
print "self[\"testlist\"].getCurrent()[1] = ",self["testlist"].getCurrent()[1]
if self.testing==1:
return
if self["testlist"].getCurrent()[1]==(self.menulength-1):
self["testlist"].moveToIndex(0)
self["resultlist"].moveToIndex(0)
else:
self["testlist"].down()
self["resultlist"].down()
def numberaction(self, number):
if self.testing==1:
return
if number >= self.menulength:
return
index = int(number)
self["testlist"].moveToIndex(index)
self["resultlist"].moveToIndex(index)
def getImageVersion(self):
date = 'xxxx-xx-xx'
file = open(resolveFilename(SCOPE_SYSETC, 'image-version'), 'r')
lines = file.readlines()
for x in lines:
splitted = x.split('=')
if splitted[0] == "version":
# YYYY MM DD hh mm
#0120 2005 11 29 01 16
#0123 4567 89 01 23 45
version = splitted[1]
year = version[4:8]
month = version[8:10]
day = version[10:12]
date = '-'.join((year, month, day))
break;
file.close()
return date
def getversion(self):
try:
fd = open("/proc/stb/info/version","r")
version = fd.read()
fd.close()
self["testversion"].setText(("Version %s"%version))
except:
self["testversion"].setText(("Version no load"))
def getmacaddr(self):
try:
if self.model == 2 or self.model == 3 or self.model == 4:
cmd = "nanddump -s 0x" + str((self.mactry-1)*2) + "0000 -b -o -l 64 -p /dev/mtd5"
elif self.model == 0 or self.model == 1:
cmd = "nanddump -s 0x" + str((self.mactry-1)*2) + "0000 -b -o -l 64 -p /dev/mtd4"
self.macConsole = Console()
self.macConsole.ePopen(cmd, self.readmac,self.checkReadmac)
except:
return
def readmac(self, result, retval,extra_args=None):
(callback) = extra_args
if self.macConsole is not None:
if retval == 0:
self.macConsole = None
macline = None
content =result.splitlines()
for x in content:
if x.startswith('0x000'+str((self.mactry-1)*2)+'0010:'):
macline = x.split()
if macline == None:
callback(0)
elif len(macline) < 10:
callback(1)
else:
mac = macline[5]+":"+macline[6]+":"+macline[7]+":"+macline[8]+":"+macline[9]+":"+macline[10]
self["mactext"].setText(("MAC : "+mac))
callback(2)
def checkReadmac(self,data):
if data == 0:
print "block %d is bad block" % self.mactry
self.mactry = self.mactry + 1
if self.mactry > 4:
self.session.open(MessageBox, _("FLASH IS BROKEN"), type = MessageBox.TYPE_INFO, enable_input = False)
return
else:
self.getmacaddr()
elif data == 1:
print 'mac dump read error'
return
elif data == 2:
print 'mac address read ok'
return
def TestAction(self):
if self.testing==1:
return
print "line - ",self["testlist"].getCurrent()[1]
self.currentindex = index = self["testlist"].getCurrent()[1]
result = 0
if index==self.satetestIndex:
self.Test0()
elif index==self.fronttestIndex:
self.Test1()
elif index>=self.tuner_test_first_index and index<=self.tuner_test_last_index:
if self.model == 0 or self.model == 1 or self.model == 2 or self.model == 3:
self.TestTune(index)
elif self.model == 4:
self.openTestTuneMenu()
elif index==self.scarttestIndex:
self.Test6()
elif index==self.rs232testIndex:
self.Test7()
elif index==self.usbtestIndex:
self.Test8()
elif index==self.ethernettestIndex:
self.Test9()
elif index == self.smarttestIndex:
self.Test10()
# elif index == 11:
# self.Test11()
# elif index ==12:
# self.Test12()
# elif index==13:
# self.Test13()
elif index==self.fdefaultIndex:
self.Test14()
# elif index==self.shutdownIndex:
# self.Test15()
else:
pass
def shutdownaction(self):
if self["testlist"].getCurrent()[1] == self.shutdownIndex:
self.Test15()
def Test0(self):
self.satatry = 8
self.satatimer.start(100,True)
def sataCheck(self):
# print "try", self.satatry
if self.satatry == 0:
displayerror = 1
else:
self.rlist[self["testlist"].getCurrent()[1]]="try %d"%self.satatry
self["resultlist"].updateList(self.rlist)
self.satatry -= 1
displayerror = 0
result =0
try:
if fileExists("/autofs/sdb1"):
if access("/autofs/sdb1",F_OK|R_OK|W_OK):
dummy=open("/autofs/sdb1/dummy03","w")
dummy.write("complete")
dummy.close()
dummy=open("/autofs/sdb1/dummy03","r")
if dummy.readline()=="complete":
print "/autofs/sdb1 - complete"
else:
print "/autofs/sdb1 - readline error"
result = 1
displayerror = 1
dummy.close()
system("rm /autofs/sdb1/dummy03")
else:
print "/autofs/sdb1 - rw access error"
result = 1
displayerror = 1
else:
print "/autofs/sdb1 - file not exist"
result = 1
except:
print "/autofs/sdb1 - exceptional error"
result = 1
displayerror = 1
try:
if fileExists("/autofs/sda1"):
if access("/autofs/sda1",F_OK|R_OK|W_OK):
dummy=open("/autofs/sda1/dummy03","w")
dummy.write("complete")
dummy.close()
dummy=open("/autofs/sda1/dummy03","r")
if dummy.readline()=="complete":
print "/autofs/sda1 - complete"
else:
print "/autofs/sda1 - readline error"
result += 1
displayerror = 1
dummy.close()
system("rm /autofs/sda1/dummy03")
else:
print "/autofs/sda1 - rw access error"
result += 1
displayerror = 1
else:
print "/autofs/sda1 - file not exist"
result += 1
except:
print "/autofs/sda1 - exceptional error"
result += 1
displayerror = 1
if result == 0:
self.session.open( MessageBox, _("Sata & extend hdd test pass\nPress 'OK' button!"), MessageBox.TYPE_INFO)
self.rlist[self["testlist"].getCurrent()[1]]="pass"
elif result == 1:
if displayerror==1:
self.session.open( MessageBox, _("One hdd test error\nPress 'EXIT' button!"), MessageBox.TYPE_ERROR)
self.rlist[self["testlist"].getCurrent()[1]]="fail"
else:
self.satatimer.start(1100,True)
else:
if displayerror==1:
self.session.open( MessageBox, _("Sata & extend hdd test error\nPress 'EXIT' button!"), MessageBox.TYPE_ERROR)
self.rlist[self["testlist"].getCurrent()[1]]="fail"
else:
self.satatimer.start(1100,True)
def Test1(self):
if self.model== 0:
self.session.openWithCallback(self.displayresult ,FrontTest)
elif self.model == 1:
self.session.openWithCallback(self.displayresult ,FrontTest_solo)
elif self.model == 2 or self.model == 3:
self.session.openWithCallback(self.displayresult ,FrontTest_uno)
elif self.model == 4:
self.session.openWithCallback(self.displayresult ,FrontTest_ultimo)
def displayresult(self):
global fronttest
if fronttest == 1:
self.rlist[self["testlist"].getCurrent()[1]]="pass"
else:
self.rlist[self["testlist"].getCurrent()[1]]="fail"
def openTestTuneMenu(self):
self.session.openWithCallback(self.TestTuneMenuResult ,TestTuneMenu, self.tuneInfo, self.tunelist, self.NimType)
def TestTuneMenuResult(self,result):
if result :
self.rlist[self["testlist"].getCurrent()[1]]="pass"
else:
self.rlist[self["testlist"].getCurrent()[1]]="fail"
self["resultlist"].updateList(self.rlist)
def TestTune(self,index):
if self.oldref is None:
eref = eServiceReference("1:0:19:1324:3EF:1:C00000:0:0:0")
serviceHandler = eServiceCenter.getInstance()
servicelist = serviceHandler.list(eref)
if not servicelist is None:
ref = servicelist.getNext()
else:
ref = self.getCurrentSelection() # raise error
print "servicelist none"
else:
ref = self.oldref
self.session.nav.stopService() # try to disable foreground service
if self.model == 0 or self.model == 1:
if index==self.tuner1_1testIndex:
ref.setData(0,1)
ref.setData(1,0x6D3)
ref.setData(2,0x3)
ref.setData(3,0xA4)
ref.setData(4,0xA00000)
self.session.nav.playService(ref)
self.avswitch.setColorFormat(0)
self.avswitch.setAspectRatio(0)
elif index==self.tuner1_2testIndex:
if self.model == 1:
self.camstep = 1
self.camtimer.start(100,True)
ref.setData(0,0x19)
ref.setData(1,0x1325)
ref.setData(2,0x3ef)
ref.setData(3,0x1)
ref.setData(4,0x64af79)
self.session.nav.playService(ref)
self.avswitch.setColorFormat(1)
self.avswitch.setAspectRatio(6)
elif index==self.tuner2_1testIndex:
ref.setData(0,1)
ref.setData(1,0x6D3)
ref.setData(2,0x3)
ref.setData(3,0xA4)
ref.setData(4,0x820000)
self.session.nav.playService(ref)
self.avswitch.setColorFormat(2)
self.avswitch.setAspectRatio(0)
elif index==self.tuner2_2testIndex:
self.camstep = 1
self.camtimer.start(100,True)
ref.setData(0,0x19)
ref.setData(1,0x1325)
ref.setData(2,0x3ef)
ref.setData(3,0x1)
ref.setData(4,0xC00000)
self.session.nav.playService(ref)
self.avswitch.setColorFormat(0)
self.avswitch.setAspectRatio(6)
self.tuningtimer.start(2000,True)
self.tunemsgtimer.start(3000, True)
elif self.model == 3 or self.model == 4:
getTuneInfo=self.tuneInfo[index]
if getTuneInfo["cam"] is True:
self.camstep = 1
self.camtimer.start(100,True)
if getTuneInfo["type"].startswith("DVB-S"):
if getTuneInfo["pol"] == "H":
ref.setData(0,1)
ref.setData(1,0x6D3)
ref.setData(2,0x3)
ref.setData(3,0xA4)
else:
ref.setData(0,0x19)
ref.setData(1,0x1325)
ref.setData(2,0x3ef)
ref.setData(3,0x1)
if getTuneInfo["sat"] == "160": # Eutelsat W2
ref.setData(4,0xA00000)
elif getTuneInfo["sat"] == "100": # Eutelsat
ref.setData(4,0x64af79)
elif getTuneInfo["sat"] == "130": # Hotbird
ref.setData(4,0x820000)
elif getTuneInfo["sat"] == "192": # Astra
ref.setData(4,0xC00000)
elif getTuneInfo["sat"] == "620": # Intelsat 902
ref.setData(4,0x26c0000)
elif getTuneInfo["sat"] == "642": # Intelsat 906
ref.setData(4,0x282AF79)
elif getTuneInfo["sat"] == "685": # Panamsat 7,10 (68.5E)
ref.setData(4,0x02ad0000)
elif getTuneInfo["sat"] == "720": # Panamsat 4 (72.0E)
ref.setData(4,0x02d0af79)
elif getTuneInfo["type"].startswith("DVB-C"):
ref.setData(0,0x19)
ref.setData(1,0x1325)
ref.setData(2,0x3ef)
ref.setData(3,0x1)
ref.setData(4,-64870) # ffff029a
elif getTuneInfo["type"].startswith("DVB-T"):
ref.setData(0,0x19)
ref.setData(1,0x1325)
ref.setData(2,0x3ef)
ref.setData(3,0x1)
ref.setData(4,-286391716) # eeee025c
self.session.nav.playService(ref)
if getTuneInfo["color"]=="CVBS":
self.avswitch.setColorFormat(0)
elif getTuneInfo["color"]=="RGB":
self.avswitch.setColorFormat(1)
elif getTuneInfo["color"]=="YC":
self.avswitch.setColorFormat(2)
if getTuneInfo["ratio"] == "4:3":
self.avswitch.setAspectRatio(0)
elif getTuneInfo["ratio"] == "16:9":
self.avswitch.setAspectRatio(6)
self.tuningtimer.start(2000,True)
self.tunemsgtimer.start(3000, True)
def cam_state(self):
current_index = self.currentindex
if self.camstep == 1:
slot = 0
state = eDVBCI_UI.getInstance().getState(slot)
print '-1-stat',state
if state > 0:
self.camstep=2
self.camtimer.start(100,True)
else:
self.session.nav.stopService()
self.session.open( MessageBox, _("CAM1_NOT_INSERTED\nPress exit!"), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
self.tunemsgtimer.stop()
elif self.camstep == 2:
slot = 0
appname = eDVBCI_UI.getInstance().getAppName(slot)
print 'appname',appname
if appname is None:
self.session.nav.stopService()
self.session.open( MessageBox, _("NO_GET_APPNAME\nPress exit!"), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
self.tunemsgtimer.stop()
else:
self.camstep=3
self.camtimer.start(100,True)
elif self.camstep==3:
slot = 1
state = eDVBCI_UI.getInstance().getState(slot)
print '-2-stat',state
if state > 0:
self.camstep=4
self.camtimer.start(100,True)
else:
self.session.nav.stopService()
self.session.open( MessageBox, _("CAM2_NOT_INSERTED\nPress exit!"), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
self.tunemsgtimer.stop()
elif self.camstep == 4:
slot = 1
appname = eDVBCI_UI.getInstance().getAppName(slot)
print 'appname',appname
if appname is None:
self.session.nav.stopService()
self.session.open( MessageBox, _("NO_GET_APPNAME\nPress exit!"), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
self.tunemsgtimer.stop()
else:
self.setSource()
self.camstep = 5
def updateStatus(self):
current_index = self.currentindex
if self.model == 0 or self.model == 1:
if current_index ==self.tuner1_1testIndex or current_index==self.tuner1_2testIndex:
tunno = 1
result = eSctest.getInstance().getFrontendstatus(0)
else:
tunno = 2
result = eSctest.getInstance().getFrontendstatus(1)
if current_index == self.tuner1_2testIndex or current_index==self.tuner2_2testIndex:
hv = "Ver"
else:
hv = "Hor"
elif self.model == 3 or self.model == 4:
getTuneInfo=self.tuneInfo[current_index]
result = eSctest.getInstance().getFrontendstatus(getTuneInfo["slot"])
tunno = getTuneInfo["slot"]+1
hv = getTuneInfo["pol"]
if hv == "H":
hv = "Hor"
elif hv == "V":
hv = "Ver"
else :
hv == ""
print "eSctest.getInstance().getFrontendstatus - %d"%result
if result == 0 or result == -1:
self.tunerlock = 0
self.tunemsgtimer.stop()
self.session.nav.stopService()
self.avswitch.setColorFormat(0)
self.session.open( MessageBox, _("Tune%d %s Locking Fail..."%(tunno,hv)), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
else :
self.tunerlock = 1
def tuneback(self,yesno):
current_index=self.currentindex
self.session.nav.stopService() # try to disable foreground service
if yesno and self.tunerlock == 1:
if current_index == self.tuner_test_last_index and self.camstep < 5: # need fix to depending about CAM exist
self.rlist[current_index]="fail"
else :
self.rlist[current_index]="pass"
else:
self.rlist[current_index]="fail"
if self.model == 0 and current_index == 6: # YC
self.avswitch.setColorFormat(0)
elif ( self.model == 3 or self.model == 4 ) and self.tuneInfo[current_index]["color"] == "YC":
self.avswitch.setColorFormat(0)
self.resetSource()
self["resultlist"].updateList(self.rlist)
def tunemsg(self):
self.tuningtimer.stop()
self.session.openWithCallback(self.tuneback, TuneMessageBox, _("%s ok?" %(self["testlist"].getCurrent()[0])), MessageBox.TYPE_YESNO)
def setSourceVar(self):
if self.model == 0:
self.input_pad_num=1
self.setTuner = 'B'
elif self.model == 1:
self.input_pad_num=0
self.setTuner = 'A'
else:
self.input_pad_num=len(self.NimType)-1
if self.input_pad_num == 0:
self.setTuner = 'A'
elif self.input_pad_num == 1:
self.setTuner = 'B'
elif self.input_pad_num == 2:
self.setTuner = 'C'
# ikseong - for 22000 tp
def setSource(self):
# fix input source
inputname = ("/proc/stb/tsmux/input%d" % self.input_pad_num)
print "<setsource> inputname : ",inputname
fd=open(inputname,"w")
fd.write("CI0")
fd.close()
# fix ci_input Tuner
filename = ("/proc/stb/tsmux/ci0_input")
fd = open(filename,'w')
fd.write(self.setTuner)
print "setTuner(CI0) : ",self.setTuner
fd.close()
print "CI loop test!!!!!!!!!!!!!!"
def resetSource(self):
inputname = ("/proc/stb/tsmux/input%d" % self.input_pad_num)
print "<resetsource> inputname : ",inputname
fd=open(inputname,"w")
fd.write(self.setTuner)
fd.close()
print "CI loop test end!!!!!!!!!!!!!!"
def Test6(self):
self.avswitch.setInput("SCART")
sleep(2)
self.session.openWithCallback(self.check6, MessageBox, _("Scart loop ok?"), MessageBox.TYPE_YESNO)
def check6(self,yesno):
if yesno:
self.rlist[self["testlist"].getCurrent()[1]]="pass"
else:
self.rlist[self["testlist"].getCurrent()[1]]="fail"
self.avswitch.setInput("ENCODER")
def check7(self):
global rstest
if rstest == 1:
self.rlist[self["testlist"].getCurrent()[1]]="pass"
else:
self.rlist[self["testlist"].getCurrent()[1]]="fail"
def Test7(self):
self.session.openWithCallback(self.check7,RS232Test)
def Agingmode(self):
self.session.openWithCallback(self.AgingmodeCallback,AgingTest, self.model)
def AgingmodeCallback(self,ret):
if ret == 1:
self["testlist"].moveToIndex(self.fdefaultIndex)
self.Test14()
self["testlist"].moveToIndex(self.shutdownIndex)
def Agingmode2(self):
self.session.openWithCallback(self.Agingmode2Callback,AgingTest_mode2, self.model)
def Agingmode2Callback(self):
self["testlist"].moveToIndex(self.fdefaultIndex)
self.Test14()
self["testlist"].moveToIndex(self.shutdownIndex)
def Test8(self):
self.usbtry = 9
self.usbtimer.start(100,True)
def usbCheck(self):
if self.usbtry == 0:
displayerror = 1
else:
self.rlist[self["testlist"].getCurrent()[1]]="try %d"%self.usbtry
self["resultlist"].updateList(self.rlist)
self.usbtry -= 1
displayerror = 0
if self.model==0 or self.model==3 or self.model==4:
devices = [ "/autofs/sdc1", "/autofs/sdd1", "/autofs/sde1" ]
elif self.model==1:
devices = [ "/autofs/sda1", "/autofs/sdb1" ]
elif self.model==2:
devices = [ "/autofs/sdc1", "/autofs/sdd1" ]
else :
self.session.open( MessageBox, _("invalid model"), MessageBox.TYPE_ERROR)
self.rlist[self["testlist"].getCurrent()[1]]="fail"
return
result=len(devices)
for dev in devices:
try:
if fileExists(dev):
if access(dev,F_OK|R_OK|W_OK):
dummy=open(dev+"/dummy03","w")
dummy.write("complete")
dummy.close()
dummy=open(dev+"/dummy03","r")
if dummy.readline()=="complete":
print dev," - complete"
else:
print dev," - readline error"
result=result -1
displayerror = 1
dummy.close()
system("rm "+dev+"/dummy03")
else:
print dev," - rw access error"
result=result -1
displayerror = 1
else:
print dev," - file not exist"
result=result-1
except:
print dev," - exceptional error"
result=result -1
displayerror = 1
if result < 0 :
result = 0
elif result == len(devices):
self.session.open( MessageBox, _("USB test pass %d devices\nPress 'OK' button!"%result), MessageBox.TYPE_INFO)
self.rlist[self["testlist"].getCurrent()[1]]="pass"
else:
if displayerror == 1:
self.session.open( MessageBox, _("USB test error : Success-%d"%result+" Fail-%d\nPress 'EXIT' button!"%(len(devices)-result)), MessageBox.TYPE_ERROR)
self.rlist[self["testlist"].getCurrent()[1]]="fail"
else:
self.usbtimer.start(1100,True)
def pingtest(self):
self.testing = 1
# system("/etc/init.d/networking stop")
system("ifconfig eth0 192.168.0.10")
# system("/etc/init.d/networking start")
cmd1 = "ping -c 1 192.168.0.100"
self.PingConsole = Console()
self.PingConsole.ePopen(cmd1, self.checkNetworkStateFinished,self.NetworkStatedataAvail)
def checkNetworkStateFinished(self, result, retval,extra_args):
(statecallback) = extra_args
if self.PingConsole is not None:
if retval == 0:
self.PingConsole = None
content = result.splitlines()
# print 'content',content
x = content[4].split()
# print 'x',x
if x[0]==x[3]:
statecallback(1)
else:
statecallback(0)
else:
statecallback(0)
def NetworkStatedataAvail(self,data):
global ethtest
if data == 1:
ethtest = 1
print "success"
self.session.openWithCallback(self.openMacConfig ,MessageBox, _("Ping test pass"), MessageBox.TYPE_INFO,2)
else:
ethtest = 0
print "fail"
self.session.open( MessageBox, _("Ping test fail\nPress exit"), MessageBox.TYPE_ERROR)
self.macresult()
def Test9(self):
self.pingtest()
def openMacConfig(self, ret=False):
self.session.openWithCallback(self.macresult ,MacConfig,mactry=self.mactry)
def macresult(self):
global ethtest
if ethtest == 1:
self.rlist[self.ethernettestIndex]="pass"
else:
self.rlist[self.ethernettestIndex]="fail"
self.getmacaddr()
self.testing = 0
# def MemTest(self, which):
# index = which
# result = 0
# if index==0:
# result = eMemtest.getInstance().dramtest()
# elif index==1:
# result = eMemtest.getInstance().flashtest()
# result = 0 # temp
# else:
# result = eMemtest.getInstance().dramtest()
# result = eMemtest.getInstance().flashtest()
# result = 0 # temp
# index = index+10
# if result == 0:
# print index,self.rlist[index]
# self.rlist[index]="pass"
# else:
# print index,self.rlist[index]
# self.rlist[index]="fail"
# self["resultlist"].updateList(self.rlist)
def scciresult(self):
global smartcardtest
if smartcardtest == 1:
self.rlist[self["testlist"].getCurrent()[1]]="pass"
else:
self.rlist[self["testlist"].getCurrent()[1]]="fail"
def Test10(self):
self.session.openWithCallback(self.scciresult ,SmartCardTest,stbmodel=self.model)
# def Test11(self):
# self.MemTest(1)
# def Test12(self):
# self.MemTest(2)
# def Test13(self):
# self.MemTest(3)
def Test14(self):
try:
print "test14"
system("rm -R /etc/enigma2")
system("ls /")
system("cp -R /usr/share/enigma2/defaults /etc/enigma2")
self.rlist[self["testlist"].getCurrent()[1]]="pass"
self["resultlist"].updateList(self.rlist)
except:
print "test14 except"
self.rlist[self["testlist"].getCurrent()[1]]="fail"
self["resultlist"].updateList(self.rlist)
self.session.open( MessageBox, _("Factory reset fail"), MessageBox.TYPE_ERROR)
def Test15(self):
self.session.openWithCallback(self.shutdown ,MessageBox, _("Do you want to shut down?"), MessageBox.TYPE_YESNO)
def shutdown(self, yesno):
if yesno :
from os import _exit
system("/usr/bin/showiframe /boot/backdrop.mvi")
_exit(1)
else:
return
def keyCancel(self):
if self.testing==1:
return
print "exit"
self.close()
# if self.oldref is not None:
# self.session.nav.playService(self.oldref)
ethtest = 0
class MacConfig(Screen):
skin = """
<screen name="MacConfig" position="center,center" size="520,100" title="Mac Config" >
<eLabel text="Mac Address " position="10,15" size="200,40" font="Regular;30" />
<widget name="text" position="230,15" size="230,40" font="Regular;30" halign="right"/>
<widget name="text1" position="470,15" size="40,40" font="Regular;30" />
<eLabel text=" " position="5,55" zPosition="-1" size="510,5" backgroundColor="#02e1e8e6" />
<widget name="stattext" position="30,75" size="450,35" font="Regular;30" />
</screen>"""
def __init__(self, session, mactry = 1):
self["actions"] = ActionMap(["DirectionActions","OkCancelActions"],
{
"ok": self.keyOk,
"left": self.keyleft,
"right": self.keyright,
"cancel": self.keyCancel,
}, -2)
Screen.__init__(self, session)
self.mactry = mactry
self.model = 0
self.getModelInfo()
self.macfd = 0
self.macaddr = "000000000000"
self.ReadMacinfo = 0
self["text"]=Label((self.macaddr))
self["text1"]= Label(("< >"))
self["stattext"]= Label((""))
self.displaymac()
self.loadmacaddr()
self.getmacaddr()
global ethtest
ethtest = 1
def getModelInfo(self):
getmodel = 0
if fileExists("/proc/stb/info/vumodel"):
vumodel = open("/proc/stb/info/vumodel")
info=vumodel.read().strip()
vumodel.close()
if info == "combo":
self.model = 2
getmodel = 1
print "MacConfig, model : combo"
elif info == "solo":
self.model = 1
getmodel = 1
print "MacConfig, model : solo"
elif info == "duo":
self.model = 0
getmodel = 1
print "MacConfig, model : duo"
elif info == "uno":
self.model = 3
getmodel = 1
print "getModelInfo : uno"
elif info == "ultimo":
self.model = 4
getmodel = 1
print "getModelInfo : ultimo"
if getmodel == 0 and fileExists("/proc/stb/info/version"):
version = open("/proc/stb/info/version")
info=version.read()
version.close()
# print info,info[:2]
if info[:2] == "14":
self.model = 1
print "MacConfig, model : solo_"
elif info[:2] == "12":
self.model = 0
print "MacConfig, model: duo_"
def loadmacaddr(self):
try:
self.macfd = 0
if self.model==0 or self.model==3 or self.model==4 :
devices = ["/autofs/sdb1", "/autofs/sdc1", "/autofs/sdd1", "/autofs/sde1" ]
elif self.model==1:
devices = [ "/autofs/sda1", "/autofs/sdb1" ]
elif self.model==2:
devices = [ "/autofs/sdb1", "/autofs/sdc1", "/autofs/sdd1" ]
for dev in devices:
print 'try..',dev
if fileExists(dev+"/macinfo.txt"):
print "<open>"+dev+"/macinfo.txt"
self.macfd = open(dev+"/macinfo.txt","r+")
break
if self.macfd == 0:
self["text"].setText(("cannot read usb!!"))
self["text1"].setText((" "))
self["stattext"].setText((" Press Exit Key."))
self.ReadMacinfo=0
return
macaddr=self.macfd.readline().split(":")
self.macaddr=macaddr[1]+macaddr[2]+macaddr[3]+macaddr[4]+macaddr[5]+macaddr[6]
self.displaymac()
self.ReadMacinfo = 1
except:
self["text"].setText(("cannot read usb!!"))
self["text1"].setText((" "))
self["stattext"].setText((" Press Exit Key."))
self.ReadMacinfo=0
def getmacaddr(self):
if self.ReadMacinfo==0:
return
try:
if self.model == 2 or self.model == 3 or self.model == 4:
cmd = "nanddump -s 0x" + str((self.mactry-1)*2) + "0000 -b -o -l 64 -p /dev/mtd5"
elif self.model == 0 or self.model == 1:
cmd = "nanddump -s 0x" + str((self.mactry-1)*2) + "0000 -b -o -l 64 -p /dev/mtd4"
self.macConsole = Console()
self.macConsole.ePopen(cmd, self.readmac,self.checkReadmac)
except:
return
def readmac(self, result, retval,extra_args=None):
(callback) = extra_args
if self.macConsole is not None:
if retval == 0:
self.macConsole = None
macline = None
content =result.splitlines()
for x in content:
if x.startswith('0x000'+str((self.mactry-1)*2)+'0010:'):
macline = x.split()
if macline == None:
callback(0)
elif len(macline) < 10:
callback(1)
else:
mac = macline[5]+":"+macline[6]+":"+macline[7]+":"+macline[8]+":"+macline[9]+":"+macline[10]
self["stattext"].setText(("now : "+mac))
callback(2)
def checkReadmac(self,data):
if data == 0:
print "block %d is bad block" % self.mactry
self.mactry = self.mactry + 1
if self.mactry > 4:
self.session.open(MessageBox, _("FLASH IS BROKEN"), type = MessageBox.TYPE_INFO, enable_input = False)
return
else:
self.getmacaddr()
elif data == 1:
print 'mac dump read error'
return
elif data == 2:
print 'mac address read ok'
return
def keyleft(self):
if self.ReadMacinfo==0 :
return
macaddress = long(self.macaddr,16)-1
if macaddress < 0 :
macaddress = 0xffffffffffff
self.macaddr = "%012x"%macaddress
self.displaymac()
def keyright(self):
if self.ReadMacinfo==0 :
return
macaddress = long(self.macaddr,16)+1
if macaddress > 0xffffffffffff:
macaddress = 0
self.macaddr = "%012x"%macaddress
self.displaymac()
def displaymac(self):
macaddr= self.macaddr
self["text"].setText(("%02x:%02x:%02x:%02x:%02x:%02x"%(int(macaddr[0:2],16),int(macaddr[2:4],16),int(macaddr[4:6],16),int(macaddr[6:8],16),int(macaddr[8:10],16),int(macaddr[10:12],16))))
def keyOk(self):
if self.ReadMacinfo==0 :
return
try:
macaddr = self.macaddr
#make_mac_sector 00-99-99-99-00-00 > /tmp/mac.sector
#flash_eraseall /dev/mtd4
#nandwrite /dev/mtd4 /tmp/mac.sector -p
cmd = "make_mac_sector %02x-%02x-%02x-%02x-%02x-%02x > /tmp/mac.sector"%(int(macaddr[0:2],16),int(macaddr[2:4],16),int(macaddr[4:6],16),int(macaddr[6:8],16),int(macaddr[8:10],16),int(macaddr[10:12],16))
system(cmd)
if self.model == 2 or self.model == 3 or self.model == 4:
system("flash_eraseall /dev/mtd5")
system("nandwrite /dev/mtd5 /tmp/mac.sector -p")
elif self.model == 0 or self.model ==1 :
system("flash_eraseall /dev/mtd4")
system("nandwrite /dev/mtd4 /tmp/mac.sector -p")
macaddress = long(macaddr,16)+1
if macaddress > 0xffffffffffff:
macaddress = 0
macaddr = "%012x"%macaddress
macwritetext = "MAC:%02x:%02x:%02x:%02x:%02x:%02x"%(int(macaddr[0:2],16),int(macaddr[2:4],16),int(macaddr[4:6],16),int(macaddr[6:8],16),int(macaddr[8:10],16),int(macaddr[10:12],16))
self.macfd.seek(0)
self.macfd.write(macwritetext)
self.macfd.close()
system("sync")
self.macaddr = macaddr
self.close()
except:
self.session.open( MessageBox, _("Mac address fail"), MessageBox.TYPE_ERROR)
global ethtest
ethtest = 0
self.close()
def keyCancel(self):
if self.macfd != 0:
self.macfd.close()
global ethtest
ethtest = 0
self.close()
smartcardtest = 0
class SmartCardTest(Screen):
skin = """
<screen name="SmartCardTest" position="center,center" size="300,120" title="SmartCard Test" >
<widget name="text" position="10,10" size="280,100" font="Regular;30" />
</screen>"""
def __init__(self, session, stbmodel = 0):
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions"],
{
"cancel": self.keyCancel,
"ok" : self.keyOk
}, -2)
Screen.__init__(self, session)
self["text"]=Label(("Testing Smartcard 1..."))
self.testok = 0
self.smartcardtimer = eTimer()
self.smartcardtimer.callback.append(self.check_smart_card)
self.closetimer = eTimer()
self.closetimer.callback.append(self.close)
self.smartcard=0
global smartcardtest
smartcardtest = 0
self.model = stbmodel
self.smartcardtimer.start(100,True)
def check_smart_card(self):
global smartcardtest
index = self.smartcard
result = 0
if index==0:
result = eSctest.getInstance().check_smart_card("/dev/sci0")
elif index ==1:
result = eSctest.getInstance().check_smart_card("/dev/sci1")
else:
result = -1
print "check smartcard : ", result
if result == 0:
print 'pass'
if(index== 0 and ( self.model== 0 or self.model==2 or self.model == 3 or self.model == 4) ):
self.smartcard = 1
self["text"].setText(_("Testing Smartcard 2..."))
self.smartcardtimer.start(100,True)
return
elif (index==1 or self.model==1):
smartcardtest = 1
self.testok = 1
self["text"].setText(_("Smart Card OK!!"))
self.closetimer.start(2000,True)
self.smartcardtimer.stop()
else :
self["text"].setText(_("Smart Card model type error"))
self.closetimer.start(2000,True)
self.smartcardtimer.stop()
else:
# if result ==-1:
# self.session.open( MessageBox, _("%d:NO_DEV_FOUND"%(index+1)), MessageBox.TYPE_ERROR)
# elif result == -2:
# self.session.open( MessageBox, _("%d:SC_NOT_INSERTED"%(index+1)), MessageBox.TYPE_ERROR)
# elif result == -3:
# self.session.open( MessageBox, _("%d:SC_NOT_VALID_ATR"%(index+1)), MessageBox.TYPE_ERROR)
# elif result == -5:
# self.session.open( MessageBox, _("%d:SC_READ_TIMEOUT"%(index+1)), MessageBox.TYPE_ERROR)
if(index==0):
self["text"].setText(_("Smart Card 1 Error!\nerrorcode=%d"%result))
elif (index==1):
self["text"].setText(_("Smart Card 2 Error!\nerrorcode=%d"%result))
self.closetimer.start(2000,True)
self.smartcardtimer.stop()
def keyCancel(self):
self.close()
def keyOk(self):
if self.testok == 1:
self.close()
fronttest = 0
class FrontTest(Screen):
skin = """
<screen name="FrontTest" position="center,center" size="300,180" title="Front Test" >
<widget name="text" position="10,10" size="280,160" font="Regular;30" />
</screen>"""
def __init__(self, session):
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions"],
{
"ok": self.keyOk,
"up":self.keyUp,
"down":self.keyDown,
"cancel": self.keyCancel,
}, -2)
Screen.__init__(self, session)
self["text"]=Label(("Wheel LEFT"))
self.step = 1
self.fronttimer= eTimer()
self.fronttimer.callback.append(self.FrontAnimate)
self.frontturnonoff = 0
eSctest.getInstance().VFD_Open()
self.keytimeout = eTimer()
self.keytimeout.callback.append(self.KeyTimeOut)
self.keytimeout.start(5000,True)
def KeyTimeOut(self):
if self.step == 1:
self["text"].setText(("Wheel LEFT ERROR"))
elif self.step ==2 :
self["text"].setText(("Wheel RIGHT ERROR"))
elif self.step == 3:
self["text"].setText(("Wheel BUTTON ERROR"))
self.step = 0
# self.keyCancel()
def keyCancel(self):
global fronttest
self.fronttimer.stop()
eSctest.getInstance().VFD_Close()
if self.step==4:
fronttest = 1
else:
fronttest = 0
self.close()
def keyDown(self):
if self.step==2:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 3
self["text"].setText(_("Press Front Wheel"))
def keyUp(self):
if self.step==1:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step=2
self["text"].setText(_("Wheel RIGHT"))
def keyOk(self):
if self.step == 3:
self.keytimeout.stop()
self.step =4
self.fronttimer.start(1000,True)
self["text"].setText(("Front Test OK!\nPress Exit Key"))
# elif self.step==4:
# global fronttest
# self.fronttimer.stop()
# eSctest.getInstance().VFD_Close()
# fronttest = 1
# self.close()
def FrontAnimate(self):
if (self.frontturnonoff==0):
eSctest.getInstance().turnon_VFD()
self.frontturnonoff = 1
else:
self.frontturnonoff = 0
eSctest.getInstance().turnoff_VFD()
self.fronttimer.start(1000,True)
class FrontTest_solo(Screen):
skin = """
<screen name="FrontTest_solo" position="center,center" size="300,180" title="Front Test" >
<widget name="text" position="10,10" size="280,160" font="Regular;30" />
</screen>"""
def __init__(self, session):
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions","GlobalActions"],
{
"ok": self.keyOk,
"cancel": self.keyCancel,
"left": self.keyleft,
"right": self.keyright,
"power_down": self.keypower,
"volumeUp": self.keyvolup,
"volumeDown": self.keyvoldown,
}, -2)
Screen.__init__(self, session)
self["text"]=Label(("Press Front STANDBY"))
self.step = 1
self.fronttimer= eTimer()
self.fronttimer.callback.append(self.FrontAnimate)
self.frontturnonoff = 0
eSctest.getInstance().VFD_Open()
self.keytimeout = eTimer()
self.keytimeout.callback.append(self.KeyTimeOut)
self.keytimeout.start(5000,True)
def KeyTimeOut(self):
if self.step == 1:
self["text"].setText(("Front STANDBY ERROR\nPress exit!"))
elif self.step == 2 :
self["text"].setText(("Front CH - ERROR\nPress exit!"))
elif self.step == 3:
self["text"].setText(("Front CH + ERROR\nPress exit!"))
elif self.step == 4 :
self["text"].setText(("Front VOL - ERROR\nPress exit!"))
elif self.step == 5:
self["text"].setText(("Front VOL + ERROR\nPress exit!"))
self.step = 0
# self.keyCancel()
def keypower(self):
if self.step== 1:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 2
self["text"].setText(_("Press Front CH -"))
def keyright(self):
if self.step== 3:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 4
self["text"].setText(_("Press Front VOL -"))
def keyleft(self):
if self.step== 2:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 3
self["text"].setText(_("Press Front CH +"))
def keyvolup(self):
if self.step== 5:
self.keytimeout.stop()
self.step = 6
self.fronttimer.start(1000,True)
self["text"].setText(_("Front LED OK?\n\nyes-ok\nno-exit"))
# self["text"].setText(("Front Test OK!\nPress Exit Key"))
def keyvoldown(self):
if self.step== 4:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 5
self["text"].setText(_("Press Front VOL +"))
def checkled(self, yesno):
if yesno :
self.step=6
else:
self.step=0
self.keyCancel()
def keyCancel(self):
global fronttest
self.fronttimer.stop()
eSctest.getInstance().VFD_Close()
fronttest = 0
self.close()
def keyOk(self):
global fronttest
if self.step == 6:
fronttest = 1
self.fronttimer.stop()
eSctest.getInstance().VFD_Close()
self.close()
def FrontAnimate(self):
if (self.frontturnonoff==0):
eSctest.getInstance().turnon_VFD()
self.frontturnonoff = 1
else:
self.frontturnonoff = 0
eSctest.getInstance().turnoff_VFD()
self.fronttimer.start(1000,True)
class FrontTest_uno(Screen):
skin = """
<screen name="FrontTest_uno" position="center,center" size="300,180" title="Front Test" >
<widget name="text" position="10,10" size="280,160" font="Regular;30" />
</screen>"""
def __init__(self, session):
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions","GlobalActions"],
{
"ok": self.keyOk,
"cancel": self.keyCancel,
"left": self.keyleft,
"right": self.keyright,
"volumeUp": self.keyvolup,
"volumeDown": self.keyvoldown,
"power_down": self.keypower,
}, -2)
Screen.__init__(self, session)
self["text"]=Label(("Press Front CH -"))
self.step = 1
self.fronttimer= eTimer()
self.fronttimer.callback.append(self.FrontAnimate)
self.frontturnonoff = 0
eSctest.getInstance().VFD_Open()
self.keytimeout = eTimer()
self.keytimeout.callback.append(self.KeyTimeOut)
self.keytimeout.start(5000,True)
def KeyTimeOut(self):
if self.step == 1:
self["text"].setText(("Front CH - ERROR\nPress exit!"))
elif self.step == 2:
self["text"].setText(("Front CH + ERROR\nPress exit!"))
elif self.step == 3 :
self["text"].setText(("Front VOL - ERROR\nPress exit!"))
elif self.step == 4:
self["text"].setText(("Front VOL + ERROR\nPress exit!"))
elif self.step == 5:
self["text"].setText(("Front STANDBY ERROR\nPress exit!"))
self.step = 0
def keyleft(self):
if self.step== 1:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 2
self["text"].setText(_("Press Front CH +"))
def keyright(self):
if self.step== 2:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 3
self["text"].setText(_("Press Front VOL -"))
def keyvoldown(self):
if self.step== 3:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 4
self["text"].setText(_("Press Front VOL +"))
def keyvolup(self):
if self.step== 4:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 5
self["text"].setText(_("Press Front STANDBY"))
def keypower(self):
if self.step== 5:
self.keytimeout.stop()
self.step = 6
self.fronttimer.start(1000,True)
self["text"].setText(_("Front LED OK?\n\nyes-ok\nno-exit"))
def keyCancel(self):
global fronttest
self.fronttimer.stop()
eSctest.getInstance().VFD_Close()
fronttest = 0
self.close()
def keyOk(self):
global fronttest
if self.step == 6:
fronttest = 1
self.fronttimer.stop()
eSctest.getInstance().VFD_Close()
self.close()
def FrontAnimate(self):
if (self.frontturnonoff==0):
eSctest.getInstance().turnon_VFD()
self.frontturnonoff = 1
else:
self.frontturnonoff = 0
eSctest.getInstance().turnoff_VFD()
self.fronttimer.start(1000,True)
class FrontTest_ultimo_Summary(Screen):
skin = """
<screen name="FactoryTestSummary" position="0,0" size="256,64" id="1">
<widget source="parent.Title" render="Label" position="0,0" size="256,24" font="Regular;18" halign="center" valign="center"/>
<widget name="text" position="0,24" size="256,40" font="VFD;20" halign="center" valign="center"/>
</screen>"""
def __init__(self, session, parent):
Screen.__init__(self, session, parent = parent)
self["text"] = Label("Press Front STANDBY")
def setText(self, text = ""):
if not isinstance(text, str):
text = str(text)
self["text"].setText(text)
class FrontTest_ultimo(FrontTest_solo):
skin = """
<screen position="center,center" size="300,180" title="Front Test" >
<widget name="text" position="10,10" size="280,160" font="Regular;30" />
</screen>"""
def createSummary(self):
return FrontTest_ultimo_Summary
def KeyTimeOut(self):
if self.step == 1:
self["text"].setText(("Front STANDBY ERROR\nPress exit!"))
self.summaries.setText("Front STANDBY ERROR\nPress exit!")
elif self.step == 2 :
self["text"].setText(("Front CH - ERROR\nPress exit!"))
self.summaries.setText("Front CH - ERROR\nPress exit!")
elif self.step == 3:
self["text"].setText(("Front CH + ERROR\nPress exit!"))
self.summaries.setText("Front CH + ERROR\nPress exit!")
elif self.step == 4 :
self["text"].setText(("Front VOL - ERROR\nPress exit!"))
self.summaries.setText("Front VOL - ERROR\nPress exit!")
elif self.step == 5:
self["text"].setText(("Front VOL + ERROR\nPress exit!"))
self.summaries.setText("Front VOL + ERROR\nPress exit!")
self.step = 0
def keypower(self):
if self.step== 1:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 2
self["text"].setText(_("Press Front CH -"))
self.summaries.setText(_("Press Front CH -"))
def keyright(self):
if self.step== 3:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 4
self["text"].setText(_("Press Front VOL -"))
self.summaries.setText(_("Press Front VOL -"))
def keyleft(self):
if self.step== 2:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 3
self["text"].setText(_("Press Front CH +"))
self.summaries.setText(_("Press Front CH +"))
def keyvolup(self):
if self.step== 5:
self.keytimeout.stop()
self.step = 6
self.fronttimer.start(1000,True)
self["text"].setText(_("Front LED OK?\n\nyes-ok\nno-exit"))
self.summaries.setText(_("Front LED OK?"))
def keyvoldown(self):
if self.step== 4:
self.keytimeout.stop()
self.keytimeout.start(5000,True)
self.step = 5
self["text"].setText(_("Press Front VOL +"))
self.summaries.setText(_("Press Front VOL +"))
def FrontAnimate(self):
if (self.frontturnonoff==0):
eSctest.getInstance().turnon_VFD()
self.frontturnonoff = 1
self.fronttimer.start(1500,True)
else:
self.frontturnonoff = 0
eSctest.getInstance().turnoff_VFD()
self.fronttimer.start(500,True)
rstest = 0
import select
class RS232Test(Screen):
skin = """
<screen name="RS232Test" position="center,center" size="260,100" title="RS232 Test" >
<widget name="text" position="10,10" size="240,80" font="Regular;30" />
</screen>"""
step=1
def __init__(self, session):
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions"],
{
"cancel": self.keyCancel,
}, -2)
Screen.__init__(self, session)
self["text"]=Label(("Press \"Enter\" Key"))
self.timer = eTimer()
self.timer.callback.append(self.checkrs232)
self.timer.start(100, True)
def checkrs232(self):
global rstest
try:
rs=open('/dev/ttyS0','r')
rd = [rs]
r,w,e = select.select(rd, [], [], 10)
if r:
input = rs.read(1)
if input == "\n":
rstest = 1
else:
rstest = 0
else:
rstest = 0
rs.close()
except:
try:
if rs:
rs.close()
except:
pass
print 'except error'
rstest = 0
if rstest == 0:
self.session.open( MessageBox, _("RS232 Test Failed!\nPress 'EXIT' button!"), MessageBox.TYPE_ERROR)
self.close()
def keyCancel(self):
self.close()
class AgingTestSummary(Screen):
skin = """
<screen name="AgingTestSummary" position="0,0" size="132,64" id="1">
<widget source="parent.Title" render="Label" position="6,0" size="132,64" font="Regular;18" halign="center" valign="center"/>
</screen>"""
class AgingTestSummaryVFD(Screen):
skin = """
<screen name="AgingTestSummaryVFD" position="0,0" size="256,64" id="1">
<eLabel text="MODE: " position="0,0" size="50,16" font="VFD;14" />
<widget name="zapmode" position="51,0" size="70,16" font="VFD;14" halign="left" valign="center"/>
<widget name="timer" position="152,0" size="124,16" font="VFD;14" halign="left" valign="center"/>
<eLabel text="TUNER: " position="0,16" size="50,16" font="VFD;14" />
<widget name="curtuner" position="51,16" size="200,16" font="VFD;14" halign="left" valign="center"/>
<!-- Signal Quality -->
<eLabel text="SNR: " position="0,32" size="45,16" font="VFD;14" transparent="1" halign="left" valign="center"/>
<widget source="session.FrontendStatus" render="Label" position="46,32" size="40,16" font="VFD;14" transparent="1">
<convert type="FrontendInfo">SNRdB</convert>
</widget>
<!-- AGC -->
<eLabel text="AGC: " position="0,48" size="45,16" font="VFD;14" transparent="1" noWrap="1" />
<widget source="session.FrontendStatus" render="Label" position="46,48" size="40,16" font="VFD;14" transparent="1" noWrap="1">
<convert type="FrontendInfo">AGC</convert>
</widget>
<widget name="error" position="90,32" size="166,32" font="VFD;18" halign="center" valign="center"/>
</screen>"""
def __init__(self, session, parent):
Screen.__init__(self, session)
self["zapmode"] = Label("")
self["timer"] = Label("")
self["curtuner"] = Label("")
self["error"] = Label("")
def setText(self, label = "zapmode",text = ""):
if not isinstance(text,str):
text = str(text)
self[label].setText(text)
class AgingTest(Screen):
skin = """
<screen position="center,center" size="350,220" title="Aging Test" >
<widget name="text1" position="10,10" size="340,40" font="Regular;30" halign = "center" valign = "center"/>
<widget name="text2" position="10,60" size="340,40" font="Regular;30" halign = "center" valign = "center"/>
<!-- Signal Quality -->
<eLabel text="SNR : " position="40,120" size="60,25" font="Regular;25" transparent="1" />
<widget source="session.FrontendStatus" render="Label" position="100,120" size="60,25" font="Regular;25" transparent="1">
<convert type="FrontendInfo">SNRdB</convert>
</widget>
<!-- AGC -->
<eLabel text="AGC : " position="180,120" size="60,25" font="Regular;25" transparent="1" noWrap="1" />
<widget source="session.FrontendStatus" render="Label" position="240,120" size="60,25" font="Regular;25" transparent="1" noWrap="1">
<convert type="FrontendInfo">AGC</convert>
</widget>
<widget name="text3" position="10,150" size="330,35" font="Regular;28" halign = "center" valign = "center"/>
<widget name="text4" position="10,185" size="330,35" font="Regular;20" halign = "center" valign = "center"/>
</screen>"""
step=1
def __init__(self, session, model):
Screen.__init__(self, session)
self["actions"] = ActionMap(["MediaPlayerActions","GlobalActions", "MediaPlayerSeekActions", "ChannelSelectBaseActions"],
{
"pause": self.keyEnd,
"stop": self.keyFinish,
"volumeUp": self.keyVolumeup,
"volumeDown": self.keyVolumedown,
"volumeMute": self.nothing,
"seekFwd" : self.keyFFW,
}, -2)
self.model = model
self["text1"]=Label(("Exit - Press Pause Key"))
self["text2"]=Label(("Reset - Press Stop Key"))
self["text3"]=Label(("Manual zapping"))
self["text4"]=Label((" "))
self.avswitch = AVSwitch()
self.curzappingmode = 'manual'
self.zapping_interval = 300
self.error = 0
self.timeout = self.zapping_interval
self.tunelist = []
self.zappinglist = {
'DVB-S2' : [
('S-1','1:0:19:1325:3EF:1:0x64af79:0:0:0:'), # astra hd
('S-2','1:0:19:1324:3EF:1:0x64af79:0:0:0:'), # anixe hd
('S-3','1:0:19:1331:3EF:1:0x64af79:0:0:0:') # servus hd
],
'DVB-C': [
('C-1','1:0:19:1325:3EF:1:FFFF029A:0:0:0:'), # astra hd (DVB-C)
('C-2','1:0:19:1324:3EF:1:FFFF029A:0:0:0:') # anixe hd (DVB-C)
]
}
self.LockCheckTimer = eTimer()
self.LockCheckTimer.callback.append(self.LockCheck)
self.nextzappingtimer = eTimer()
self.nextzappingtimer.callback.append(self.checktimeout)
self.checkTunerType()
self.makeTunelList()
self.playservice(service = self.tunelist[0][1])
# self.logmessage("AGING TEST START")
def createSummary(self):
if self.model == 4:
self.onShown.append(self.VFDinit)
return AgingTestSummaryVFD
else:
return AgingTestSummary
def setTextVFD(self, name ,text):
if self.model == 4:
self.summaries.setText(name ,text)
def VFDinit(self):
if self.curzappingmode == 'manual' :
self.summaries.setText("zapmode", 'MANUAL')
else:
self.summaries.setText("zapmode", 'AUTO')
self.summaries.setText("timer", "Timer %d sec"%self.timeout)
self.summaries.setText("curtuner", "%s, CHANNEL - %s)"%(self.NimType[0], self.tunelist[0][0]))
def checkTunerType(self):
self.NimType={}
nimfile = open("/proc/bus/nim_sockets")
for line in nimfile.readlines():
print line
if line == "":
break
if line.strip().startswith("NIM Socket"):
parts = line.strip().split(" ")
current_slot = int(parts[2][:-1])
elif line.strip().startswith("Type:"):
self.NimType[current_slot]= str(line.strip()[6:])
elif line.strip().startswith("empty"):
self.NimType.pop(current_slot)
nimfile.close()
def makeTunelList(self):
if self.NimType[0].startswith("DVB-S"):
tunetype = "DVB-S2"
elif self.NimType[0].startswith("DVB-C"):
tunetype = "DVB-C"
elif self.NimType[0].startswith("DVB-T"):
# tunetype = "DVB-T"
pass # fix later..
try :
self.tunelist = self.zappinglist[tunetype]
except:
print "[FactoryTest] ERROR, index error (%s)"%tunetype
def nextZapping(self, zap_rev = False):
if zap_rev:
tunelistlen = len(self.tunelist)
nextservice = self.tunelist.pop(tunelistlen-1)
self.tunelist.insert(0,nextservice)
else:
currentservice = self.tunelist.pop(0)
self.tunelist.append(currentservice)
self.playservice(service=self.tunelist[0][1])
if self.curzappingmode == 'auto':
self.timeout = self.zapping_interval
self.setTextVFD("curtuner", "%s, CHANNEL - %s)"%(self.NimType[0], self.tunelist[0][0]))
def checktimeout(self):
if self.timeout == 0:
self.nextZapping()
else:
self.timeout -=1
self["text4"].setText("remain %d sec for next tuning" %self.timeout)
self.setTextVFD("timer", "Timer %d sec"%self.timeout)
def playservice(self,service = '1:0:19:1325:3EF:1:0x64af79:0:0:0:'):
ref = eServiceReference(service)
self.session.nav.playService(ref)
self.avswitch.setAspectRatio(6)
self.avswitch.setColorFormat(0)
self.LockCheckTimer.start(2000,True)
def LockCheck(self):
result = eSctest.getInstance().getFrontendstatus(0)
if result == 0 or result == -1:
if self.model == 4:
self.error +=1
print "AGINGTEST - LOCK FAIL(%d)"%self.error
self.setTextVFD("error", "LOCK FAIL(%d)"%self.error)
# logmsg = "[LOCKFAIL][%d] TYPE : %s, CH : %s, ZAPMODE: %s"%(self.error,self.NimType[0],self.tunelist[0][0],self.curzappingmode)
# self.logmessage(logmsg)
else:
self.session.open( MessageBox, _("Locking Fail Error"), MessageBox.TYPE_ERROR)
def logmessage(self,msg):
pass
def nothing(self):
print "nothing"
def keyFFW(self):
if self.curzappingmode == 'auto':
self.curzappingmode = 'manual'
self.nextzappingtimer.stop()
self.timeout = self.zapping_interval
self["text3"].setText("Manual zapping")
self["text4"].setText("")
self.setTextVFD("zapmode", 'MANUAL')
self.setTextVFD("timer", "")
elif self.curzappingmode == 'manual':
self.curzappingmode = 'auto'
self["text3"].setText("Auto zapping")
self["text4"].setText("remain %d sec for next tuning" %self.timeout)
self.setTextVFD("zapmode", 'AUTO')
self.setTextVFD("timer", "Timer %d sec"%self.timeout)
# self.timeout = self.zapping_interval
self.nextzappingtimer.start(1000)
def keyVolumeup(self):
self.nextZapping(zap_rev = False)
def keyVolumedown(self):
self.nextZapping(zap_rev = True)
def nothing(self):
print "nothing"
def keyEnd(self):
self.session.nav.stopService()
self.close(0) # exit
def keyFinish(self):
self.session.nav.stopService()
self.close(1) # exit and reset
class AgingTest_mode2_Summary(Screen):
skin = """
<screen name="AgingTest_mode2_Summary" position="0,0" size="132,64" id="1">
<widget source="parent.Title" render="Label" position="6,0" size="132,64" font="Regular;18" halign="center" valign="center"/>
</screen>"""
class AgingTest_mode2_Summary_VFD(Screen):
skin = """
<screen name="AgingTest_mode2_Summary_VFD" position="0,0" size="256,64" id="1">
<eLabel text="MODE: " position="0,0" size="50,16" font="VFD;14" />
<widget name="zapmode" position="51,0" size="70,16" font="VFD;14" halign="left" valign="center"/>
<widget name="timer" position="152,0" size="124,16" font="VFD;14" halign="left" valign="center"/>
<eLabel text="TUNER: " position="0,16" size="50,16" font="VFD;14" />
<widget name="curtuner" position="51,16" size="200,16" font="VFD;14" halign="left" valign="center"/>
<!-- Signal Quality -->
<eLabel text="SNR: " position="0,32" size="45,16" font="VFD;14" transparent="1" halign="left" valign="center"/>
<widget source="session.FrontendStatus" render="Label" position="46,32" size="40,16" font="VFD;14" transparent="1">
<convert type="FrontendInfo">SNRdB</convert>
</widget>
<!-- AGC -->
<eLabel text="AGC: " position="0,48" size="45,16" font="VFD;14" transparent="1" noWrap="1" />
<widget source="session.FrontendStatus" render="Label" position="46,48" size="40,16" font="VFD;14" transparent="1" noWrap="1">
<convert type="FrontendInfo">AGC</convert>
</widget>
<widget name="error" position="90,32" size="166,32" font="VFD;18" halign="center" valign="center"/>
</screen>"""
def __init__(self, session, parent):
Screen.__init__(self, session)
self["zapmode"] = Label("")
self["timer"] = Label("")
self["curtuner"] = Label("")
self["error"] = Label("")
def setText(self, label = "zapmode",text = ""):
if not isinstance(text,str):
text = str(text)
self[label].setText(text)
from Components.Input import Input
from Screens.InputBox import InputBox
class AgingTest_mode2(Screen):
skin = """
<screen position="center,center" size="370,190" title="Aging Test 2" >
<widget name="text1" position="10,10" size="350,40" font="Regular;30" halign="center" valign="center"/>
<widget name="text2" position="10,60" size="350,30" font="Regular;25" halign="center" valign="center"/>
<!-- Signal Quality -->
<eLabel text="SNR : " position="50,100" size="60,25" font="Regular;25" transparent="1" />
<widget source="session.FrontendStatus" render="Label" position="110,100" size="60,25" font="Regular;25" transparent="1">
<convert type="FrontendInfo">SNRdB</convert>
</widget>
<!-- AGC -->
<eLabel text="AGC : " position="190,100" size="60,25" font="Regular;25" transparent="1" noWrap="1" />
<widget source="session.FrontendStatus" render="Label" position="250,100" size="60,25" font="Regular;25" transparent="1" noWrap="1">
<convert type="FrontendInfo">AGC</convert>
</widget>
<widget name="text3" position="10,130" size="350,25" font="Regular;18" halign="center" valign="center"/>
<widget name="text4" position="10,155" size="350,25" font="Regular;18" halign="center" valign="center"/>
</screen>"""
step=1
def __init__(self, session,model = 4):
self["actions"] = ActionMap(["MediaPlayerActions","GlobalActions","InfobarMenuActions","ChannelSelectBaseActions"],
{
"pause": self.keyEnd,
"stop": self.keyFinish,
"volumeUp": self.keyVolumeup,
"volumeDown": self.keyVolumedown,
"volumeMute": self.nothing,
"mainMenu" : self.keyMenu,
"nextBouquet" : self.keyChannelup,
"prevBouquet" : self.keyChannelDown,
"showFavourites" : self.keyBlue,
}, -2)
Screen.__init__(self, session)
self.model = model
self.slotindex = { 0 : 'A', 1 : 'B', 2 : 'C', 3: 'D'}
self.curtuner = 0
self.isChangeTuner = True
self.isChangeChannel = False
self.zapping_interval = 300
self.timeout = self.zapping_interval
self.avswitch = AVSwitch()
self.error = 0
self.LockCheckTimer = eTimer()
self.LockCheckTimer.callback.append(self.LockCheck)
self.nextzappingtimer = eTimer()
self.nextzappingtimer.callback.append(self.checktimeout)
self.tunelist_db = {
'DVB-S2' : [
[
('1-1','1:0:19:1325:3EF:1:0x64af79:0:0:0:'), # astra hd
('1-2','1:0:19:1324:3EF:1:0x64af79:0:0:0:'), # anixe hd
('1-3','1:0:19:1331:3EF:1:0x64af79:0:0:0:') # servus hd
],
[
('2-1','1:0:19:1325:3EF:1:0xC00000:0:0:0:'), # astra hd
('2-2','1:0:19:1324:3EF:1:0xC00000:0:0:0:'), # anixe hd
('2-3','1:0:19:1331:3EF:1:0xC00000:0:0:0:') # servus hd
],
[
('3-1','1:0:19:1325:3EF:1:0x282AF79:0:0:0:'), # astra hd
('3-2','1:0:19:1324:3EF:1:0x282AF79:0:0:0:'), # anixe hd
('3-3','1:0:19:1331:3EF:1:0x282AF79:0:0:0:') # servus hd
],
[
('4-1','1:0:19:1325:3EF:1:0x02d0af79:0:0:0:'), # astra hd, Panamsat 4 (72.0E)
('4-2','1:0:19:1324:3EF:1:0x02d0af79:0:0:0:'), # anixe hd
('4-3','1:0:19:1331:3EF:1:0x02d0af79:0:0:0:') # servus hd
]
# namespace : 0x02d0af79, 720 # Panamsat 7,10 (68.5E)
],
'DVB-C': [
[
('C-1','1:0:19:1325:3EF:1:FFFF029A:0:0:0:'), # astra hd (DVB-C)
('C-2','1:0:19:1324:3EF:1:FFFF029A:0:0:0:') # anixe hd (DVB-C)
]
]
}
self.tunelist = {}
self.NimType={}
self.checkTunerType()
self.makeTunelList()
self.playservice(service = self.tunelist[self.curtuner][0][1])
self.curzappingmode = 'auto'
self["text1"]=Label("ZAPPING MODE : AUTO")
self["text2"]=Label("CURRENT TUNER : %s (%s)"%(self.slotindex[self.curtuner], self.NimType[self.curtuner]))
self["text3"]=Label("remain %d sec for next tuning" %self.timeout)
self["text4"]=Label("Press 'stop' key for exit")
self.nextzappingtimer.start(1000)
self.logmessage("AGING TEST START")
def createSummary(self):
if self.model == 4:
self.onShown.append(self.VFDinit)
return AgingTest_mode2_Summary_VFD
else:
return AgingTest_mode2_Summary
def VFDinit(self):
if self.curzappingmode == 'manual' :
self.summaries.setText("zapmode", 'MANUAL')
else:
self.summaries.setText("zapmode", 'AUTO')
self.summaries.setText("timer", "Timer %d sec"%self.timeout)
self.summaries.setText("curtuner", "%s (%s, CHANNEL - %s)"%(self.slotindex[self.curtuner], self.NimType[self.curtuner], self.tunelist[self.curtuner][0][0]))
def setTextVFD(self,name ,text):
if self.model == 4:
self.summaries.setText(name, text)
def checkTunerType(self):
nimfile = open("/proc/bus/nim_sockets")
for line in nimfile.readlines():
print line
if line == "":
break
if line.strip().startswith("NIM Socket"):
parts = line.strip().split(" ")
current_slot = int(parts[2][:-1])
elif line.strip().startswith("Type:"):
self.NimType[current_slot]= str(line.strip()[6:])
elif line.strip().startswith("empty"):
self.NimType.pop(current_slot)
nimfile.close()
def makeTunelList(self):
for slot, type in self.NimType.items():
if type.startswith('DVB-S'):
tunelist_type = 'DVB-S2'
elif type.startswith('DVB-C'):
tunelist_type = 'DVB-C'
elif type.startswith('DVB-T'):
tunelist_type = 'DVB-T'
try :
self.tunelist[slot] = self.tunelist_db[tunelist_type].pop(0)
except:
print "[FactoryTest] ERROR, pop from empty list (%s)"%tunelist_type
print "tunelist : "
print self.tunelist
def nextZapping(self, mode = 'auto', changeTuner = True, changeService = False, reverse_tuner = False, reverse_service = False):
if mode == 'manual' and changeTuner or mode == 'auto' and self.isChangeTuner:
if reverse_tuner:
self.curtuner -=1
else:
self.curtuner +=1
if self.curtuner >= len(self.tunelist):
self.curtuner = 0
if self.curtuner < 0:
self.curtuner = len(self.tunelist)-1
if mode == 'manual' and changeService or mode == 'auto' and self.isChangeChannel:
if reverse_service:
tunelistlen = len(self.tunelist[self.curtuner])
nextservice = self.tunelist[self.curtuner].pop(tunelistlen-1)
self.tunelist[self.curtuner].insert(0,nextservice)
else:
currentservice = self.tunelist[self.curtuner].pop(0)
self.tunelist[self.curtuner].append(currentservice)
self.playservice(service=self.tunelist[self.curtuner][0][1])
if self.curzappingmode == 'auto':
self.timeout = self.zapping_interval
self["text2"].setText("CURRENT TUNER : %s (%s)"%(self.slotindex[self.curtuner], self.NimType[self.curtuner]))
self.setTextVFD("curtuner", "%s (%s, CHANNEL - %s)"%(self.slotindex[self.curtuner], self.NimType[self.curtuner], self.tunelist[self.curtuner][0][0]))
def checktimeout(self):
if self.timeout == 0:
self.nextZapping(mode = 'auto')
else:
self.timeout -=1
self["text3"].setText("remain %d sec for next tuning" %self.timeout)
self.setTextVFD("timer", "Timer %d sec"%self.timeout)
def playservice(self,service = '1:0:19:1325:3EF:1:0x64af79:0:0:0:'):
ref = eServiceReference(service)
self.session.nav.playService(ref)
self.avswitch.setAspectRatio(6)
self.avswitch.setColorFormat(0)
self.LockCheckTimer.start(2000,True)
def LockCheck(self):
result = eSctest.getInstance().getFrontendstatus(self.curtuner)
if result == 0 or result == -1:
if self.model == 4:
self.error +=1
print "AGINGTEST - LOCK FAIL(%d)"%self.error
self.summaries.setText("error", "LOCK FAIL(%d)"%self.error)
logmsg = "[LOCKFAIL][%d] SLOT : %d, TYPE : %s, CH : %s, ZAPMODE: %s"%(self.error,self.curtuner,self.NimType[self.curtuner],self.tunelist[self.curtuner][0][0],self.curzappingmode)
self.logmessage(logmsg)
else:
self.error +=1
print "AGINGTEST - LOCK FAIL(%d)"%self.error
logmsg = "[LOCKFAIL][%d] SLOT : %d, TYPE : %s, CH : %s, ZAPMODE: %s"%(self.error,self.curtuner,self.NimType[self.curtuner],self.tunelist[self.curtuner][0][0],self.curzappingmode)
self.logmessage(logmsg)
self.session.open( MessageBox, _("Locking Fail Error"), MessageBox.TYPE_ERROR)
def logmessage(self,msg):
print "[logmessage]",msg
devpath = None
checklist = ["/autofs/sda1", "/autofs/sdb1", "/autofs/sdc1", "/autofs/sdd1", "/autofs/sde1"]
for dev in checklist:
try:
if fileExists(dev):
if access(dev,F_OK|R_OK|W_OK):
dummy=open(dev+"/dummy03","w")
dummy.write("check")
dummy.close()
dummy=open(dev+"/dummy03","r")
if dummy.readline()=="check":
print dev," - rw check ok"
devpath = dev
break
else:
print dev," - read check error"
dummy.close()
system("rm "+dev+"/dummy03")
else:
print dev," - rw access error"
else:
pass
except:
print dev," - exceptional error"
if devpath:
cmd = "echo %s >> %s/agingTest.log" % (msg,devpath)
print "[logmessage] %s(%s)"%(cmd,devpath)
system(cmd)
def nothing(self):
print "nothing"
def keyBlue(self):
if self.curzappingmode == 'auto':
self.curzappingmode = 'manual'
self["text1"].setText("ZAPPING MODE : MANUAL")
self["text3"].setText("Press 'stop' key for exit")
self["text4"].setText("")
self.setTextVFD("zapmode", 'MANUAL')
self.setTextVFD("timer", "")
self.nextzappingtimer.stop()
self.timeout = self.zapping_interval
elif self.curzappingmode == 'manual':
self.curzappingmode = 'auto'
self["text1"].setText("ZAPPING MODE : AUTO")
self["text3"].setText("remain %d sec for next tuning" %self.timeout)
self["text4"].setText("Press 'stop' key for exit")
self.setTextVFD("zapmode", 'AUTO')
self.setTextVFD("timer", "Timer %d sec"%self.timeout)
self.timeout = self.zapping_interval
self.nextzappingtimer.start(1000)
def keyVolumeup(self):
self.nextZapping(mode = 'manual', changeTuner = False, changeService = True)
def keyVolumedown(self):
self.nextZapping(mode = 'manual', changeTuner = False, changeService = True, reverse_service = True)
def keyChannelup(self):
self.nextZapping(mode = 'manual', changeTuner = True, changeService = False)
def keyChannelDown(self):
self.nextZapping(mode = 'manual', changeTuner = True, changeService = False, reverse_tuner = True)
def keyMenu(self):
self.session.openWithCallback(self.menuCallback, AgingTest_mode2_setmenu, tuner = self.isChangeTuner, channel = self.isChangeChannel, interval = self.zapping_interval)
def menuCallback(self, tuner, channel, interval):
if tuner is not None:
self.isChangeTuner = tuner
if channel is not None:
self.isChangeChannel = channel
if interval is not None:
self.zapping_interval = interval
self.timeout = self.zapping_interval
def keyEnd(self):
self.session.nav.stopService()
self.close()
def keyFinish(self):
self.session.nav.stopService()
self.close()
from Components.ConfigList import ConfigListScreen
from Components.config import ConfigInteger, ConfigYesNo, getConfigListEntry, NoSave
class AgingTest_mode2_setmenu(Screen,ConfigListScreen):
skin = """
<screen position="center,center" size="370,190" title="Aging Test - settings" >
<widget name="config" zPosition="2" position="10,10" size="360,180" scrollbarMode="showOnDemand" transparent="1" />
<ePixmap pixmap="Vu_HD/buttons/green.png" position="50,135" size="25,25" alphatest="on" />
<ePixmap pixmap="Vu_HD/buttons/red.png" position="215,135" size="25,25" alphatest="on" />
<widget source="key_red" render="Label" position="75,135" zPosition="1" size="90,25" font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_green" render="Label" position="240,135" zPosition="1" size="90,25" font="Regular;20" halign="center" valign="center" transparent="1" />
</screen>"""
def __init__(self,session, tuner = True, channel = False, interval = 300):
Screen.__init__(self,session)
self.session = session
self.tuner = tuner
self.channel = channel
self.zap_interval = interval
self["key_red"] = StaticText(_("Save"))
self["key_green"] = StaticText(_("Cancel"))
self["shortcuts"] = ActionMap(["ShortcutActions", "SetupActions" ],
{
"ok": self.keySave,
"cancel": self.keyCancel,
"red": self.keyCancel,
"green": self.keySave,
}, -2)
self.list = []
ConfigListScreen.__init__(self, self.list,session = self.session)
self.config_tuner = NoSave(ConfigYesNo(default = self.tuner))
self.config_channel = NoSave(ConfigYesNo(default = self.channel))
self.config_zap_interval = NoSave(ConfigInteger(default = self.zap_interval, limits=(5, 9999) ) )
self.configSetup()
def configSetup(self):
self.list = []
self.setupEntryTuner = getConfigListEntry(_("change tuner on timeout"), self.config_tuner )
self.setupEntryChannel = getConfigListEntry(_("change channel on timeout"), self.config_channel )
self.setupEntryZapInterval = getConfigListEntry(_("zapping interval (sec) "), self.config_zap_interval )
self.list.append( self.setupEntryTuner )
self.list.append( self.setupEntryChannel )
self.list.append( self.setupEntryZapInterval )
self["config"].list = self.list
self["config"].l.setList(self.list)
def keySave(self):
self.close(self.config_tuner.value, self.config_channel.value, self.config_zap_interval.value)
def keyCancel(self):
self.close(None, None, None)
class TestTuneMenu(Screen):
skin = """
<screen position="350,230" size="550,300" title="Tuning Test" >
<widget name="testlist" position="10,0" size="440,250" itemHeight="35"/>
<widget name="resultlist" position="470,0" size="60,250" itemHeight="35"/>
<widget source="text" render="Label" position="100,270" size="450,30" font="Regular;22" />
</screen>"""
def __init__(self ,session ,tuneInfo, tunelist,NimType):
self.session = session
self.NimType = NimType
self.tuneInfo = tuneInfo
self.tunelist = tunelist
self.model = 4
self["actions"] = NumberActionMap(["OkCancelActions","WizardActions","NumberActions"],
{
"left": self.nothing,
"right":self.nothing,
"ok": self.TestAction,
"cancel": self.keyCancel,
"up": self.keyup,
"down": self.keydown,
"0": self.numberaction,
"1": self.numberaction,
"2": self.numberaction,
"3": self.numberaction,
"4": self.numberaction,
"5": self.numberaction,
"6": self.numberaction,
"7": self.numberaction,
"8": self.numberaction,
"9": self.numberaction,
}, -2)
Screen.__init__(self, session)
self.text = _("Press 'EXIT' key to finish tune test.")
self["text"] = StaticText(self.text)
self.createConfig()
session.nav.stopService() # try to disable foreground service
self.tunemsgtimer = eTimer()
self.tunemsgtimer.callback.append(self.tunemsg)
self.camstep = 1
self.camtimer = eTimer()
self.camtimer.callback.append(self.cam_state)
self.tunerlock = 0
self.tuningtimer = eTimer()
self.tuningtimer.callback.append(self.updateStatus)
self.setSourceVar()
self.avswitch = AVSwitch()
def createConfig(self):
self.menulength= len(self.tunelist)
self["testlist"] = MenuList(self.tunelist)
self.rlist = []
for x in range(self.menulength):
self.rlist.append((".."))
self["resultlist"] = TestResultList(self.rlist)
def TestAction(self):
print "line - ",self["testlist"].getCurrent()[1]
self.currentindex = index = self["testlist"].getCurrent()[1]
result = 0
self.TestTune(index)
def nothing(self):
print "nothing"
def keyup(self):
print "self.menulength = ",self.menulength
print "self[\"testlist\"].getCurrent()[1] = ",self["testlist"].getCurrent()[1]
if self["testlist"].getCurrent()[1]==0:
self["testlist"].moveToIndex(self.menulength-1)
self["resultlist"].moveToIndex(self.menulength-1)
else:
self["testlist"].up()
self["resultlist"].up()
def keydown(self):
print "self.menulength = ",self.menulength
print "self[\"testlist\"].getCurrent()[1] = ",self["testlist"].getCurrent()[1]
if self["testlist"].getCurrent()[1]==(self.menulength-1):
self["testlist"].moveToIndex(0)
self["resultlist"].moveToIndex(0)
else:
self["testlist"].down()
self["resultlist"].down()
def numberaction(self, number):
if number >= self.menulength:
return
index = int(number)
self["testlist"].moveToIndex(index)
self["resultlist"].moveToIndex(index)
def keyCancel(self):
print "testtunemenu exit"
if not '..' in self.rlist and not 'fail' in self.rlist:
self.close(True)
else:
self.close(False)
# if self.oldref is not None:
# self.session.nav.playService(self.oldref)
def TestTune(self,index):
ref = eServiceReference("1:0:19:1324:3EF:1:C00000:0:0:0")
self.session.nav.stopService() # try to disable foreground service
getTuneInfo=self.tuneInfo[index]
if getTuneInfo["cam"] is True:
self.camstep = 1
self.camtimer.start(100,True)
if getTuneInfo["type"].startswith("DVB-S"):
if getTuneInfo["pol"] == "H":
ref.setData(0,1)
ref.setData(1,0x6D3)
ref.setData(2,0x3)
ref.setData(3,0xA4)
else:
ref.setData(0,0x19)
ref.setData(1,0x1325)
ref.setData(2,0x3ef)
ref.setData(3,0x1)
if getTuneInfo["sat"] == "160": # Eutelsat W2
ref.setData(4,0xA00000)
elif getTuneInfo["sat"] == "100": # Eutelsat
ref.setData(4,0x64af79)
elif getTuneInfo["sat"] == "130": # Hotbird
ref.setData(4,0x820000)
elif getTuneInfo["sat"] == "192": # Astra
ref.setData(4,0xC00000)
elif getTuneInfo["sat"] == "620": # Intelsat 902
ref.setData(4,0x26c0000) # need to fix later
elif getTuneInfo["sat"] == "642": # Intelsat 906
ref.setData(4,0x282AF79) # need to fix later
elif getTuneInfo["type"].startswith("DVB-C"):
ref.setData(0,0x19)
ref.setData(1,0x1325)
ref.setData(2,0x3ef)
ref.setData(3,0x1)
ref.setData(4,-64870) # ffff029a
elif getTuneInfo["type"].startswith("DVB-T"):
ref.setData(0,0x19)
ref.setData(1,0x1325)
ref.setData(2,0x3ef)
ref.setData(3,0x1)
ref.setData(4,-286391716) # eeee025c
self.session.nav.playService(ref)
if getTuneInfo["color"]=="CVBS":
self.avswitch.setColorFormat(0)
elif getTuneInfo["color"]=="RGB":
self.avswitch.setColorFormat(1)
elif getTuneInfo["color"]=="YC":
self.avswitch.setColorFormat(2)
if getTuneInfo["ratio"] == "4:3":
self.avswitch.setAspectRatio(0)
elif getTuneInfo["ratio"] == "16:9":
self.avswitch.setAspectRatio(6)
self.tuningtimer.start(2000,True)
self.tunemsgtimer.start(3000, True)
def cam_state(self):
current_index = self.currentindex
if self.camstep == 1:
slot = 0
state = eDVBCI_UI.getInstance().getState(slot)
print '-1-stat',state
if state > 0:
self.camstep=2
self.camtimer.start(100,True)
else:
self.session.nav.stopService()
self.session.open( MessageBox, _("CAM1_NOT_INSERTED\nPress exit!"), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
self.tunemsgtimer.stop()
elif self.camstep == 2:
slot = 0
appname = eDVBCI_UI.getInstance().getAppName(slot)
print 'appname',appname
if appname is None:
self.session.nav.stopService()
self.session.open( MessageBox, _("NO_GET_APPNAME\nPress exit!"), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
self.tunemsgtimer.stop()
else:
self.camstep=3
self.camtimer.start(100,True)
elif self.camstep==3:
slot = 1
state = eDVBCI_UI.getInstance().getState(slot)
print '-2-stat',state
if state > 0:
self.camstep=4
self.camtimer.start(100,True)
else:
self.session.nav.stopService()
self.session.open( MessageBox, _("CAM2_NOT_INSERTED\nPress exit!"), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
self.tunemsgtimer.stop()
elif self.camstep == 4:
slot = 1
appname = eDVBCI_UI.getInstance().getAppName(slot)
print 'appname',appname
if appname is None:
self.session.nav.stopService()
self.session.open( MessageBox, _("NO_GET_APPNAME\nPress exit!"), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
self.tunemsgtimer.stop()
else:
self.setSource()
self.camstep = 5
def updateStatus(self):
current_index = self.currentindex
getTuneInfo=self.tuneInfo[current_index]
result = eSctest.getInstance().getFrontendstatus(getTuneInfo["slot"])
tunno = getTuneInfo["slot"]+1
hv = getTuneInfo["pol"]
if hv == "H":
hv = "Hor"
elif hv == "V":
hv = "Ver"
else :
hv == ""
print "eSctest.getInstance().getFrontendstatus - %d"%result
if result == 0 or result == -1:
self.tunerlock = 0
self.tunemsgtimer.stop()
self.session.nav.stopService()
self.avswitch.setColorFormat(0)
self.session.open( MessageBox, _("Tune%d %s Locking Fail..."%(tunno,hv)), MessageBox.TYPE_ERROR)
self.rlist[current_index]="fail"
else :
self.tunerlock = 1
def tuneback(self,yesno):
current_index=self.currentindex
self.session.nav.stopService() # try to disable foreground service
if yesno and self.tunerlock == 1:
getTuneInfo=self.tuneInfo[current_index]
if getTuneInfo["cam"] and self.camstep < 5: # need fix to depending about CAM exist
self.rlist[current_index]="fail"
else :
self.rlist[current_index]="pass"
else:
self.rlist[current_index]="fail"
if self.tuneInfo[current_index]["color"] == "YC":
self.avswitch.setColorFormat(0)
self.resetSource()
self["resultlist"].updateList(self.rlist)
def tunemsg(self):
self.tuningtimer.stop()
self.session.openWithCallback(self.tuneback, TuneMessageBox, _("%s ok?" %(self["testlist"].getCurrent()[0])), MessageBox.TYPE_YESNO)
def setSourceVar(self):
self.input_pad_num=len(self.NimType)-1
if self.input_pad_num == 0:
self.setTuner = 'A'
elif self.input_pad_num == 1:
self.setTuner = 'B'
elif self.input_pad_num == 2:
self.setTuner = 'C'
# ikseong - for 22000 tp
def setSource(self):
# fix input source
inputname = ("/proc/stb/tsmux/input%d" % self.input_pad_num)
print "<setsource> inputname : ",inputname
fd=open(inputname,"w")
fd.write("CI0")
fd.close()
# fix ci_input Tuner
filename = ("/proc/stb/tsmux/ci0_input")
fd = open(filename,'w')
fd.write(self.setTuner)
print "setTuner(CI0) : ",self.setTuner
fd.close()
print "CI loop test!!!!!!!!!!!!!!"
def resetSource(self):
inputname = ("/proc/stb/tsmux/input%d" % self.input_pad_num)
print "<resetsource> inputname : ",inputname
fd=open(inputname,"w")
fd.write(self.setTuner)
fd.close()
print "CI loop test end!!!!!!!!!!!!!!"
session = None
def cleanup():
global Session
Session = None
global Servicelist
Servicelist = None
def main(session, servicelist, **kwargs):
global Session
Session = session
global Servicelist
Servicelist = servicelist
bouquets = Servicelist.getBouquetList()
global bouquetSel
bouquetSel = Session.openWithCallback(cleanup, FactoryTest)
#def Plugins(**kwargs):
# return PluginDescriptor(name=_("Factory Test"), description="Test App for Factory", where = PluginDescriptor.WHERE_EXTENSIONSMENU, fnc=main)
def Plugins(**kwargs):
return []
|
rjschwei/azure-sdk-for-python
|
refs/heads/master
|
azure-mgmt-network/azure/mgmt/network/models/load_balancing_rule.py
|
1
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class LoadBalancingRule(SubResource):
"""A loag balancing rule for a load balancer.
:param id: Resource ID.
:type id: str
:param frontend_ip_configuration: A reference to frontend IP addresses.
:type frontend_ip_configuration: :class:`SubResource
<azure.mgmt.network.models.SubResource>`
:param backend_address_pool: A reference to a pool of DIPs. Inbound
traffic is randomly load balanced across IPs in the backend IPs.
:type backend_address_pool: :class:`SubResource
<azure.mgmt.network.models.SubResource>`
:param probe: The reference of the load balancer probe used by the load
balancing rule.
:type probe: :class:`SubResource <azure.mgmt.network.models.SubResource>`
:param protocol: The transport protocol for the external endpoint.
Possible values are 'Udp' or 'Tcp'. Possible values include: 'Udp', 'Tcp'
:type protocol: str or :class:`TransportProtocol
<azure.mgmt.network.models.TransportProtocol>`
:param load_distribution: The load distribution policy for this rule.
Possible values are 'Default', 'SourceIP', and 'SourceIPProtocol'.
Possible values include: 'Default', 'SourceIP', 'SourceIPProtocol'
:type load_distribution: str or :class:`LoadDistribution
<azure.mgmt.network.models.LoadDistribution>`
:param frontend_port: The port for the external endpoint. Port numbers for
each Rule must be unique within the Load Balancer. Acceptable values are
between 1 and 65534.
:type frontend_port: int
:param backend_port: The port used for internal connections on the
endpoint. Acceptable values are between 1 and 65535.
:type backend_port: int
:param idle_timeout_in_minutes: The timeout for the TCP idle connection.
The value can be set between 4 and 30 minutes. The default value is 4
minutes. This element is only used when the protocol is set to TCP.
:type idle_timeout_in_minutes: int
:param enable_floating_ip: Configures a virtual machine's endpoint for the
floating IP capability required to configure a SQL AlwaysOn Availability
Group. This setting is required when using the SQL AlwaysOn Availability
Groups in SQL server. This setting can't be changed after you create the
endpoint.
:type enable_floating_ip: bool
:param provisioning_state: Gets the provisioning state of the PublicIP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'protocol': {'required': True},
'frontend_port': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'probe': {'key': 'properties.probe', 'type': 'SubResource'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'load_distribution': {'key': 'properties.loadDistribution', 'type': 'str'},
'frontend_port': {'key': 'properties.frontendPort', 'type': 'int'},
'backend_port': {'key': 'properties.backendPort', 'type': 'int'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'enable_floating_ip': {'key': 'properties.enableFloatingIP', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, protocol, frontend_port, id=None, frontend_ip_configuration=None, backend_address_pool=None, probe=None, load_distribution=None, backend_port=None, idle_timeout_in_minutes=None, enable_floating_ip=None, provisioning_state=None, name=None, etag=None):
super(LoadBalancingRule, self).__init__(id=id)
self.frontend_ip_configuration = frontend_ip_configuration
self.backend_address_pool = backend_address_pool
self.probe = probe
self.protocol = protocol
self.load_distribution = load_distribution
self.frontend_port = frontend_port
self.backend_port = backend_port
self.idle_timeout_in_minutes = idle_timeout_in_minutes
self.enable_floating_ip = enable_floating_ip
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
|
abhitopia/tensorflow
|
refs/heads/master
|
tensorflow/python/platform/flags.py
|
85
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of the flags interface."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse as _argparse
from tensorflow.python.util.all_util import remove_undocumented
_global_parser = _argparse.ArgumentParser()
# pylint: disable=invalid-name
class _FlagValues(object):
"""Global container and accessor for flags and their values."""
def __init__(self):
self.__dict__['__flags'] = {}
self.__dict__['__parsed'] = False
def _parse_flags(self, args=None):
result, unparsed = _global_parser.parse_known_args(args=args)
for flag_name, val in vars(result).items():
self.__dict__['__flags'][flag_name] = val
self.__dict__['__parsed'] = True
return unparsed
def __getattr__(self, name):
"""Retrieves the 'value' attribute of the flag --name."""
if not self.__dict__['__parsed']:
self._parse_flags()
if name not in self.__dict__['__flags']:
raise AttributeError(name)
return self.__dict__['__flags'][name]
def __setattr__(self, name, value):
"""Sets the 'value' attribute of the flag --name."""
if not self.__dict__['__parsed']:
self._parse_flags()
self.__dict__['__flags'][name] = value
def _define_helper(flag_name, default_value, docstring, flagtype):
"""Registers 'flag_name' with 'default_value' and 'docstring'."""
_global_parser.add_argument('--' + flag_name,
default=default_value,
help=docstring,
type=flagtype)
# Provides the global object that can be used to access flags.
FLAGS = _FlagValues()
def DEFINE_string(flag_name, default_value, docstring):
"""Defines a flag of type 'string'.
Args:
flag_name: The name of the flag as a string.
default_value: The default value the flag should take as a string.
docstring: A helpful message explaining the use of the flag.
"""
_define_helper(flag_name, default_value, docstring, str)
def DEFINE_integer(flag_name, default_value, docstring):
"""Defines a flag of type 'int'.
Args:
flag_name: The name of the flag as a string.
default_value: The default value the flag should take as an int.
docstring: A helpful message explaining the use of the flag.
"""
_define_helper(flag_name, default_value, docstring, int)
def DEFINE_boolean(flag_name, default_value, docstring):
"""Defines a flag of type 'boolean'.
Args:
flag_name: The name of the flag as a string.
default_value: The default value the flag should take as a boolean.
docstring: A helpful message explaining the use of the flag.
"""
# Register a custom function for 'bool' so --flag=True works.
def str2bool(v):
return v.lower() in ('true', 't', '1')
_global_parser.add_argument('--' + flag_name,
nargs='?',
const=True,
help=docstring,
default=default_value,
type=str2bool)
# Add negated version, stay consistent with argparse with regard to
# dashes in flag names.
_global_parser.add_argument('--no' + flag_name,
action='store_false',
dest=flag_name.replace('-', '_'))
# The internal google library defines the following alias, so we match
# the API for consistency.
DEFINE_bool = DEFINE_boolean # pylint: disable=invalid-name
def DEFINE_float(flag_name, default_value, docstring):
"""Defines a flag of type 'float'.
Args:
flag_name: The name of the flag as a string.
default_value: The default value the flag should take as a float.
docstring: A helpful message explaining the use of the flag.
"""
_define_helper(flag_name, default_value, docstring, float)
_allowed_symbols = [
# We rely on gflags documentation.
'DEFINE_bool',
'DEFINE_boolean',
'DEFINE_float',
'DEFINE_integer',
'DEFINE_string',
'FLAGS',
]
remove_undocumented(__name__, _allowed_symbols)
|
mahendra-r/edx-platform
|
refs/heads/master
|
lms/djangoapps/shoppingcart/exceptions.py
|
191
|
"""
Exceptions for the shoppingcart app
"""
# (Exception Class Names are sort of self-explanatory, so skipping docstring requirement)
# pylint: disable=missing-docstring
class PaymentException(Exception):
pass
class PurchasedCallbackException(PaymentException):
pass
class InvalidCartItem(PaymentException):
pass
class ItemAlreadyInCartException(InvalidCartItem):
pass
class AlreadyEnrolledInCourseException(InvalidCartItem):
pass
class CourseDoesNotExistException(InvalidCartItem):
pass
class CouponDoesNotExistException(InvalidCartItem):
pass
class MultipleCouponsNotAllowedException(InvalidCartItem):
pass
class RedemptionCodeError(Exception):
"""An error occurs while processing redemption codes. """
pass
class ReportException(Exception):
pass
class ReportTypeDoesNotExistException(ReportException):
pass
class InvalidStatusToRetire(Exception):
pass
class UnexpectedOrderItemStatus(Exception):
pass
class ItemNotFoundInCartException(Exception):
pass
|
CristianBB/SickRage
|
refs/heads/develop
|
lib/unidecode/x06c.py
|
252
|
data = (
'Lu ', # 0x00
'Mu ', # 0x01
'Li ', # 0x02
'Tong ', # 0x03
'Rong ', # 0x04
'Chang ', # 0x05
'Pu ', # 0x06
'Luo ', # 0x07
'Zhan ', # 0x08
'Sao ', # 0x09
'Zhan ', # 0x0a
'Meng ', # 0x0b
'Luo ', # 0x0c
'Qu ', # 0x0d
'Die ', # 0x0e
'Shi ', # 0x0f
'Di ', # 0x10
'Min ', # 0x11
'Jue ', # 0x12
'Mang ', # 0x13
'Qi ', # 0x14
'Pie ', # 0x15
'Nai ', # 0x16
'Qi ', # 0x17
'Dao ', # 0x18
'Xian ', # 0x19
'Chuan ', # 0x1a
'Fen ', # 0x1b
'Ri ', # 0x1c
'Nei ', # 0x1d
'[?] ', # 0x1e
'Fu ', # 0x1f
'Shen ', # 0x20
'Dong ', # 0x21
'Qing ', # 0x22
'Qi ', # 0x23
'Yin ', # 0x24
'Xi ', # 0x25
'Hai ', # 0x26
'Yang ', # 0x27
'An ', # 0x28
'Ya ', # 0x29
'Ke ', # 0x2a
'Qing ', # 0x2b
'Ya ', # 0x2c
'Dong ', # 0x2d
'Dan ', # 0x2e
'Lu ', # 0x2f
'Qing ', # 0x30
'Yang ', # 0x31
'Yun ', # 0x32
'Yun ', # 0x33
'Shui ', # 0x34
'San ', # 0x35
'Zheng ', # 0x36
'Bing ', # 0x37
'Yong ', # 0x38
'Dang ', # 0x39
'Shitamizu ', # 0x3a
'Le ', # 0x3b
'Ni ', # 0x3c
'Tun ', # 0x3d
'Fan ', # 0x3e
'Gui ', # 0x3f
'Ting ', # 0x40
'Zhi ', # 0x41
'Qiu ', # 0x42
'Bin ', # 0x43
'Ze ', # 0x44
'Mian ', # 0x45
'Cuan ', # 0x46
'Hui ', # 0x47
'Diao ', # 0x48
'Yi ', # 0x49
'Cha ', # 0x4a
'Zhuo ', # 0x4b
'Chuan ', # 0x4c
'Wan ', # 0x4d
'Fan ', # 0x4e
'Dai ', # 0x4f
'Xi ', # 0x50
'Tuo ', # 0x51
'Mang ', # 0x52
'Qiu ', # 0x53
'Qi ', # 0x54
'Shan ', # 0x55
'Pai ', # 0x56
'Han ', # 0x57
'Qian ', # 0x58
'Wu ', # 0x59
'Wu ', # 0x5a
'Xun ', # 0x5b
'Si ', # 0x5c
'Ru ', # 0x5d
'Gong ', # 0x5e
'Jiang ', # 0x5f
'Chi ', # 0x60
'Wu ', # 0x61
'Tsuchi ', # 0x62
'[?] ', # 0x63
'Tang ', # 0x64
'Zhi ', # 0x65
'Chi ', # 0x66
'Qian ', # 0x67
'Mi ', # 0x68
'Yu ', # 0x69
'Wang ', # 0x6a
'Qing ', # 0x6b
'Jing ', # 0x6c
'Rui ', # 0x6d
'Jun ', # 0x6e
'Hong ', # 0x6f
'Tai ', # 0x70
'Quan ', # 0x71
'Ji ', # 0x72
'Bian ', # 0x73
'Bian ', # 0x74
'Gan ', # 0x75
'Wen ', # 0x76
'Zhong ', # 0x77
'Fang ', # 0x78
'Xiong ', # 0x79
'Jue ', # 0x7a
'Hang ', # 0x7b
'Niou ', # 0x7c
'Qi ', # 0x7d
'Fen ', # 0x7e
'Xu ', # 0x7f
'Xu ', # 0x80
'Qin ', # 0x81
'Yi ', # 0x82
'Wo ', # 0x83
'Yun ', # 0x84
'Yuan ', # 0x85
'Hang ', # 0x86
'Yan ', # 0x87
'Chen ', # 0x88
'Chen ', # 0x89
'Dan ', # 0x8a
'You ', # 0x8b
'Dun ', # 0x8c
'Hu ', # 0x8d
'Huo ', # 0x8e
'Qie ', # 0x8f
'Mu ', # 0x90
'Rou ', # 0x91
'Mei ', # 0x92
'Ta ', # 0x93
'Mian ', # 0x94
'Wu ', # 0x95
'Chong ', # 0x96
'Tian ', # 0x97
'Bi ', # 0x98
'Sha ', # 0x99
'Zhi ', # 0x9a
'Pei ', # 0x9b
'Pan ', # 0x9c
'Zhui ', # 0x9d
'Za ', # 0x9e
'Gou ', # 0x9f
'Liu ', # 0xa0
'Mei ', # 0xa1
'Ze ', # 0xa2
'Feng ', # 0xa3
'Ou ', # 0xa4
'Li ', # 0xa5
'Lun ', # 0xa6
'Cang ', # 0xa7
'Feng ', # 0xa8
'Wei ', # 0xa9
'Hu ', # 0xaa
'Mo ', # 0xab
'Mei ', # 0xac
'Shu ', # 0xad
'Ju ', # 0xae
'Zan ', # 0xaf
'Tuo ', # 0xb0
'Tuo ', # 0xb1
'Tuo ', # 0xb2
'He ', # 0xb3
'Li ', # 0xb4
'Mi ', # 0xb5
'Yi ', # 0xb6
'Fa ', # 0xb7
'Fei ', # 0xb8
'You ', # 0xb9
'Tian ', # 0xba
'Zhi ', # 0xbb
'Zhao ', # 0xbc
'Gu ', # 0xbd
'Zhan ', # 0xbe
'Yan ', # 0xbf
'Si ', # 0xc0
'Kuang ', # 0xc1
'Jiong ', # 0xc2
'Ju ', # 0xc3
'Xie ', # 0xc4
'Qiu ', # 0xc5
'Yi ', # 0xc6
'Jia ', # 0xc7
'Zhong ', # 0xc8
'Quan ', # 0xc9
'Bo ', # 0xca
'Hui ', # 0xcb
'Mi ', # 0xcc
'Ben ', # 0xcd
'Zhuo ', # 0xce
'Chu ', # 0xcf
'Le ', # 0xd0
'You ', # 0xd1
'Gu ', # 0xd2
'Hong ', # 0xd3
'Gan ', # 0xd4
'Fa ', # 0xd5
'Mao ', # 0xd6
'Si ', # 0xd7
'Hu ', # 0xd8
'Ping ', # 0xd9
'Ci ', # 0xda
'Fan ', # 0xdb
'Chi ', # 0xdc
'Su ', # 0xdd
'Ning ', # 0xde
'Cheng ', # 0xdf
'Ling ', # 0xe0
'Pao ', # 0xe1
'Bo ', # 0xe2
'Qi ', # 0xe3
'Si ', # 0xe4
'Ni ', # 0xe5
'Ju ', # 0xe6
'Yue ', # 0xe7
'Zhu ', # 0xe8
'Sheng ', # 0xe9
'Lei ', # 0xea
'Xuan ', # 0xeb
'Xue ', # 0xec
'Fu ', # 0xed
'Pan ', # 0xee
'Min ', # 0xef
'Tai ', # 0xf0
'Yang ', # 0xf1
'Ji ', # 0xf2
'Yong ', # 0xf3
'Guan ', # 0xf4
'Beng ', # 0xf5
'Xue ', # 0xf6
'Long ', # 0xf7
'Lu ', # 0xf8
'[?] ', # 0xf9
'Bo ', # 0xfa
'Xie ', # 0xfb
'Po ', # 0xfc
'Ze ', # 0xfd
'Jing ', # 0xfe
'Yin ', # 0xff
)
|
RCOS-Grading-Server/HWserver
|
refs/heads/master
|
migration/migrator/migrations/course/20190110000000_course_materials_owner.py
|
2
|
import os
import grp
from pathlib import Path
def up(config, conn, semester, course):
course_dir = Path(config.submitty['submitty_data_dir'], 'courses', semester, course)
uploads_dir = Path(course_dir, 'uploads')
php_user = config.submitty_users['php_user']
# set the owner
os.system("chown "+php_user+" "+str(uploads_dir))
pass
def down(config, conn, semester, course):
pass
|
COSMOGRAIL/PyCS
|
refs/heads/master
|
demo/demo1/1_import.py
|
1
|
# In this first script we "import" the data, in this case from a simple text file with
# headers (other formats are also supported, see doc)
import pycs
rdbfile = "data/trialcurves.txt"
lcs = [
pycs.gen.lc.rdbimport(rdbfile, 'A', 'mag_A', 'magerr_A', "Trial"),
pycs.gen.lc.rdbimport(rdbfile, 'B', 'mag_B', 'magerr_B', "Trial"),
pycs.gen.lc.rdbimport(rdbfile, 'C', 'mag_C', 'magerr_C', "Trial"),
pycs.gen.lc.rdbimport(rdbfile, 'D', 'mag_D', 'magerr_D', "Trial")
]
pycs.gen.mrg.colourise(lcs) # Gives each curve a different colour.
# Let's shift them by the "true" time shifts, for display purposes :
lcs[1].shifttime(-5.0)
lcs[2].shifttime(-20.0)
lcs[3].shifttime(-70.0)
# We show them :
pycs.gen.lc.display(lcs)
# Or if you prefer to save them into a file:
pycs.gen.lc.display(lcs, filename="fig_trialcurves.pdf")
# This function has many more options...
# We undo these shifts, as from now on we "forget" about these true delays.
for l in lcs:
l.resetshifts()
# The main point of this script : we save the raw curves into a pkl file :
pycs.gen.util.writepickle(lcs, "data/trialcurves.pkl")
# Normally we would stop here.
# In any further scripts, you can now import the data by reading this pickle file :
lcs = pycs.gen.util.readpickle("data/trialcurves.pkl")
# ... and do something with it.
for l in lcs:
print l.longinfo()
# For instance, we could export the data into a text file.
for l in lcs:
l.resetshifts()
pycs.gen.util.multilcsexport(lcs, "out_trialcurves.txt", separator="\t", verbose=True, properties=None)
# Which gives in this case the same file as the one from which you read the data in first place.
|
xforce/diorama-native-modding
|
refs/heads/master
|
tools/gyp/test/mac/gyptest-app-assets-catalog.py
|
61
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that app bundles are built correctly.
"""
import TestGyp
import TestMac
import os
import plistlib
import subprocess
import sys
def ExpectEq(expected, actual):
if expected != actual:
print >>sys.stderr, 'Expected "%s", got "%s"' % (expected, actual)
test.fail_test()
def ls(path):
'''Returns a list of all files in a directory, relative to the directory.'''
result = []
for dirpath, _, files in os.walk(path):
for f in files:
result.append(os.path.join(dirpath, f)[len(path) + 1:])
return result
# Xcode supports for assets catalog was introduced in Xcode 6.0
if sys.platform == 'darwin' and TestMac.Xcode.Version() >= '0600':
test_gyp_path = 'test-assets-catalog.gyp'
test_app_path = 'Test App Assets Catalog Gyp.app'
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp(test_gyp_path, chdir='app-bundle')
test.build(test_gyp_path, test.ALL, chdir='app-bundle')
# Binary
test.built_file_must_exist(
os.path.join(test_app_path, 'Contents/MacOS/Test App Assets Catalog Gyp'),
chdir='app-bundle')
# Info.plist
info_plist = test.built_file_path(
os.path.join(test_app_path, 'Contents/Info.plist'),
chdir='app-bundle')
test.must_exist(info_plist)
test.must_contain(
info_plist,
'com.google.Test-App-Assets-Catalog-Gyp') # Variable expansion
test.must_not_contain(info_plist, '${MACOSX_DEPLOYMENT_TARGET}');
if test.format != 'make':
# TODO: Synthesized plist entries aren't hooked up in the make generator.
machine = subprocess.check_output(['sw_vers', '-buildVersion']).rstrip('\n')
plist = plistlib.readPlist(info_plist)
ExpectEq(machine, plist['BuildMachineOSBuild'])
expected = ''
version = TestMac.Xcode.SDKVersion()
expected = 'macosx' + version
ExpectEq(expected, plist['DTSDKName'])
sdkbuild = TestMac.Xcode.SDKBuild()
if not sdkbuild:
# Above command doesn't work in Xcode 4.2.
sdkbuild = plist['BuildMachineOSBuild']
ExpectEq(sdkbuild, plist['DTSDKBuild'])
ExpectEq(TestMac.Xcode.Version(), plist['DTXcode'])
ExpectEq(TestMac.Xcode.Build(), plist['DTXcodeBuild'])
# Resources
strings_files = ['InfoPlist.strings', 'utf-16be.strings', 'utf-16le.strings']
for f in strings_files:
strings = test.built_file_path(
os.path.join(test_app_path, 'Contents/Resources/English.lproj', f),
chdir='app-bundle')
test.must_exist(strings)
# Xcodes writes UTF-16LE with BOM.
contents = open(strings, 'rb').read()
if not contents.startswith('\xff\xfe' + '/* Localized'.encode('utf-16le')):
test.fail_test()
test.built_file_must_exist(
os.path.join(
test_app_path, 'Contents/Resources/English.lproj/MainMenu.nib'),
chdir='app-bundle')
# make does not supports .xcassets files
extra_content_files = []
if test.format != 'make':
extra_content_files = ['Contents/Resources/Assets.car']
for f in extra_content_files:
test.built_file_must_exist(
os.path.join(test_app_path, f),
chdir='app-bundle')
# Packaging
test.built_file_must_exist(
os.path.join(test_app_path, 'Contents/PkgInfo'),
chdir='app-bundle')
test.built_file_must_match(
os.path.join(test_app_path, 'Contents/PkgInfo'), 'APPLause',
chdir='app-bundle')
# Check that no other files get added to the bundle.
if set(ls(test.built_file_path(test_app_path, chdir='app-bundle'))) != \
set(['Contents/MacOS/Test App Assets Catalog Gyp',
'Contents/Info.plist',
'Contents/Resources/English.lproj/MainMenu.nib',
'Contents/PkgInfo',
] + extra_content_files +
[os.path.join('Contents/Resources/English.lproj', f)
for f in strings_files]):
test.fail_test()
test.pass_test()
|
vnsofthe/odoo
|
refs/heads/8.0
|
openerp/addons/base/module/report/__init__.py
|
463
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import ir_module_reference_print
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
plotly/python-api
|
refs/heads/master
|
packages/python/plotly/plotly/validators/histogram2d/hoverlabel/font/_size.py
|
1
|
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="size", parent_name="histogram2d.hoverlabel.font", **kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
min=kwargs.pop("min", 1),
role=kwargs.pop("role", "style"),
**kwargs
)
|
giovaroma/bootstrap4
|
refs/heads/master
|
node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
|
1509
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 and later, non-Express have a x64-x86 cross that we want
# to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValueUsingWinReg(key, value):
"""Use the _winreg module to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
"""
import _winreg
try:
root, subkey = key.split('\\', 1)
assert root == 'HKLM' # Only need HKLM for now.
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
return _winreg.QueryValueEx(hkey, value)[0]
except WindowsError:
return None
def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
Using _winreg is preferable because it solves an issue on some corporate
environments where access to reg.exe is locked down. However, we still need
to fallback to reg.exe for the case where the _winreg module is not available
(for example in cygwin python).
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
try:
return _RegistryGetValueUsingWinReg(key, value)
except ImportError:
pass
# Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
project_version='14.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v140'),
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
if version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if not allow_fallback:
raise ValueError('Could not locate Visual Studio installation.')
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
|
jiadaizhao/LeetCode
|
refs/heads/master
|
0301-0400/0363-Max Sum of Rectangle No Larger Than K/0363-Max Sum of Rectangle No Larger Than K.py
|
1
|
import bisect
import math
class Solution:
def maxSumSubmatrix(self, matrix: 'List[List[int]]', k: 'int') -> 'int':
m = len(matrix)
n = len(matrix[0])
maxSum = -math.inf
for l in range(n):
sums = [0] * m
for r in range(l, n):
table = [0]
presum = 0
for i in range(m):
sums[i] += matrix[i][r]
presum += sums[i]
index = bisect.bisect_left(table, presum - k)
if index != len(table):
maxSum = max(maxSum, presum - table[index])
if maxSum == k:
return k
bisect.insort_left(table, presum)
return maxSum
class Solution2:
def maxSumSubmatrix(self, matrix: 'List[List[int]]', k: 'int') -> 'int':
m = len(matrix)
n = len(matrix[0])
maxSum = -math.inf
presum = [[0] * (n + 1) for _ in range(m)]
for i in range(m):
for j in range(n):
presum[i][j + 1] = presum[i][j] + matrix[i][j]
for l in range(n):
for r in range(l, n):
curr = 0
table = [0]
for i in range(m):
curr += presum[i][r + 1] - presum[i][l]
index = bisect.bisect_left(table, curr - k)
if index != len(table):
maxSum = max(maxSum, curr - table[index])
if maxSum == k:
return k
bisect.insort_left(table, curr)
return maxSum
|
Dyrcona/asciidoc
|
refs/heads/master
|
filters/music/music2png.py
|
24
|
#!/usr/bin/env python
'''
NAME
music2png - Converts textual music notation to classically notated PNG file
SYNOPSIS
music2png [options] INFILE
DESCRIPTION
This filter reads LilyPond or ABC music notation text from the input file
INFILE (or stdin if INFILE is -), converts it to classical music notation
and writes it to a trimmed PNG image file.
This script is a wrapper for LilyPond and ImageMagick commands.
OPTIONS
-f FORMAT
The INFILE music format. 'abc' for ABC notation, 'ly' for LilyPond
notation. Defaults to 'abc' unless source starts with backslash.
-o OUTFILE
The file name of the output file. If not specified the output file is
named like INFILE but with a .png file name extension.
-m
Skip if the PNG output file is newer that than the INFILE.
Compares timestamps on INFILE and OUTFILE. If
INFILE is - (stdin) then compares MD5 checksum stored in file
named like OUTFILE but with a .md5 file name extension.
The .md5 file is created if the -m option is used and the
INFILE is - (stdin).
-v
Verbosely print processing information to stderr.
--help, -h
Print this documentation.
--version
Print program version number.
SEE ALSO
lilypond(1), abc2ly(1), convert(1)
AUTHOR
Written by Stuart Rackham, <srackham@gmail.com>
COPYING
Copyright (C) 2006 Stuart Rackham. Free use of this software is
granted under the terms of the GNU General Public License (GPL).
'''
# Suppress warning: "the md5 module is deprecated; use hashlib instead"
import warnings
warnings.simplefilter('ignore',DeprecationWarning)
import os, sys, tempfile, md5
VERSION = '0.1.2'
# Globals.
verbose = False
class EApp(Exception): pass # Application specific exception.
def print_stderr(line):
sys.stderr.write(line + os.linesep)
def print_verbose(line):
if verbose:
print_stderr(line)
def write_file(filename, data, mode='w'):
f = open(filename, mode)
try:
f.write(data)
finally:
f.close()
def read_file(filename, mode='r'):
f = open(filename, mode)
try:
return f.read()
finally:
f.close()
def run(cmd):
global verbose
if not verbose:
cmd += ' 2>%s' % os.devnull
print_verbose('executing: %s' % cmd)
if os.system(cmd):
raise EApp, 'failed command: %s' % cmd
def music2png(format, infile, outfile, modified):
'''Convert ABC notation in file infile to cropped PNG file named outfile.'''
outfile = os.path.abspath(outfile)
outdir = os.path.dirname(outfile)
if not os.path.isdir(outdir):
raise EApp, 'directory does not exist: %s' % outdir
basefile = tempfile.mktemp(dir=os.path.dirname(outfile))
temps = [basefile + ext for ext in ('.abc', '.ly', '.ps', '.midi')]
skip = False
if infile == '-':
source = sys.stdin.read()
checksum = md5.new(source).digest()
filename = os.path.splitext(outfile)[0] + '.md5'
if modified:
if os.path.isfile(filename) and os.path.isfile(outfile) and \
checksum == read_file(filename,'rb'):
skip = True
else:
write_file(filename, checksum, 'wb')
else:
if not os.path.isfile(infile):
raise EApp, 'input file does not exist: %s' % infile
if modified and os.path.isfile(outfile) and \
os.path.getmtime(infile) <= os.path.getmtime(outfile):
skip = True
source = read_file(infile)
if skip:
print_verbose('skipped: no change: %s' % outfile)
return
if format is None:
if source and source.startswith('\\'): # Guess input format.
format = 'ly'
else:
format = 'abc'
# Write temporary source file.
write_file('%s.%s' % (basefile,format), source)
abc = basefile + '.abc'
ly = basefile + '.ly'
png = basefile + '.png'
saved_pwd = os.getcwd()
os.chdir(outdir)
try:
if format == 'abc':
run('abc2ly -o "%s" "%s"' % (ly,abc))
run('lilypond --png -o "%s" "%s"' % (basefile,ly))
os.rename(png, outfile)
finally:
os.chdir(saved_pwd)
# Chop the bottom 75 pixels off to get rid of the page footer then crop the
# music image. The -strip option necessary because FOP does not like the
# custom PNG color profile used by Lilypond.
run('convert "%s" -strip -gravity South -chop 0x75 -trim "%s"' % (outfile, outfile))
for f in temps:
if os.path.isfile(f):
print_verbose('deleting: %s' % f)
os.remove(f)
def usage(msg=''):
if msg:
print_stderr(msg)
print_stderr('\n'
'usage:\n'
' music2png [options] INFILE\n'
'\n'
'options:\n'
' -f FORMAT\n'
' -o OUTFILE\n'
' -m\n'
' -v\n'
' --help\n'
' --version')
def main():
# Process command line options.
global verbose
format = None
outfile = None
modified = False
import getopt
opts,args = getopt.getopt(sys.argv[1:], 'f:o:mhv', ['help','version'])
for o,v in opts:
if o in ('--help','-h'):
print __doc__
sys.exit(0)
if o =='--version':
print('music2png version %s' % (VERSION,))
sys.exit(0)
if o == '-f': format = v
if o == '-o': outfile = v
if o == '-m': modified = True
if o == '-v': verbose = True
if len(args) != 1:
usage()
sys.exit(1)
infile = args[0]
if format not in (None, 'abc', 'ly'):
usage('invalid FORMAT')
sys.exit(1)
if outfile is None:
if infile == '-':
usage('OUTFILE must be specified')
sys.exit(1)
outfile = os.path.splitext(infile)[0] + '.png'
# Do the work.
music2png(format, infile, outfile, modified)
# Print something to suppress asciidoc 'no output from filter' warnings.
if infile == '-':
sys.stdout.write(' ')
if __name__ == "__main__":
try:
main()
except SystemExit:
raise
except KeyboardInterrupt:
sys.exit(1)
except Exception, e:
print_stderr("%s: %s" % (os.path.basename(sys.argv[0]), str(e)))
sys.exit(1)
|
phenoxim/cinder
|
refs/heads/master
|
cinder/policies/qos_specs.py
|
5
|
# Copyright (c) 2017 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from cinder.policies import base
CREATE_POLICY = 'volume_extension:qos_specs_manage:create'
GET_POLICY = 'volume_extension:qos_specs_manage:get'
GET_ALL_POLICY = 'volume_extension:qos_specs_manage:get_all'
UPDATE_POLICY = 'volume_extension:qos_specs_manage:update'
DELETE_POLICY = 'volume_extension:qos_specs_manage:delete'
qos_specs_policies = [
policy.DocumentedRuleDefault(
name=GET_ALL_POLICY,
check_str=base.RULE_ADMIN_API,
description="List qos specs or list all associations.",
operations=[
{
'method': 'GET',
'path': '/qos-specs'
},
{
'method': 'GET',
'path': '/qos-specs/{qos_id}/associations'
}
]),
policy.DocumentedRuleDefault(
name=GET_POLICY,
check_str=base.RULE_ADMIN_API,
description="Show qos specs.",
operations=[
{
'method': 'GET',
'path': '/qos-specs/{qos_id}'
}
]),
policy.DocumentedRuleDefault(
name=CREATE_POLICY,
check_str=base.RULE_ADMIN_API,
description="Create qos specs.",
operations=[
{
'method': 'POST',
'path': '/qos-specs'
}
]),
policy.DocumentedRuleDefault(
name=UPDATE_POLICY,
check_str=base.RULE_ADMIN_API,
description="Update qos specs (including updating association).",
operations=[
{
'method': 'PUT',
'path': '/qos-specs/{qos_id}'
},
{
'method': 'GET',
'path': '/qos-specs/{qos_id}/disassociate_all'
},
{
'method': 'GET',
'path': '/qos-specs/{qos_id}/associate'
},
{
'method': 'GET',
'path': '/qos-specs/{qos_id}/disassociate'
}
]),
policy.DocumentedRuleDefault(
name=DELETE_POLICY,
check_str=base.RULE_ADMIN_API,
description="delete qos specs or unset one specified qos key.",
operations=[
{
'method': 'DELETE',
'path': '/qos-specs/{qos_id}'
},
{
'method': 'PUT',
'path': '/qos-specs/{qos_id}/delete_keys'
}
])
]
def list_rules():
return qos_specs_policies
|
aspidites/django
|
refs/heads/master
|
tests/i18n/tests.py
|
113
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import decimal
import gettext as gettext_module
import os
import pickle
from contextlib import contextmanager
from importlib import import_module
from threading import local
from unittest import skipUnless
from django import forms
from django.conf import settings
from django.template import Context, Template, TemplateSyntaxError
from django.test import (
RequestFactory, SimpleTestCase, TestCase, override_settings,
)
from django.utils import six, translation
from django.utils._os import upath
from django.utils.formats import (
date_format, get_format, get_format_modules, iter_format_modules, localize,
localize_input, reset_format_cache, sanitize_separators, time_format,
)
from django.utils.numberformat import format as nformat
from django.utils.safestring import SafeBytes, SafeString, SafeText, mark_safe
from django.utils.six import PY3
from django.utils.translation import (
LANGUAGE_SESSION_KEY, activate, check_for_language, deactivate,
get_language, get_language_bidi, get_language_from_request,
get_language_info, gettext, gettext_lazy, ngettext_lazy, npgettext,
npgettext_lazy, pgettext, pgettext_lazy, string_concat, to_locale,
trans_real, ugettext, ugettext_lazy, ungettext, ungettext_lazy,
)
from .forms import CompanyForm, I18nForm, SelectDateForm
from .models import Company, TestModel
here = os.path.dirname(os.path.abspath(upath(__file__)))
extended_locale_paths = settings.LOCALE_PATHS + [
os.path.join(here, 'other', 'locale'),
]
@contextmanager
def patch_formats(lang, **settings):
from django.utils.formats import _format_cache
# Populate _format_cache with temporary values
for key, value in settings.items():
_format_cache[(key, lang)] = value
try:
yield
finally:
reset_format_cache()
class TranslationTests(SimpleTestCase):
@translation.override('fr')
def test_plural(self):
"""
Test plurals with ungettext. French differs from English in that 0 is singular.
"""
self.assertEqual(ungettext("%d year", "%d years", 0) % 0, "0 année")
self.assertEqual(ungettext("%d year", "%d years", 2) % 2, "2 années")
self.assertEqual(ungettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0}, "0 octet")
self.assertEqual(ungettext("%(size)d byte", "%(size)d bytes", 2) % {'size': 2}, "2 octets")
def test_override(self):
activate('de')
try:
with translation.override('pl'):
self.assertEqual(get_language(), 'pl')
self.assertEqual(get_language(), 'de')
with translation.override(None):
self.assertEqual(get_language(), None)
self.assertEqual(get_language(), 'de')
finally:
deactivate()
def test_override_decorator(self):
@translation.override('pl')
def func_pl():
self.assertEqual(get_language(), 'pl')
@translation.override(None)
def func_none():
self.assertEqual(get_language(), None)
try:
activate('de')
func_pl()
self.assertEqual(get_language(), 'de')
func_none()
self.assertEqual(get_language(), 'de')
finally:
deactivate()
def test_override_exit(self):
"""
Test that the language restored is the one used when the function was
called, not the one used when the decorator was initialized. refs #23381
"""
activate('fr')
@translation.override('pl')
def func_pl():
pass
deactivate()
try:
activate('en')
func_pl()
self.assertEqual(get_language(), 'en')
finally:
deactivate()
def test_lazy_objects(self):
"""
Format string interpolation should work with *_lazy objects.
"""
s = ugettext_lazy('Add %(name)s')
d = {'name': 'Ringo'}
self.assertEqual('Add Ringo', s % d)
with translation.override('de', deactivate=True):
self.assertEqual('Ringo hinzuf\xfcgen', s % d)
with translation.override('pl'):
self.assertEqual('Dodaj Ringo', s % d)
# It should be possible to compare *_lazy objects.
s1 = ugettext_lazy('Add %(name)s')
self.assertEqual(s, s1)
s2 = gettext_lazy('Add %(name)s')
s3 = gettext_lazy('Add %(name)s')
self.assertEqual(s2, s3)
self.assertEqual(s, s2)
s4 = ugettext_lazy('Some other string')
self.assertNotEqual(s, s4)
@skipUnless(six.PY2, "No more bytestring translations on PY3")
def test_lazy_and_bytestrings(self):
# On Python 2, (n)gettext_lazy should not transform a bytestring to unicode
self.assertEqual(gettext_lazy(b"test").upper(), b"TEST")
self.assertEqual((ngettext_lazy(b"%d test", b"%d tests") % 1).upper(), b"1 TEST")
# Other versions of lazy functions always return unicode
self.assertEqual(ugettext_lazy(b"test").upper(), "TEST")
self.assertEqual((ungettext_lazy(b"%d test", b"%d tests") % 1).upper(), "1 TEST")
self.assertEqual(pgettext_lazy(b"context", b"test").upper(), "TEST")
self.assertEqual(
(npgettext_lazy(b"context", b"%d test", b"%d tests") % 1).upper(),
"1 TEST"
)
def test_lazy_pickle(self):
s1 = ugettext_lazy("test")
self.assertEqual(six.text_type(s1), "test")
s2 = pickle.loads(pickle.dumps(s1))
self.assertEqual(six.text_type(s2), "test")
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_ungettext_lazy(self):
simple_with_format = ungettext_lazy('%d good result', '%d good results')
simple_str_with_format = ngettext_lazy(str('%d good result'), str('%d good results'))
simple_context_with_format = npgettext_lazy('Exclamation', '%d good result', '%d good results')
simple_without_format = ungettext_lazy('good result', 'good results')
with translation.override('de'):
self.assertEqual(simple_with_format % 1, '1 gutes Resultat')
self.assertEqual(simple_with_format % 4, '4 guten Resultate')
self.assertEqual(simple_str_with_format % 1, str('1 gutes Resultat'))
self.assertEqual(simple_str_with_format % 4, str('4 guten Resultate'))
self.assertEqual(simple_context_with_format % 1, '1 gutes Resultat!')
self.assertEqual(simple_context_with_format % 4, '4 guten Resultate!')
self.assertEqual(simple_without_format % 1, 'gutes Resultat')
self.assertEqual(simple_without_format % 4, 'guten Resultate')
complex_nonlazy = ungettext_lazy('Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4)
complex_deferred = ungettext_lazy('Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num')
complex_str_nonlazy = ngettext_lazy(str('Hi %(name)s, %(num)d good result'), str('Hi %(name)s, %(num)d good results'), 4)
complex_str_deferred = ngettext_lazy(str('Hi %(name)s, %(num)d good result'), str('Hi %(name)s, %(num)d good results'), 'num')
complex_context_nonlazy = npgettext_lazy('Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 4)
complex_context_deferred = npgettext_lazy('Greeting', 'Hi %(name)s, %(num)d good result', 'Hi %(name)s, %(num)d good results', 'num')
with translation.override('de'):
self.assertEqual(complex_nonlazy % {'num': 4, 'name': 'Jim'}, 'Hallo Jim, 4 guten Resultate')
self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 1}, 'Hallo Jim, 1 gutes Resultat')
self.assertEqual(complex_deferred % {'name': 'Jim', 'num': 5}, 'Hallo Jim, 5 guten Resultate')
with six.assertRaisesRegex(self, KeyError, 'Your dictionary lacks key.*'):
complex_deferred % {'name': 'Jim'}
self.assertEqual(complex_str_nonlazy % {'num': 4, 'name': 'Jim'}, str('Hallo Jim, 4 guten Resultate'))
self.assertEqual(complex_str_deferred % {'name': 'Jim', 'num': 1}, str('Hallo Jim, 1 gutes Resultat'))
self.assertEqual(complex_str_deferred % {'name': 'Jim', 'num': 5}, str('Hallo Jim, 5 guten Resultate'))
with six.assertRaisesRegex(self, KeyError, 'Your dictionary lacks key.*'):
complex_str_deferred % {'name': 'Jim'}
self.assertEqual(complex_context_nonlazy % {'num': 4, 'name': 'Jim'}, 'Willkommen Jim, 4 guten Resultate')
self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 1}, 'Willkommen Jim, 1 gutes Resultat')
self.assertEqual(complex_context_deferred % {'name': 'Jim', 'num': 5}, 'Willkommen Jim, 5 guten Resultate')
with six.assertRaisesRegex(self, KeyError, 'Your dictionary lacks key.*'):
complex_context_deferred % {'name': 'Jim'}
@skipUnless(six.PY2, "PY3 doesn't have distinct int and long types")
def test_ungettext_lazy_long(self):
"""
Regression test for #22820: int and long should be treated alike in ungettext_lazy.
"""
result = ungettext_lazy('%(name)s has %(num)d good result', '%(name)s has %(num)d good results', 4)
self.assertEqual(result % {'name': 'Joe', 'num': 4}, "Joe has 4 good results")
# Now with a long
result = ungettext_lazy(
'%(name)s has %(num)d good result', '%(name)s has %(num)d good results',
long(4) # NOQA: long undefined on PY3
)
self.assertEqual(result % {'name': 'Joe', 'num': 4}, "Joe has 4 good results")
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_pgettext(self):
trans_real._active = local()
trans_real._translations = {}
with translation.override('de'):
self.assertEqual(pgettext("unexisting", "May"), "May")
self.assertEqual(pgettext("month name", "May"), "Mai")
self.assertEqual(pgettext("verb", "May"), "Kann")
self.assertEqual(npgettext("search", "%d result", "%d results", 4) % 4, "4 Resultate")
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_template_tags_pgettext(self):
"""
Ensure that message contexts are taken into account the {% trans %} and
{% blocktrans %} template tags.
Refs #14806.
"""
trans_real._active = local()
trans_real._translations = {}
with translation.override('de'):
# {% trans %} -----------------------------------
# Inexisting context...
t = Template('{% load i18n %}{% trans "May" context "unexisting" %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'May')
# Existing context...
# Using a literal
t = Template('{% load i18n %}{% trans "May" context "month name" %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Mai')
t = Template('{% load i18n %}{% trans "May" context "verb" %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Kann')
# Using a variable
t = Template('{% load i18n %}{% trans "May" context message_context %}')
rendered = t.render(Context({'message_context': 'month name'}))
self.assertEqual(rendered, 'Mai')
t = Template('{% load i18n %}{% trans "May" context message_context %}')
rendered = t.render(Context({'message_context': 'verb'}))
self.assertEqual(rendered, 'Kann')
# Using a filter
t = Template('{% load i18n %}{% trans "May" context message_context|lower %}')
rendered = t.render(Context({'message_context': 'MONTH NAME'}))
self.assertEqual(rendered, 'Mai')
t = Template('{% load i18n %}{% trans "May" context message_context|lower %}')
rendered = t.render(Context({'message_context': 'VERB'}))
self.assertEqual(rendered, 'Kann')
# Using 'as'
t = Template('{% load i18n %}{% trans "May" context "month name" as var %}Value: {{ var }}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Value: Mai')
t = Template('{% load i18n %}{% trans "May" as var context "verb" %}Value: {{ var }}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Value: Kann')
# {% blocktrans %} ------------------------------
# Inexisting context...
t = Template('{% load i18n %}{% blocktrans context "unexisting" %}May{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'May')
# Existing context...
# Using a literal
t = Template('{% load i18n %}{% blocktrans context "month name" %}May{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Mai')
t = Template('{% load i18n %}{% blocktrans context "verb" %}May{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Kann')
# Using a variable
t = Template('{% load i18n %}{% blocktrans context message_context %}May{% endblocktrans %}')
rendered = t.render(Context({'message_context': 'month name'}))
self.assertEqual(rendered, 'Mai')
t = Template('{% load i18n %}{% blocktrans context message_context %}May{% endblocktrans %}')
rendered = t.render(Context({'message_context': 'verb'}))
self.assertEqual(rendered, 'Kann')
# Using a filter
t = Template('{% load i18n %}{% blocktrans context message_context|lower %}May{% endblocktrans %}')
rendered = t.render(Context({'message_context': 'MONTH NAME'}))
self.assertEqual(rendered, 'Mai')
t = Template('{% load i18n %}{% blocktrans context message_context|lower %}May{% endblocktrans %}')
rendered = t.render(Context({'message_context': 'VERB'}))
self.assertEqual(rendered, 'Kann')
# Using 'count'
t = Template('{% load i18n %}{% blocktrans count number=1 context "super search" %}{{ number }} super result{% plural %}{{ number }} super results{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, '1 Super-Ergebnis')
t = Template('{% load i18n %}{% blocktrans count number=2 context "super search" %}{{ number }} super result{% plural %}{{ number }} super results{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, '2 Super-Ergebnisse')
t = Template('{% load i18n %}{% blocktrans context "other super search" count number=1 %}{{ number }} super result{% plural %}{{ number }} super results{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, '1 anderen Super-Ergebnis')
t = Template('{% load i18n %}{% blocktrans context "other super search" count number=2 %}{{ number }} super result{% plural %}{{ number }} super results{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, '2 andere Super-Ergebnisse')
# Using 'with'
t = Template('{% load i18n %}{% blocktrans with num_comments=5 context "comment count" %}There are {{ num_comments }} comments{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Es gibt 5 Kommentare')
t = Template('{% load i18n %}{% blocktrans with num_comments=5 context "other comment count" %}There are {{ num_comments }} comments{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Andere: Es gibt 5 Kommentare')
# Using trimmed
t = Template('{% load i18n %}{% blocktrans trimmed %}\n\nThere\n\t are 5 \n\n comments\n{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'There are 5 comments')
t = Template('{% load i18n %}{% blocktrans with num_comments=5 context "comment count" trimmed %}\n\nThere are \t\n \t {{ num_comments }} comments\n\n{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, 'Es gibt 5 Kommentare')
t = Template('{% load i18n %}{% blocktrans context "other super search" count number=2 trimmed %}\n{{ number }} super \n result{% plural %}{{ number }} super results{% endblocktrans %}')
rendered = t.render(Context())
self.assertEqual(rendered, '2 andere Super-Ergebnisse')
# Mis-uses
self.assertRaises(TemplateSyntaxError, Template, '{% load i18n %}{% blocktrans context with month="May" %}{{ month }}{% endblocktrans %}')
self.assertRaises(TemplateSyntaxError, Template, '{% load i18n %}{% blocktrans context %}{% endblocktrans %}')
self.assertRaises(TemplateSyntaxError, Template, '{% load i18n %}{% blocktrans count number=2 context %}{{ number }} super result{% plural %}{{ number }} super results{% endblocktrans %}')
def test_string_concat(self):
"""
six.text_type(string_concat(...)) should not raise a TypeError - #4796
"""
self.assertEqual('django', six.text_type(string_concat("dja", "ngo")))
def test_empty_value(self):
"""
Empty value must stay empty after being translated (#23196).
"""
with translation.override('de'):
self.assertEqual("", ugettext(""))
self.assertEqual(str(""), gettext(str("")))
s = mark_safe("")
self.assertEqual(s, ugettext(s))
def test_safe_status(self):
"""
Translating a string requiring no auto-escaping shouldn't change the "safe" status.
"""
s = mark_safe(str('Password'))
self.assertEqual(SafeString, type(s))
with translation.override('de', deactivate=True):
self.assertEqual(SafeText, type(ugettext(s)))
self.assertEqual('aPassword', SafeText('a') + s)
self.assertEqual('Passworda', s + SafeText('a'))
self.assertEqual('Passworda', s + mark_safe('a'))
self.assertEqual('aPassword', mark_safe('a') + s)
self.assertEqual('as', mark_safe('a') + mark_safe('s'))
def test_maclines(self):
"""
Translations on files with mac or dos end of lines will be converted
to unix eof in .po catalogs, and they have to match when retrieved
"""
ca_translation = trans_real.translation('ca')
ca_translation._catalog['Mac\nEOF\n'] = 'Catalan Mac\nEOF\n'
ca_translation._catalog['Win\nEOF\n'] = 'Catalan Win\nEOF\n'
with translation.override('ca', deactivate=True):
self.assertEqual('Catalan Mac\nEOF\n', ugettext('Mac\rEOF\r'))
self.assertEqual('Catalan Win\nEOF\n', ugettext('Win\r\nEOF\r\n'))
def test_to_locale(self):
"""
Tests the to_locale function and the special case of Serbian Latin
(refs #12230 and r11299)
"""
self.assertEqual(to_locale('en-us'), 'en_US')
self.assertEqual(to_locale('sr-lat'), 'sr_Lat')
def test_to_language(self):
"""
Test the to_language function
"""
self.assertEqual(trans_real.to_language('en_US'), 'en-us')
self.assertEqual(trans_real.to_language('sr_Lat'), 'sr-lat')
def test_language_bidi(self):
self.assertEqual(get_language_bidi(), False)
with translation.override(None):
self.assertEqual(get_language_bidi(), False)
@override_settings(LOCALE_PATHS=[os.path.join(here, 'other', 'locale')])
def test_bad_placeholder_1(self):
"""
Error in translation file should not crash template rendering
(%(person)s is translated as %(personne)s in fr.po)
Refs #16516.
"""
with translation.override('fr'):
t = Template('{% load i18n %}{% blocktrans %}My name is {{ person }}.{% endblocktrans %}')
rendered = t.render(Context({'person': 'James'}))
self.assertEqual(rendered, 'My name is James.')
@override_settings(LOCALE_PATHS=[os.path.join(here, 'other', 'locale')])
def test_bad_placeholder_2(self):
"""
Error in translation file should not crash template rendering
(%(person) misses a 's' in fr.po, causing the string formatting to fail)
Refs #18393.
"""
with translation.override('fr'):
t = Template('{% load i18n %}{% blocktrans %}My other name is {{ person }}.{% endblocktrans %}')
rendered = t.render(Context({'person': 'James'}))
self.assertEqual(rendered, 'My other name is James.')
class TranslationThreadSafetyTests(SimpleTestCase):
def setUp(self):
self._old_language = get_language()
self._translations = trans_real._translations
# here we rely on .split() being called inside the _fetch()
# in trans_real.translation()
class sideeffect_str(str):
def split(self, *args, **kwargs):
res = str.split(self, *args, **kwargs)
trans_real._translations['en-YY'] = None
return res
trans_real._translations = {sideeffect_str('en-XX'): None}
def tearDown(self):
trans_real._translations = self._translations
activate(self._old_language)
def test_bug14894_translation_activate_thread_safety(self):
translation_count = len(trans_real._translations)
try:
translation.activate('pl')
except RuntimeError:
self.fail('translation.activate() is not thread-safe')
# make sure sideeffect_str actually added a new translation
self.assertLess(translation_count, len(trans_real._translations))
@override_settings(USE_L10N=True)
class FormattingTests(SimpleTestCase):
def setUp(self):
super(FormattingTests, self).setUp()
self.n = decimal.Decimal('66666.666')
self.f = 99999.999
self.d = datetime.date(2009, 12, 31)
self.dt = datetime.datetime(2009, 12, 31, 20, 50)
self.t = datetime.time(10, 15, 48)
self.l = 10000 if PY3 else long(10000) # NOQA: long undefined on PY3
self.ctxt = Context({
'n': self.n,
't': self.t,
'd': self.d,
'dt': self.dt,
'f': self.f,
'l': self.l,
})
def test_locale_independent(self):
"""
Localization of numbers
"""
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual('66666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=','))
self.assertEqual('66666A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B'))
self.assertEqual('66666', nformat(self.n, decimal_sep='X', decimal_pos=0, grouping=1, thousand_sep='Y'))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual('66,666.66', nformat(self.n, decimal_sep='.', decimal_pos=2, grouping=3, thousand_sep=','))
self.assertEqual('6B6B6B6B6A6', nformat(self.n, decimal_sep='A', decimal_pos=1, grouping=1, thousand_sep='B'))
self.assertEqual('-66666.6', nformat(-66666.666, decimal_sep='.', decimal_pos=1))
self.assertEqual('-66666.0', nformat(int('-66666'), decimal_sep='.', decimal_pos=1))
self.assertEqual('10000.0', nformat(self.l, decimal_sep='.', decimal_pos=1))
# This unusual grouping/force_grouping combination may be triggered by the intcomma filter (#17414)
self.assertEqual('10000', nformat(self.l, decimal_sep='.', decimal_pos=0, grouping=0, force_grouping=True))
# date filter
self.assertEqual('31.12.2009 в 20:50', Template('{{ dt|date:"d.m.Y в H:i" }}').render(self.ctxt))
self.assertEqual('⌚ 10:15', Template('{{ t|time:"⌚ H:i" }}').render(self.ctxt))
@override_settings(USE_L10N=False)
def test_l10n_disabled(self):
"""
Catalan locale with format i18n disabled translations will be used,
but not formats
"""
with translation.override('ca', deactivate=True):
self.maxDiff = 3000
self.assertEqual('N j, Y', get_format('DATE_FORMAT'))
self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK'))
self.assertEqual('.', get_format('DECIMAL_SEPARATOR'))
self.assertEqual('10:15 a.m.', time_format(self.t))
self.assertEqual('des. 31, 2009', date_format(self.d))
self.assertEqual('desembre 2009', date_format(self.d, 'YEAR_MONTH_FORMAT'))
self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT'))
self.assertEqual('No localizable', localize('No localizable'))
self.assertEqual('66666.666', localize(self.n))
self.assertEqual('99999.999', localize(self.f))
self.assertEqual('10000', localize(self.l))
self.assertEqual('des. 31, 2009', localize(self.d))
self.assertEqual('des. 31, 2009, 8:50 p.m.', localize(self.dt))
self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt))
self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt))
self.assertEqual('des. 31, 2009', Template('{{ d }}').render(self.ctxt))
self.assertEqual('des. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt))
self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt))
self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt))
self.assertEqual('10:15 a.m.', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt))
self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt))
self.assertEqual('12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt))
form = I18nForm({
'decimal_field': '66666,666',
'float_field': '99999,999',
'date_field': '31/12/2009',
'datetime_field': '31/12/2009 20:50',
'time_field': '20:50',
'integer_field': '1.234',
})
self.assertFalse(form.is_valid())
self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['float_field'])
self.assertEqual(['Introdu\xefu un n\xfamero.'], form.errors['decimal_field'])
self.assertEqual(['Introdu\xefu una data v\xe0lida.'], form.errors['date_field'])
self.assertEqual(['Introdu\xefu una data/hora v\xe0lides.'], form.errors['datetime_field'])
self.assertEqual(['Introdu\xefu un n\xfamero sencer.'], form.errors['integer_field'])
form2 = SelectDateForm({
'date_field_month': '12',
'date_field_day': '31',
'date_field_year': '2009'
})
self.assertTrue(form2.is_valid())
self.assertEqual(datetime.date(2009, 12, 31), form2.cleaned_data['date_field'])
self.assertHTMLEqual(
'<select name="mydate_month" id="id_mydate_month">\n<option value="0">---</option>\n<option value="1">gener</option>\n<option value="2">febrer</option>\n<option value="3">mar\xe7</option>\n<option value="4">abril</option>\n<option value="5">maig</option>\n<option value="6">juny</option>\n<option value="7">juliol</option>\n<option value="8">agost</option>\n<option value="9">setembre</option>\n<option value="10">octubre</option>\n<option value="11">novembre</option>\n<option value="12" selected="selected">desembre</option>\n</select>\n<select name="mydate_day" id="id_mydate_day">\n<option value="0">---</option>\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="4">4</option>\n<option value="5">5</option>\n<option value="6">6</option>\n<option value="7">7</option>\n<option value="8">8</option>\n<option value="9">9</option>\n<option value="10">10</option>\n<option value="11">11</option>\n<option value="12">12</option>\n<option value="13">13</option>\n<option value="14">14</option>\n<option value="15">15</option>\n<option value="16">16</option>\n<option value="17">17</option>\n<option value="18">18</option>\n<option value="19">19</option>\n<option value="20">20</option>\n<option value="21">21</option>\n<option value="22">22</option>\n<option value="23">23</option>\n<option value="24">24</option>\n<option value="25">25</option>\n<option value="26">26</option>\n<option value="27">27</option>\n<option value="28">28</option>\n<option value="29">29</option>\n<option value="30">30</option>\n<option value="31" selected="selected">31</option>\n</select>\n<select name="mydate_year" id="id_mydate_year">\n<option value="0">---</option>\n<option value="2009" selected="selected">2009</option>\n<option value="2010">2010</option>\n<option value="2011">2011</option>\n<option value="2012">2012</option>\n<option value="2013">2013</option>\n<option value="2014">2014</option>\n<option value="2015">2015</option>\n<option value="2016">2016</option>\n<option value="2017">2017</option>\n<option value="2018">2018</option>\n</select>',
forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31))
)
# We shouldn't change the behavior of the floatformat filter re:
# thousand separator and grouping when USE_L10N is False even
# if the USE_THOUSAND_SEPARATOR, NUMBER_GROUPING and
# THOUSAND_SEPARATOR settings are specified
with self.settings(USE_THOUSAND_SEPARATOR=True,
NUMBER_GROUPING=1, THOUSAND_SEPARATOR='!'):
self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt))
self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt))
def test_false_like_locale_formats(self):
"""
Ensure that the active locale's formats take precedence over the
default settings even if they would be interpreted as False in a
conditional test (e.g. 0 or empty string).
Refs #16938.
"""
with patch_formats('fr', THOUSAND_SEPARATOR='', FIRST_DAY_OF_WEEK=0):
with translation.override('fr'):
with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR='!'):
self.assertEqual('', get_format('THOUSAND_SEPARATOR'))
# Even a second time (after the format has been cached)...
self.assertEqual('', get_format('THOUSAND_SEPARATOR'))
with self.settings(FIRST_DAY_OF_WEEK=1):
self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK'))
# Even a second time (after the format has been cached)...
self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK'))
def test_l10n_enabled(self):
self.maxDiff = 3000
# Catalan locale
with translation.override('ca', deactivate=True):
self.assertEqual('j \d\e F \d\e Y', get_format('DATE_FORMAT'))
self.assertEqual(1, get_format('FIRST_DAY_OF_WEEK'))
self.assertEqual(',', get_format('DECIMAL_SEPARATOR'))
self.assertEqual('10:15', time_format(self.t))
self.assertEqual('31 de desembre de 2009', date_format(self.d))
self.assertEqual('desembre del 2009', date_format(self.d, 'YEAR_MONTH_FORMAT'))
self.assertEqual('31/12/2009 20:50', date_format(self.dt, 'SHORT_DATETIME_FORMAT'))
self.assertEqual('No localizable', localize('No localizable'))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual('66.666,666', localize(self.n))
self.assertEqual('99.999,999', localize(self.f))
self.assertEqual('10.000', localize(self.l))
self.assertEqual('True', localize(True))
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual('66666,666', localize(self.n))
self.assertEqual('99999,999', localize(self.f))
self.assertEqual('10000', localize(self.l))
self.assertEqual('31 de desembre de 2009', localize(self.d))
self.assertEqual('31 de desembre de 2009 a les 20:50', localize(self.dt))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt))
self.assertEqual('99.999,999', Template('{{ f }}').render(self.ctxt))
self.assertEqual('10.000', Template('{{ l }}').render(self.ctxt))
with self.settings(USE_THOUSAND_SEPARATOR=True):
form3 = I18nForm({
'decimal_field': '66.666,666',
'float_field': '99.999,999',
'date_field': '31/12/2009',
'datetime_field': '31/12/2009 20:50',
'time_field': '20:50',
'integer_field': '1.234',
})
self.assertTrue(form3.is_valid())
self.assertEqual(decimal.Decimal('66666.666'), form3.cleaned_data['decimal_field'])
self.assertEqual(99999.999, form3.cleaned_data['float_field'])
self.assertEqual(datetime.date(2009, 12, 31), form3.cleaned_data['date_field'])
self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form3.cleaned_data['datetime_field'])
self.assertEqual(datetime.time(20, 50), form3.cleaned_data['time_field'])
self.assertEqual(1234, form3.cleaned_data['integer_field'])
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual('66666,666', Template('{{ n }}').render(self.ctxt))
self.assertEqual('99999,999', Template('{{ f }}').render(self.ctxt))
self.assertEqual('31 de desembre de 2009', Template('{{ d }}').render(self.ctxt))
self.assertEqual('31 de desembre de 2009 a les 20:50', Template('{{ dt }}').render(self.ctxt))
self.assertEqual('66666,67', Template('{{ n|floatformat:2 }}').render(self.ctxt))
self.assertEqual('100000,0', Template('{{ f|floatformat }}').render(self.ctxt))
self.assertEqual('10:15', Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt))
self.assertEqual('31/12/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt))
self.assertEqual('31/12/2009 20:50', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt))
self.assertEqual(date_format(datetime.datetime.now(), "DATE_FORMAT"),
Template('{% now "DATE_FORMAT" %}').render(self.ctxt))
with self.settings(USE_THOUSAND_SEPARATOR=False):
form4 = I18nForm({
'decimal_field': '66666,666',
'float_field': '99999,999',
'date_field': '31/12/2009',
'datetime_field': '31/12/2009 20:50',
'time_field': '20:50',
'integer_field': '1234',
})
self.assertTrue(form4.is_valid())
self.assertEqual(decimal.Decimal('66666.666'), form4.cleaned_data['decimal_field'])
self.assertEqual(99999.999, form4.cleaned_data['float_field'])
self.assertEqual(datetime.date(2009, 12, 31), form4.cleaned_data['date_field'])
self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form4.cleaned_data['datetime_field'])
self.assertEqual(datetime.time(20, 50), form4.cleaned_data['time_field'])
self.assertEqual(1234, form4.cleaned_data['integer_field'])
form5 = SelectDateForm({
'date_field_month': '12',
'date_field_day': '31',
'date_field_year': '2009'
})
self.assertTrue(form5.is_valid())
self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field'])
self.assertHTMLEqual(
'<select name="mydate_day" id="id_mydate_day">\n<option value="0">---</option>\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="4">4</option>\n<option value="5">5</option>\n<option value="6">6</option>\n<option value="7">7</option>\n<option value="8">8</option>\n<option value="9">9</option>\n<option value="10">10</option>\n<option value="11">11</option>\n<option value="12">12</option>\n<option value="13">13</option>\n<option value="14">14</option>\n<option value="15">15</option>\n<option value="16">16</option>\n<option value="17">17</option>\n<option value="18">18</option>\n<option value="19">19</option>\n<option value="20">20</option>\n<option value="21">21</option>\n<option value="22">22</option>\n<option value="23">23</option>\n<option value="24">24</option>\n<option value="25">25</option>\n<option value="26">26</option>\n<option value="27">27</option>\n<option value="28">28</option>\n<option value="29">29</option>\n<option value="30">30</option>\n<option value="31" selected="selected">31</option>\n</select>\n<select name="mydate_month" id="id_mydate_month">\n<option value="0">---</option>\n<option value="1">gener</option>\n<option value="2">febrer</option>\n<option value="3">mar\xe7</option>\n<option value="4">abril</option>\n<option value="5">maig</option>\n<option value="6">juny</option>\n<option value="7">juliol</option>\n<option value="8">agost</option>\n<option value="9">setembre</option>\n<option value="10">octubre</option>\n<option value="11">novembre</option>\n<option value="12" selected="selected">desembre</option>\n</select>\n<select name="mydate_year" id="id_mydate_year">\n<option value="0">---</option>\n<option value="2009" selected="selected">2009</option>\n<option value="2010">2010</option>\n<option value="2011">2011</option>\n<option value="2012">2012</option>\n<option value="2013">2013</option>\n<option value="2014">2014</option>\n<option value="2015">2015</option>\n<option value="2016">2016</option>\n<option value="2017">2017</option>\n<option value="2018">2018</option>\n</select>',
forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31))
)
# Russian locale (with E as month)
with translation.override('ru', deactivate=True):
self.assertHTMLEqual(
'<select name="mydate_day" id="id_mydate_day">\n<option value="0">---</option>\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="4">4</option>\n<option value="5">5</option>\n<option value="6">6</option>\n<option value="7">7</option>\n<option value="8">8</option>\n<option value="9">9</option>\n<option value="10">10</option>\n<option value="11">11</option>\n<option value="12">12</option>\n<option value="13">13</option>\n<option value="14">14</option>\n<option value="15">15</option>\n<option value="16">16</option>\n<option value="17">17</option>\n<option value="18">18</option>\n<option value="19">19</option>\n<option value="20">20</option>\n<option value="21">21</option>\n<option value="22">22</option>\n<option value="23">23</option>\n<option value="24">24</option>\n<option value="25">25</option>\n<option value="26">26</option>\n<option value="27">27</option>\n<option value="28">28</option>\n<option value="29">29</option>\n<option value="30">30</option>\n<option value="31" selected="selected">31</option>\n</select>\n<select name="mydate_month" id="id_mydate_month">\n<option value="0">---</option>\n<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>\n<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>\n<option value="3">\u041c\u0430\u0440\u0442</option>\n<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>\n<option value="5">\u041c\u0430\u0439</option>\n<option value="6">\u0418\u044e\u043d\u044c</option>\n<option value="7">\u0418\u044e\u043b\u044c</option>\n<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>\n<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>\n<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>\n<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>\n<option value="12" selected="selected">\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>\n</select>\n<select name="mydate_year" id="id_mydate_year">\n<option value="0">---</option>\n<option value="2009" selected="selected">2009</option>\n<option value="2010">2010</option>\n<option value="2011">2011</option>\n<option value="2012">2012</option>\n<option value="2013">2013</option>\n<option value="2014">2014</option>\n<option value="2015">2015</option>\n<option value="2016">2016</option>\n<option value="2017">2017</option>\n<option value="2018">2018</option>\n</select>',
forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31))
)
# English locale
with translation.override('en', deactivate=True):
self.assertEqual('N j, Y', get_format('DATE_FORMAT'))
self.assertEqual(0, get_format('FIRST_DAY_OF_WEEK'))
self.assertEqual('.', get_format('DECIMAL_SEPARATOR'))
self.assertEqual('Dec. 31, 2009', date_format(self.d))
self.assertEqual('December 2009', date_format(self.d, 'YEAR_MONTH_FORMAT'))
self.assertEqual('12/31/2009 8:50 p.m.', date_format(self.dt, 'SHORT_DATETIME_FORMAT'))
self.assertEqual('No localizable', localize('No localizable'))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual('66,666.666', localize(self.n))
self.assertEqual('99,999.999', localize(self.f))
self.assertEqual('10,000', localize(self.l))
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual('66666.666', localize(self.n))
self.assertEqual('99999.999', localize(self.f))
self.assertEqual('10000', localize(self.l))
self.assertEqual('Dec. 31, 2009', localize(self.d))
self.assertEqual('Dec. 31, 2009, 8:50 p.m.', localize(self.dt))
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual('66,666.666', Template('{{ n }}').render(self.ctxt))
self.assertEqual('99,999.999', Template('{{ f }}').render(self.ctxt))
self.assertEqual('10,000', Template('{{ l }}').render(self.ctxt))
with self.settings(USE_THOUSAND_SEPARATOR=False):
self.assertEqual('66666.666', Template('{{ n }}').render(self.ctxt))
self.assertEqual('99999.999', Template('{{ f }}').render(self.ctxt))
self.assertEqual('Dec. 31, 2009', Template('{{ d }}').render(self.ctxt))
self.assertEqual('Dec. 31, 2009, 8:50 p.m.', Template('{{ dt }}').render(self.ctxt))
self.assertEqual('66666.67', Template('{{ n|floatformat:2 }}').render(self.ctxt))
self.assertEqual('100000.0', Template('{{ f|floatformat }}').render(self.ctxt))
self.assertEqual('12/31/2009', Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt))
self.assertEqual('12/31/2009 8:50 p.m.', Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt))
form5 = I18nForm({
'decimal_field': '66666.666',
'float_field': '99999.999',
'date_field': '12/31/2009',
'datetime_field': '12/31/2009 20:50',
'time_field': '20:50',
'integer_field': '1234',
})
self.assertTrue(form5.is_valid())
self.assertEqual(decimal.Decimal('66666.666'), form5.cleaned_data['decimal_field'])
self.assertEqual(99999.999, form5.cleaned_data['float_field'])
self.assertEqual(datetime.date(2009, 12, 31), form5.cleaned_data['date_field'])
self.assertEqual(datetime.datetime(2009, 12, 31, 20, 50), form5.cleaned_data['datetime_field'])
self.assertEqual(datetime.time(20, 50), form5.cleaned_data['time_field'])
self.assertEqual(1234, form5.cleaned_data['integer_field'])
form6 = SelectDateForm({
'date_field_month': '12',
'date_field_day': '31',
'date_field_year': '2009'
})
self.assertTrue(form6.is_valid())
self.assertEqual(datetime.date(2009, 12, 31), form6.cleaned_data['date_field'])
self.assertHTMLEqual(
'<select name="mydate_month" id="id_mydate_month">\n<option value="0">---</option>\n<option value="1">January</option>\n<option value="2">February</option>\n<option value="3">March</option>\n<option value="4">April</option>\n<option value="5">May</option>\n<option value="6">June</option>\n<option value="7">July</option>\n<option value="8">August</option>\n<option value="9">September</option>\n<option value="10">October</option>\n<option value="11">November</option>\n<option value="12" selected="selected">December</option>\n</select>\n<select name="mydate_day" id="id_mydate_day">\n<option value="0">---</option>\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="4">4</option>\n<option value="5">5</option>\n<option value="6">6</option>\n<option value="7">7</option>\n<option value="8">8</option>\n<option value="9">9</option>\n<option value="10">10</option>\n<option value="11">11</option>\n<option value="12">12</option>\n<option value="13">13</option>\n<option value="14">14</option>\n<option value="15">15</option>\n<option value="16">16</option>\n<option value="17">17</option>\n<option value="18">18</option>\n<option value="19">19</option>\n<option value="20">20</option>\n<option value="21">21</option>\n<option value="22">22</option>\n<option value="23">23</option>\n<option value="24">24</option>\n<option value="25">25</option>\n<option value="26">26</option>\n<option value="27">27</option>\n<option value="28">28</option>\n<option value="29">29</option>\n<option value="30">30</option>\n<option value="31" selected="selected">31</option>\n</select>\n<select name="mydate_year" id="id_mydate_year">\n<option value="0">---</option>\n<option value="2009" selected="selected">2009</option>\n<option value="2010">2010</option>\n<option value="2011">2011</option>\n<option value="2012">2012</option>\n<option value="2013">2013</option>\n<option value="2014">2014</option>\n<option value="2015">2015</option>\n<option value="2016">2016</option>\n<option value="2017">2017</option>\n<option value="2018">2018</option>\n</select>',
forms.SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31))
)
def test_sub_locales(self):
"""
Check if sublocales fall back to the main locale
"""
with self.settings(USE_THOUSAND_SEPARATOR=True):
with translation.override('de-at', deactivate=True):
self.assertEqual('66.666,666', Template('{{ n }}').render(self.ctxt))
with translation.override('es-us', deactivate=True):
self.assertEqual('31 de Diciembre de 2009', date_format(self.d))
def test_localized_input(self):
"""
Tests if form input is correctly localized
"""
self.maxDiff = 1200
with translation.override('de-at', deactivate=True):
form6 = CompanyForm({
'name': 'acme',
'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0),
'cents_paid': decimal.Decimal('59.47'),
'products_delivered': 12000,
})
self.assertTrue(form6.is_valid())
self.assertHTMLEqual(
form6.as_ul(),
'<li><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" value="acme" maxlength="50" /></li>\n<li><label for="id_date_added">Date added:</label> <input type="text" name="date_added" value="31.12.2009 06:00:00" id="id_date_added" /></li>\n<li><label for="id_cents_paid">Cents paid:</label> <input type="text" name="cents_paid" value="59,47" id="id_cents_paid" /></li>\n<li><label for="id_products_delivered">Products delivered:</label> <input type="text" name="products_delivered" value="12000" id="id_products_delivered" /></li>'
)
self.assertEqual(localize_input(datetime.datetime(2009, 12, 31, 6, 0, 0)), '31.12.2009 06:00:00')
self.assertEqual(datetime.datetime(2009, 12, 31, 6, 0, 0), form6.cleaned_data['date_added'])
with self.settings(USE_THOUSAND_SEPARATOR=True):
# Checking for the localized "products_delivered" field
self.assertInHTML('<input type="text" name="products_delivered" value="12.000" id="id_products_delivered" />', form6.as_ul())
def test_sanitize_separators(self):
"""
Tests django.utils.formats.sanitize_separators.
"""
# Non-strings are untouched
self.assertEqual(sanitize_separators(123), 123)
with translation.override('ru', deactivate=True):
# Russian locale has non-breaking space (\xa0) as thousand separator
# Check that usual space is accepted too when sanitizing inputs
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual(sanitize_separators('1\xa0234\xa0567'), '1234567')
self.assertEqual(sanitize_separators('77\xa0777,777'), '77777.777')
self.assertEqual(sanitize_separators('12 345'), '12345')
self.assertEqual(sanitize_separators('77 777,777'), '77777.777')
with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=False):
self.assertEqual(sanitize_separators('12\xa0345'), '12\xa0345')
with patch_formats(get_language(), THOUSAND_SEPARATOR='.', DECIMAL_SEPARATOR=','):
with self.settings(USE_THOUSAND_SEPARATOR=True):
self.assertEqual(sanitize_separators('10.234'), '10234')
# Suspicion that user entered dot as decimal separator (#22171)
self.assertEqual(sanitize_separators('10.10'), '10.10')
def test_iter_format_modules(self):
"""
Tests the iter_format_modules function.
"""
# Importing some format modules so that we can compare the returned
# modules with these expected modules
default_mod = import_module('django.conf.locale.de.formats')
test_mod = import_module('i18n.other.locale.de.formats')
test_mod2 = import_module('i18n.other2.locale.de.formats')
with translation.override('de-at', deactivate=True):
# Should return the correct default module when no setting is set
self.assertEqual(list(iter_format_modules('de')), [default_mod])
# When the setting is a string, should return the given module and
# the default module
self.assertEqual(
list(iter_format_modules('de', 'i18n.other.locale')),
[test_mod, default_mod])
# When setting is a list of strings, should return the given
# modules and the default module
self.assertEqual(
list(iter_format_modules('de', ['i18n.other.locale', 'i18n.other2.locale'])),
[test_mod, test_mod2, default_mod])
def test_iter_format_modules_stability(self):
"""
Tests the iter_format_modules function always yields format modules in
a stable and correct order in presence of both base ll and ll_CC formats.
"""
en_format_mod = import_module('django.conf.locale.en.formats')
en_gb_format_mod = import_module('django.conf.locale.en_GB.formats')
self.assertEqual(list(iter_format_modules('en-gb')), [en_gb_format_mod, en_format_mod])
def test_get_format_modules_lang(self):
with translation.override('de', deactivate=True):
self.assertEqual('.', get_format('DECIMAL_SEPARATOR', lang='en'))
def test_get_format_modules_stability(self):
with self.settings(FORMAT_MODULE_PATH='i18n.other.locale'):
with translation.override('de', deactivate=True):
old = str("%r") % get_format_modules(reverse=True)
new = str("%r") % get_format_modules(reverse=True) # second try
self.assertEqual(new, old, 'Value returned by get_formats_modules() must be preserved between calls.')
def test_localize_templatetag_and_filter(self):
"""
Tests the {% localize %} templatetag
"""
context = Context({'value': 3.14})
template1 = Template("{% load l10n %}{% localize %}{{ value }}{% endlocalize %};{% localize on %}{{ value }}{% endlocalize %}")
template2 = Template("{% load l10n %}{{ value }};{% localize off %}{{ value }};{% endlocalize %}{{ value }}")
template3 = Template('{% load l10n %}{{ value }};{{ value|unlocalize }}')
template4 = Template('{% load l10n %}{{ value }};{{ value|localize }}')
output1 = '3,14;3,14'
output2 = '3,14;3.14;3,14'
output3 = '3,14;3.14'
output4 = '3.14;3,14'
with translation.override('de', deactivate=True):
with self.settings(USE_L10N=False):
self.assertEqual(template1.render(context), output1)
self.assertEqual(template4.render(context), output4)
with self.settings(USE_L10N=True):
self.assertEqual(template1.render(context), output1)
self.assertEqual(template2.render(context), output2)
self.assertEqual(template3.render(context), output3)
def test_localized_as_text_as_hidden_input(self):
"""
Tests if form input with 'as_hidden' or 'as_text' is correctly localized. Ticket #18777
"""
self.maxDiff = 1200
with translation.override('de-at', deactivate=True):
template = Template('{% load l10n %}{{ form.date_added }}; {{ form.cents_paid }}')
template_as_text = Template('{% load l10n %}{{ form.date_added.as_text }}; {{ form.cents_paid.as_text }}')
template_as_hidden = Template('{% load l10n %}{{ form.date_added.as_hidden }}; {{ form.cents_paid.as_hidden }}')
form = CompanyForm({
'name': 'acme',
'date_added': datetime.datetime(2009, 12, 31, 6, 0, 0),
'cents_paid': decimal.Decimal('59.47'),
'products_delivered': 12000,
})
context = Context({'form': form})
self.assertTrue(form.is_valid())
self.assertHTMLEqual(
template.render(context),
'<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" />; <input id="id_cents_paid" name="cents_paid" type="text" value="59,47" />'
)
self.assertHTMLEqual(
template_as_text.render(context),
'<input id="id_date_added" name="date_added" type="text" value="31.12.2009 06:00:00" />; <input id="id_cents_paid" name="cents_paid" type="text" value="59,47" />'
)
self.assertHTMLEqual(
template_as_hidden.render(context),
'<input id="id_date_added" name="date_added" type="hidden" value="31.12.2009 06:00:00" />; <input id="id_cents_paid" name="cents_paid" type="hidden" value="59,47" />'
)
class MiscTests(SimpleTestCase):
def setUp(self):
super(MiscTests, self).setUp()
self.rf = RequestFactory()
@override_settings(LANGUAGE_CODE='de')
def test_english_fallback(self):
"""
With a non-English LANGUAGE_CODE and if the active language is English
or one of its variants, the untranslated string should be returned
(instead of falling back to LANGUAGE_CODE) (See #24413).
"""
self.assertEqual(ugettext("Image"), "Bild")
with translation.override('en'):
self.assertEqual(ugettext("Image"), "Image")
with translation.override('en-us'):
self.assertEqual(ugettext("Image"), "Image")
with translation.override('en-ca'):
self.assertEqual(ugettext("Image"), "Image")
def test_parse_spec_http_header(self):
"""
Testing HTTP header parsing. First, we test that we can parse the
values according to the spec (and that we extract all the pieces in
the right order).
"""
p = trans_real.parse_accept_lang_header
# Good headers.
self.assertEqual([('de', 1.0)], p('de'))
self.assertEqual([('en-au', 1.0)], p('en-AU'))
self.assertEqual([('es-419', 1.0)], p('es-419'))
self.assertEqual([('*', 1.0)], p('*;q=1.00'))
self.assertEqual([('en-au', 0.123)], p('en-AU;q=0.123'))
self.assertEqual([('en-au', 0.5)], p('en-au;q=0.5'))
self.assertEqual([('en-au', 1.0)], p('en-au;q=1.0'))
self.assertEqual([('da', 1.0), ('en', 0.5), ('en-gb', 0.25)], p('da, en-gb;q=0.25, en;q=0.5'))
self.assertEqual([('en-au-xx', 1.0)], p('en-au-xx'))
self.assertEqual([('de', 1.0), ('en-au', 0.75), ('en-us', 0.5), ('en', 0.25), ('es', 0.125), ('fa', 0.125)], p('de,en-au;q=0.75,en-us;q=0.5,en;q=0.25,es;q=0.125,fa;q=0.125'))
self.assertEqual([('*', 1.0)], p('*'))
self.assertEqual([('de', 1.0)], p('de;q=0.'))
self.assertEqual([('en', 1.0), ('*', 0.5)], p('en; q=1.0, * ; q=0.5'))
self.assertEqual([], p(''))
# Bad headers; should always return [].
self.assertEqual([], p('en-gb;q=1.0000'))
self.assertEqual([], p('en;q=0.1234'))
self.assertEqual([], p('en;q=.2'))
self.assertEqual([], p('abcdefghi-au'))
self.assertEqual([], p('**'))
self.assertEqual([], p('en,,gb'))
self.assertEqual([], p('en-au;q=0.1.0'))
self.assertEqual([], p('XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXZ,en'))
self.assertEqual([], p('da, en-gb;q=0.8, en;q=0.7,#'))
self.assertEqual([], p('de;q=2.0'))
self.assertEqual([], p('de;q=0.a'))
self.assertEqual([], p('12-345'))
self.assertEqual([], p(''))
self.assertEqual([], p('en; q=1,'))
def test_parse_literal_http_header(self):
"""
Now test that we parse a literal HTTP header correctly.
"""
g = get_language_from_request
r = self.rf.get('/')
r.COOKIES = {}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'}
self.assertEqual('pt-br', g(r))
r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt'}
self.assertEqual('pt', g(r))
r.META = {'HTTP_ACCEPT_LANGUAGE': 'es,de'}
self.assertEqual('es', g(r))
r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-ar,de'}
self.assertEqual('es-ar', g(r))
# This test assumes there won't be a Django translation to a US
# variation of the Spanish language, a safe assumption. When the
# user sets it as the preferred language, the main 'es'
# translation should be selected instead.
r.META = {'HTTP_ACCEPT_LANGUAGE': 'es-us'}
self.assertEqual(g(r), 'es')
# This tests the following scenario: there isn't a main language (zh)
# translation of Django but there is a translation to variation (zh-hans)
# the user sets zh-hans as the preferred language, it should be selected
# by Django without falling back nor ignoring it.
r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans,de'}
self.assertEqual(g(r), 'zh-hans')
r.META = {'HTTP_ACCEPT_LANGUAGE': 'NL'}
self.assertEqual('nl', g(r))
r.META = {'HTTP_ACCEPT_LANGUAGE': 'fy'}
self.assertEqual('fy', g(r))
r.META = {'HTTP_ACCEPT_LANGUAGE': 'ia'}
self.assertEqual('ia', g(r))
r.META = {'HTTP_ACCEPT_LANGUAGE': 'sr-latn'}
self.assertEqual('sr-latn', g(r))
r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hans'}
self.assertEqual('zh-hans', g(r))
r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-hant'}
self.assertEqual('zh-hant', g(r))
@override_settings(
LANGUAGES=[
('en', 'English'),
('zh-hans', 'Simplified Chinese'),
('zh-hant', 'Traditional Chinese'),
]
)
def test_support_for_deprecated_chinese_language_codes(self):
"""
Some browsers (Firefox, IE etc) use deprecated language codes. As these
language codes will be removed in Django 1.9, these will be incorrectly
matched. For example zh-tw (traditional) will be interpreted as zh-hans
(simplified), which is wrong. So we should also accept these deprecated
language codes.
refs #18419 -- this is explicitly for browser compatibility
"""
g = get_language_from_request
r = self.rf.get('/')
r.COOKIES = {}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-cn,en'}
self.assertEqual(g(r), 'zh-hans')
r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-tw,en'}
self.assertEqual(g(r), 'zh-hant')
def test_special_fallback_language(self):
"""
Some languages may have special fallbacks that don't follow the simple
'fr-ca' -> 'fr' logic (notably Chinese codes).
"""
r = self.rf.get('/')
r.COOKIES = {}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'zh-my,en'}
self.assertEqual(get_language_from_request(r), 'zh-hans')
def test_parse_language_cookie(self):
"""
Now test that we parse language preferences stored in a cookie correctly.
"""
g = get_language_from_request
r = self.rf.get('/')
r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt-br'}
r.META = {}
self.assertEqual('pt-br', g(r))
r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'pt'}
r.META = {}
self.assertEqual('pt', g(r))
r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es'}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'}
self.assertEqual('es', g(r))
# This test assumes there won't be a Django translation to a US
# variation of the Spanish language, a safe assumption. When the
# user sets it as the preferred language, the main 'es'
# translation should be selected instead.
r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'es-us'}
r.META = {}
self.assertEqual(g(r), 'es')
# This tests the following scenario: there isn't a main language (zh)
# translation of Django but there is a translation to variation (zh-hans)
# the user sets zh-hans as the preferred language, it should be selected
# by Django without falling back nor ignoring it.
r.COOKIES = {settings.LANGUAGE_COOKIE_NAME: 'zh-hans'}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'de'}
self.assertEqual(g(r), 'zh-hans')
def test_get_language_from_path_real(self):
g = trans_real.get_language_from_path
self.assertEqual(g('/pl/'), 'pl')
self.assertEqual(g('/pl'), 'pl')
self.assertEqual(g('/xyz/'), None)
def test_get_language_from_path_null(self):
from django.utils.translation.trans_null import get_language_from_path as g
self.assertEqual(g('/pl/'), None)
self.assertEqual(g('/pl'), None)
self.assertEqual(g('/xyz/'), None)
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_percent_in_translatable_block(self):
t_sing = Template("{% load i18n %}{% blocktrans %}The result was {{ percent }}%{% endblocktrans %}")
t_plur = Template("{% load i18n %}{% blocktrans count num as number %}{{ percent }}% represents {{ num }} object{% plural %}{{ percent }}% represents {{ num }} objects{% endblocktrans %}")
with translation.override('de'):
self.assertEqual(t_sing.render(Context({'percent': 42})), 'Das Ergebnis war 42%')
self.assertEqual(t_plur.render(Context({'percent': 42, 'num': 1})), '42% stellt 1 Objekt dar')
self.assertEqual(t_plur.render(Context({'percent': 42, 'num': 4})), '42% stellt 4 Objekte dar')
@override_settings(LOCALE_PATHS=extended_locale_paths)
def test_percent_formatting_in_blocktrans(self):
"""
Test that using Python's %-formatting is properly escaped in blocktrans,
singular or plural
"""
t_sing = Template("{% load i18n %}{% blocktrans %}There are %(num_comments)s comments{% endblocktrans %}")
t_plur = Template("{% load i18n %}{% blocktrans count num as number %}%(percent)s% represents {{ num }} object{% plural %}%(percent)s% represents {{ num }} objects{% endblocktrans %}")
with translation.override('de'):
# Strings won't get translated as they don't match after escaping %
self.assertEqual(t_sing.render(Context({'num_comments': 42})), 'There are %(num_comments)s comments')
self.assertEqual(t_plur.render(Context({'percent': 42, 'num': 1})), '%(percent)s% represents 1 object')
self.assertEqual(t_plur.render(Context({'percent': 42, 'num': 4})), '%(percent)s% represents 4 objects')
def test_cache_resetting(self):
"""
#14170 after setting LANGUAGE, cache should be cleared and languages
previously valid should not be used.
"""
g = get_language_from_request
r = self.rf.get('/')
r.COOKIES = {}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-br'}
self.assertEqual('pt-br', g(r))
with self.settings(LANGUAGES=[('en', 'English')]):
self.assertNotEqual('pt-br', g(r))
class ResolutionOrderI18NTests(SimpleTestCase):
def setUp(self):
super(ResolutionOrderI18NTests, self).setUp()
activate('de')
def tearDown(self):
deactivate()
super(ResolutionOrderI18NTests, self).tearDown()
def assertUgettext(self, msgid, msgstr):
result = ugettext(msgid)
self.assertIn(msgstr, result, ("The string '%s' isn't in the "
"translation of '%s'; the actual result is '%s'." % (msgstr, msgid, result)))
class AppResolutionOrderI18NTests(ResolutionOrderI18NTests):
@override_settings(LANGUAGE_CODE='de')
def test_app_translation(self):
# Original translation.
self.assertUgettext('Date/time', 'Datum/Zeit')
# Different translation.
with self.modify_settings(INSTALLED_APPS={'append': 'i18n.resolution'}):
# Force refreshing translations.
activate('de')
# Doesn't work because it's added later in the list.
self.assertUgettext('Date/time', 'Datum/Zeit')
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.admin.apps.SimpleAdminConfig'}):
# Force refreshing translations.
activate('de')
# Unless the original is removed from the list.
self.assertUgettext('Date/time', 'Datum/Zeit (APP)')
@override_settings(LOCALE_PATHS=extended_locale_paths)
class LocalePathsResolutionOrderI18NTests(ResolutionOrderI18NTests):
def test_locale_paths_translation(self):
self.assertUgettext('Time', 'LOCALE_PATHS')
def test_locale_paths_override_app_translation(self):
with self.settings(INSTALLED_APPS=['i18n.resolution']):
self.assertUgettext('Time', 'LOCALE_PATHS')
class DjangoFallbackResolutionOrderI18NTests(ResolutionOrderI18NTests):
def test_django_fallback(self):
self.assertEqual(ugettext('Date/time'), 'Datum/Zeit')
class TestModels(TestCase):
def test_lazy(self):
tm = TestModel()
tm.save()
def test_safestr(self):
c = Company(cents_paid=12, products_delivered=1)
c.name = SafeText('Iñtërnâtiônàlizætiøn1')
c.save()
c.name = SafeBytes('Iñtërnâtiônàlizætiøn1'.encode('utf-8'))
c.save()
class TestLanguageInfo(SimpleTestCase):
def test_localized_language_info(self):
li = get_language_info('de')
self.assertEqual(li['code'], 'de')
self.assertEqual(li['name_local'], 'Deutsch')
self.assertEqual(li['name'], 'German')
self.assertEqual(li['bidi'], False)
def test_unknown_language_code(self):
six.assertRaisesRegex(self, KeyError, r"Unknown language code xx\.", get_language_info, 'xx')
def test_unknown_only_country_code(self):
li = get_language_info('de-xx')
self.assertEqual(li['code'], 'de')
self.assertEqual(li['name_local'], 'Deutsch')
self.assertEqual(li['name'], 'German')
self.assertEqual(li['bidi'], False)
def test_unknown_language_code_and_country_code(self):
six.assertRaisesRegex(self, KeyError, r"Unknown language code xx-xx and xx\.", get_language_info, 'xx-xx')
def test_fallback_language_code(self):
"""
get_language_info return the first fallback language info if the lang_info
struct does not contain the 'name' key.
"""
li = get_language_info('zh-my')
self.assertEqual(li['code'], 'zh-hans')
li = get_language_info('zh-hans')
self.assertEqual(li['code'], 'zh-hans')
class MultipleLocaleActivationTests(SimpleTestCase):
"""
Tests for template rendering behavior when multiple locales are activated
during the lifetime of the same process.
"""
def setUp(self):
super(MultipleLocaleActivationTests, self).setUp()
self._old_language = get_language()
def tearDown(self):
super(MultipleLocaleActivationTests, self).tearDown()
activate(self._old_language)
def test_single_locale_activation(self):
"""
Simple baseline behavior with one locale for all the supported i18n constructs.
"""
with translation.override('fr'):
self.assertEqual(Template("{{ _('Yes') }}").render(Context({})), 'Oui')
self.assertEqual(Template("{% load i18n %}{% trans 'Yes' %}").render(Context({})), 'Oui')
self.assertEqual(Template("{% load i18n %}{% blocktrans %}Yes{% endblocktrans %}").render(Context({})), 'Oui')
# Literal marked up with _() in a filter expression
def test_multiple_locale_filter(self):
with translation.override('de'):
t = Template("{% load i18n %}{{ 0|yesno:_('yes,no,maybe') }}")
with translation.override(self._old_language), translation.override('nl'):
self.assertEqual(t.render(Context({})), 'nee')
def test_multiple_locale_filter_deactivate(self):
with translation.override('de', deactivate=True):
t = Template("{% load i18n %}{{ 0|yesno:_('yes,no,maybe') }}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'nee')
def test_multiple_locale_filter_direct_switch(self):
with translation.override('de'):
t = Template("{% load i18n %}{{ 0|yesno:_('yes,no,maybe') }}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'nee')
# Literal marked up with _()
def test_multiple_locale(self):
with translation.override('de'):
t = Template("{{ _('No') }}")
with translation.override(self._old_language), translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
def test_multiple_locale_deactivate(self):
with translation.override('de', deactivate=True):
t = Template("{{ _('No') }}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
def test_multiple_locale_direct_switch(self):
with translation.override('de'):
t = Template("{{ _('No') }}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
# Literal marked up with _(), loading the i18n template tag library
def test_multiple_locale_loadi18n(self):
with translation.override('de'):
t = Template("{% load i18n %}{{ _('No') }}")
with translation.override(self._old_language), translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
def test_multiple_locale_loadi18n_deactivate(self):
with translation.override('de', deactivate=True):
t = Template("{% load i18n %}{{ _('No') }}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
def test_multiple_locale_loadi18n_direct_switch(self):
with translation.override('de'):
t = Template("{% load i18n %}{{ _('No') }}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
# trans i18n tag
def test_multiple_locale_trans(self):
with translation.override('de'):
t = Template("{% load i18n %}{% trans 'No' %}")
with translation.override(self._old_language), translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
def test_multiple_locale_deactivate_trans(self):
with translation.override('de', deactivate=True):
t = Template("{% load i18n %}{% trans 'No' %}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
def test_multiple_locale_direct_switch_trans(self):
with translation.override('de'):
t = Template("{% load i18n %}{% trans 'No' %}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
# blocktrans i18n tag
def test_multiple_locale_btrans(self):
with translation.override('de'):
t = Template("{% load i18n %}{% blocktrans %}No{% endblocktrans %}")
with translation.override(self._old_language), translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
def test_multiple_locale_deactivate_btrans(self):
with translation.override('de', deactivate=True):
t = Template("{% load i18n %}{% blocktrans %}No{% endblocktrans %}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
def test_multiple_locale_direct_switch_btrans(self):
with translation.override('de'):
t = Template("{% load i18n %}{% blocktrans %}No{% endblocktrans %}")
with translation.override('nl'):
self.assertEqual(t.render(Context({})), 'Nee')
@override_settings(
USE_I18N=True,
LANGUAGES=[
('en', 'English'),
('fr', 'French'),
],
MIDDLEWARE_CLASSES=[
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
],
ROOT_URLCONF='i18n.urls',
)
class LocaleMiddlewareTests(TestCase):
def test_streaming_response(self):
# Regression test for #5241
response = self.client.get('/fr/streaming/')
self.assertContains(response, "Oui/Non")
response = self.client.get('/en/streaming/')
self.assertContains(response, "Yes/No")
@override_settings(
MIDDLEWARE_CLASSES=[
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
],
)
def test_language_not_saved_to_session(self):
"""Checks that current language is not automatically saved to
session on every request."""
# Regression test for #21473
self.client.get('/fr/simple/')
self.assertNotIn(LANGUAGE_SESSION_KEY, self.client.session)
@override_settings(
USE_I18N=True,
LANGUAGES=[
('bg', 'Bulgarian'),
('en-us', 'English'),
('pt-br', 'Portugese (Brazil)'),
],
MIDDLEWARE_CLASSES=[
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
],
ROOT_URLCONF='i18n.urls'
)
class CountrySpecificLanguageTests(SimpleTestCase):
def setUp(self):
super(CountrySpecificLanguageTests, self).setUp()
self.rf = RequestFactory()
def test_check_for_language(self):
self.assertTrue(check_for_language('en'))
self.assertTrue(check_for_language('en-us'))
self.assertTrue(check_for_language('en-US'))
self.assertTrue(check_for_language('be'))
self.assertTrue(check_for_language('be@latin'))
self.assertTrue(check_for_language('sr-RS@latin'))
self.assertTrue(check_for_language('sr-RS@12345'))
self.assertFalse(check_for_language('en-ü'))
self.assertFalse(check_for_language('en\x00'))
self.assertFalse(check_for_language(None))
self.assertFalse(check_for_language('be@ '))
# Specifying encoding is not supported (Django enforces UTF-8)
self.assertFalse(check_for_language('tr-TR.UTF-8'))
self.assertFalse(check_for_language('tr-TR.UTF8'))
self.assertFalse(check_for_language('de-DE.utf-8'))
def test_get_language_from_request(self):
# issue 19919
r = self.rf.get('/')
r.COOKIES = {}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'en-US,en;q=0.8,bg;q=0.6,ru;q=0.4'}
lang = get_language_from_request(r)
self.assertEqual('en-us', lang)
r = self.rf.get('/')
r.COOKIES = {}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'bg-bg,en-US;q=0.8,en;q=0.6,ru;q=0.4'}
lang = get_language_from_request(r)
self.assertEqual('bg', lang)
def test_specific_language_codes(self):
# issue 11915
r = self.rf.get('/')
r.COOKIES = {}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'}
lang = get_language_from_request(r)
self.assertEqual('pt-br', lang)
r = self.rf.get('/')
r.COOKIES = {}
r.META = {'HTTP_ACCEPT_LANGUAGE': 'pt-pt,en-US;q=0.8,en;q=0.6,ru;q=0.4'}
lang = get_language_from_request(r)
self.assertEqual('pt-br', lang)
class TranslationFilesMissing(SimpleTestCase):
def setUp(self):
super(TranslationFilesMissing, self).setUp()
self.gettext_find_builtin = gettext_module.find
def tearDown(self):
gettext_module.find = self.gettext_find_builtin
super(TranslationFilesMissing, self).tearDown()
def patchGettextFind(self):
gettext_module.find = lambda *args, **kw: None
def test_failure_finding_default_mo_files(self):
'''
Ensure IOError is raised if the default language is unparseable.
Refs: #18192
'''
self.patchGettextFind()
trans_real._translations = {}
self.assertRaises(IOError, activate, 'en')
|
wkentaro/conque.vim
|
refs/heads/master
|
autoload/conque_term/conque_sole.py
|
30
|
# FILE: autoload/conque_term/conque_sole.py
# AUTHOR: Nico Raffo <nicoraffo@gmail.com>
# WEBSITE: http://conque.googlecode.com
# MODIFIED: 2011-09-02
# VERSION: 2.3, for Vim 7.0
# LICENSE:
# Conque - Vim terminal/console emulator
# Copyright (C) 2009-2011 Nico Raffo
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Windows Console Emulator
This is the main interface to the Windows emulator. It reads new output from the background console
and updates the Vim buffer.
"""
import vim
class ConqueSole(Conque):
window_top = None
window_bottom = None
color_cache = {}
attribute_cache = {}
color_mode = None
color_conceals = {}
buffer = None
encoding = None
# counters for periodic rendering
buffer_redraw_ct = 1
screen_redraw_ct = 1
# line offset, shifts output down
offset = 0
def open(self):
""" Start command and initialize this instance
Arguments:
command - Command string, e.g. "Powershell.exe"
options - Dictionary of config options
python_exe - Path to the python.exe executable. Usually C:\PythonXX\python.exe
communicator_py - Path to subprocess controller script in user's vimfiles directory
"""
# get arguments
command = vim.eval('command')
options = vim.eval('options')
python_exe = vim.eval('py_exe')
communicator_py = vim.eval('py_vim')
# init size
self.columns = vim.current.window.width
self.lines = vim.current.window.height
self.window_top = 0
self.window_bottom = vim.current.window.height - 1
# color mode
self.color_mode = vim.eval('g:ConqueTerm_ColorMode')
# line offset
self.offset = int(options['offset'])
# init color
self.enable_colors = options['color'] and not CONQUE_FAST_MODE
# open command
self.proc = ConqueSoleWrapper()
self.proc.open(command, self.lines, self.columns, python_exe, communicator_py, options)
self.buffer = vim.current.buffer
self.screen_encoding = vim.eval('&fileencoding')
def read(self, timeout=1, set_cursor=True, return_output=False, update_buffer=True):
""" Read from console and update Vim buffer. """
try:
stats = self.proc.get_stats()
if not stats:
return
# disable screen and buffer redraws in fast mode
if not CONQUE_FAST_MODE:
self.buffer_redraw_ct += 1
self.screen_redraw_ct += 1
update_top = 0
update_bottom = 0
lines = []
# full buffer redraw, our favorite!
#if self.buffer_redraw_ct == CONQUE_SOLE_BUFFER_REDRAW:
# self.buffer_redraw_ct = 0
# update_top = 0
# update_bottom = stats['top_offset'] + self.lines
# (lines, attributes) = self.proc.read(update_top, update_bottom)
# if return_output:
# output = self.get_new_output(lines, update_top, stats)
# if update_buffer:
# for i in range(update_top, update_bottom + 1):
# if CONQUE_FAST_MODE:
# self.plain_text(i, lines[i], None, stats)
# else:
# self.plain_text(i, lines[i], attributes[i], stats)
# full screen redraw
if stats['cursor_y'] + 1 != self.l or stats['top_offset'] != self.window_top or self.screen_redraw_ct >= CONQUE_SOLE_SCREEN_REDRAW:
self.screen_redraw_ct = 0
update_top = self.window_top
update_bottom = max([stats['top_offset'] + self.lines + 1, stats['cursor_y']])
(lines, attributes) = self.proc.read(update_top, update_bottom - update_top + 1)
if return_output:
output = self.get_new_output(lines, update_top, stats)
if update_buffer:
for i in range(update_top, update_bottom + 1):
if CONQUE_FAST_MODE:
self.plain_text(i, lines[i - update_top], None, stats)
else:
self.plain_text(i, lines[i - update_top], attributes[i - update_top], stats)
# single line redraw
else:
update_top = stats['cursor_y']
(lines, attributes) = self.proc.read(update_top, 1)
if return_output:
output = self.get_new_output(lines, update_top, stats)
if update_buffer:
if lines[0].rstrip() != u(self.buffer[update_top].rstrip()):
if CONQUE_FAST_MODE:
self.plain_text(update_top, lines[0], None, stats)
else:
self.plain_text(update_top, lines[0], attributes[0], stats)
# reset current position
self.window_top = stats['top_offset']
self.l = stats['cursor_y'] + 1
self.c = stats['cursor_x'] + 1
# reposition cursor if this seems plausible
if set_cursor:
self.set_cursor(self.l, self.c)
if return_output:
return output
except:
pass
def get_new_output(self, lines, update_top, stats):
""" Calculate the "new" output from this read. Fake but useful """
if not (stats['cursor_y'] + 1 > self.l or (stats['cursor_y'] + 1 == self.l and stats['cursor_x'] + 1 > self.c)):
return ""
try:
num_to_return = stats['cursor_y'] - self.l + 2
lines = lines[self.l - update_top - 1:]
new_output = []
# first line
new_output.append(lines[0][self.c - 1:].rstrip())
# the rest
for i in range(1, num_to_return):
new_output.append(lines[i].rstrip())
except:
pass
return "\n".join(new_output)
def plain_text(self, line_nr, text, attributes, stats):
""" Write plain text to Vim buffer. """
# handle line offset
line_nr += self.offset
self.l = line_nr + 1
# remove trailing whitespace
text = text.rstrip()
# if we're using concealed text for color, then s- is weird
if self.color_mode == 'conceal':
text = self.add_conceal_color(text, attributes, stats, line_nr)
# deal with character encoding
if CONQUE_PYTHON_VERSION == 2:
val = text.encode(self.screen_encoding)
else:
# XXX / Vim's python3 interface doesn't accept bytes object
val = str(text)
# update vim buffer
if len(self.buffer) <= line_nr:
self.buffer.append(val)
else:
self.buffer[line_nr] = val
if self.enable_colors and not self.color_mode == 'conceal' and line_nr > self.l - CONQUE_MAX_SYNTAX_LINES:
relevant = attributes[0:len(text)]
if line_nr not in self.attribute_cache or self.attribute_cache[line_nr] != relevant:
self.do_color(attributes=relevant, stats=stats)
self.attribute_cache[line_nr] = relevant
def add_conceal_color(self, text, attributes, stats, line_nr):
""" Add 'conceal' color strings to output text """
# stop here if coloration is disabled
if not self.enable_colors:
return text
# if no colors for this line, clear everything out
if len(attributes) == 0 or attributes == u(chr(stats['default_attribute'])) * len(attributes):
return text
new_text = ''
self.color_conceals[line_nr] = []
attribute_chunks = CONQUE_WIN32_REGEX_ATTR.findall(attributes)
offset = 0
ends = []
for attr in attribute_chunks:
attr_num = ord(attr[1])
ends = []
if attr_num != stats['default_attribute']:
color = self.translate_color(attr_num)
new_text += chr(27) + 'sf' + color['fg_code'] + ';'
ends.append(chr(27) + 'ef' + color['fg_code'] + ';')
self.color_conceals[line_nr].append(offset)
if attr_num > 15:
new_text += chr(27) + 'sb' + color['bg_code'] + ';'
ends.append(chr(27) + 'eb' + color['bg_code'] + ';')
self.color_conceals[line_nr].append(offset)
new_text += text[offset:offset + len(attr[0])]
# close color regions
ends.reverse()
for i in range(0, len(ends)):
self.color_conceals[line_nr].append(len(new_text))
new_text += ends[i]
offset += len(attr[0])
return new_text
def do_color(self, start=0, end=0, attributes='', stats=None):
""" Convert Windows console attributes into Vim syntax highlighting """
# if no colors for this line, clear everything out
if len(attributes) == 0 or attributes == u(chr(stats['default_attribute'])) * len(attributes):
self.color_changes = {}
self.apply_color(1, len(attributes), self.l)
return
attribute_chunks = CONQUE_WIN32_REGEX_ATTR.findall(attributes)
offset = 0
for attr in attribute_chunks:
attr_num = ord(attr[1])
if attr_num != stats['default_attribute']:
self.color_changes = self.translate_color(attr_num)
self.apply_color(offset + 1, offset + len(attr[0]) + 1, self.l)
offset += len(attr[0])
def translate_color(self, attr):
""" Convert Windows console attributes into RGB colors """
# check for cached color
if attr in self.color_cache:
return self.color_cache[attr]
# convert attribute integer to bit string
bit_str = bin(attr)
bit_str = bit_str.replace('0b', '')
# slice foreground and background portions of bit string
fg = bit_str[-4:].rjust(4, '0')
bg = bit_str[-8:-4].rjust(4, '0')
# ok, first create foreground #rbg
red = int(fg[1]) * 204 + int(fg[0]) * int(fg[1]) * 51
green = int(fg[2]) * 204 + int(fg[0]) * int(fg[2]) * 51
blue = int(fg[3]) * 204 + int(fg[0]) * int(fg[3]) * 51
fg_str = "#%02x%02x%02x" % (red, green, blue)
fg_code = "%02x%02x%02x" % (red, green, blue)
fg_code = fg_code[0] + fg_code[2] + fg_code[4]
# ok, first create foreground #rbg
red = int(bg[1]) * 204 + int(bg[0]) * int(bg[1]) * 51
green = int(bg[2]) * 204 + int(bg[0]) * int(bg[2]) * 51
blue = int(bg[3]) * 204 + int(bg[0]) * int(bg[3]) * 51
bg_str = "#%02x%02x%02x" % (red, green, blue)
bg_code = "%02x%02x%02x" % (red, green, blue)
bg_code = bg_code[0] + bg_code[2] + bg_code[4]
# build value for color_changes
color = {'guifg': fg_str, 'guibg': bg_str}
if self.color_mode == 'conceal':
color['fg_code'] = fg_code
color['bg_code'] = bg_code
self.color_cache[attr] = color
return color
def write_vk(self, vk_code):
""" write virtual key code to shared memory using proprietary escape seq """
self.proc.write_vk(vk_code)
def update_window_size(self):
""" Resize underlying console if Vim buffer size has changed """
if vim.current.window.width != self.columns or vim.current.window.height != self.lines:
# reset all window size attributes to default
self.columns = vim.current.window.width
self.lines = vim.current.window.height
self.working_columns = vim.current.window.width
self.working_lines = vim.current.window.height
self.bottom = vim.current.window.height
self.proc.window_resize(vim.current.window.height, vim.current.window.width)
def set_cursor(self, line, column):
""" Update cursor position in Vim buffer """
# handle offset
line += self.offset
# shift cursor position to handle concealed text
if self.enable_colors and self.color_mode == 'conceal':
if line - 1 in self.color_conceals:
for c in self.color_conceals[line - 1]:
if c < column:
column += 7
else:
break
# figure out line
buffer_line = line
if buffer_line > len(self.buffer):
for l in range(len(self.buffer) - 1, buffer_line):
self.buffer.append('')
# figure out column
real_column = column
if len(self.buffer[buffer_line - 1]) < real_column:
self.buffer[buffer_line - 1] = self.buffer[buffer_line - 1] + ' ' * (real_column - len(self.buffer[buffer_line - 1]))
# python version is occasionally grumpy
try:
vim.current.window.cursor = (buffer_line, real_column - 1)
except:
vim.command('call cursor(' + str(buffer_line) + ', ' + str(real_column) + ')')
def idle(self):
""" go into idle mode """
self.proc.idle()
def resume(self):
""" resume from idle mode """
self.proc.resume()
def close(self):
""" end console subprocess """
self.proc.close()
def abort(self):
""" end subprocess forcefully """
self.proc.close()
def get_buffer_line(self, line):
""" get buffer line """
return line
# vim:foldmethod=marker
|
theonion/django-bulbs
|
refs/heads/master
|
bulbs/cms_notifications/south_migrations/__init__.py
|
4
|
__author__ = 'andrewkos'
|
robovm/robovm-studio
|
refs/heads/master
|
python/testData/refactoring/introduceConstant/suggestUniqueNames.py
|
83
|
S = True
STR = True
"<caret>"
|
MorganBauer/gcjcupcake
|
refs/heads/master
|
commands/gcj_cupcake/ZipUtils.py
|
1
|
# -*- coding: utf-8 -*-
#
# GCJ Cupcake by jbernadas
# Copyright (C) 2010 Jorge Bernadas (jbernadas@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gzip, random, os, sys
from zipfile import ZipFile, ZIP_DEFLATED
# Unzip the specified data using a temporary file and the gzip library.
# After the data is unzipped, the temporary file is erased, so no special
# cleanup is necessary.
def unzip_data(zipped_data):
try:
# Write the zipped data into a temporary file (using a random name
# to prevent collisions).
zip_filename = 'tempZipFile_{0}.gz'.format(random.randrange(0, 2**31 - 1))
zip_file = open(zip_filename, 'wb')
zip_file.write(zipped_data)
zip_file.close()
except IOError as error:
# Log the IO error and exit with error.
sys.stderr.write('IO error happened while writing zipped '
'data to "{0}": {1}'.format(zip_filename, str(error)))
sys.exit(1)
try:
# Open the file using gzip and get the unzipped contents.
unzipped_file = gzip.open(zip_filename, 'rb')
unzipped_data = unzipped_file.read()
unzipped_file.close()
except IOError as error:
# Log the IO error and exit with error.
sys.stderr.write('IO error happened while reading unzipped '
'data from "{0}": {1}'.format(zip_filename, str(error)))
sys.exit(1)
try:
# Remove the temporary zipped file.
os.remove(zip_filename)
except OSError as error:
# Log the OS error and exit with error.
sys.stderr.write('OS error happened while removing zipped '
'data at "{0}": {1}'.format(zip_filename, str(error)))
sys.exit(1)
# Return the unzipped string.
return unzipped_data
# Zip the specified data using a temporary file and the gzip library.
# After the data is zipped, the temporary file is erased, so no special
# cleanup is necessary.
def zip_data(unzipped_data):
try:
# Compress the data and write it to a temporary file (using a random name
# to prevent collisions).
zip_filename = 'tempZipFile_{0}.gz'.format(random.randrange(0, 2**31 - 1))
compress_file = gzip.open(zip_filename, 'wb')
compress_file.write(unzipped_data)
compress_file.close()
except IOError as error:
# Log the IO error and exit with error.
sys.stderr.write('IO error happened while compressing '
'data into "{0}": {1}'.format(zip_filename, str(error)))
sys.exit(1)
try:
# Open the file normally and get the zipped contents.
zipped_file = open(zip_filename, 'rb')
zipped_data = zipped_file.read()
zipped_file.close()
except IOError as error:
# Log the IO error and exit with error.
sys.stderr.write('IO error happened while reading unzipped '
'data from "{0}": {1}'.format(zip_filename, str(error)))
sys.exit(1)
try:
# Remove the temporary zipped file.
os.remove(zip_filename)
except OSError as error:
# Log the OS error and exit with error.
sys.stderr.write('OS error happened while removing zipped '
'data at "{0}": {1}'.format(zip_filename, str(error)))
sys.exit(1)
# Return the zipped string.
return zipped_data
# Create a zip file with the specified source files.
def make_zip_file(source_files, zip_filename, ignore_exts = []):
try:
# Open the destination zip file and initialize the ignored files set.
zip_file = ZipFile(zip_filename, 'w', ZIP_DEFLATED)
ignored_files = set()
# Put all specified sources in the zip file, ignoring files with the
# specified extensions.
for source_filename in source_files:
# If the source is a directory, walk over it, adding each file inside it.
if os.path.isdir(source_filename):
# Walk over the specified directory.
for dirpath, dirnames, filenames in os.walk(source_filename):
# Create the directory inside the zip file and process all
# files in the current directory.
zip_file.write(dirpath)
for filename in filenames:
# Create the base filename and check if it extension is not in the
# extenstions ignore list. Otherwise, add it to the ignored files set.
base_filename = os.path.join(dirpath, filename)
if os.path.splitext(filename)[1] not in ignore_exts:
zip_file.write(base_filename)
else:
ignored_files.update([base_filename])
else:
# Add a file to the zip if and only if it extension is not in the ignore list.
# Otherwise, add it to the ignored files set.
if os.path.splitext(source_filename)[1] not in ignore_exts:
zip_file.write(source_filename)
else:
ignored_files.update([source_filename])
# Close the zip file and return the ignored files set.
zip_file.close()
return ignored_files
except OSError as error:
# Log the OS error and exit with error.
sys.stderr.write('OS error happened while creating zip '
'file "{0}": {1}'.format(zip_filename, str(error)))
sys.exit(1)
except IOError as error:
# Log the IO error and exit with error.
sys.stderr.write('IO error happened while creating zip '
'file "{0}": {1}'.format(zip_filename, str(error)))
sys.exit(1)
####################################################################################################
|
Impactstory/total-impact-core
|
refs/heads/master
|
extras/functional_tests/providers_test_proxy.py
|
2
|
#!/usr/bin/env python
#
# Providers Test Proxy
#
# This is a very basic webserver which can be used to simluate commuicating
# providers. It performs basic response replay for known data items. Response
# data is stored in test/data/<provider>
#
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import BaseHTTPRequestHandler
import SocketServer
from optparse import OptionParser
import logging
import os
import re
responses = {'dryad':{},'wikipedia':{},'github':{},'mendeley':{},'crossref':{}}
def load_test_data(provider, filename):
datadir = os.path.join(os.path.split(__file__)[0], "../test/data/", provider)
return open(os.path.join(datadir, filename)).read()
responses['dryad']['aliases'] = (200, load_test_data('dryad', 'sample_extract_aliases_page.xml'))
responses['dryad']['metrics'] = (200, load_test_data('dryad', 'sample_extract_metrics_page.html'))
responses['dryad']['10.5061'] = (200, load_test_data('dryad', 'dryad_info_10.5061.xml'))
responses['wikipedia']['metrics'] = (200, load_test_data('wikipedia', 'wikipedia_response.xml'))
responses['wikipedia']['10.1186'] = (200, load_test_data('wikipedia', 'wikipedia_10.1186_response.xml'))
responses['wikipedia']['10.5061'] = (200, load_test_data('wikipedia', 'wikipedia_10.5061_response.xml'))
responses['wikipedia']['cottagelabs'] = (200, load_test_data('wikipedia', 'wikipedia_cottagelabs.xml'))
responses['github']['members'] = (200, load_test_data('github', 'egonw_gtd_member_response.json'))
responses['github']['metrics'] = (200, load_test_data('github', 'egonw_gtd_metric_response.json'))
responses['mendeley']['aliases-10.5061'] = (404, load_test_data('mendeley', 'mendeley-aliases-10.5061'))
responses['crossref']['aliases-10.5061'] = (200, load_test_data('crossref', 'crossref-aliases-10.5061'))
urlmap = {
###################################################################################
##
## Dryad Provider
##
"http://datadryad.org/solr/search/select/?q=dc.identifier:10.5061/dryad.7898&fl=dc.identifier.uri,dc.title": responses['dryad']['aliases'],
"http://datadryad.org/solr/search/select/?q=dc.identifier:10.5061/dryad.7898&fl=dc.date.accessioned.year,dc.identifier.uri,dc.title_ac,dc.contributor.author_ac" : responses['dryad']['10.5061'],
"http://dx.doi.org/10.5061/dryad.7898": responses['dryad']['metrics'],
###################################################################################
##
## Wikipedia Provider
##
# Metrics information for various test items
"http://en.wikipedia.org/w/api.php?action=query&list=search&srprop=timestamp&format=xml&srsearch='10.1371/journal.pcbi.1000361'": responses['wikipedia']['metrics'],
"http://en.wikipedia.org/w/api.php?action=query&list=search&srprop=timestamp&format=xml&srsearch='10.1186/1745-6215-11-32'": responses['wikipedia']['10.1186'],
"http://en.wikipedia.org/w/api.php?action=query&list=search&srprop=timestamp&format=xml&srsearch='10.5061/dryad.7898'": responses['wikipedia']['10.5061'],
"http://en.wikipedia.org/w/api.php?action=query&list=search&srprop=timestamp&format=xml&srsearch='http://cottagelabs.com'": responses['wikipedia']['cottagelabs'],
###################################################################################
##
## Github Provider
##
# member_items results for egonw
"https://api.github.com/users/egonw/repos": responses['github']['members'],
# metrics results for ('github', 'egonw,gtd')
"https://github.com/api/v2/json/repos/show/egonw/gtd": responses['github']['metrics'],
###################################################################################
##
## Mendeley Provider
##
re.compile(r"http://api.mendeley.com/oapi/documents/details/10.5061%252Fdryad.7898\?type=doi&consumer_key=.*"): responses['mendeley']['aliases-10.5061'],
###################################################################################
##
## Crossref Provider
##
re.compile(r"http://doi.crossref.org/servlet/query\?pid=(.*)&qdata=10.5061/dryad.7898&format=unixref"): responses["crossref"]['aliases-10.5061'],
###################################################################################
##
## Test Item
##
## This is just so you can check http://proxy:port/test to see if this is running ok
##
"/test": responses['github']['members'],
}
class ProvidersTestProxy(BaseHTTPRequestHandler):
def do_GET(self):
# Find match, including regex
match = None
for key in urlmap.keys():
if isinstance(key, str):
if self.path == key:
match = key
else:
if key.match(self.path):
match = key
if match:
print "Found:", self.path
(code, response) = urlmap[match]
self.send_response(code)
self.end_headers()
self.wfile.write(response)
else:
print "Not Found:", self.path
self.send_response(500, "Test Proxy: Unknown URL")
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-p", "--port",
action="store", dest="port", default=8081,
help="Port to run the server on (default 8081)")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="print debugging output")
parser.add_option("-l", "--log",
action="store", dest="log", default=None,
help="runtime log")
parser.add_option("-q", "--quiet",
action="store_true", dest="quiet", default=False,
help="Only print errors on failures")
(options, args) = parser.parse_args()
if options.verbose:
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# Nicer formatting to show different providers
formatter = logging.Formatter(' %(name)s - %(message)s')
ch.setFormatter(formatter)
logger = logging.getLogger('')
logger.addHandler(ch)
else:
logger = logging.getLogger('ti.providers')
logger.setLevel(logging.WARNING)
class ReuseServer(SocketServer.TCPServer):
allow_reuse_address = True
handler = ProvidersTestProxy
httpd = ReuseServer(("", int(options.port)), handler)
print "listening on port", options.port
httpd.serve_forever()
|
repotvsupertuga/tvsupertuga.repository
|
refs/heads/master
|
instal/script.module.resolveurl/lib/resolveurl/plugins/thevid.py
|
2
|
"""
Copyright (C) 2017 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os, thevid_gmu
from resolveurl import common
from resolveurl.resolver import ResolveUrl, ResolverError
logger = common.log_utils.Logger.get_logger(__name__)
logger.disable()
VID_SOURCE = 'https://raw.githubusercontent.com/jsergio123/script.module.resolveurl/master/lib/resolveurl/plugins/thevid_gmu.py'
VID_PATH = os.path.join(common.plugins_path, 'thevid_gmu.py')
class TheVidResolver(ResolveUrl):
name = "TheVid"
domains = ["thevid.net", "thevid.tv", "thevid.live"]
pattern = '(?://|\.)(thevid\.(?:net|tv|live))/(?:video|e|v)/([A-Za-z0-9]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
try:
self._auto_update(VID_SOURCE, VID_PATH)
reload(thevid_gmu)
web_url = self.get_url(host, media_id)
return thevid_gmu.get_media_url(web_url)
except Exception as e:
logger.log_debug('Exception during thevid.net resolve parse: %s' % e)
raise
def get_url(self, host, media_id):
return self._default_get_url(host, media_id, template='http://{host}/e/{media_id}/')
@classmethod
def get_settings_xml(cls):
xml = super(cls, cls).get_settings_xml()
xml.append('<setting id="%s_auto_update" type="bool" label="Automatically update resolver" default="true"/>' % (cls.__name__))
xml.append('<setting id="%s_etag" type="text" default="" visible="false"/>' % (cls.__name__))
return xml
|
axbaretto/beam
|
refs/heads/master
|
sdks/python/.tox/docs/lib/python2.7/site-packages/google/protobuf/internal/descriptor_test.py
|
37
|
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unittest for google.protobuf.internal.descriptor."""
__author__ = 'robinson@google.com (Will Robinson)'
import sys
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import unittest_custom_options_pb2
from google.protobuf import unittest_import_pb2
from google.protobuf import unittest_pb2
from google.protobuf import descriptor_pb2
from google.protobuf.internal import api_implementation
from google.protobuf.internal import test_util
from google.protobuf import descriptor
from google.protobuf import descriptor_pool
from google.protobuf import symbol_database
from google.protobuf import text_format
TEST_EMPTY_MESSAGE_DESCRIPTOR_ASCII = """
name: 'TestEmptyMessage'
"""
class DescriptorTest(unittest.TestCase):
def setUp(self):
file_proto = descriptor_pb2.FileDescriptorProto(
name='some/filename/some.proto',
package='protobuf_unittest')
message_proto = file_proto.message_type.add(
name='NestedMessage')
message_proto.field.add(
name='bb',
number=1,
type=descriptor_pb2.FieldDescriptorProto.TYPE_INT32,
label=descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL)
enum_proto = message_proto.enum_type.add(
name='ForeignEnum')
enum_proto.value.add(name='FOREIGN_FOO', number=4)
enum_proto.value.add(name='FOREIGN_BAR', number=5)
enum_proto.value.add(name='FOREIGN_BAZ', number=6)
file_proto.message_type.add(name='ResponseMessage')
service_proto = file_proto.service.add(
name='Service')
method_proto = service_proto.method.add(
name='CallMethod',
input_type='.protobuf_unittest.NestedMessage',
output_type='.protobuf_unittest.ResponseMessage')
# Note: Calling DescriptorPool.Add() multiple times with the same file only
# works if the input is canonical; in particular, all type names must be
# fully qualified.
self.pool = self.GetDescriptorPool()
self.pool.Add(file_proto)
self.my_file = self.pool.FindFileByName(file_proto.name)
self.my_message = self.my_file.message_types_by_name[message_proto.name]
self.my_enum = self.my_message.enum_types_by_name[enum_proto.name]
self.my_service = self.my_file.services_by_name[service_proto.name]
self.my_method = self.my_service.methods_by_name[method_proto.name]
def GetDescriptorPool(self):
return symbol_database.Default().pool
def testEnumValueName(self):
self.assertEqual(self.my_message.EnumValueName('ForeignEnum', 4),
'FOREIGN_FOO')
self.assertEqual(
self.my_message.enum_types_by_name[
'ForeignEnum'].values_by_number[4].name,
self.my_message.EnumValueName('ForeignEnum', 4))
def testEnumFixups(self):
self.assertEqual(self.my_enum, self.my_enum.values[0].type)
def testContainingTypeFixups(self):
self.assertEqual(self.my_message, self.my_message.fields[0].containing_type)
self.assertEqual(self.my_message, self.my_enum.containing_type)
def testContainingServiceFixups(self):
self.assertEqual(self.my_service, self.my_method.containing_service)
def testGetOptions(self):
self.assertEqual(self.my_enum.GetOptions(),
descriptor_pb2.EnumOptions())
self.assertEqual(self.my_enum.values[0].GetOptions(),
descriptor_pb2.EnumValueOptions())
self.assertEqual(self.my_message.GetOptions(),
descriptor_pb2.MessageOptions())
self.assertEqual(self.my_message.fields[0].GetOptions(),
descriptor_pb2.FieldOptions())
self.assertEqual(self.my_method.GetOptions(),
descriptor_pb2.MethodOptions())
self.assertEqual(self.my_service.GetOptions(),
descriptor_pb2.ServiceOptions())
def testSimpleCustomOptions(self):
file_descriptor = unittest_custom_options_pb2.DESCRIPTOR
message_descriptor =\
unittest_custom_options_pb2.TestMessageWithCustomOptions.DESCRIPTOR
field_descriptor = message_descriptor.fields_by_name['field1']
oneof_descriptor = message_descriptor.oneofs_by_name['AnOneof']
enum_descriptor = message_descriptor.enum_types_by_name['AnEnum']
enum_value_descriptor =\
message_descriptor.enum_values_by_name['ANENUM_VAL2']
service_descriptor =\
unittest_custom_options_pb2.TestServiceWithCustomOptions.DESCRIPTOR
method_descriptor = service_descriptor.FindMethodByName('Foo')
file_options = file_descriptor.GetOptions()
file_opt1 = unittest_custom_options_pb2.file_opt1
self.assertEqual(9876543210, file_options.Extensions[file_opt1])
message_options = message_descriptor.GetOptions()
message_opt1 = unittest_custom_options_pb2.message_opt1
self.assertEqual(-56, message_options.Extensions[message_opt1])
field_options = field_descriptor.GetOptions()
field_opt1 = unittest_custom_options_pb2.field_opt1
self.assertEqual(8765432109, field_options.Extensions[field_opt1])
field_opt2 = unittest_custom_options_pb2.field_opt2
self.assertEqual(42, field_options.Extensions[field_opt2])
oneof_options = oneof_descriptor.GetOptions()
oneof_opt1 = unittest_custom_options_pb2.oneof_opt1
self.assertEqual(-99, oneof_options.Extensions[oneof_opt1])
enum_options = enum_descriptor.GetOptions()
enum_opt1 = unittest_custom_options_pb2.enum_opt1
self.assertEqual(-789, enum_options.Extensions[enum_opt1])
enum_value_options = enum_value_descriptor.GetOptions()
enum_value_opt1 = unittest_custom_options_pb2.enum_value_opt1
self.assertEqual(123, enum_value_options.Extensions[enum_value_opt1])
service_options = service_descriptor.GetOptions()
service_opt1 = unittest_custom_options_pb2.service_opt1
self.assertEqual(-9876543210, service_options.Extensions[service_opt1])
method_options = method_descriptor.GetOptions()
method_opt1 = unittest_custom_options_pb2.method_opt1
self.assertEqual(unittest_custom_options_pb2.METHODOPT1_VAL2,
method_options.Extensions[method_opt1])
message_descriptor = (
unittest_custom_options_pb2.DummyMessageContainingEnum.DESCRIPTOR)
self.assertTrue(file_descriptor.has_options)
self.assertFalse(message_descriptor.has_options)
def testDifferentCustomOptionTypes(self):
kint32min = -2**31
kint64min = -2**63
kint32max = 2**31 - 1
kint64max = 2**63 - 1
kuint32max = 2**32 - 1
kuint64max = 2**64 - 1
message_descriptor =\
unittest_custom_options_pb2.CustomOptionMinIntegerValues.DESCRIPTOR
message_options = message_descriptor.GetOptions()
self.assertEqual(False, message_options.Extensions[
unittest_custom_options_pb2.bool_opt])
self.assertEqual(kint32min, message_options.Extensions[
unittest_custom_options_pb2.int32_opt])
self.assertEqual(kint64min, message_options.Extensions[
unittest_custom_options_pb2.int64_opt])
self.assertEqual(0, message_options.Extensions[
unittest_custom_options_pb2.uint32_opt])
self.assertEqual(0, message_options.Extensions[
unittest_custom_options_pb2.uint64_opt])
self.assertEqual(kint32min, message_options.Extensions[
unittest_custom_options_pb2.sint32_opt])
self.assertEqual(kint64min, message_options.Extensions[
unittest_custom_options_pb2.sint64_opt])
self.assertEqual(0, message_options.Extensions[
unittest_custom_options_pb2.fixed32_opt])
self.assertEqual(0, message_options.Extensions[
unittest_custom_options_pb2.fixed64_opt])
self.assertEqual(kint32min, message_options.Extensions[
unittest_custom_options_pb2.sfixed32_opt])
self.assertEqual(kint64min, message_options.Extensions[
unittest_custom_options_pb2.sfixed64_opt])
message_descriptor =\
unittest_custom_options_pb2.CustomOptionMaxIntegerValues.DESCRIPTOR
message_options = message_descriptor.GetOptions()
self.assertEqual(True, message_options.Extensions[
unittest_custom_options_pb2.bool_opt])
self.assertEqual(kint32max, message_options.Extensions[
unittest_custom_options_pb2.int32_opt])
self.assertEqual(kint64max, message_options.Extensions[
unittest_custom_options_pb2.int64_opt])
self.assertEqual(kuint32max, message_options.Extensions[
unittest_custom_options_pb2.uint32_opt])
self.assertEqual(kuint64max, message_options.Extensions[
unittest_custom_options_pb2.uint64_opt])
self.assertEqual(kint32max, message_options.Extensions[
unittest_custom_options_pb2.sint32_opt])
self.assertEqual(kint64max, message_options.Extensions[
unittest_custom_options_pb2.sint64_opt])
self.assertEqual(kuint32max, message_options.Extensions[
unittest_custom_options_pb2.fixed32_opt])
self.assertEqual(kuint64max, message_options.Extensions[
unittest_custom_options_pb2.fixed64_opt])
self.assertEqual(kint32max, message_options.Extensions[
unittest_custom_options_pb2.sfixed32_opt])
self.assertEqual(kint64max, message_options.Extensions[
unittest_custom_options_pb2.sfixed64_opt])
message_descriptor =\
unittest_custom_options_pb2.CustomOptionOtherValues.DESCRIPTOR
message_options = message_descriptor.GetOptions()
self.assertEqual(-100, message_options.Extensions[
unittest_custom_options_pb2.int32_opt])
self.assertAlmostEqual(12.3456789, message_options.Extensions[
unittest_custom_options_pb2.float_opt], 6)
self.assertAlmostEqual(1.234567890123456789, message_options.Extensions[
unittest_custom_options_pb2.double_opt])
self.assertEqual("Hello, \"World\"", message_options.Extensions[
unittest_custom_options_pb2.string_opt])
self.assertEqual(b"Hello\0World", message_options.Extensions[
unittest_custom_options_pb2.bytes_opt])
dummy_enum = unittest_custom_options_pb2.DummyMessageContainingEnum
self.assertEqual(
dummy_enum.TEST_OPTION_ENUM_TYPE2,
message_options.Extensions[unittest_custom_options_pb2.enum_opt])
message_descriptor =\
unittest_custom_options_pb2.SettingRealsFromPositiveInts.DESCRIPTOR
message_options = message_descriptor.GetOptions()
self.assertAlmostEqual(12, message_options.Extensions[
unittest_custom_options_pb2.float_opt], 6)
self.assertAlmostEqual(154, message_options.Extensions[
unittest_custom_options_pb2.double_opt])
message_descriptor =\
unittest_custom_options_pb2.SettingRealsFromNegativeInts.DESCRIPTOR
message_options = message_descriptor.GetOptions()
self.assertAlmostEqual(-12, message_options.Extensions[
unittest_custom_options_pb2.float_opt], 6)
self.assertAlmostEqual(-154, message_options.Extensions[
unittest_custom_options_pb2.double_opt])
def testComplexExtensionOptions(self):
descriptor =\
unittest_custom_options_pb2.VariousComplexOptions.DESCRIPTOR
options = descriptor.GetOptions()
self.assertEqual(42, options.Extensions[
unittest_custom_options_pb2.complex_opt1].foo)
self.assertEqual(324, options.Extensions[
unittest_custom_options_pb2.complex_opt1].Extensions[
unittest_custom_options_pb2.quux])
self.assertEqual(876, options.Extensions[
unittest_custom_options_pb2.complex_opt1].Extensions[
unittest_custom_options_pb2.corge].qux)
self.assertEqual(987, options.Extensions[
unittest_custom_options_pb2.complex_opt2].baz)
self.assertEqual(654, options.Extensions[
unittest_custom_options_pb2.complex_opt2].Extensions[
unittest_custom_options_pb2.grault])
self.assertEqual(743, options.Extensions[
unittest_custom_options_pb2.complex_opt2].bar.foo)
self.assertEqual(1999, options.Extensions[
unittest_custom_options_pb2.complex_opt2].bar.Extensions[
unittest_custom_options_pb2.quux])
self.assertEqual(2008, options.Extensions[
unittest_custom_options_pb2.complex_opt2].bar.Extensions[
unittest_custom_options_pb2.corge].qux)
self.assertEqual(741, options.Extensions[
unittest_custom_options_pb2.complex_opt2].Extensions[
unittest_custom_options_pb2.garply].foo)
self.assertEqual(1998, options.Extensions[
unittest_custom_options_pb2.complex_opt2].Extensions[
unittest_custom_options_pb2.garply].Extensions[
unittest_custom_options_pb2.quux])
self.assertEqual(2121, options.Extensions[
unittest_custom_options_pb2.complex_opt2].Extensions[
unittest_custom_options_pb2.garply].Extensions[
unittest_custom_options_pb2.corge].qux)
self.assertEqual(1971, options.Extensions[
unittest_custom_options_pb2.ComplexOptionType2
.ComplexOptionType4.complex_opt4].waldo)
self.assertEqual(321, options.Extensions[
unittest_custom_options_pb2.complex_opt2].fred.waldo)
self.assertEqual(9, options.Extensions[
unittest_custom_options_pb2.complex_opt3].qux)
self.assertEqual(22, options.Extensions[
unittest_custom_options_pb2.complex_opt3].complexoptiontype5.plugh)
self.assertEqual(24, options.Extensions[
unittest_custom_options_pb2.complexopt6].xyzzy)
# Check that aggregate options were parsed and saved correctly in
# the appropriate descriptors.
def testAggregateOptions(self):
file_descriptor = unittest_custom_options_pb2.DESCRIPTOR
message_descriptor =\
unittest_custom_options_pb2.AggregateMessage.DESCRIPTOR
field_descriptor = message_descriptor.fields_by_name["fieldname"]
enum_descriptor = unittest_custom_options_pb2.AggregateEnum.DESCRIPTOR
enum_value_descriptor = enum_descriptor.values_by_name["VALUE"]
service_descriptor =\
unittest_custom_options_pb2.AggregateService.DESCRIPTOR
method_descriptor = service_descriptor.FindMethodByName("Method")
# Tests for the different types of data embedded in fileopt
file_options = file_descriptor.GetOptions().Extensions[
unittest_custom_options_pb2.fileopt]
self.assertEqual(100, file_options.i)
self.assertEqual("FileAnnotation", file_options.s)
self.assertEqual("NestedFileAnnotation", file_options.sub.s)
self.assertEqual("FileExtensionAnnotation", file_options.file.Extensions[
unittest_custom_options_pb2.fileopt].s)
self.assertEqual("EmbeddedMessageSetElement", file_options.mset.Extensions[
unittest_custom_options_pb2.AggregateMessageSetElement
.message_set_extension].s)
# Simple tests for all the other types of annotations
self.assertEqual(
"MessageAnnotation",
message_descriptor.GetOptions().Extensions[
unittest_custom_options_pb2.msgopt].s)
self.assertEqual(
"FieldAnnotation",
field_descriptor.GetOptions().Extensions[
unittest_custom_options_pb2.fieldopt].s)
self.assertEqual(
"EnumAnnotation",
enum_descriptor.GetOptions().Extensions[
unittest_custom_options_pb2.enumopt].s)
self.assertEqual(
"EnumValueAnnotation",
enum_value_descriptor.GetOptions().Extensions[
unittest_custom_options_pb2.enumvalopt].s)
self.assertEqual(
"ServiceAnnotation",
service_descriptor.GetOptions().Extensions[
unittest_custom_options_pb2.serviceopt].s)
self.assertEqual(
"MethodAnnotation",
method_descriptor.GetOptions().Extensions[
unittest_custom_options_pb2.methodopt].s)
def testNestedOptions(self):
nested_message =\
unittest_custom_options_pb2.NestedOptionType.NestedMessage.DESCRIPTOR
self.assertEqual(1001, nested_message.GetOptions().Extensions[
unittest_custom_options_pb2.message_opt1])
nested_field = nested_message.fields_by_name["nested_field"]
self.assertEqual(1002, nested_field.GetOptions().Extensions[
unittest_custom_options_pb2.field_opt1])
outer_message =\
unittest_custom_options_pb2.NestedOptionType.DESCRIPTOR
nested_enum = outer_message.enum_types_by_name["NestedEnum"]
self.assertEqual(1003, nested_enum.GetOptions().Extensions[
unittest_custom_options_pb2.enum_opt1])
nested_enum_value = outer_message.enum_values_by_name["NESTED_ENUM_VALUE"]
self.assertEqual(1004, nested_enum_value.GetOptions().Extensions[
unittest_custom_options_pb2.enum_value_opt1])
nested_extension = outer_message.extensions_by_name["nested_extension"]
self.assertEqual(1005, nested_extension.GetOptions().Extensions[
unittest_custom_options_pb2.field_opt2])
def testFileDescriptorReferences(self):
self.assertEqual(self.my_enum.file, self.my_file)
self.assertEqual(self.my_message.file, self.my_file)
def testFileDescriptor(self):
self.assertEqual(self.my_file.name, 'some/filename/some.proto')
self.assertEqual(self.my_file.package, 'protobuf_unittest')
self.assertEqual(self.my_file.pool, self.pool)
# Generated modules also belong to the default pool.
self.assertEqual(unittest_pb2.DESCRIPTOR.pool, descriptor_pool.Default())
@unittest.skipIf(
api_implementation.Type() != 'cpp' or api_implementation.Version() != 2,
'Immutability of descriptors is only enforced in v2 implementation')
def testImmutableCppDescriptor(self):
message_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
with self.assertRaises(AttributeError):
message_descriptor.fields_by_name = None
with self.assertRaises(TypeError):
message_descriptor.fields_by_name['Another'] = None
with self.assertRaises(TypeError):
message_descriptor.fields.append(None)
class NewDescriptorTest(DescriptorTest):
"""Redo the same tests as above, but with a separate DescriptorPool."""
def GetDescriptorPool(self):
return descriptor_pool.DescriptorPool()
class GeneratedDescriptorTest(unittest.TestCase):
"""Tests for the properties of descriptors in generated code."""
def CheckMessageDescriptor(self, message_descriptor):
# Basic properties
self.assertEqual(message_descriptor.name, 'TestAllTypes')
self.assertEqual(message_descriptor.full_name,
'protobuf_unittest.TestAllTypes')
# Test equality and hashability
self.assertEqual(message_descriptor, message_descriptor)
self.assertEqual(message_descriptor.fields[0].containing_type,
message_descriptor)
self.assertIn(message_descriptor, [message_descriptor])
self.assertIn(message_descriptor, {message_descriptor: None})
# Test field containers
self.CheckDescriptorSequence(message_descriptor.fields)
self.CheckDescriptorMapping(message_descriptor.fields_by_name)
self.CheckDescriptorMapping(message_descriptor.fields_by_number)
self.CheckDescriptorMapping(message_descriptor.fields_by_camelcase_name)
def CheckFieldDescriptor(self, field_descriptor):
# Basic properties
self.assertEqual(field_descriptor.name, 'optional_int32')
self.assertEqual(field_descriptor.camelcase_name, 'optionalInt32')
self.assertEqual(field_descriptor.full_name,
'protobuf_unittest.TestAllTypes.optional_int32')
self.assertEqual(field_descriptor.containing_type.name, 'TestAllTypes')
# Test equality and hashability
self.assertEqual(field_descriptor, field_descriptor)
self.assertEqual(
field_descriptor.containing_type.fields_by_name['optional_int32'],
field_descriptor)
self.assertEqual(
field_descriptor.containing_type.fields_by_camelcase_name[
'optionalInt32'],
field_descriptor)
self.assertIn(field_descriptor, [field_descriptor])
self.assertIn(field_descriptor, {field_descriptor: None})
def CheckDescriptorSequence(self, sequence):
# Verifies that a property like 'messageDescriptor.fields' has all the
# properties of an immutable abc.Sequence.
self.assertGreater(len(sequence), 0) # Sized
self.assertEqual(len(sequence), len(list(sequence))) # Iterable
item = sequence[0]
self.assertEqual(item, sequence[0])
self.assertIn(item, sequence) # Container
self.assertEqual(sequence.index(item), 0)
self.assertEqual(sequence.count(item), 1)
reversed_iterator = reversed(sequence)
self.assertEqual(list(reversed_iterator), list(sequence)[::-1])
self.assertRaises(StopIteration, next, reversed_iterator)
def CheckDescriptorMapping(self, mapping):
# Verifies that a property like 'messageDescriptor.fields' has all the
# properties of an immutable abc.Mapping.
self.assertGreater(len(mapping), 0) # Sized
self.assertEqual(len(mapping), len(list(mapping))) # Iterable
if sys.version_info >= (3,):
key, item = next(iter(mapping.items()))
else:
key, item = mapping.items()[0]
self.assertIn(key, mapping) # Container
self.assertEqual(mapping.get(key), item)
# keys(), iterkeys() &co
item = (next(iter(mapping.keys())), next(iter(mapping.values())))
self.assertEqual(item, next(iter(mapping.items())))
if sys.version_info < (3,):
def CheckItems(seq, iterator):
self.assertEqual(next(iterator), seq[0])
self.assertEqual(list(iterator), seq[1:])
CheckItems(mapping.keys(), mapping.iterkeys())
CheckItems(mapping.values(), mapping.itervalues())
CheckItems(mapping.items(), mapping.iteritems())
def testDescriptor(self):
message_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
self.CheckMessageDescriptor(message_descriptor)
field_descriptor = message_descriptor.fields_by_name['optional_int32']
self.CheckFieldDescriptor(field_descriptor)
field_descriptor = message_descriptor.fields_by_camelcase_name[
'optionalInt32']
self.CheckFieldDescriptor(field_descriptor)
def testCppDescriptorContainer(self):
# Check that the collection is still valid even if the parent disappeared.
enum = unittest_pb2.TestAllTypes.DESCRIPTOR.enum_types_by_name['NestedEnum']
values = enum.values
del enum
self.assertEqual('FOO', values[0].name)
def testCppDescriptorContainer_Iterator(self):
# Same test with the iterator
enum = unittest_pb2.TestAllTypes.DESCRIPTOR.enum_types_by_name['NestedEnum']
values_iter = iter(enum.values)
del enum
self.assertEqual('FOO', next(values_iter).name)
class DescriptorCopyToProtoTest(unittest.TestCase):
"""Tests for CopyTo functions of Descriptor."""
def _AssertProtoEqual(self, actual_proto, expected_class, expected_ascii):
expected_proto = expected_class()
text_format.Merge(expected_ascii, expected_proto)
self.assertEqual(
actual_proto, expected_proto,
'Not equal,\nActual:\n%s\nExpected:\n%s\n'
% (str(actual_proto), str(expected_proto)))
def _InternalTestCopyToProto(self, desc, expected_proto_class,
expected_proto_ascii):
actual = expected_proto_class()
desc.CopyToProto(actual)
self._AssertProtoEqual(
actual, expected_proto_class, expected_proto_ascii)
def testCopyToProto_EmptyMessage(self):
self._InternalTestCopyToProto(
unittest_pb2.TestEmptyMessage.DESCRIPTOR,
descriptor_pb2.DescriptorProto,
TEST_EMPTY_MESSAGE_DESCRIPTOR_ASCII)
def testCopyToProto_NestedMessage(self):
TEST_NESTED_MESSAGE_ASCII = """
name: 'NestedMessage'
field: <
name: 'bb'
number: 1
label: 1 # Optional
type: 5 # TYPE_INT32
>
"""
self._InternalTestCopyToProto(
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR,
descriptor_pb2.DescriptorProto,
TEST_NESTED_MESSAGE_ASCII)
def testCopyToProto_ForeignNestedMessage(self):
TEST_FOREIGN_NESTED_ASCII = """
name: 'TestForeignNested'
field: <
name: 'foreign_nested'
number: 1
label: 1 # Optional
type: 11 # TYPE_MESSAGE
type_name: '.protobuf_unittest.TestAllTypes.NestedMessage'
>
"""
self._InternalTestCopyToProto(
unittest_pb2.TestForeignNested.DESCRIPTOR,
descriptor_pb2.DescriptorProto,
TEST_FOREIGN_NESTED_ASCII)
def testCopyToProto_ForeignEnum(self):
TEST_FOREIGN_ENUM_ASCII = """
name: 'ForeignEnum'
value: <
name: 'FOREIGN_FOO'
number: 4
>
value: <
name: 'FOREIGN_BAR'
number: 5
>
value: <
name: 'FOREIGN_BAZ'
number: 6
>
"""
self._InternalTestCopyToProto(
unittest_pb2.ForeignEnum.DESCRIPTOR,
descriptor_pb2.EnumDescriptorProto,
TEST_FOREIGN_ENUM_ASCII)
def testCopyToProto_Options(self):
TEST_DEPRECATED_FIELDS_ASCII = """
name: 'TestDeprecatedFields'
field: <
name: 'deprecated_int32'
number: 1
label: 1 # Optional
type: 5 # TYPE_INT32
options: <
deprecated: true
>
>
"""
self._InternalTestCopyToProto(
unittest_pb2.TestDeprecatedFields.DESCRIPTOR,
descriptor_pb2.DescriptorProto,
TEST_DEPRECATED_FIELDS_ASCII)
def testCopyToProto_AllExtensions(self):
TEST_EMPTY_MESSAGE_WITH_EXTENSIONS_ASCII = """
name: 'TestEmptyMessageWithExtensions'
extension_range: <
start: 1
end: 536870912
>
"""
self._InternalTestCopyToProto(
unittest_pb2.TestEmptyMessageWithExtensions.DESCRIPTOR,
descriptor_pb2.DescriptorProto,
TEST_EMPTY_MESSAGE_WITH_EXTENSIONS_ASCII)
def testCopyToProto_SeveralExtensions(self):
TEST_MESSAGE_WITH_SEVERAL_EXTENSIONS_ASCII = """
name: 'TestMultipleExtensionRanges'
extension_range: <
start: 42
end: 43
>
extension_range: <
start: 4143
end: 4244
>
extension_range: <
start: 65536
end: 536870912
>
"""
self._InternalTestCopyToProto(
unittest_pb2.TestMultipleExtensionRanges.DESCRIPTOR,
descriptor_pb2.DescriptorProto,
TEST_MESSAGE_WITH_SEVERAL_EXTENSIONS_ASCII)
# Disable this test so we can make changes to the proto file.
# TODO(xiaofeng): Enable this test after cl/55530659 is submitted.
#
# def testCopyToProto_FileDescriptor(self):
# UNITTEST_IMPORT_FILE_DESCRIPTOR_ASCII = ("""
# name: 'google/protobuf/unittest_import.proto'
# package: 'protobuf_unittest_import'
# dependency: 'google/protobuf/unittest_import_public.proto'
# message_type: <
# name: 'ImportMessage'
# field: <
# name: 'd'
# number: 1
# label: 1 # Optional
# type: 5 # TYPE_INT32
# >
# >
# """ +
# """enum_type: <
# name: 'ImportEnum'
# value: <
# name: 'IMPORT_FOO'
# number: 7
# >
# value: <
# name: 'IMPORT_BAR'
# number: 8
# >
# value: <
# name: 'IMPORT_BAZ'
# number: 9
# >
# >
# options: <
# java_package: 'com.google.protobuf.test'
# optimize_for: 1 # SPEED
# >
# public_dependency: 0
# """)
# self._InternalTestCopyToProto(
# unittest_import_pb2.DESCRIPTOR,
# descriptor_pb2.FileDescriptorProto,
# UNITTEST_IMPORT_FILE_DESCRIPTOR_ASCII)
def testCopyToProto_ServiceDescriptor(self):
TEST_SERVICE_ASCII = """
name: 'TestService'
method: <
name: 'Foo'
input_type: '.protobuf_unittest.FooRequest'
output_type: '.protobuf_unittest.FooResponse'
>
method: <
name: 'Bar'
input_type: '.protobuf_unittest.BarRequest'
output_type: '.protobuf_unittest.BarResponse'
>
"""
# TODO(rocking): enable this test after the proto descriptor change is
# checked in.
#self._InternalTestCopyToProto(
# unittest_pb2.TestService.DESCRIPTOR,
# descriptor_pb2.ServiceDescriptorProto,
# TEST_SERVICE_ASCII)
class MakeDescriptorTest(unittest.TestCase):
def testMakeDescriptorWithNestedFields(self):
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
file_descriptor_proto.name = 'Foo2'
message_type = file_descriptor_proto.message_type.add()
message_type.name = file_descriptor_proto.name
nested_type = message_type.nested_type.add()
nested_type.name = 'Sub'
enum_type = nested_type.enum_type.add()
enum_type.name = 'FOO'
enum_type_val = enum_type.value.add()
enum_type_val.name = 'BAR'
enum_type_val.number = 3
field = message_type.field.add()
field.number = 1
field.name = 'uint64_field'
field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
field.type = descriptor.FieldDescriptor.TYPE_UINT64
field = message_type.field.add()
field.number = 2
field.name = 'nested_message_field'
field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
field.type = descriptor.FieldDescriptor.TYPE_MESSAGE
field.type_name = 'Sub'
enum_field = nested_type.field.add()
enum_field.number = 2
enum_field.name = 'bar_field'
enum_field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
enum_field.type = descriptor.FieldDescriptor.TYPE_ENUM
enum_field.type_name = 'Foo2.Sub.FOO'
result = descriptor.MakeDescriptor(message_type)
self.assertEqual(result.fields[0].cpp_type,
descriptor.FieldDescriptor.CPPTYPE_UINT64)
self.assertEqual(result.fields[1].cpp_type,
descriptor.FieldDescriptor.CPPTYPE_MESSAGE)
self.assertEqual(result.fields[1].message_type.containing_type,
result)
self.assertEqual(result.nested_types[0].fields[0].full_name,
'Foo2.Sub.bar_field')
self.assertEqual(result.nested_types[0].fields[0].enum_type,
result.nested_types[0].enum_types[0])
self.assertFalse(result.has_options)
self.assertFalse(result.fields[0].has_options)
def testMakeDescriptorWithUnsignedIntField(self):
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
file_descriptor_proto.name = 'Foo'
message_type = file_descriptor_proto.message_type.add()
message_type.name = file_descriptor_proto.name
enum_type = message_type.enum_type.add()
enum_type.name = 'FOO'
enum_type_val = enum_type.value.add()
enum_type_val.name = 'BAR'
enum_type_val.number = 3
field = message_type.field.add()
field.number = 1
field.name = 'uint64_field'
field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
field.type = descriptor.FieldDescriptor.TYPE_UINT64
enum_field = message_type.field.add()
enum_field.number = 2
enum_field.name = 'bar_field'
enum_field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
enum_field.type = descriptor.FieldDescriptor.TYPE_ENUM
enum_field.type_name = 'Foo.FOO'
result = descriptor.MakeDescriptor(message_type)
self.assertEqual(result.fields[0].cpp_type,
descriptor.FieldDescriptor.CPPTYPE_UINT64)
def testMakeDescriptorWithOptions(self):
descriptor_proto = descriptor_pb2.DescriptorProto()
aggregate_message = unittest_custom_options_pb2.AggregateMessage
aggregate_message.DESCRIPTOR.CopyToProto(descriptor_proto)
reformed_descriptor = descriptor.MakeDescriptor(descriptor_proto)
options = reformed_descriptor.GetOptions()
self.assertEqual(101,
options.Extensions[unittest_custom_options_pb2.msgopt].i)
def testCamelcaseName(self):
descriptor_proto = descriptor_pb2.DescriptorProto()
descriptor_proto.name = 'Bar'
names = ['foo_foo', 'FooBar', 'fooBaz', 'fooFoo', 'foobar']
camelcase_names = ['fooFoo', 'fooBar', 'fooBaz', 'fooFoo', 'foobar']
for index in range(len(names)):
field = descriptor_proto.field.add()
field.number = index + 1
field.name = names[index]
result = descriptor.MakeDescriptor(descriptor_proto)
for index in range(len(camelcase_names)):
self.assertEqual(result.fields[index].camelcase_name,
camelcase_names[index])
def testJsonName(self):
descriptor_proto = descriptor_pb2.DescriptorProto()
descriptor_proto.name = 'TestJsonName'
names = ['field_name', 'fieldName', 'FieldName',
'_field_name', 'FIELD_NAME', 'json_name']
json_names = ['fieldName', 'fieldName', 'FieldName',
'FieldName', 'FIELDNAME', '@type']
for index in range(len(names)):
field = descriptor_proto.field.add()
field.number = index + 1
field.name = names[index]
field.json_name = '@type'
result = descriptor.MakeDescriptor(descriptor_proto)
for index in range(len(json_names)):
self.assertEqual(result.fields[index].json_name,
json_names[index])
if __name__ == '__main__':
unittest.main()
|
mehulsbhatt/nsscache
|
refs/heads/master
|
nss_cache/config_test.py
|
2
|
# Copyright 2007 Google Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Unit tests for nss_cache/config.py."""
__author__ = 'vasilios@google.com (Vasilios Hoffman)'
import os
import shutil
import tempfile
import unittest
from nss_cache import config
class TestConfig(unittest.TestCase):
"""Unit tests for config.Config()."""
def testConfigInit(self):
env = {'NSSCACHE_CONFIG': 'test.conf'}
conf = config.Config(env)
self.assertEquals(conf.config_file, env['NSSCACHE_CONFIG'],
msg='Failed to override NSSCACHE_CONFIG.')
class TestMapOptions(unittest.TestCase):
"""Unit tests for config.MapOptions()."""
def testMapOptionsInit(self):
mapconfig = config.MapOptions()
self.assertTrue(isinstance(mapconfig.cache, dict))
self.assertTrue(isinstance(mapconfig.source, dict))
class TestClassMethods(unittest.TestCase):
"""Unit tests for class-level methods in config.py."""
def setUp(self):
# create a directory with a writeable copy of nsscache.conf in it
self.workdir = tempfile.mkdtemp()
conf_filename = 'nsscache.conf'
self.conf_filename = os.path.join(self.workdir, conf_filename)
shutil.copy(conf_filename, self.conf_filename)
os.chmod(self.conf_filename, 0640)
# prepare a config object with this config
self.conf = config.Config({})
self.conf.config_file = self.conf_filename
def tearDown(self):
shutil.rmtree(self.workdir)
def testLoadConfigSingleMap(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = foo\n'
'maps = foo\n'
'timestamp_dir = foo\n')
conf_file.close()
config.LoadConfig(self.conf)
self.assertEquals(['foo'], self.conf.maps)
def testLoadConfigTwoMaps(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = foo\n'
'maps = foo, bar\n'
'timestamp_dir = foo\n')
conf_file.close()
config.LoadConfig(self.conf)
self.assertEquals(['foo', 'bar'], self.conf.maps)
def testLoadConfigMapsWhitespace(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = foo\n'
'maps = foo, bar , baz\n'
'timestamp_dir = foo\n')
conf_file.close()
config.LoadConfig(self.conf)
self.assertEquals(['foo', 'bar', 'baz'], self.conf.maps)
def testLoadConfigExample(self):
"""Test that we parse and load the example config.
Note that this also tests MapOptions() creation and our overriding
of defaults in LoadConfig.
This requires that nsscache.conf exists in the top of the source tree.
Changes to the configuration options may break this test.
"""
conf = self.conf
config.LoadConfig(conf)
passwd = conf.options['passwd']
group = conf.options['group']
shadow = conf.options['shadow']
automount = conf.options['automount']
self.assertTrue(isinstance(passwd, config.MapOptions))
self.assertTrue(isinstance(group, config.MapOptions))
self.assertTrue(isinstance(shadow, config.MapOptions))
self.assertTrue(isinstance(automount, config.MapOptions))
self.assertEquals(passwd.source['name'], 'ldap')
self.assertEquals(group.source['name'], 'ldap')
self.assertEquals(shadow.source['name'], 'ldap')
self.assertEquals(automount.source['name'], 'ldap')
self.assertEquals(passwd.cache['name'], 'cache')
self.assertEquals(group.cache['name'], 'cache')
self.assertEquals(shadow.cache['name'], 'cache')
self.assertEquals(automount.cache['name'], 'files')
self.assertEquals(passwd.source['base'],
'ou=people,dc=example,dc=com')
self.assertEquals(passwd.source['filter'],
'(objectclass=posixAccount)')
self.assertEquals(group.source['base'],
'ou=group,dc=example,dc=com')
self.assertEquals(group.source['filter'],
'(objectclass=posixGroup)')
def testLoadConfigOptionalDefaults(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = foo\n'
'maps = foo, bar , baz\n'
'lockfile = foo\n'
'timestamp_dir = foo\n')
conf_file.close()
config.LoadConfig(self.conf)
self.assertEquals(self.conf.lockfile, 'foo')
def testLoadConfigStripQuotesFromStrings(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = "ldap"\n' # needs to be ldap due to magic
'cache = \'b\'ar\'\n'
'maps = quux\n'
'timestamp_dir = foo\n'
'ldap_tls_require_cert = \'blah\'\n'
'[quux]\n'
'ldap_klingon = "qep\'a\' wa\'maH loS\'DIch"\n')
conf_file.close()
config.LoadConfig(self.conf)
self.assertEquals('ldap', self.conf.options['quux'].source['name'])
self.assertEquals('b\'ar', self.conf.options['quux'].cache['name'])
self.assertEquals('blah',
self.conf.options['quux'].source['tls_require_cert'])
self.assertEquals('qep\'a\' wa\'maH loS\'DIch',
self.conf.options['quux'].source['klingon'])
def testLoadConfigConvertsNumbers(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = foo\n'
'maps = foo\n'
'timestamp_dir = foo\n'
'foo_string = test\n'
'foo_float = 1.23\n'
'foo_int = 1\n')
conf_file.close()
config.LoadConfig(self.conf)
foo_dict = self.conf.options['foo'].source
self.assertTrue(isinstance(foo_dict['string'], str))
self.assertTrue(isinstance(foo_dict['float'], float))
self.assertTrue(isinstance(foo_dict['int'], int))
self.assertEquals(foo_dict['string'], 'test')
self.assertEquals(foo_dict['float'], 1.23)
self.assertEquals(foo_dict['int'], 1)
def testOptions(self):
# check the empty case.
options = config.Options([], 'foo')
self.assertEquals(options, {})
# create a list like from ConfigParser.items()
items = [('maps', 'foo, bar, foobar'),
('nssdb_dir', '/path/to/dir'),
('ldap_uri', 'TEST_URI'),
('source', 'foo'),
('cache', 'bar'),
('ldap_base', 'TEST_BASE'),
('ldap_filter', 'TEST_FILTER')]
options = config.Options(items, 'ldap')
self.assertTrue(options.has_key('uri'))
self.assertTrue(options.has_key('base'))
self.assertTrue(options.has_key('filter'))
self.assertEquals(options['uri'], 'TEST_URI')
self.assertEquals(options['base'], 'TEST_BASE')
self.assertEquals(options['filter'], 'TEST_FILTER')
def testParseNSSwitchConf(self):
nsswitch_filename = os.path.join(self.workdir, 'nsswitch.conf')
nsswitch_file = open(nsswitch_filename, 'w')
nsswitch_file.write('passwd: files db\n')
nsswitch_file.write('group: files db\n')
nsswitch_file.write('shadow: files db\n')
nsswitch_file.close()
expected_switch = {'passwd': ['files', 'db'],
'group': ['files', 'db'],
'shadow': ['files', 'db']}
self.assertEquals(expected_switch,
config.ParseNSSwitchConf(nsswitch_filename))
os.unlink(nsswitch_filename)
def testVerifyConfiguration(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = foo\n'
'maps = passwd, group, shadow\n'
'timestamp_dir = foo\n')
conf_file.close()
config.LoadConfig(self.conf)
nsswitch_filename = os.path.join(self.workdir, 'nsswitch.conf')
nsswitch_file = open(nsswitch_filename, 'w')
nsswitch_file.write('passwd: files db\n')
nsswitch_file.write('group: files db\n')
nsswitch_file.write('shadow: files db\n')
nsswitch_file.close()
self.assertEquals((0, 0),
config.VerifyConfiguration(self.conf,
nsswitch_filename))
os.unlink(nsswitch_filename)
def testVerifyConfigurationWithCache(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = files\n'
'maps = passwd, group, shadow\n'
'timestamp_dir = foo\n'
'files_cache_filename_suffix = cache')
conf_file.close()
config.LoadConfig(self.conf)
nsswitch_filename = os.path.join(self.workdir, 'nsswitch.conf')
nsswitch_file = open(nsswitch_filename, 'w')
nsswitch_file.write('passwd: cache\n')
nsswitch_file.write('group: cache\n')
nsswitch_file.write('shadow: cache\n')
nsswitch_file.close()
self.assertEquals((0, 0),
config.VerifyConfiguration(self.conf,
nsswitch_filename))
os.unlink(nsswitch_filename)
def testVerifyConfigurationWithFiles(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = files\n'
'maps = passwd, group, shadow\n'
'timestamp_dir = foo\n')
conf_file.close()
config.LoadConfig(self.conf)
nsswitch_filename = os.path.join(self.workdir, 'nsswitch.conf')
nsswitch_file = open(nsswitch_filename, 'w')
nsswitch_file.write('passwd: files\n')
nsswitch_file.write('group: files\n')
nsswitch_file.write('shadow: files\n')
nsswitch_file.close()
self.assertEquals((0, 0),
config.VerifyConfiguration(self.conf,
nsswitch_filename))
os.unlink(nsswitch_filename)
def testVerifyBadConfigurationWithCache(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = files\n'
'maps = passwd, group, shadow\n'
'timestamp_dir = foo\n'
'files_cache_filename_suffix = cache')
conf_file.close()
config.LoadConfig(self.conf)
nsswitch_filename = os.path.join(self.workdir, 'nsswitch.conf')
nsswitch_file = open(nsswitch_filename, 'w')
nsswitch_file.write('passwd: files\n')
nsswitch_file.write('group: files\n')
nsswitch_file.write('shadow: files\n')
nsswitch_file.close()
self.assertEquals((3, 0),
config.VerifyConfiguration(self.conf,
nsswitch_filename))
os.unlink(nsswitch_filename)
def testVerifyBadConfigurationIncrementsWarningCount(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = foo\n'
'maps = passwd, group, shadow\n'
'timestamp_dir = foo\n')
conf_file.close()
config.LoadConfig(self.conf)
nsswitch_filename = os.path.join(self.workdir, 'nsswitch.conf')
nsswitch_file = open(nsswitch_filename, 'w')
nsswitch_file.write('passwd: files ldap\n')
nsswitch_file.write('group: files db\n')
nsswitch_file.write('shadow: files db\n')
nsswitch_file.close()
self.assertEquals((1, 0),
config.VerifyConfiguration(self.conf,
nsswitch_filename))
os.unlink(nsswitch_filename)
def testVerifyNoMapConfigurationIsError(self):
conf_file = open(self.conf_filename, 'w')
conf_file.write('[DEFAULT]\n'
'source = foo\n'
'cache = foo\n'
'maps = \n'
'timestamp_dir = foo\n')
conf_file.close()
config.LoadConfig(self.conf)
nsswitch_filename = os.path.join(self.workdir, 'nsswitch.conf')
nsswitch_file = open(nsswitch_filename, 'w')
nsswitch_file.write('passwd: files ldap\n')
nsswitch_file.close()
self.assertEquals((0, 1),
config.VerifyConfiguration(self.conf,
nsswitch_filename))
os.unlink(nsswitch_filename)
if __name__ == '__main__':
unittest.main()
|
geekboxzone/mmallow_prebuilts_gcc_darwin-x86_aarch64_aarch64-linux-android-4.9
|
refs/heads/geekbox
|
share/gdb/python/gdb/printing.py
|
75
|
# Pretty-printer utilities.
# Copyright (C) 2010-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Utilities for working with pretty-printers."""
import gdb
import gdb.types
import re
import sys
if sys.version_info[0] > 2:
# Python 3 removed basestring and long
basestring = str
long = int
class PrettyPrinter(object):
"""A basic pretty-printer.
Attributes:
name: A unique string among all printers for the context in which
it is defined (objfile, progspace, or global(gdb)), and should
meaningfully describe what can be pretty-printed.
E.g., "StringPiece" or "protobufs".
subprinters: An iterable object with each element having a `name'
attribute, and, potentially, "enabled" attribute.
Or this is None if there are no subprinters.
enabled: A boolean indicating if the printer is enabled.
Subprinters are for situations where "one" pretty-printer is actually a
collection of several printers. E.g., The libstdc++ pretty-printer has
a pretty-printer for each of several different types, based on regexps.
"""
# While one might want to push subprinters into the subclass, it's
# present here to formalize such support to simplify
# commands/pretty_printers.py.
def __init__(self, name, subprinters=None):
self.name = name
self.subprinters = subprinters
self.enabled = True
def __call__(self, val):
# The subclass must define this.
raise NotImplementedError("PrettyPrinter __call__")
class SubPrettyPrinter(object):
"""Baseclass for sub-pretty-printers.
Sub-pretty-printers needn't use this, but it formalizes what's needed.
Attributes:
name: The name of the subprinter.
enabled: A boolean indicating if the subprinter is enabled.
"""
def __init__(self, name):
self.name = name
self.enabled = True
def register_pretty_printer(obj, printer, replace=False):
"""Register pretty-printer PRINTER with OBJ.
The printer is added to the front of the search list, thus one can override
an existing printer if one needs to. Use a different name when overriding
an existing printer, otherwise an exception will be raised; multiple
printers with the same name are disallowed.
Arguments:
obj: Either an objfile, progspace, or None (in which case the printer
is registered globally).
printer: Either a function of one argument (old way) or any object
which has attributes: name, enabled, __call__.
replace: If True replace any existing copy of the printer.
Otherwise if the printer already exists raise an exception.
Returns:
Nothing.
Raises:
TypeError: A problem with the type of the printer.
ValueError: The printer's name contains a semicolon ";".
RuntimeError: A printer with the same name is already registered.
If the caller wants the printer to be listable and disableable, it must
follow the PrettyPrinter API. This applies to the old way (functions) too.
If printer is an object, __call__ is a method of two arguments:
self, and the value to be pretty-printed. See PrettyPrinter.
"""
# Watch for both __name__ and name.
# Functions get the former for free, but we don't want to use an
# attribute named __foo__ for pretty-printers-as-objects.
# If printer has both, we use `name'.
if not hasattr(printer, "__name__") and not hasattr(printer, "name"):
raise TypeError("printer missing attribute: name")
if hasattr(printer, "name") and not hasattr(printer, "enabled"):
raise TypeError("printer missing attribute: enabled")
if not hasattr(printer, "__call__"):
raise TypeError("printer missing attribute: __call__")
if obj is None:
if gdb.parameter("verbose"):
gdb.write("Registering global %s pretty-printer ...\n" % name)
obj = gdb
else:
if gdb.parameter("verbose"):
gdb.write("Registering %s pretty-printer for %s ...\n" %
(printer.name, obj.filename))
if hasattr(printer, "name"):
if not isinstance(printer.name, basestring):
raise TypeError("printer name is not a string")
# If printer provides a name, make sure it doesn't contain ";".
# Semicolon is used by the info/enable/disable pretty-printer commands
# to delimit subprinters.
if printer.name.find(";") >= 0:
raise ValueError("semicolon ';' in printer name")
# Also make sure the name is unique.
# Alas, we can't do the same for functions and __name__, they could
# all have a canonical name like "lookup_function".
# PERF: gdb records printers in a list, making this inefficient.
i = 0
for p in obj.pretty_printers:
if hasattr(p, "name") and p.name == printer.name:
if replace:
del obj.pretty_printers[i]
break
else:
raise RuntimeError("pretty-printer already registered: %s" %
printer.name)
i = i + 1
obj.pretty_printers.insert(0, printer)
class RegexpCollectionPrettyPrinter(PrettyPrinter):
"""Class for implementing a collection of regular-expression based pretty-printers.
Intended usage:
pretty_printer = RegexpCollectionPrettyPrinter("my_library")
pretty_printer.add_printer("myclass1", "^myclass1$", MyClass1Printer)
...
pretty_printer.add_printer("myclassN", "^myclassN$", MyClassNPrinter)
register_pretty_printer(obj, pretty_printer)
"""
class RegexpSubprinter(SubPrettyPrinter):
def __init__(self, name, regexp, gen_printer):
super(RegexpCollectionPrettyPrinter.RegexpSubprinter, self).__init__(name)
self.regexp = regexp
self.gen_printer = gen_printer
self.compiled_re = re.compile(regexp)
def __init__(self, name):
super(RegexpCollectionPrettyPrinter, self).__init__(name, [])
def add_printer(self, name, regexp, gen_printer):
"""Add a printer to the list.
The printer is added to the end of the list.
Arguments:
name: The name of the subprinter.
regexp: The regular expression, as a string.
gen_printer: A function/method that given a value returns an
object to pretty-print it.
Returns:
Nothing.
"""
# NOTE: A previous version made the name of each printer the regexp.
# That makes it awkward to pass to the enable/disable commands (it's
# cumbersome to make a regexp of a regexp). So now the name is a
# separate parameter.
self.subprinters.append(self.RegexpSubprinter(name, regexp,
gen_printer))
def __call__(self, val):
"""Lookup the pretty-printer for the provided value."""
# Get the type name.
typename = gdb.types.get_basic_type(val.type).tag
if not typename:
return None
# Iterate over table of type regexps to determine
# if a printer is registered for that type.
# Return an instantiation of the printer if found.
for printer in self.subprinters:
if printer.enabled and printer.compiled_re.search(typename):
return printer.gen_printer(val)
# Cannot find a pretty printer. Return None.
return None
# A helper class for printing enum types. This class is instantiated
# with a list of enumerators to print a particular Value.
class _EnumInstance:
def __init__(self, enumerators, val):
self.enumerators = enumerators
self.val = val
def to_string(self):
flag_list = []
v = long(self.val)
any_found = False
for (e_name, e_value) in self.enumerators:
if v & e_value != 0:
flag_list.append(e_name)
v = v & ~e_value
any_found = True
if not any_found or v != 0:
# Leftover value.
flag_list.append('<unknown: 0x%x>' % v)
return "0x%x [%s]" % (self.val, " | ".join(flag_list))
class FlagEnumerationPrinter(PrettyPrinter):
"""A pretty-printer which can be used to print a flag-style enumeration.
A flag-style enumeration is one where the enumerators are or'd
together to create values. The new printer will print these
symbolically using '|' notation. The printer must be registered
manually. This printer is most useful when an enum is flag-like,
but has some overlap. GDB's built-in printing will not handle
this case, but this printer will attempt to."""
def __init__(self, enum_type):
super(FlagEnumerationPrinter, self).__init__(enum_type)
self.initialized = False
def __call__(self, val):
if not self.initialized:
self.initialized = True
flags = gdb.lookup_type(self.name)
self.enumerators = []
for field in flags.fields():
self.enumerators.append((field.name, field.enumval))
# Sorting the enumerators by value usually does the right
# thing.
self.enumerators.sort(key = lambda x: x.enumval)
if self.enabled:
return _EnumInstance(self.enumerators, val)
else:
return None
|
MichaelDoyle/Diamond
|
refs/heads/master
|
src/collectors/ip/ip.py
|
3
|
# coding=utf-8
"""
The IPCollector class collects metrics on IP stats
#### Dependencies
* /proc/net/snmp
#### Allowed Metric Names
<table>
<tr><th>Name</th></tr>
<tr><th>InAddrErrors</th></tr>
<tr><th>InDelivers</th></tr>
<tr><th>InDiscards</th></tr>
<tr><th>InHdrErrors</th></tr>
<tr><th>InReceives</th></tr>
<tr><th>InUnknownProtos</th></tr>
<tr><th>OutDiscards</th></tr>
<tr><th>OutNoRoutes</th></tr>
<tr><th>OutRequests</th></tr>
</table>
"""
import diamond.collector
import os
class IPCollector(diamond.collector.Collector):
PROC = [
'/proc/net/snmp',
]
GAUGES = [
'Forwarding',
'DefaultTTL',
]
def process_config(self):
super(IPCollector, self).process_config()
if self.config['allowed_names'] is None:
self.config['allowed_names'] = []
def get_default_config_help(self):
config_help = super(IPCollector, self).get_default_config_help()
config_help.update({
'allowed_names': 'list of entries to collect, empty to collect all'
})
return config_help
def get_default_config(self):
""" Returns the default collector settings
"""
config = super(IPCollector, self).get_default_config()
config.update({
'path': 'ip',
'allowed_names': 'InAddrErrors, InDelivers, InDiscards, ' +
'InHdrErrors, InReceives, InUnknownProtos, OutDiscards, ' +
'OutNoRoutes, OutRequests'
})
return config
def collect(self):
metrics = {}
for filepath in self.PROC:
if not os.access(filepath, os.R_OK):
self.log.error('Permission to access %s denied', filepath)
continue
header = ''
data = ''
# Seek the file for the lines which start with Ip
file = open(filepath)
if not file:
self.log.error('Failed to open %s', filepath)
continue
while True:
line = file.readline()
# Reached EOF?
if len(line) == 0:
break
# Line has metrics?
if line.startswith('Ip'):
header = line
data = file.readline()
break
file.close()
# No data from the file?
if header == '' or data == '':
self.log.error('%s has no lines starting with Ip' % filepath)
continue
header = header.split()
data = data.split()
# Zip up the keys and values
for i in xrange(1, len(header)):
metrics[header[i]] = data[i]
for metric_name in metrics.keys():
if ((len(self.config['allowed_names']) > 0 and
metric_name not in self.config['allowed_names'])):
continue
value = long(metrics[metric_name])
# Publish the metric
if metric_name in self.GAUGES:
self.publish_gauge(metric_name, value, 0)
else:
self.publish_counter(metric_name, value, 0)
|
tjanez/ansible
|
refs/heads/devel
|
lib/ansible/modules/files/patch.py
|
22
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Luis Alberto Perez Lazaro <luisperlazaro@gmail.com>
# (c) 2015, Jakub Jirutka <jakub@jirutka.cz>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: patch
author:
- "Jakub Jirutka (@jirutka)"
- "Luis Alberto Perez Lazaro (@luisperlaz)"
version_added: 1.9
description:
- Apply patch files using the GNU patch tool.
short_description: Apply patch files using the GNU patch tool.
options:
basedir:
description:
- Path of a base directory in which the patch file will be applied.
May be omitted when C(dest) option is specified, otherwise required.
required: false
dest:
description:
- Path of the file on the remote machine to be patched.
- The names of the files to be patched are usually taken from the patch
file, but if there's just one file to be patched it can specified with
this option.
required: false
aliases: [ "originalfile" ]
src:
description:
- Path of the patch file as accepted by the GNU patch tool. If
C(remote_src) is 'no', the patch source file is looked up from the
module's "files" directory.
required: true
aliases: [ "patchfile" ]
remote_src:
description:
- If C(no), it will search for src at originating/master machine, if C(yes) it will
go to the remote/target machine for the src. Default is C(no).
choices: [ "yes", "no" ]
required: false
default: "no"
strip:
description:
- Number that indicates the smallest prefix containing leading slashes
that will be stripped from each file name found in the patch file.
For more information see the strip parameter of the GNU patch tool.
required: false
type: "int"
default: "0"
backup:
version_added: "2.0"
description:
- passes --backup --version-control=numbered to patch,
producing numbered backup copies
choices: [ 'yes', 'no' ]
default: 'no'
binary:
version_added: "2.0"
description:
- Setting to C(yes) will disable patch's heuristic for transforming CRLF
line endings into LF. Line endings of src and dest must match. If set to
C(no), patch will replace CRLF in src files on POSIX.
required: false
type: "bool"
default: "no"
note:
- This module requires GNU I(patch) utility to be installed on the remote host.
'''
EXAMPLES = '''
- name: apply patch to one file
patch:
src: /tmp/index.html.patch
dest: /var/www/index.html
- name: apply patch to multiple files under basedir
patch:
src: /tmp/customize.patch
basedir: /var/www
strip: 1
'''
import os
from os import path, R_OK, W_OK
class PatchError(Exception):
pass
def is_already_applied(patch_func, patch_file, basedir, dest_file=None, binary=False, strip=0):
opts = ['--quiet', '--reverse', '--forward', '--dry-run',
"--strip=%s" % strip, "--directory='%s'" % basedir,
"--input='%s'" % patch_file]
if binary:
opts.append('--binary')
if dest_file:
opts.append("'%s'" % dest_file)
(rc, _, _) = patch_func(opts)
return rc == 0
def apply_patch(patch_func, patch_file, basedir, dest_file=None, binary=False, strip=0, dry_run=False, backup=False):
opts = ['--quiet', '--forward', '--batch', '--reject-file=-',
"--strip=%s" % strip, "--directory='%s'" % basedir,
"--input='%s'" % patch_file]
if dry_run:
opts.append('--dry-run')
if binary:
opts.append('--binary')
if dest_file:
opts.append("'%s'" % dest_file)
if backup:
opts.append('--backup --version-control=numbered')
(rc, out, err) = patch_func(opts)
if rc != 0:
msg = err or out
raise PatchError(msg)
def main():
module = AnsibleModule(
argument_spec={
'src': {'required': True, 'aliases': ['patchfile']},
'dest': {'aliases': ['originalfile']},
'basedir': {},
'strip': {'default': 0, 'type': 'int'},
'remote_src': {'default': False, 'type': 'bool'},
# NB: for 'backup' parameter, semantics is slightly different from standard
# since patch will create numbered copies, not strftime("%Y-%m-%d@%H:%M:%S~")
'backup': {'default': False, 'type': 'bool'},
'binary': {'default': False, 'type': 'bool'},
},
required_one_of=[['dest', 'basedir']],
supports_check_mode=True
)
# Create type object as namespace for module params
p = type('Params', (), module.params)
p.src = os.path.expanduser(p.src)
if not os.access(p.src, R_OK):
module.fail_json(msg="src %s doesn't exist or not readable" % (p.src))
if p.dest and not os.access(p.dest, W_OK):
module.fail_json(msg="dest %s doesn't exist or not writable" % (p.dest))
if p.basedir and not path.exists(p.basedir):
module.fail_json(msg="basedir %s doesn't exist" % (p.basedir))
if not p.basedir:
p.basedir = path.dirname(p.dest)
patch_bin = module.get_bin_path('patch')
if patch_bin is None:
module.fail_json(msg="patch command not found")
patch_func = lambda opts: module.run_command("%s %s" % (patch_bin, ' '.join(opts)))
# patch need an absolute file name
p.src = os.path.abspath(p.src)
changed = False
if not is_already_applied(patch_func, p.src, p.basedir, dest_file=p.dest, binary=p.binary, strip=p.strip):
try:
apply_patch( patch_func, p.src, p.basedir, dest_file=p.dest, binary=p.binary, strip=p.strip,
dry_run=module.check_mode, backup=p.backup )
changed = True
except PatchError:
e = get_exception()
module.fail_json(msg=str(e))
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
heronotears/devops-sso
|
refs/heads/master
|
adapter/sqlite.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import math
import sqlite3
import config
from .base import BaseAdapter, Cache
class SqliteAdapter(BaseAdapter):
def __init__(self):
super(SqliteAdapter, self).__init__()
self.connect()
def connect(self):
self.conn = sqlite3.connect(config.DATABASES['default']['NAME'], isolation_level=None)
self.conn.row_factory = sqlite3.Row
self.cursor = self.conn.cursor()
@Cache.boot(['page', 'limit'])
def pager_groups(self, page, limit=config.PAGE_LIMIT):
page, offset = self.get_offset(page, limit)
statement = {
'count': {
'sql': 'SELECT COUNT(*) AS _count FROM `groups`',
'parameters': ()
},
'data': {
'sql': 'SELECT * FROM `groups` ORDER BY `group_id` LIMIT ?,?',
'parameters': (offset, limit)
}
}
return self._pager(statement, page, limit)
@Cache.boot(['group_id', 'page', 'limit'])
def pager_hosts(self, group_id, page, limit=config.PAGE_LIMIT):
page, offset = self.get_offset(page, limit)
statement = {
'count': {
'sql': 'SELECT COUNT(*) AS _count FROM `hosts` WHERE `group_id`=?',
'parameters': (group_id,)
},
'data': {
'sql': 'SELECT * FROM `hosts` WHERE `group_id`=? ORDER BY `host_id` LIMIT ?,?',
'parameters': (group_id, offset, limit)
}
}
return self._pager(statement, page, limit)
@Cache.boot(['group_id', 'host_id', 'page', 'limit'])
def pager_users(self, group_id, host_id, page, limit=config.PAGE_LIMIT):
page, offset = self.get_offset(page, limit)
statement = {
'count': {
'sql': 'SELECT COUNT(*) AS _count FROM `users` WHERE `group_id`=? AND `host_id`=?',
'parameters': (group_id, host_id)
},
'data': {
'sql': 'SELECT * FROM `users` WHERE `group_id`=? AND `host_id`=? ORDER BY `user_id` LIMIT ?,?',
'parameters': (group_id, host_id, offset, limit)
}
}
return self._pager(statement, page, limit)
def _pager(self, statement, page, limit):
self.cursor.execute(statement['count']['sql'], statement['count']['parameters'])
row = self.cursor.fetchone()
count = row['_count']
total = int(math.ceil(float(count) / limit))
self.cursor.execute(statement['data']['sql'], statement['data']['parameters'])
data = self.cursor.fetchall()
result = {
'current': page,
'count': count,
'total': total,
'items': data
}
return result
@Cache.boot(['text'])
def search(self, text):
text = '%s%%' % text.strip()
result = []
if text:
self.cursor.execute('SELECT `host` FROM `hosts` WHERE `host` LIKE ? ORDER BY `host_id` ASC', (text,))
rows = self.cursor.fetchall()
for row in rows:
result.append(row['host'])
return result
|
mitsuhiko/django
|
refs/heads/master
|
django/contrib/auth/models.py
|
2
|
import datetime
import hashlib
import urllib
from django.contrib import auth
from django.contrib.auth.signals import user_logged_in
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.db.models.manager import EmptyManager
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import smart_str
from django.utils.translation import ugettext_lazy as _
from django.utils.crypto import constant_time_compare
UNUSABLE_PASSWORD = '!' # This will never be a valid hash
def get_hexdigest(algorithm, salt, raw_password):
"""
Returns a string of the hexdigest of the given plaintext password and salt
using the given algorithm ('md5', 'sha1' or 'crypt').
"""
raw_password, salt = smart_str(raw_password), smart_str(salt)
if algorithm == 'crypt':
try:
import crypt
except ImportError:
raise ValueError('"crypt" password algorithm not supported in this environment')
return crypt.crypt(raw_password, salt)
if algorithm == 'md5':
return hashlib.md5(salt + raw_password).hexdigest()
elif algorithm == 'sha1':
return hashlib.sha1(salt + raw_password).hexdigest()
raise ValueError("Got unknown password algorithm type in password.")
def check_password(raw_password, enc_password):
"""
Returns a boolean of whether the raw_password was correct. Handles
encryption formats behind the scenes.
"""
algo, salt, hsh = enc_password.split('$')
return constant_time_compare(hsh, get_hexdigest(algo, salt, raw_password))
def update_last_login(sender, user, **kwargs):
"""
A signal receiver which updates the last_login date for
the user logging in.
"""
user.last_login = datetime.datetime.now()
user.save()
user_logged_in.connect(update_last_login)
class SiteProfileNotAvailable(Exception):
pass
class PermissionManager(models.Manager):
def get_by_natural_key(self, codename, app_label, model):
return self.get(
codename=codename,
content_type=ContentType.objects.get_by_natural_key(app_label, model)
)
class Permission(models.Model):
"""The permissions system provides a way to assign permissions to specific users and groups of users.
The permission system is used by the Django admin site, but may also be useful in your own code. The Django admin site uses permissions as follows:
- The "add" permission limits the user's ability to view the "add" form and add an object.
- The "change" permission limits a user's ability to view the change list, view the "change" form and change an object.
- The "delete" permission limits the ability to delete an object.
Permissions are set globally per type of object, not per specific object instance. It is possible to say "Mary may change news stories," but it's not currently possible to say "Mary may change news stories, but only the ones she created herself" or "Mary may only change news stories that have a certain status or publication date."
Three basic permissions -- add, change and delete -- are automatically created for each Django model.
"""
name = models.CharField(_('name'), max_length=50)
content_type = models.ForeignKey(ContentType)
codename = models.CharField(_('codename'), max_length=100)
objects = PermissionManager()
class Meta:
verbose_name = _('permission')
verbose_name_plural = _('permissions')
unique_together = (('content_type', 'codename'),)
ordering = ('content_type__app_label', 'content_type__model', 'codename')
def __unicode__(self):
return u"%s | %s | %s" % (
unicode(self.content_type.app_label),
unicode(self.content_type),
unicode(self.name))
def natural_key(self):
return (self.codename,) + self.content_type.natural_key()
natural_key.dependencies = ['contenttypes.contenttype']
class Group(models.Model):
"""Groups are a generic way of categorizing users to apply permissions, or some other label, to those users. A user can belong to any number of groups.
A user in a group automatically has all the permissions granted to that group. For example, if the group Site editors has the permission can_edit_home_page, any user in that group will have that permission.
Beyond permissions, groups are a convenient way to categorize users to apply some label, or extended functionality, to them. For example, you could create a group 'Special users', and you could write code that would do special things to those users -- such as giving them access to a members-only portion of your site, or sending them members-only email messages.
"""
name = models.CharField(_('name'), max_length=80, unique=True)
permissions = models.ManyToManyField(Permission, verbose_name=_('permissions'), blank=True)
class Meta:
verbose_name = _('group')
verbose_name_plural = _('groups')
def __unicode__(self):
return self.name
class UserManager(models.Manager):
def create_user(self, username, email, password=None):
"""
Creates and saves a User with the given username, email and password.
"""
now = datetime.datetime.now()
# Normalize the address by lowercasing the domain part of the email
# address.
try:
email_name, domain_part = email.strip().split('@', 1)
except ValueError:
pass
else:
email = '@'.join([email_name, domain_part.lower()])
user = self.model(username=username, email=email, is_staff=False,
is_active=True, is_superuser=False, last_login=now,
date_joined=now)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, username, email, password):
u = self.create_user(username, email, password)
u.is_staff = True
u.is_active = True
u.is_superuser = True
u.save(using=self._db)
return u
def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
"Generates a random password with the given length and given allowed_chars"
# Note that default value of allowed_chars does not have "I" or letters
# that look like it -- just to avoid confusion.
from random import choice
return ''.join([choice(allowed_chars) for i in range(length)])
# A few helper functions for common logic between User and AnonymousUser.
def _user_get_all_permissions(user, obj):
permissions = set()
anon = user.is_anonymous()
for backend in auth.get_backends():
if not anon or backend.supports_anonymous_user:
if hasattr(backend, "get_all_permissions"):
if obj is not None:
if backend.supports_object_permissions:
permissions.update(
backend.get_all_permissions(user, obj)
)
else:
permissions.update(backend.get_all_permissions(user))
return permissions
def _user_has_perm(user, perm, obj):
anon = user.is_anonymous()
active = user.is_active
for backend in auth.get_backends():
if (not active and not anon and backend.supports_inactive_user) or \
(not anon or backend.supports_anonymous_user):
if hasattr(backend, "has_perm"):
if obj is not None:
if (backend.supports_object_permissions and
backend.has_perm(user, perm, obj)):
return True
else:
if backend.has_perm(user, perm):
return True
return False
def _user_has_module_perms(user, app_label):
anon = user.is_anonymous()
active = user.is_active
for backend in auth.get_backends():
if (not active and not anon and backend.supports_inactive_user) or \
(not anon or backend.supports_anonymous_user):
if hasattr(backend, "has_module_perms"):
if backend.has_module_perms(user, app_label):
return True
return False
class User(models.Model):
"""
Users within the Django authentication system are represented by this model.
Username and password are required. Other fields are optional.
"""
username = models.CharField(_('username'), max_length=30, unique=True, help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
first_name = models.CharField(_('first name'), max_length=30, blank=True)
last_name = models.CharField(_('last name'), max_length=30, blank=True)
email = models.EmailField(_('e-mail address'), blank=True)
password = models.CharField(_('password'), max_length=128, help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
is_staff = models.BooleanField(_('staff status'), default=False, help_text=_("Designates whether the user can log into this admin site."))
is_active = models.BooleanField(_('active'), default=True, help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
is_superuser = models.BooleanField(_('superuser status'), default=False, help_text=_("Designates that this user has all permissions without explicitly assigning them."))
last_login = models.DateTimeField(_('last login'), default=datetime.datetime.now)
date_joined = models.DateTimeField(_('date joined'), default=datetime.datetime.now)
groups = models.ManyToManyField(Group, verbose_name=_('groups'), blank=True,
help_text=_("In addition to the permissions manually assigned, this user will also get all permissions granted to each group he/she is in."))
user_permissions = models.ManyToManyField(Permission, verbose_name=_('user permissions'), blank=True)
objects = UserManager()
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
def __unicode__(self):
return self.username
def get_absolute_url(self):
return "/users/%s/" % urllib.quote(smart_str(self.username))
def is_anonymous(self):
"""
Always returns False. This is a way of comparing User objects to
anonymous users.
"""
return False
def is_authenticated(self):
"""
Always return True. This is a way to tell if the user has been
authenticated in templates.
"""
return True
def get_full_name(self):
"Returns the first_name plus the last_name, with a space in between."
full_name = u'%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def set_password(self, raw_password):
if raw_password is None:
self.set_unusable_password()
else:
import random
algo = 'sha1'
salt = get_hexdigest(algo, str(random.random()), str(random.random()))[:5]
hsh = get_hexdigest(algo, salt, raw_password)
self.password = '%s$%s$%s' % (algo, salt, hsh)
def check_password(self, raw_password):
"""
Returns a boolean of whether the raw_password was correct. Handles
encryption formats behind the scenes.
"""
# Backwards-compatibility check. Older passwords won't include the
# algorithm or salt.
if '$' not in self.password:
is_correct = (self.password == get_hexdigest('md5', '', raw_password))
if is_correct:
# Convert the password to the new, more secure format.
self.set_password(raw_password)
self.save()
return is_correct
return check_password(raw_password, self.password)
def set_unusable_password(self):
# Sets a value that will never be a valid hash
self.password = UNUSABLE_PASSWORD
def has_usable_password(self):
if self.password is None \
or self.password == UNUSABLE_PASSWORD:
return False
else:
return True
def get_group_permissions(self, obj=None):
"""
Returns a list of permission strings that this user has through
his/her groups. This method queries all available auth backends.
If an object is passed in, only permissions matching this object
are returned.
"""
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_group_permissions"):
if obj is not None:
if backend.supports_object_permissions:
permissions.update(
backend.get_group_permissions(self, obj)
)
else:
permissions.update(backend.get_group_permissions(self))
return permissions
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj)
def has_perm(self, perm, obj=None):
"""
Returns True if the user has the specified permission. This method
queries all available auth backends, but returns immediately if any
backend returns True. Thus, a user who has permission from a single
auth backend is assumed to have permission in general. If an object
is provided, permissions for this specific object are checked.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
# Otherwise we need to check the backends.
return _user_has_perm(self, perm, obj)
def has_perms(self, perm_list, obj=None):
"""
Returns True if the user has each of the specified permissions.
If object is passed, it checks if the user has all required perms
for this object.
"""
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, app_label):
"""
Returns True if the user has any permissions in the given app
label. Uses pretty much the same logic as has_perm, above.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
return _user_has_module_perms(self, app_label)
def email_user(self, subject, message, from_email=None):
"Sends an email to this User."
from django.core.mail import send_mail
send_mail(subject, message, from_email, [self.email])
def get_profile(self):
"""
Returns site-specific profile for this user. Raises
SiteProfileNotAvailable if this site does not allow profiles.
"""
if not hasattr(self, '_profile_cache'):
from django.conf import settings
if not getattr(settings, 'AUTH_PROFILE_MODULE', False):
raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO'
'DULE in your project settings')
try:
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
except ValueError:
raise SiteProfileNotAvailable('app_label and model_name should'
' be separated by a dot in the AUTH_PROFILE_MODULE set'
'ting')
try:
model = models.get_model(app_label, model_name)
if model is None:
raise SiteProfileNotAvailable('Unable to load the profile '
'model, check AUTH_PROFILE_MODULE in your project sett'
'ings')
self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id)
self._profile_cache.user = self
except (ImportError, ImproperlyConfigured):
raise SiteProfileNotAvailable
return self._profile_cache
class AnonymousUser(object):
id = None
username = ''
is_staff = False
is_active = False
is_superuser = False
_groups = EmptyManager()
_user_permissions = EmptyManager()
def __init__(self):
pass
def __unicode__(self):
return 'AnonymousUser'
def __str__(self):
return unicode(self).encode('utf-8')
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return 1 # instances always return the same hash value
def save(self):
raise NotImplementedError
def delete(self):
raise NotImplementedError
def set_password(self, raw_password):
raise NotImplementedError
def check_password(self, raw_password):
raise NotImplementedError
def _get_groups(self):
return self._groups
groups = property(_get_groups)
def _get_user_permissions(self):
return self._user_permissions
user_permissions = property(_get_user_permissions)
def get_group_permissions(self, obj=None):
return set()
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj=obj)
def has_perm(self, perm, obj=None):
return _user_has_perm(self, perm, obj=obj)
def has_perms(self, perm_list, obj=None):
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, module):
return _user_has_module_perms(self, module)
def is_anonymous(self):
return True
def is_authenticated(self):
return False
|
jocelynj/weboob
|
refs/heads/soge
|
weboob/applications/weboorrents/weboorrents.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010 Romain Bignon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from __future__ import with_statement
import sys
from weboob.capabilities.torrent import ICapTorrent
from weboob.tools.application.repl import ReplApplication
from weboob.tools.application.formatters.iformatter import IFormatter
__all__ = ['Weboorrents']
def sizeof_fmt(num):
for x in ['bytes','KB','MB','GB','TB']:
if num < 1024.0:
return "%-4.1f%s" % (num, x)
num /= 1024.0
class TorrentInfoFormatter(IFormatter):
MANDATORY_FIELDS = ('id', 'name', 'size', 'seeders', 'leechers', 'url', 'files', 'description')
def flush(self):
pass
def format_dict(self, item):
result = u'%s%s%s\n' % (ReplApplication.BOLD, item['name'], ReplApplication.NC)
result += 'ID: %s\n' % item['id']
result += 'Size: %s\n' % sizeof_fmt(item['size'])
result += 'Seeders: %s\n' % item['seeders']
result += 'Leechers: %s\n' % item['leechers']
result += 'URL: %s\n' % item['url']
result += '\n%sFiles%s\n' % (ReplApplication.BOLD, ReplApplication.NC)
for f in item['files']:
result += ' * %s\n' % f
result += '\n%sDescription%s\n' % (ReplApplication.BOLD, ReplApplication.NC)
result += item['description']
return result
class TorrentListFormatter(IFormatter):
MANDATORY_FIELDS = ('id', 'name', 'size', 'seeders', 'leechers')
count = 0
def flush(self):
self.count = 0
pass
def format_dict(self, item):
self.count += 1
if self.interactive:
backend = item['id'].split('@', 1)[1]
result = u'%s* (%d) %s (%s)%s\n' % (ReplApplication.BOLD, self.count, item['name'], backend, ReplApplication.NC)
else:
result = u'%s* (%s) %s%s\n' % (ReplApplication.BOLD, item['id'], item['name'], ReplApplication.NC)
size = sizeof_fmt(item['size'])
result += ' %10s (Seed: %2d / Leech: %2d)' % (size, item['seeders'], item['leechers'])
return result
class Weboorrents(ReplApplication):
APPNAME = 'weboorrents'
VERSION = '0.4'
COPYRIGHT = 'Copyright(C) 2010 Romain Bignon'
CAPS = ICapTorrent
EXTRA_FORMATTERS = {'torrent_list': TorrentListFormatter,
'torrent_info': TorrentInfoFormatter,
}
COMMANDS_FORMATTERS = {'search': 'torrent_list',
'info': 'torrent_info',
}
torrents = []
def _complete_id(self):
return ['%s@%s' % (torrent.id, torrent.backend) for torrent in self.torrents]
def complete_info(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
return self._complete_id()
def parse_id(self, id):
if self.interactive:
try:
torrent = self.torrents[int(id) - 1]
except (IndexError,ValueError):
pass
else:
id = '%s@%s' % (torrent.id, torrent.backend)
return ReplApplication.parse_id(self, id)
def do_info(self, id):
"""
info ID
Get information about a torrent.
"""
_id, backend_name = self.parse_id(id)
found = 0
for backend, torrent in self.do('get_torrent', _id, backends=backend_name):
if torrent:
self.format(torrent)
found = 1
if not found:
print >>sys.stderr, 'Torrent "%s" not found' % id
else:
self.flush()
def complete_getfile(self, text, line, *ignored):
args = line.split(' ', 2)
if len(args) == 2:
return self._complete_id()
elif len(args) >= 3:
return self.path_completer(args[2])
def do_getfile(self, line):
"""
getfile ID FILENAME
Get the .torrent file.
FILENAME is where to write the file. If FILENAME is '-',
the file is written to stdout.
"""
id, dest = self.parseargs(line, 2, 2)
_id, backend_name = self.parse_id(id)
for backend, buf in self.do('get_torrent_file', _id, backends=backend_name):
if buf:
if dest == '-':
print buf
else:
try:
with open(dest, 'w') as f:
f.write(buf)
except IOError, e:
print >>sys.stderr, 'Unable to write .torrent in "%s": %s' % (dest, e)
return 1
return
print >>sys.stderr, 'Torrent "%s" not found' % id
def do_search(self, pattern):
"""
search [PATTERN]
Search torrents.
"""
self.torrents = []
if not pattern:
pattern = None
self.set_formatter_header(u'Search pattern: %s' % pattern if pattern else u'Latest torrents')
for backend, torrent in self.do('iter_torrents', pattern=pattern):
self.torrents.append(torrent)
self.format(torrent)
self.flush()
|
JensRantil/cligh
|
refs/heads/master
|
setup.py
|
1
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='cligh',
version='0.1',
description='Command-line interface to GitHub',
author='Christopher M. Brannon',
author_email='cmbrannon79@gmail.com',
license='BSD',
packages=['cligh'],
scripts=['bin/cligh'])
|
bealdav/OCB
|
refs/heads/patch-1
|
addons/mrp_repair/__openerp__.py
|
55
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Repairs Management',
'version': '1.0',
'category': 'Manufacturing',
'description': """
The aim is to have a complete module to manage all products repairs.
====================================================================
The following topics should be covered by this module:
------------------------------------------------------
* Add/remove products in the reparation
* Impact for stocks
* Invoicing (products and/or services)
* Warranty concept
* Repair quotation report
* Notes for the technician and for the final customer
""",
'author': 'OpenERP SA',
'images': ['images/repair_order.jpeg'],
'depends': ['mrp', 'sale', 'account'],
'website': 'https://www.odoo.com/page/manufacturing',
'data': [
'security/ir.model.access.csv',
'security/mrp_repair_security.xml',
'mrp_repair_data.xml',
'mrp_repair_sequence.xml',
'wizard/mrp_repair_cancel_view.xml',
'wizard/mrp_repair_make_invoice_view.xml',
'mrp_repair_view.xml',
'mrp_repair_workflow.xml',
'mrp_repair_report.xml',
'views/report_mrprepairorder.xml',
],
'demo': ['mrp_repair_demo.yml'],
'test': ['test/mrp_repair_users.yml',
'test/test_mrp_repair_noneinv.yml',
'test/test_mrp_repair_b4inv.yml',
'test/test_mrp_repair_afterinv.yml',
'test/test_mrp_repair_cancel.yml',
'test/test_mrp_repair_fee.yml',
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
CJ-Jewell/ThinkStats2
|
refs/heads/master
|
code/relay.py
|
68
|
"""This file contains code used in "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import numpy as np
import thinkplot
import thinkstats2
"""
Sample line.
Place Div/Tot Div Guntime Nettime Pace Name Ag S Race# City/state
===== ======== ===== ======= ======= ===== ====================== == = ===== =======================
97 26/256 M4049 42:48 42:44 6:53 Allen Downey 42 M 337 Needham MA
"""
def ConvertPaceToSpeed(pace):
"""Converts pace in MM:SS per mile to MPH."""
m, s = [int(x) for x in pace.split(':')]
secs = m*60 + s
mph = 1 / secs * 60 * 60
return mph
def CleanLine(line):
"""Converts a line from coolrunning results to a tuple of values."""
t = line.split()
if len(t) < 6:
return None
place, divtot, div, gun, net, pace = t[0:6]
if not '/' in divtot:
return None
for time in [gun, net, pace]:
if ':' not in time:
return None
return place, divtot, div, gun, net, pace
def ReadResults(filename='Apr25_27thAn_set1.shtml'):
"""Read results from a file and return a list of tuples."""
results = []
for line in open(filename):
t = CleanLine(line)
if t:
results.append(t)
return results
def GetSpeeds(results, column=5):
"""Extract the pace column and return a list of speeds in MPH."""
speeds = []
for t in results:
pace = t[column]
speed = ConvertPaceToSpeed(pace)
speeds.append(speed)
return speeds
def BinData(data, low, high, n):
"""Rounds data off into bins.
data: sequence of numbers
low: low value
high: high value
n: number of bins
returns: sequence of numbers
"""
data = (np.array(data) - low) / (high - low) * n
data = np.round(data) * (high - low) / n + low
return data
def main():
results = ReadResults()
speeds = GetSpeeds(results)
speeds = BinData(speeds, 3, 12, 100)
pmf = thinkstats2.Pmf(speeds, 'speeds')
thinkplot.Pmf(pmf)
thinkplot.Show(title='PMF of running speed',
xlabel='speed (mph)',
ylabel='probability')
if __name__ == '__main__':
main()
|
rhdedgar/openshift-tools
|
refs/heads/stg
|
openshift/installer/vendored/openshift-ansible-3.5.127/roles/lib_openshift/library/oc_serviceaccount_secret.py
|
12
|
#!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import json
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/serviceaccount_secret -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_serviceaccount_secret
short_description: Module to manage openshift service account secrets
description:
- Manage openshift service account secrets programmatically.
options:
state:
description:
- If present, the service account will be linked with the secret if it is not already. If absent, the service account will be unlinked from the secret if it is already linked. If list, information about the service account secrets will be gathered and returned as part of the Ansible call results.
required: false
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: false
aliases: []
service_account:
description:
- Name of the service account.
required: true
default: None
aliases: []
namespace:
description:
- Namespace of the service account and secret.
required: true
default: None
aliases: []
secret:
description:
- The secret that should be linked to the service account.
required: false
default: None
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: get secrets of a service account
oc_serviceaccount_secret:
state: list
service_account: builder
namespace: default
register: sasecretout
- name: Link a service account to a specific secret
oc_serviceaccount_secret:
service_account: builder
secret: mynewsecret
namespace: default
register: sasecretout
'''
# -*- -*- -*- End included fragment: doc/serviceaccount_secret -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# pylint: disable=undefined-variable,missing-docstring
# noqa: E301,E302
class YeditException(Exception):
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods
class Yedit(object):
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@separator.setter
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key % ''.join(common_separators), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key % ''.join(common_separators), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
yfd.write(contents)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.safe_load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. %s' % err)
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in ' +
'dict with non-dict type. value=[%s] [%s]' % (value, type(value))) # noqa: E501
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.load(invalue)
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[%s] vtype=[%s]'
% (inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# If vtype is not str then go ahead and attempt to yaml load it.
if isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming ' +
'value. value=[%s] vtype=[%s]'
% (type(inc_value), vtype))
return inc_value
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(module):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=module.params['src'],
backup=module.params['backup'],
separator=module.params['separator'])
if module.params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and \
module.params['state'] != 'present':
return {'failed': True,
'msg': 'Error opening file [%s]. Verify that the ' +
'file exists, that it is has correct' +
' permissions, and is valid yaml.'}
if module.params['state'] == 'list':
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
yamlfile.yaml_dict = content
if module.params['key']:
rval = yamlfile.get(module.params['key']) or {}
return {'changed': False, 'result': rval, 'state': "list"}
elif module.params['state'] == 'absent':
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
yamlfile.yaml_dict = content
if module.params['update']:
rval = yamlfile.pop(module.params['key'],
module.params['value'])
else:
rval = yamlfile.delete(module.params['key'])
if rval[0] and module.params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': "absent"}
elif module.params['state'] == 'present':
# check if content is different than what is in the file
if module.params['content']:
content = Yedit.parse_value(module.params['content'],
module.params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
module.params['value'] is None:
return {'changed': False,
'result': yamlfile.yaml_dict,
'state': "present"}
yamlfile.yaml_dict = content
# we were passed a value; parse it
if module.params['value']:
value = Yedit.parse_value(module.params['value'],
module.params['value_type'])
key = module.params['key']
if module.params['update']:
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(Yedit.parse_value(module.params['curr_value']), # noqa: E501
module.params['curr_value_format']) # noqa: E501
rval = yamlfile.update(key, value, module.params['index'], curr_value) # noqa: E501
elif module.params['append']:
rval = yamlfile.append(key, value)
else:
rval = yamlfile.put(key, value)
if rval[0] and module.params['src']:
yamlfile.write()
return {'changed': rval[0],
'result': rval[1], 'state': "present"}
# no edits to make
if module.params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': "present"}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, rname, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource, rname]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
elif rname:
cmd.append(rname)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
rval = {}
results = ''
err = None
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"results": results,
"cmd": ' '.join(cmds)}
if returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.args:
err = err.args
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {}})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(contents)
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
# "v3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = version[1:4]
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import yum
yum_base = yum.YumBase()
if yum_base.rpmdb.searchNevra(name='atomic-openshift'):
return True
return False
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self):
'''return all options as a string'''
return self.stringify()
def stringify(self):
''' return the options hash as cli params in a string '''
rval = []
for key, data in self.config_options.items():
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/serviceaccount.py -*- -*- -*-
class ServiceAccountConfig(object):
'''Service account config class
This class stores the options and returns a default service account
'''
# pylint: disable=too-many-arguments
def __init__(self, sname, namespace, kubeconfig, secrets=None, image_pull_secrets=None):
self.name = sname
self.kubeconfig = kubeconfig
self.namespace = namespace
self.secrets = secrets or []
self.image_pull_secrets = image_pull_secrets or []
self.data = {}
self.create_dict()
def create_dict(self):
''' instantiate a properly structured volume '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'ServiceAccount'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['secrets'] = []
if self.secrets:
for sec in self.secrets:
self.data['secrets'].append({"name": sec})
self.data['imagePullSecrets'] = []
if self.image_pull_secrets:
for sec in self.image_pull_secrets:
self.data['imagePullSecrets'].append({"name": sec})
class ServiceAccount(Yedit):
''' Class to wrap the oc command line tools '''
image_pull_secrets_path = "imagePullSecrets"
secrets_path = "secrets"
def __init__(self, content):
'''ServiceAccount constructor'''
super(ServiceAccount, self).__init__(content=content)
self._secrets = None
self._image_pull_secrets = None
@property
def image_pull_secrets(self):
''' property for image_pull_secrets '''
if self._image_pull_secrets is None:
self._image_pull_secrets = self.get(ServiceAccount.image_pull_secrets_path) or []
return self._image_pull_secrets
@image_pull_secrets.setter
def image_pull_secrets(self, secrets):
''' property for secrets '''
self._image_pull_secrets = secrets
@property
def secrets(self):
''' property for secrets '''
if not self._secrets:
self._secrets = self.get(ServiceAccount.secrets_path) or []
return self._secrets
@secrets.setter
def secrets(self, secrets):
''' property for secrets '''
self._secrets = secrets
def delete_secret(self, inc_secret):
''' remove a secret '''
remove_idx = None
for idx, sec in enumerate(self.secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.secrets[remove_idx]
return True
return False
def delete_image_pull_secret(self, inc_secret):
''' remove a image_pull_secret '''
remove_idx = None
for idx, sec in enumerate(self.image_pull_secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.image_pull_secrets[remove_idx]
return True
return False
def find_secret(self, inc_secret):
'''find secret'''
for secret in self.secrets:
if secret['name'] == inc_secret:
return secret
return None
def find_image_pull_secret(self, inc_secret):
'''find secret'''
for secret in self.image_pull_secrets:
if secret['name'] == inc_secret:
return secret
return None
def add_secret(self, inc_secret):
'''add secret'''
if self.secrets:
self.secrets.append({"name": inc_secret}) # pylint: disable=no-member
else:
self.put(ServiceAccount.secrets_path, [{"name": inc_secret}])
def add_image_pull_secret(self, inc_secret):
'''add image_pull_secret'''
if self.image_pull_secrets:
self.image_pull_secrets.append({"name": inc_secret}) # pylint: disable=no-member
else:
self.put(ServiceAccount.image_pull_secrets_path, [{"name": inc_secret}])
# -*- -*- -*- End included fragment: lib/serviceaccount.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
class OCServiceAccountSecret(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
def __init__(self, config, verbose=False):
''' Constructor for OpenshiftOC '''
super(OCServiceAccountSecret, self).__init__(config.namespace, kubeconfig=config.kubeconfig, verbose=verbose)
self.config = config
self.verbose = verbose
self._service_account = None
@property
def service_account(self):
''' Property for the service account '''
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
''' setter for the service account '''
self._service_account = data
def exists(self, in_secret):
''' verifies if secret exists in the service account '''
result = self.service_account.find_secret(in_secret)
if not result:
return False
return True
def get(self):
''' get the service account definition from the master '''
sao = self._get(OCServiceAccountSecret.kind, self.config.name)
if sao['returncode'] == 0:
self.service_account = ServiceAccount(content=sao['results'][0])
sao['results'] = self.service_account.get('secrets')
return sao
def delete(self):
''' delete secrets '''
modified = []
for rem_secret in self.config.secrets:
modified.append(self.service_account.delete_secret(rem_secret))
if any(modified):
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
def put(self):
''' place secrets into sa '''
modified = False
for add_secret in self.config.secrets:
if not self.service_account.find_secret(add_secret):
self.service_account.add_secret(add_secret)
modified = True
if modified:
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
@staticmethod
# pylint: disable=too-many-return-statements,too-many-branches
# TODO: This function should be refactored into its individual parts.
def run_ansible(params, check_mode):
''' run the ansible idempotent code '''
sconfig = ServiceAccountConfig(params['service_account'],
params['namespace'],
params['kubeconfig'],
[params['secret']],
None)
oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_sa_sec.get()
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': "list"}
########
# Delete
########
if state == 'absent':
if oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have removed the " + \
"secret from the service account.'}
api_rval = oc_sa_sec.delete()
return {'changed': True, 'results': api_rval, 'state': "absent"}
return {'changed': False, 'state': "absent"}
if state == 'present':
########
# Create
########
if not oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have added the ' + \
'secret to the service account.'}
# Create it here
api_rval = oc_sa_sec.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_sa_sec.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': "present"}
return {'changed': False, 'results': api_rval, 'state': "present"}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. %s' % state,
'state': 'unknown'}
# -*- -*- -*- End included fragment: class/oc_serviceaccount_secret.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
def main():
'''
ansible oc module to manage service account secrets.
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default=None, required=True, type='str'),
secret=dict(default=None, type='str'),
service_account=dict(required=True, type='str'),
),
supports_check_mode=True,
)
rval = OCServiceAccountSecret.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
module.fail_json(**rval)
module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_serviceaccount_secret.py -*- -*- -*-
|
C00kiie/Youtube-Mp3-telegram-bot
|
refs/heads/master
|
youtube_dl/extractor/polskieradio.py
|
50
|
# coding: utf-8
from __future__ import unicode_literals
import itertools
import re
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urllib_parse_unquote,
compat_urlparse
)
from ..utils import (
extract_attributes,
int_or_none,
strip_or_none,
unified_timestamp,
)
class PolskieRadioIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?polskieradio\.pl/\d+/\d+/Artykul/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://www.polskieradio.pl/7/5102/Artykul/1587943,Prof-Andrzej-Nowak-o-historii-nie-da-sie-myslec-beznamietnie',
'info_dict': {
'id': '1587943',
'title': 'Prof. Andrzej Nowak: o historii nie da się myśleć beznamiętnie',
'description': 'md5:12f954edbf3120c5e7075e17bf9fc5c5',
},
'playlist': [{
'md5': '2984ee6ce9046d91fc233bc1a864a09a',
'info_dict': {
'id': '1540576',
'ext': 'mp3',
'title': 'md5:d4623290d4ac983bf924061c75c23a0d',
'timestamp': 1456594200,
'upload_date': '20160227',
'duration': 2364,
'thumbnail': r're:^https?://static\.prsa\.pl/images/.*\.jpg$'
},
}],
}, {
'url': 'http://www.polskieradio.pl/265/5217/Artykul/1635803,Euro-2016-nie-ma-miejsca-na-blad-Polacy-graja-ze-Szwajcaria-o-cwiercfinal',
'info_dict': {
'id': '1635803',
'title': 'Euro 2016: nie ma miejsca na błąd. Polacy grają ze Szwajcarią o ćwierćfinał',
'description': 'md5:01cb7d0cad58664095d72b51a1ebada2',
},
'playlist_mincount': 12,
}, {
'url': 'http://polskieradio.pl/9/305/Artykul/1632955,Bardzo-popularne-slowo-remis',
'only_matching': True,
}, {
'url': 'http://www.polskieradio.pl/7/5102/Artykul/1587943',
'only_matching': True,
}, {
# with mp4 video
'url': 'http://www.polskieradio.pl/9/299/Artykul/1634903,Brexit-Leszek-Miller-swiat-sie-nie-zawali-Europa-bedzie-trwac-dalej',
'only_matching': True,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
content = self._search_regex(
r'(?s)<div[^>]+class="\s*this-article\s*"[^>]*>(.+?)<div[^>]+class="tags"[^>]*>',
webpage, 'content')
timestamp = unified_timestamp(self._html_search_regex(
r'(?s)<span[^>]+id="datetime2"[^>]*>(.+?)</span>',
webpage, 'timestamp', fatal=False))
thumbnail_url = self._og_search_thumbnail(webpage)
entries = []
media_urls = set()
for data_media in re.findall(r'<[^>]+data-media=({[^>]+})', content):
media = self._parse_json(data_media, playlist_id, fatal=False)
if not media.get('file') or not media.get('desc'):
continue
media_url = self._proto_relative_url(media['file'], 'http:')
if media_url in media_urls:
continue
media_urls.add(media_url)
entries.append({
'id': compat_str(media['id']),
'url': media_url,
'title': compat_urllib_parse_unquote(media['desc']),
'duration': int_or_none(media.get('length')),
'vcodec': 'none' if media.get('provider') == 'audio' else None,
'timestamp': timestamp,
'thumbnail': thumbnail_url
})
title = self._og_search_title(webpage).strip()
description = strip_or_none(self._og_search_description(webpage))
return self.playlist_result(entries, playlist_id, title, description)
class PolskieRadioCategoryIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?polskieradio\.pl/\d+(?:,[^/]+)?/(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.polskieradio.pl/7/5102,HISTORIA-ZYWA',
'info_dict': {
'id': '5102',
'title': 'HISTORIA ŻYWA',
},
'playlist_mincount': 38,
}, {
'url': 'http://www.polskieradio.pl/7/4807',
'info_dict': {
'id': '4807',
'title': 'Vademecum 1050. rocznicy Chrztu Polski'
},
'playlist_mincount': 5
}, {
'url': 'http://www.polskieradio.pl/7/129,Sygnaly-dnia?ref=source',
'only_matching': True
}, {
'url': 'http://www.polskieradio.pl/37,RedakcjaKatolicka/4143,Kierunek-Krakow',
'info_dict': {
'id': '4143',
'title': 'Kierunek Kraków',
},
'playlist_mincount': 61
}, {
'url': 'http://www.polskieradio.pl/10,czworka/214,muzyka',
'info_dict': {
'id': '214',
'title': 'Muzyka',
},
'playlist_mincount': 61
}, {
'url': 'http://www.polskieradio.pl/7,Jedynka/5102,HISTORIA-ZYWA',
'only_matching': True,
}, {
'url': 'http://www.polskieradio.pl/8,Dwojka/196,Publicystyka',
'only_matching': True,
}]
@classmethod
def suitable(cls, url):
return False if PolskieRadioIE.suitable(url) else super(PolskieRadioCategoryIE, cls).suitable(url)
def _entries(self, url, page, category_id):
content = page
for page_num in itertools.count(2):
for a_entry, entry_id in re.findall(
r'(?s)<article[^>]+>.*?(<a[^>]+href=["\']/\d+/\d+/Artykul/(\d+)[^>]+>).*?</article>',
content):
entry = extract_attributes(a_entry)
href = entry.get('href')
if not href:
continue
yield self.url_result(
compat_urlparse.urljoin(url, href), PolskieRadioIE.ie_key(),
entry_id, entry.get('title'))
mobj = re.search(
r'<div[^>]+class=["\']next["\'][^>]*>\s*<a[^>]+href=(["\'])(?P<url>(?:(?!\1).)+)\1',
content)
if not mobj:
break
next_url = compat_urlparse.urljoin(url, mobj.group('url'))
content = self._download_webpage(
next_url, category_id, 'Downloading page %s' % page_num)
def _real_extract(self, url):
category_id = self._match_id(url)
webpage = self._download_webpage(url, category_id)
title = self._html_search_regex(
r'<title>([^<]+) - [^<]+ - [^<]+</title>',
webpage, 'title', fatal=False)
return self.playlist_result(
self._entries(url, webpage, category_id),
category_id, title)
|
waseem18/oh-mainline
|
refs/heads/master
|
vendor/packages/scrapy/scrapy/conf.py
|
19
|
"""
Scrapy settings manager
See documentation in docs/topics/settings.rst
"""
import os
import cPickle as pickle
from scrapy.settings import CrawlerSettings
from scrapy.utils.conf import init_env
ENVVAR = 'SCRAPY_SETTINGS_MODULE'
def get_project_settings():
if ENVVAR not in os.environ:
project = os.environ.get('SCRAPY_PROJECT', 'default')
init_env(project)
settings_module_path = os.environ.get(ENVVAR, 'scrapy_settings')
try:
settings_module = __import__(settings_module_path, {}, {}, [''])
except ImportError:
settings_module = None
settings = CrawlerSettings(settings_module)
# XXX: remove this hack
pickled_settings = os.environ.get("SCRAPY_PICKLED_SETTINGS_TO_OVERRIDE")
settings.overrides = pickle.loads(pickled_settings) if pickled_settings else {}
# XXX: deprecate and remove this functionality
for k, v in os.environ.items():
if k.startswith('SCRAPY_'):
settings.overrides[k[7:]] = v
return settings
settings = get_project_settings()
|
saquiba2/numpy2
|
refs/heads/master
|
numpy/distutils/command/__init__.py
|
264
|
"""distutils.command
Package containing implementation of all the standard Distutils
commands.
"""
from __future__ import division, absolute_import, print_function
def test_na_writable_attributes_deletion():
a = np.NA(2)
attr = ['payload', 'dtype']
for s in attr:
assert_raises(AttributeError, delattr, a, s)
__revision__ = "$Id: __init__.py,v 1.3 2005/05/16 11:08:49 pearu Exp $"
distutils_all = [ #'build_py',
'clean',
'install_clib',
'install_scripts',
'bdist',
'bdist_dumb',
'bdist_wininst',
]
__import__('distutils.command', globals(), locals(), distutils_all)
__all__ = ['build',
'config_compiler',
'config',
'build_src',
'build_py',
'build_ext',
'build_clib',
'build_scripts',
'install',
'install_data',
'install_headers',
'install_lib',
'bdist_rpm',
'sdist',
] + distutils_all
|
Seichis/seldon-server
|
refs/heads/master
|
external/predictor/python/template/example_predict.py
|
10
|
def init(config):
# do any initialisation needed here
print "initialised"
def get_predictions(client,json):
# take json, convert to format needed and return list of 3-tuples of (score,classId,confidence)
return [(1.0,1,0.9)]
|
netsuileo/sfu-cluster-dashboard
|
refs/heads/master
|
dashboard/app/user/admin.py
|
1
|
from django.contrib import admin
from models import UserProfile
admin.site.register(UserProfile)
|
CumulusNetworks/netshow-cumulus-lib
|
refs/heads/master
|
tests/test_netshowlib/test_cache.py
|
1
|
# disable docstring checking
# pylint: disable=C0111
# disable checking no-self-use
# pylint: disable=R0201
# pylint: disable=W0212
# pylint: disable=W0201
# pylint: disable=F0401
from netshowlib.cumulus import cache as cumulus_cache
import mock
from asserts import assert_equals, mock_open_str, mod_args_generator
from nose.tools import set_trace
class TestCumulusCache(object):
def setup(self):
self.cache = cumulus_cache.Cache()
def test_new_attrs(self):
assert_equals(self.cache.feature_list.get('counters'), 'cumulus')
assert_equals(self.cache.feature_list.get('mstpd'), 'cumulus')
assert_equals(self.cache.feature_list.get('asic'), 'cumulus')
|
sunny414/tryton-client
|
refs/heads/master
|
tryton/gui/window/view_form/view/graph_gtk/parser.py
|
1
|
#This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from tryton.gui.window.view_form.view.interface import ParserInterface
import tryton.common as common
import gtk
from bar import VerticalBar, HorizontalBar
from line import Line
from pie import Pie
from tryton.config import TRYTON_ICON, CONFIG
import sys
import os
import gettext
_ = gettext.gettext
GRAPH_TYPE = {
'vbar': VerticalBar,
'hbar': HorizontalBar,
'line': Line,
'pie': Pie,
}
def save(widget, graph):
parent = common.get_toplevel_window()
dia = gtk.Dialog(_('Save As'), parent,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OK, gtk.RESPONSE_OK))
dia.set_icon(TRYTON_ICON)
dia.set_has_separator(True)
dia.set_default_response(gtk.RESPONSE_OK)
dia.vbox.set_spacing(5)
dia.vbox.set_homogeneous(False)
title = gtk.Label('<b>' + _('Image Size') + '</b>')
title.set_alignment(0.0, 0.5)
title.set_use_markup(True)
dia.vbox.pack_start(title)
table = gtk.Table(2, 2)
table.set_col_spacings(3)
table.set_row_spacings(3)
table.set_border_width(1)
table.attach(gtk.Label(_('Width:')), 0, 1, 0, 1, yoptions=False,
xoptions=gtk.FILL)
spinwidth = gtk.SpinButton(gtk.Adjustment(400.0,
0.0, sys.maxint, 1.0, 10.0))
spinwidth.set_numeric(True)
spinwidth.set_activates_default(True)
table.attach(spinwidth, 1, 2, 0, 1, yoptions=False, xoptions=gtk.FILL)
table.attach(gtk.Label(_('Height:')), 0, 1, 1, 2, yoptions=False,
xoptions=gtk.FILL)
spinheight = gtk.SpinButton(gtk.Adjustment(200.0,
0.0, sys.maxint, 1.0, 10.0))
spinheight.set_numeric(True)
spinheight.set_activates_default(True)
table.attach(spinheight, 1, 2, 1, 2, yoptions=False, xoptions=gtk.FILL)
dia.vbox.pack_start(table)
filechooser = gtk.FileChooserWidget(gtk.FILE_CHOOSER_ACTION_SAVE, None)
filechooser.set_current_folder(CONFIG['client.default_path'])
filter = gtk.FileFilter()
filter.set_name(_('PNG image (*.png)'))
filter.add_mime_type('image/png')
filter.add_pattern('*.png')
filechooser.add_filter(filter)
dia.vbox.pack_start(filechooser)
dia.show_all()
while True:
response = dia.run()
width = spinwidth.get_value_as_int()
height = spinheight.get_value_as_int()
filename = filechooser.get_filename()
if filename:
filename = filename.decode('utf-8')
try:
CONFIG['client.default_path'] = \
os.path.dirname(filename)
CONFIG.save()
except IOError:
pass
if response == gtk.RESPONSE_OK:
if width and height and filename:
if not filename.endswith('.png'):
filename = filename + '.png'
try:
graph.export_png(filename, width, height)
break
except MemoryError:
common.message(_('Image size too large!'), dia,
gtk.MESSAGE_ERROR)
else:
break
parent.present()
dia.destroy()
return
def button_press(widget, event, graph):
if event.button == 3:
menu = gtk.Menu()
item = gtk.ImageMenuItem(_('Save As...'))
img = gtk.Image()
img.set_from_stock('tryton-save-as', gtk.ICON_SIZE_MENU)
item.set_image(img)
item.connect('activate', save, graph)
item.show()
menu.append(item)
menu.popup(None, None, None, event.button, event.time)
return True
elif event.button == 1:
graph.action()
class ParserGraph(ParserInterface):
def parse(self, model, root_node, fields):
attrs = common.node_attributes(root_node)
self.title = attrs.get('string', 'Unknown')
xfield = None
yfields = []
for node in root_node.childNodes:
if not node.nodeType == node.ELEMENT_NODE:
continue
if node.localName == 'x':
for child in node.childNodes:
if not child.nodeType == child.ELEMENT_NODE:
continue
xfield = common.node_attributes(child)
if not xfield.get('string'):
xfield['string'] = fields[xfield['name']
].attrs['string']
break
elif node.localName == 'y':
for child in node.childNodes:
if not child.nodeType == child.ELEMENT_NODE:
continue
yattrs = common.node_attributes(child)
if not yattrs.get('string') and yattrs['name'] != '#':
yattrs['string'] = fields[yattrs['name']
].attrs['string']
yfields.append(yattrs)
widget = GRAPH_TYPE[attrs.get('type', 'vbar')
](xfield, yfields, attrs, model)
event = gtk.EventBox()
event.add(widget)
event.connect('button-press-event', button_press, widget)
return event, {'root': widget}, [], '', [], None
|
huor/incubator-hawq
|
refs/heads/master
|
tools/bin/pythonSrc/pychecker-0.8.18/test_input/test35.py
|
11
|
__pychecker__ = '--no-namedargs --no-import --no-var --no-privatevar --no-moduledoc --funcdoc'
import re
_NOT_USED1 = None
NOT_USED2 = None
def x(a, b) : pass
def y():
x(b=1, a=2)
|
mglukhikh/intellij-community
|
refs/heads/master
|
python/testData/refactoring/introduceVariable/multiReference.after.py
|
83
|
P = {'a': 0}
M = [42]
a_ = P['a']
M[a_] += 1
|
Maspear/odoo
|
refs/heads/8.0
|
addons/board/controllers.py
|
348
|
# -*- coding: utf-8 -*-
from xml.etree import ElementTree
from openerp.addons.web.controllers.main import load_actions_from_ir_values
from openerp.http import Controller, route, request
class Board(Controller):
@route('/board/add_to_dashboard', type='json', auth='user')
def add_to_dashboard(self, menu_id, action_id, context_to_save, domain, view_mode, name=''):
# FIXME move this method to board.board model
dashboard_action = load_actions_from_ir_values('action', 'tree_but_open',
[('ir.ui.menu', menu_id)], False)
if dashboard_action:
action = dashboard_action[0][2]
if action['res_model'] == 'board.board' and action['views'][0][1] == 'form':
# Maybe should check the content instead of model board.board ?
view_id = action['views'][0][0]
board = request.session.model(action['res_model']).fields_view_get(view_id, 'form')
if board and 'arch' in board:
xml = ElementTree.fromstring(board['arch'])
column = xml.find('./board/column')
if column is not None:
new_action = ElementTree.Element('action', {
'name': str(action_id),
'string': name,
'view_mode': view_mode,
'context': str(context_to_save),
'domain': str(domain)
})
column.insert(0, new_action)
arch = ElementTree.tostring(xml, 'utf-8')
return request.session.model('ir.ui.view.custom').create({
'user_id': request.session.uid,
'ref_id': view_id,
'arch': arch
}, request.context)
return False
|
eneldoserrata/marcos_openerp
|
refs/heads/master
|
oemedical/oemedical_emr/models/oemedical_patient.py
|
3
|
# -*- coding: utf-8 -*-
#/#############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#/#############################################################################
from openerp.osv import fields, orm, orm
from openerp.tools.translate import _
from dateutil.relativedelta import relativedelta
from datetime import datetime
class OeMedicalPatient(orm.Model):
_name= 'oemedical.patient'
_inherit= 'oemedical.patient'
_columns={
'family': fields.many2one('oemedical.family', string='Family', help='Family Code'),
'blood_type': fields.selection([
('A', 'A'),
('B', 'B'),
('AB', 'AB'),
('O', 'O'), ], string='Blood Type'),
'rh': fields.selection([
('+', '+'),
('-', '-'), ], string='Rh'),
'primary_care_doctor': fields.many2one('oemedical.physician', 'Primary Care Doctor', help='Current primary care / family doctor'),
'childbearing_age': fields.boolean('Potential for Childbearing'),
'medications': fields.one2many('oemedical.patient.medication', 'patient_id', string='Medications',),
'evaluations': fields.one2many('oemedical.patient.evaluation', 'patient_id', string='Evaluations',),
'critical_info': fields.text( string='Important disease, allergy or procedures information', help='Write any important information on the patient\'s disease, surgeries, allergies, ...'),
'diseases': fields.one2many('oemedical.patient.disease', 'patient_id', string='Diseases', help='Mark if the patient has died'),
'ethnic_group': fields.many2one('oemedical.ethnicity', string='Ethnic group',),
'vaccinations': fields.one2many('oemedical.vaccination', 'patient_id', 'Vaccinations',),
'cod': fields.many2one('oemedical.pathology', string='Cause of Death',),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
wscullin/spack
|
refs/heads/qmcpack
|
var/spack/repos/builtin/packages/zip/package.py
|
3
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Zip(MakefilePackage):
"""Zip is a compression and file packaging/archive utility."""
homepage = 'http://www.info-zip.org/Zip.html'
url = 'http://downloads.sourceforge.net/infozip/zip30.tar.gz'
version('3.0', '7b74551e63f8ee6aab6fbc86676c0d37')
depends_on('bzip2')
def url_for_version(self, version):
return 'http://downloads.sourceforge.net/infozip/zip{0}.tar.gz'.format(version.joined)
make_args = ['-f', 'unix/Makefile']
build_targets = make_args + ['generic']
@property
def install_targets(self):
return self.make_args + ['prefix={0}'.format(self.prefix), 'install']
|
Rjtsahu/School-Bus-Tracking
|
refs/heads/restructure
|
BusTrack/repository/main.py
|
1
|
from BusTrack.repository import Base
from BusTrack.repository import engine
# import all relevant db models here.
from BusTrack.repository.models.Bus import Bus
from BusTrack.repository.models.UserType import UserType
from BusTrack.repository.models.User import User
from BusTrack.repository.models.UserLogin import UserLogin
from BusTrack.repository.models.Feedback import Feedback
from BusTrack.repository.models.Kid import Kid
from BusTrack.repository.models.Journey import Journey
from BusTrack.repository.models.Location import Location
from BusTrack.repository.models.Attendance import Attendance
def create_database():
print('creating database from given mappings')
Base.metadata.create_all(engine)
Bus.__create_default_bus__()
UserType.__create_default_role__()
tables = [User.__tablename__, UserLogin.__tablename__, Feedback.__tablename__
, Kid.__tablename__, Journey.__tablename__, Location.__tablename__,
Attendance.__tablename__]
print('created mapping for tables:', tables)
|
GunnerJnr/_CodeInstitute
|
refs/heads/master
|
Stream-3/Full-Stack-Development/19.Djangos-Testing-Framework/4.How-To-Test-Models/we_are_social/threads/migrations/0001_initial.py
|
19
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-08 09:18
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import tinymce.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', tinymce.models.HTMLField(blank=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Subject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', tinymce.models.HTMLField()),
],
),
migrations.CreateModel(
name='Thread',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('Subject', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='threads', to='threads.Subject')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='threads', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='post',
name='thread',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='threads.Thread'),
),
migrations.AddField(
model_name='post',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to=settings.AUTH_USER_MODEL),
),
]
|
antinet/cjdns
|
refs/heads/master
|
node_build/dependencies/libuv/build/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py
|
350
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that a hard_dependency that is exported is pulled in as a dependency
for a target if the target is a static library and if the generator will
remove dependencies between static libraries.
"""
import TestGyp
test = TestGyp.TestGyp()
if test.format == 'dump_dependency_json':
test.skip_test('Skipping test; dependency JSON does not adjust ' \
'static libraries.\n')
test.run_gyp('hard_dependency.gyp', chdir='src')
chdir = 'relocate/src'
test.relocate('src', chdir)
test.build('hard_dependency.gyp', 'c', chdir=chdir)
# The 'a' static library should be built, as it has actions with side-effects
# that are necessary to compile 'c'. Even though 'c' does not directly depend
# on 'a', because 'a' is a hard_dependency that 'b' exports, 'c' should import
# it as a hard_dependency and ensure it is built before building 'c'.
test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_exist('c', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_not_exist('d', type=test.STATIC_LIB, chdir=chdir)
test.pass_test()
|
ProfessionalIT/professionalit-webiste
|
refs/heads/master
|
sdk/google_appengine/lib/django-1.5/django/contrib/messages/api.py
|
321
|
from django.contrib.messages import constants
from django.contrib.messages.storage import default_storage
__all__ = (
'add_message', 'get_messages',
'get_level', 'set_level',
'debug', 'info', 'success', 'warning', 'error',
)
class MessageFailure(Exception):
pass
def add_message(request, level, message, extra_tags='', fail_silently=False):
"""
Attempts to add a message to the request using the 'messages' app.
"""
if hasattr(request, '_messages'):
return request._messages.add(level, message, extra_tags)
if not fail_silently:
raise MessageFailure('You cannot add messages without installing '
'django.contrib.messages.middleware.MessageMiddleware')
def get_messages(request):
"""
Returns the message storage on the request if it exists, otherwise returns
an empty list.
"""
if hasattr(request, '_messages'):
return request._messages
else:
return []
def get_level(request):
"""
Returns the minimum level of messages to be recorded.
The default level is the ``MESSAGE_LEVEL`` setting. If this is not found,
the ``INFO`` level is used.
"""
if hasattr(request, '_messages'):
storage = request._messages
else:
storage = default_storage(request)
return storage.level
def set_level(request, level):
"""
Sets the minimum level of messages to be recorded, returning ``True`` if
the level was recorded successfully.
If set to ``None``, the default level will be used (see the ``get_level``
method).
"""
if not hasattr(request, '_messages'):
return False
request._messages.level = level
return True
def debug(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``DEBUG`` level.
"""
add_message(request, constants.DEBUG, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def info(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``INFO`` level.
"""
add_message(request, constants.INFO, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def success(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``SUCCESS`` level.
"""
add_message(request, constants.SUCCESS, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def warning(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``WARNING`` level.
"""
add_message(request, constants.WARNING, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def error(request, message, extra_tags='', fail_silently=False):
"""
Adds a message with the ``ERROR`` level.
"""
add_message(request, constants.ERROR, message, extra_tags=extra_tags,
fail_silently=fail_silently)
|
vmora/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/qgis/ServiceAreaFromPoint.py
|
4
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ServiceAreaFromPoint.py
---------------------
Date : December 2016
Copyright : (C) 2016 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'December 2016'
__copyright__ = '(C) 2016, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from collections import OrderedDict
from qgis.PyQt.QtCore import QVariant, QCoreApplication
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsWkbTypes,
QgsUnitTypes,
QgsFeature,
QgsFeatureSink,
QgsGeometry,
QgsGeometryUtils,
QgsFields,
QgsField,
QgsProcessing,
QgsProcessingException,
QgsProcessingParameterBoolean,
QgsProcessingParameterDistance,
QgsProcessingParameterEnum,
QgsProcessingParameterPoint,
QgsProcessingParameterField,
QgsProcessingParameterNumber,
QgsProcessingParameterString,
QgsProcessingParameterFeatureSink,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterDefinition)
from qgis.analysis import (QgsVectorLayerDirector,
QgsNetworkDistanceStrategy,
QgsNetworkSpeedStrategy,
QgsGraphBuilder,
QgsGraphAnalyzer
)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class ServiceAreaFromPoint(QgisAlgorithm):
INPUT = 'INPUT'
START_POINT = 'START_POINT'
STRATEGY = 'STRATEGY'
TRAVEL_COST = 'TRAVEL_COST'
DIRECTION_FIELD = 'DIRECTION_FIELD'
VALUE_FORWARD = 'VALUE_FORWARD'
VALUE_BACKWARD = 'VALUE_BACKWARD'
VALUE_BOTH = 'VALUE_BOTH'
DEFAULT_DIRECTION = 'DEFAULT_DIRECTION'
SPEED_FIELD = 'SPEED_FIELD'
DEFAULT_SPEED = 'DEFAULT_SPEED'
TOLERANCE = 'TOLERANCE'
INCLUDE_BOUNDS = 'INCLUDE_BOUNDS'
OUTPUT = 'OUTPUT'
OUTPUT_LINES = 'OUTPUT_LINES'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'networkanalysis.svg'))
def group(self):
return self.tr('Network analysis')
def groupId(self):
return 'networkanalysis'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.DIRECTIONS = OrderedDict([
(self.tr('Forward direction'), QgsVectorLayerDirector.DirectionForward),
(self.tr('Backward direction'), QgsVectorLayerDirector.DirectionBackward),
(self.tr('Both directions'), QgsVectorLayerDirector.DirectionBoth)])
self.STRATEGIES = [self.tr('Shortest'),
self.tr('Fastest')
]
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Vector layer representing network'),
[QgsProcessing.TypeVectorLine]))
self.addParameter(QgsProcessingParameterPoint(self.START_POINT,
self.tr('Start point')))
self.addParameter(QgsProcessingParameterEnum(self.STRATEGY,
self.tr('Path type to calculate'),
self.STRATEGIES,
defaultValue=0))
self.addParameter(QgsProcessingParameterNumber(self.TRAVEL_COST,
self.tr('Travel cost (distance for "Shortest", time for "Fastest")'),
QgsProcessingParameterNumber.Double,
0.0, False, 0, 99999999.99))
params = []
params.append(QgsProcessingParameterField(self.DIRECTION_FIELD,
self.tr('Direction field'),
None,
self.INPUT,
optional=True))
params.append(QgsProcessingParameterString(self.VALUE_FORWARD,
self.tr('Value for forward direction'),
optional=True))
params.append(QgsProcessingParameterString(self.VALUE_BACKWARD,
self.tr('Value for backward direction'),
optional=True))
params.append(QgsProcessingParameterString(self.VALUE_BOTH,
self.tr('Value for both directions'),
optional=True))
params.append(QgsProcessingParameterEnum(self.DEFAULT_DIRECTION,
self.tr('Default direction'),
list(self.DIRECTIONS.keys()),
defaultValue=2))
params.append(QgsProcessingParameterField(self.SPEED_FIELD,
self.tr('Speed field'),
None,
self.INPUT,
optional=True))
params.append(QgsProcessingParameterNumber(self.DEFAULT_SPEED,
self.tr('Default speed (km/h)'),
QgsProcessingParameterNumber.Double,
5.0, False, 0, 99999999.99))
params.append(QgsProcessingParameterDistance(self.TOLERANCE,
self.tr('Topology tolerance'),
0.0, self.INPUT, False, 0, 99999999.99))
params.append(QgsProcessingParameterBoolean(self.INCLUDE_BOUNDS,
self.tr('Include upper/lower bound points'),
defaultValue=False))
for p in params:
p.setFlags(p.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(p)
lines_output = QgsProcessingParameterFeatureSink(self.OUTPUT_LINES,
self.tr('Service area (lines)'),
QgsProcessing.TypeVectorLine, optional=True)
lines_output.setCreateByDefault(True)
self.addParameter(lines_output)
nodes_output = QgsProcessingParameterFeatureSink(self.OUTPUT,
self.tr('Service area (boundary nodes)'),
QgsProcessing.TypeVectorPoint, optional=True)
nodes_output.setCreateByDefault(False)
self.addParameter(nodes_output)
def name(self):
return 'serviceareafrompoint'
def displayName(self):
return self.tr('Service area (from point)')
def processAlgorithm(self, parameters, context, feedback):
network = self.parameterAsSource(parameters, self.INPUT, context)
if network is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
startPoint = self.parameterAsPoint(parameters, self.START_POINT, context, network.sourceCrs())
strategy = self.parameterAsEnum(parameters, self.STRATEGY, context)
travelCost = self.parameterAsDouble(parameters, self.TRAVEL_COST, context)
directionFieldName = self.parameterAsString(parameters, self.DIRECTION_FIELD, context)
forwardValue = self.parameterAsString(parameters, self.VALUE_FORWARD, context)
backwardValue = self.parameterAsString(parameters, self.VALUE_BACKWARD, context)
bothValue = self.parameterAsString(parameters, self.VALUE_BOTH, context)
defaultDirection = self.parameterAsEnum(parameters, self.DEFAULT_DIRECTION, context)
speedFieldName = self.parameterAsString(parameters, self.SPEED_FIELD, context)
defaultSpeed = self.parameterAsDouble(parameters, self.DEFAULT_SPEED, context)
tolerance = self.parameterAsDouble(parameters, self.TOLERANCE, context)
include_bounds = True # default to true to maintain 3.0 API
if self.INCLUDE_BOUNDS in parameters:
include_bounds = self.parameterAsBool(parameters, self.INCLUDE_BOUNDS, context)
directionField = -1
if directionFieldName:
directionField = network.fields().lookupField(directionFieldName)
speedField = -1
if speedFieldName:
speedField = network.fields().lookupField(speedFieldName)
director = QgsVectorLayerDirector(network,
directionField,
forwardValue,
backwardValue,
bothValue,
defaultDirection)
distUnit = context.project().crs().mapUnits()
multiplier = QgsUnitTypes.fromUnitToUnitFactor(distUnit, QgsUnitTypes.DistanceMeters)
if strategy == 0:
strategy = QgsNetworkDistanceStrategy()
else:
strategy = QgsNetworkSpeedStrategy(speedField,
defaultSpeed,
multiplier * 1000.0 / 3600.0)
director.addStrategy(strategy)
builder = QgsGraphBuilder(network.sourceCrs(),
True,
tolerance)
feedback.pushInfo(QCoreApplication.translate('ServiceAreaFromPoint', 'Building graph…'))
snappedPoints = director.makeGraph(builder, [startPoint], feedback)
feedback.pushInfo(QCoreApplication.translate('ServiceAreaFromPoint', 'Calculating service area…'))
graph = builder.graph()
idxStart = graph.findVertex(snappedPoints[0])
tree, cost = QgsGraphAnalyzer.dijkstra(graph, idxStart, 0)
vertices = set()
points = []
lines = []
for vertex, start_vertex_cost in enumerate(cost):
inbound_edge_index = tree[vertex]
if inbound_edge_index == -1 and vertex != idxStart:
# unreachable vertex
continue
if start_vertex_cost > travelCost:
# vertex is too expensive, discard
continue
vertices.add(vertex)
start_point = graph.vertex(vertex).point()
# find all edges coming from this vertex
for edge_id in graph.vertex(vertex).outgoingEdges():
edge = graph.edge(edge_id)
end_vertex_cost = start_vertex_cost + edge.cost(0)
end_point = graph.vertex(edge.toVertex()).point()
if end_vertex_cost <= travelCost:
# end vertex is cheap enough to include
vertices.add(edge.toVertex())
lines.append([start_point, end_point])
else:
# travelCost sits somewhere on this edge, interpolate position
interpolated_end_point = QgsGeometryUtils.interpolatePointOnLineByValue(start_point.x(), start_point.y(), start_vertex_cost,
end_point.x(), end_point.y(), end_vertex_cost, travelCost)
points.append(interpolated_end_point)
lines.append([start_point, interpolated_end_point])
for i in vertices:
points.append(graph.vertex(i).point())
feedback.pushInfo(QCoreApplication.translate('ServiceAreaFromPoint', 'Writing results…'))
fields = QgsFields()
fields.append(QgsField('type', QVariant.String, '', 254, 0))
fields.append(QgsField('start', QVariant.String, '', 254, 0))
feat = QgsFeature()
feat.setFields(fields)
(point_sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
fields, QgsWkbTypes.MultiPoint, network.sourceCrs())
results = {}
if point_sink is not None:
results[self.OUTPUT] = dest_id
geomPoints = QgsGeometry.fromMultiPointXY(points)
feat.setGeometry(geomPoints)
feat['type'] = 'within'
feat['start'] = startPoint.toString()
point_sink.addFeature(feat, QgsFeatureSink.FastInsert)
if include_bounds:
upperBoundary = []
lowerBoundary = []
vertices = []
for i, v in enumerate(cost):
if v > travelCost and tree[i] != -1:
vertexId = graph.edge(tree[i]).fromVertex()
if cost[vertexId] <= travelCost:
vertices.append(i)
for i in vertices:
upperBoundary.append(graph.vertex(graph.edge(tree[i]).toVertex()).point())
lowerBoundary.append(graph.vertex(graph.edge(tree[i]).fromVertex()).point())
geomUpper = QgsGeometry.fromMultiPointXY(upperBoundary)
geomLower = QgsGeometry.fromMultiPointXY(lowerBoundary)
feat.setGeometry(geomUpper)
feat['type'] = 'upper'
feat['start'] = startPoint.toString()
point_sink.addFeature(feat, QgsFeatureSink.FastInsert)
feat.setGeometry(geomLower)
feat['type'] = 'lower'
feat['start'] = startPoint.toString()
point_sink.addFeature(feat, QgsFeatureSink.FastInsert)
(line_sink, line_dest_id) = self.parameterAsSink(parameters, self.OUTPUT_LINES, context,
fields, QgsWkbTypes.MultiLineString, network.sourceCrs())
if line_sink is not None:
results[self.OUTPUT_LINES] = line_dest_id
geom_lines = QgsGeometry.fromMultiPolylineXY(lines)
feat.setGeometry(geom_lines)
feat['type'] = 'lines'
feat['start'] = startPoint.toString()
line_sink.addFeature(feat, QgsFeatureSink.FastInsert)
return results
|
atosorigin/ansible
|
refs/heads/devel
|
test/support/integration/plugins/modules/x509_crl_info.py
|
36
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2020, Felix Fontein <felix@fontein.de>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: x509_crl_info
version_added: "2.10"
short_description: Retrieve information on Certificate Revocation Lists (CRLs)
description:
- This module allows one to retrieve information on Certificate Revocation Lists (CRLs).
requirements:
- cryptography >= 1.2
author:
- Felix Fontein (@felixfontein)
options:
path:
description:
- Remote absolute path where the generated CRL file should be created or is already located.
- Either I(path) or I(content) must be specified, but not both.
type: path
content:
description:
- Content of the X.509 certificate in PEM format.
- Either I(path) or I(content) must be specified, but not both.
type: str
notes:
- All timestamp values are provided in ASN.1 TIME format, i.e. following the C(YYYYMMDDHHMMSSZ) pattern.
They are all in UTC.
seealso:
- module: x509_crl
'''
EXAMPLES = r'''
- name: Get information on CRL
x509_crl_info:
path: /etc/ssl/my-ca.crl
register: result
- debug:
msg: "{{ result }}"
'''
RETURN = r'''
issuer:
description:
- The CRL's issuer.
- Note that for repeated values, only the last one will be returned.
returned: success
type: dict
sample: '{"organizationName": "Ansible", "commonName": "ca.example.com"}'
issuer_ordered:
description: The CRL's issuer as an ordered list of tuples.
returned: success
type: list
elements: list
sample: '[["organizationName", "Ansible"], ["commonName": "ca.example.com"]]'
last_update:
description: The point in time from which this CRL can be trusted as ASN.1 TIME.
returned: success
type: str
sample: 20190413202428Z
next_update:
description: The point in time from which a new CRL will be issued and the client has to check for it as ASN.1 TIME.
returned: success
type: str
sample: 20190413202428Z
digest:
description: The signature algorithm used to sign the CRL.
returned: success
type: str
sample: sha256WithRSAEncryption
revoked_certificates:
description: List of certificates to be revoked.
returned: success
type: list
elements: dict
contains:
serial_number:
description: Serial number of the certificate.
type: int
sample: 1234
revocation_date:
description: The point in time the certificate was revoked as ASN.1 TIME.
type: str
sample: 20190413202428Z
issuer:
description: The certificate's issuer.
type: list
elements: str
sample: '["DNS:ca.example.org"]'
issuer_critical:
description: Whether the certificate issuer extension is critical.
type: bool
sample: no
reason:
description:
- The value for the revocation reason extension.
- One of C(unspecified), C(key_compromise), C(ca_compromise), C(affiliation_changed), C(superseded),
C(cessation_of_operation), C(certificate_hold), C(privilege_withdrawn), C(aa_compromise), and
C(remove_from_crl).
type: str
sample: key_compromise
reason_critical:
description: Whether the revocation reason extension is critical.
type: bool
sample: no
invalidity_date:
description: |
The point in time it was known/suspected that the private key was compromised
or that the certificate otherwise became invalid as ASN.1 TIME.
type: str
sample: 20190413202428Z
invalidity_date_critical:
description: Whether the invalidity date extension is critical.
type: bool
sample: no
'''
import traceback
from distutils.version import LooseVersion
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
MINIMAL_CRYPTOGRAPHY_VERSION = '1.2'
CRYPTOGRAPHY_IMP_ERR = None
try:
import cryptography
from cryptography import x509
from cryptography.hazmat.backends import default_backend
CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
except ImportError:
CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
CRYPTOGRAPHY_FOUND = False
else:
CRYPTOGRAPHY_FOUND = True
TIMESTAMP_FORMAT = "%Y%m%d%H%M%SZ"
class CRLError(crypto_utils.OpenSSLObjectError):
pass
class CRLInfo(crypto_utils.OpenSSLObject):
"""The main module implementation."""
def __init__(self, module):
super(CRLInfo, self).__init__(
module.params['path'] or '',
'present',
False,
module.check_mode
)
self.content = module.params['content']
self.module = module
self.crl = None
if self.content is None:
try:
with open(self.path, 'rb') as f:
data = f.read()
except Exception as e:
self.module.fail_json(msg='Error while reading CRL file from disk: {0}'.format(e))
else:
data = self.content.encode('utf-8')
try:
self.crl = x509.load_pem_x509_crl(data, default_backend())
except Exception as e:
self.module.fail_json(msg='Error while decoding CRL: {0}'.format(e))
def _dump_revoked(self, entry):
return {
'serial_number': entry['serial_number'],
'revocation_date': entry['revocation_date'].strftime(TIMESTAMP_FORMAT),
'issuer':
[crypto_utils.cryptography_decode_name(issuer) for issuer in entry['issuer']]
if entry['issuer'] is not None else None,
'issuer_critical': entry['issuer_critical'],
'reason': crypto_utils.REVOCATION_REASON_MAP_INVERSE.get(entry['reason']) if entry['reason'] is not None else None,
'reason_critical': entry['reason_critical'],
'invalidity_date':
entry['invalidity_date'].strftime(TIMESTAMP_FORMAT)
if entry['invalidity_date'] is not None else None,
'invalidity_date_critical': entry['invalidity_date_critical'],
}
def get_info(self):
result = {
'changed': False,
'last_update': None,
'next_update': None,
'digest': None,
'issuer_ordered': None,
'issuer': None,
'revoked_certificates': [],
}
result['last_update'] = self.crl.last_update.strftime(TIMESTAMP_FORMAT)
result['next_update'] = self.crl.next_update.strftime(TIMESTAMP_FORMAT)
try:
result['digest'] = crypto_utils.cryptography_oid_to_name(self.crl.signature_algorithm_oid)
except AttributeError:
# Older cryptography versions don't have signature_algorithm_oid yet
dotted = crypto_utils._obj2txt(
self.crl._backend._lib,
self.crl._backend._ffi,
self.crl._x509_crl.sig_alg.algorithm
)
oid = x509.oid.ObjectIdentifier(dotted)
result['digest'] = crypto_utils.cryptography_oid_to_name(oid)
issuer = []
for attribute in self.crl.issuer:
issuer.append([crypto_utils.cryptography_oid_to_name(attribute.oid), attribute.value])
result['issuer_ordered'] = issuer
result['issuer'] = {}
for k, v in issuer:
result['issuer'][k] = v
result['revoked_certificates'] = []
for cert in self.crl:
entry = crypto_utils.cryptography_decode_revoked_certificate(cert)
result['revoked_certificates'].append(self._dump_revoked(entry))
return result
def generate(self):
# Empty method because crypto_utils.OpenSSLObject wants this
pass
def dump(self):
# Empty method because crypto_utils.OpenSSLObject wants this
pass
def main():
module = AnsibleModule(
argument_spec=dict(
path=dict(type='path'),
content=dict(type='str'),
),
required_one_of=(
['path', 'content'],
),
mutually_exclusive=(
['path', 'content'],
),
supports_check_mode=True,
)
if not CRYPTOGRAPHY_FOUND:
module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
exception=CRYPTOGRAPHY_IMP_ERR)
try:
crl = CRLInfo(module)
result = crl.get_info()
module.exit_json(**result)
except crypto_utils.OpenSSLObjectError as e:
module.fail_json(msg=to_native(e))
if __name__ == "__main__":
main()
|
datawire/ambassador
|
refs/heads/master
|
python/ambassador/ir/iripallowdeny.py
|
1
|
from typing import ClassVar, Dict, List, Optional, Tuple, TYPE_CHECKING
from typing import cast as typecast
from ..config import Config
from ..envoy.v2.v2cidrrange import CIDRRange
from .irresource import IRResource
from .irfilter import IRFilter
if TYPE_CHECKING:
from .ir import IR # pragma: no cover
class IRIPAllowDeny(IRFilter):
"""
IRIPAllowDeny is an IRFilter that implements an allow/deny list based
on IP address.
"""
parent: IRResource
action: str
principals: List[Tuple[str, CIDRRange]]
EnvoyTypeMap: ClassVar[Dict[str, str]] = {
"remote": "remote_ip",
"peer": "direct_remote_ip"
}
def __init__(self, ir: 'IR', aconf: Config,
rkey: str="ir.ipallowdeny",
name: str="ir.ipallowdeny",
kind: str="IRIPAllowDeny",
parent: IRResource=None,
action: str=None,
**kwargs) -> None:
"""
Initialize an IRIPAllowDeny. In addition to the usual IRFilter parameters,
parent and action are required:
parent is the IRResource in which the IRIPAllowDeny is defined; at present,
this will be the Ambassador module. It's required because it's where errors
should be posted.
action must be either "ALLOW" or "DENY". This action will be normalized to
all-uppercase in setup().
"""
assert parent is not None
assert action is not None
super().__init__(
ir=ir, aconf=aconf, rkey=rkey, kind=kind, name=name,
parent=parent, action=action, **kwargs)
def setup(self, ir: 'IR', aconf: Config) -> bool:
"""
Set up an IRIPAllowDeny based on the action and principals passed into
__init__.
"""
assert self.parent
# These pops will crash if the action or principals are missing. That's
# OK -- they're required elements.
action: Optional[str] = self.pop("action")
principals: Optional[List[Dict[str, str]]] = self.pop("principals")
assert action is not None
assert principals is not None
action = action.upper()
if (action != "ALLOW") and (action != "DENY"):
raise RuntimeError(f"IRIPAllowDeny action must be ALLOW or DENY, not {action}")
self.action = action
self.principals = []
ir.logger.debug(f"PRINCIPALS: {principals}")
# principals looks like
#
# [
# { 'peer': '127.0.0.1' },
# { 'remote': '192.68.0.0/24' },
# { 'remote': '::1' }
# ]
#
# or the like, where the key in the dict specifies how Envoy will handle the
# IP match, and the value is a CIDRRange spec.
for pdict in principals:
# If we have more than one thing in the dict, that's an error.
first = True
for kind, spec in pdict.items():
if not first:
self.parent.post_error(f"ip{self.action.lower()} principals must be separate list elements")
break
first = False
envoy_kind = IRIPAllowDeny.EnvoyTypeMap.get(kind, None)
if not envoy_kind:
self.parent.post_error(f"ip{self.action.lower()} principal type {kind} unknown: must be peer or remote")
continue
cidrrange = CIDRRange(spec)
if cidrrange:
self.principals.append((envoy_kind, cidrrange))
else:
self.parent.post_error(f"ip_{self.action.lower()} principal {spec} is not valid: {cidrrange.error}")
if len(self.principals) > 0:
return True
else:
return False
def __str__(self) -> str:
pstrs = [ str(x) for x in self.principals ]
return f"<IPAllowDeny {self.action}: {', '.join(pstrs)}>"
def as_dict(self) -> dict:
return {
"action": self.action,
"principals": [ { kind: block.as_dict() } for kind, block in self.principals ]
}
|
bonewell/sdl_core
|
refs/heads/master
|
src/3rd_party-static/gmock-1.7.0/gtest/test/gtest_help_test.py
|
2968
|
#!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the --help flag of Google C++ Testing Framework.
SYNOPSIS
gtest_help_test.py --build_dir=BUILD/DIR
# where BUILD/DIR contains the built gtest_help_test_ file.
gtest_help_test.py
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import gtest_test_utils
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
IS_WINDOWS = os.name == 'nt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
FLAG_PREFIX = '--gtest_'
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
STREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to'
UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
re.sub('^--', '/', LIST_TESTS_FLAG),
re.sub('_', '-', LIST_TESTS_FLAG)]
INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing'
SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess(
[PROGRAM_PATH, LIST_TESTS_FLAG]).output
# The help message must match this regex.
HELP_REGEX = re.compile(
FLAG_PREFIX + r'list_tests.*' +
FLAG_PREFIX + r'filter=.*' +
FLAG_PREFIX + r'also_run_disabled_tests.*' +
FLAG_PREFIX + r'repeat=.*' +
FLAG_PREFIX + r'shuffle.*' +
FLAG_PREFIX + r'random_seed=.*' +
FLAG_PREFIX + r'color=.*' +
FLAG_PREFIX + r'print_time.*' +
FLAG_PREFIX + r'output=.*' +
FLAG_PREFIX + r'break_on_failure.*' +
FLAG_PREFIX + r'throw_on_failure.*' +
FLAG_PREFIX + r'catch_exceptions=0.*',
re.DOTALL)
def RunWithFlag(flag):
"""Runs gtest_help_test_ with the given flag.
Returns:
the exit code and the text output as a tuple.
Args:
flag: the command-line flag to pass to gtest_help_test_, or None.
"""
if flag is None:
command = [PROGRAM_PATH]
else:
command = [PROGRAM_PATH, flag]
child = gtest_test_utils.Subprocess(command)
return child.exit_code, child.output
class GTestHelpTest(gtest_test_utils.TestCase):
"""Tests the --help flag and its equivalent forms."""
def TestHelpFlag(self, flag):
"""Verifies correct behavior when help flag is specified.
The right message must be printed and the tests must
skipped when the given flag is specified.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assertEquals(0, exit_code)
self.assert_(HELP_REGEX.search(output), output)
if IS_LINUX:
self.assert_(STREAM_RESULT_TO_FLAG in output, output)
else:
self.assert_(STREAM_RESULT_TO_FLAG not in output, output)
if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
else:
self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)
def TestNonHelpFlag(self, flag):
"""Verifies correct behavior when no help flag is specified.
Verifies that when no help flag is specified, the tests are run
and the help message is not printed.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assert_(exit_code != 0)
self.assert_(not HELP_REGEX.search(output), output)
def testPrintsHelpWithFullFlag(self):
self.TestHelpFlag('--help')
def testPrintsHelpWithShortFlag(self):
self.TestHelpFlag('-h')
def testPrintsHelpWithQuestionFlag(self):
self.TestHelpFlag('-?')
def testPrintsHelpWithWindowsStyleQuestionFlag(self):
self.TestHelpFlag('/?')
def testPrintsHelpWithUnrecognizedGoogleTestFlag(self):
self.TestHelpFlag(UNKNOWN_FLAG)
def testPrintsHelpWithIncorrectFlagStyle(self):
for incorrect_flag in INCORRECT_FLAG_VARIANTS:
self.TestHelpFlag(incorrect_flag)
def testRunsTestsWithoutHelpFlag(self):
"""Verifies that when no help flag is specified, the tests are run
and the help message is not printed."""
self.TestNonHelpFlag(None)
def testRunsTestsWithGtestInternalFlag(self):
"""Verifies that the tests are run and no help message is printed when
a flag starting with Google Test prefix and 'internal_' is supplied."""
self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING)
if __name__ == '__main__':
gtest_test_utils.Main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.