repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
vicky2135/lucious
|
refs/heads/master
|
oscar/lib/python2.7/site-packages/django/utils/autoreload.py
|
295
|
# Autoreloading launcher.
# Borrowed from Peter Hunt and the CherryPy project (http://www.cherrypy.org).
# Some taken from Ian Bicking's Paste (http://pythonpaste.org/).
#
# Portions copyright (c) 2004, CherryPy Team (team@cherrypy.org)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the CherryPy Team nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import signal
import sys
import time
import traceback
from django.apps import apps
from django.conf import settings
from django.core.signals import request_finished
from django.utils import six
from django.utils._os import npath
from django.utils.six.moves import _thread as thread
# This import does nothing, but it's necessary to avoid some race conditions
# in the threading module. See http://code.djangoproject.com/ticket/2330 .
try:
import threading # NOQA
except ImportError:
pass
try:
import termios
except ImportError:
termios = None
USE_INOTIFY = False
try:
# Test whether inotify is enabled and likely to work
import pyinotify
fd = pyinotify.INotifyWrapper.create().inotify_init()
if fd >= 0:
USE_INOTIFY = True
os.close(fd)
except ImportError:
pass
RUN_RELOADER = True
FILE_MODIFIED = 1
I18N_MODIFIED = 2
_mtimes = {}
_win = (sys.platform == "win32")
_exception = None
_error_files = []
_cached_modules = set()
_cached_filenames = []
def gen_filenames(only_new=False):
"""
Returns a list of filenames referenced in sys.modules and translation
files.
"""
# N.B. ``list(...)`` is needed, because this runs in parallel with
# application code which might be mutating ``sys.modules``, and this will
# fail with RuntimeError: cannot mutate dictionary while iterating
global _cached_modules, _cached_filenames
module_values = set(sys.modules.values())
_cached_filenames = clean_files(_cached_filenames)
if _cached_modules == module_values:
# No changes in module list, short-circuit the function
if only_new:
return []
else:
return _cached_filenames + clean_files(_error_files)
new_modules = module_values - _cached_modules
new_filenames = clean_files(
[filename.__file__ for filename in new_modules
if hasattr(filename, '__file__')])
if not _cached_filenames and settings.USE_I18N:
# Add the names of the .mo files that can be generated
# by compilemessages management command to the list of files watched.
basedirs = [os.path.join(os.path.dirname(os.path.dirname(__file__)),
'conf', 'locale'),
'locale']
for app_config in reversed(list(apps.get_app_configs())):
basedirs.append(os.path.join(npath(app_config.path), 'locale'))
basedirs.extend(settings.LOCALE_PATHS)
basedirs = [os.path.abspath(basedir) for basedir in basedirs
if os.path.isdir(basedir)]
for basedir in basedirs:
for dirpath, dirnames, locale_filenames in os.walk(basedir):
for filename in locale_filenames:
if filename.endswith('.mo'):
new_filenames.append(os.path.join(dirpath, filename))
_cached_modules = _cached_modules.union(new_modules)
_cached_filenames += new_filenames
if only_new:
return new_filenames + clean_files(_error_files)
else:
return _cached_filenames + clean_files(_error_files)
def clean_files(filelist):
filenames = []
for filename in filelist:
if not filename:
continue
if filename.endswith(".pyc") or filename.endswith(".pyo"):
filename = filename[:-1]
if filename.endswith("$py.class"):
filename = filename[:-9] + ".py"
if os.path.exists(filename):
filenames.append(filename)
return filenames
def reset_translations():
import gettext
from django.utils.translation import trans_real
gettext._translations = {}
trans_real._translations = {}
trans_real._default = None
trans_real._active = threading.local()
def inotify_code_changed():
"""
Checks for changed code using inotify. After being called
it blocks until a change event has been fired.
"""
class EventHandler(pyinotify.ProcessEvent):
modified_code = None
def process_default(self, event):
if event.path.endswith('.mo'):
EventHandler.modified_code = I18N_MODIFIED
else:
EventHandler.modified_code = FILE_MODIFIED
wm = pyinotify.WatchManager()
notifier = pyinotify.Notifier(wm, EventHandler())
def update_watch(sender=None, **kwargs):
if sender and getattr(sender, 'handles_files', False):
# No need to update watches when request serves files.
# (sender is supposed to be a django.core.handlers.BaseHandler subclass)
return
mask = (
pyinotify.IN_MODIFY |
pyinotify.IN_DELETE |
pyinotify.IN_ATTRIB |
pyinotify.IN_MOVED_FROM |
pyinotify.IN_MOVED_TO |
pyinotify.IN_CREATE |
pyinotify.IN_DELETE_SELF |
pyinotify.IN_MOVE_SELF
)
for path in gen_filenames(only_new=True):
wm.add_watch(path, mask)
# New modules may get imported when a request is processed.
request_finished.connect(update_watch)
# Block until an event happens.
update_watch()
notifier.check_events(timeout=None)
notifier.read_events()
notifier.process_events()
notifier.stop()
# If we are here the code must have changed.
return EventHandler.modified_code
def code_changed():
global _mtimes, _win
for filename in gen_filenames():
stat = os.stat(filename)
mtime = stat.st_mtime
if _win:
mtime -= stat.st_ctime
if filename not in _mtimes:
_mtimes[filename] = mtime
continue
if mtime != _mtimes[filename]:
_mtimes = {}
try:
del _error_files[_error_files.index(filename)]
except ValueError:
pass
return I18N_MODIFIED if filename.endswith('.mo') else FILE_MODIFIED
return False
def check_errors(fn):
def wrapper(*args, **kwargs):
global _exception
try:
fn(*args, **kwargs)
except Exception:
_exception = sys.exc_info()
et, ev, tb = _exception
if getattr(ev, 'filename', None) is None:
# get the filename from the last item in the stack
filename = traceback.extract_tb(tb)[-1][0]
else:
filename = ev.filename
if filename not in _error_files:
_error_files.append(filename)
raise
return wrapper
def raise_last_exception():
global _exception
if _exception is not None:
six.reraise(*_exception)
def ensure_echo_on():
if termios:
fd = sys.stdin
if fd.isatty():
attr_list = termios.tcgetattr(fd)
if not attr_list[3] & termios.ECHO:
attr_list[3] |= termios.ECHO
if hasattr(signal, 'SIGTTOU'):
old_handler = signal.signal(signal.SIGTTOU, signal.SIG_IGN)
else:
old_handler = None
termios.tcsetattr(fd, termios.TCSANOW, attr_list)
if old_handler is not None:
signal.signal(signal.SIGTTOU, old_handler)
def reloader_thread():
ensure_echo_on()
if USE_INOTIFY:
fn = inotify_code_changed
else:
fn = code_changed
while RUN_RELOADER:
change = fn()
if change == FILE_MODIFIED:
sys.exit(3) # force reload
elif change == I18N_MODIFIED:
reset_translations()
time.sleep(1)
def restart_with_reloader():
while True:
args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions] + sys.argv
if sys.platform == "win32":
args = ['"%s"' % arg for arg in args]
new_environ = os.environ.copy()
new_environ["RUN_MAIN"] = 'true'
exit_code = os.spawnve(os.P_WAIT, sys.executable, args, new_environ)
if exit_code != 3:
return exit_code
def python_reloader(main_func, args, kwargs):
if os.environ.get("RUN_MAIN") == "true":
thread.start_new_thread(main_func, args, kwargs)
try:
reloader_thread()
except KeyboardInterrupt:
pass
else:
try:
exit_code = restart_with_reloader()
if exit_code < 0:
os.kill(os.getpid(), -exit_code)
else:
sys.exit(exit_code)
except KeyboardInterrupt:
pass
def jython_reloader(main_func, args, kwargs):
from _systemrestart import SystemRestart
thread.start_new_thread(main_func, args)
while True:
if code_changed():
raise SystemRestart
time.sleep(1)
def main(main_func, args=None, kwargs=None):
if args is None:
args = ()
if kwargs is None:
kwargs = {}
if sys.platform.startswith('java'):
reloader = jython_reloader
else:
reloader = python_reloader
wrapped_main_func = check_errors(main_func)
reloader(wrapped_main_func, args, kwargs)
|
daisymax/nvda
|
refs/heads/master
|
source/appModules/miranda32.py
|
2
|
#appModules/miranda32.py
#A part of NonVisual Desktop Access (NVDA)
#Copyright (C) 2006-2012 NVDA Contributors
#This file is covered by the GNU General Public License.
#See the file COPYING for more details.
import ui
import config
from ctypes import *
from ctypes.wintypes import *
import winKernel
from NVDAObjects.IAccessible import IAccessible, ContentGenericClient
from NVDAObjects.behaviors import Dialog
import appModuleHandler
import speech
import braille
import controlTypes
from scriptHandler import isScriptWaiting
import api
import mouseHandler
import oleacc
from keyboardHandler import KeyboardInputGesture
import watchdog
#contact list window messages
CLM_FIRST=0x1000 #this is the same as LVM_FIRST
CLM_LAST=0x1100
#messages, compare with equivalent TVM_s in the MSDN
CLM_ENSUREVISIBLE=CLM_FIRST+6 #wParam=hItem, lParam=partialOk
CLE_TOGGLE=-1
CLE_COLLAPSE=0
CLE_EXPAND=1
CLE_INVALID=0xFFFF
CLM_EXPAND=CLM_FIRST+7 #wParam=hItem, lParam=CLE_
CLM_FINDCONTACT=CLM_FIRST+8 #wParam=hContact, returns an hItem
CLM_FINDGROUP=CLM_FIRST+9 #wParam=hGroup, returns an hItem
CLM_GETBKCOLOR=CLM_FIRST+10 #returns a COLORREF
CLM_GETCHECKMARK=CLM_FIRST+11 #wParam=hItem, returns 1 or 0
CLM_GETCOUNT=CLM_FIRST+12 #returns the total number of items
CLM_GETEXPAND=CLM_FIRST+14 #wParam=hItem, returns a CLE_, CLE_INVALID if not a group
CLM_GETEXTRACOLUMNS=CLM_FIRST+15 #returns number of extra columns
CLM_GETEXTRAIMAGE=CLM_FIRST+16 #wParam=hItem, lParam=MAKELPARAM(iColumn (0 based),0), returns iImage or 0xFF
CLM_GETEXTRAIMAGELIST=CLM_FIRST+17 #returns HIMAGELIST
CLM_GETFONT=CLM_FIRST+18 #wParam=fontId, see clm_setfont. returns hFont.
CLM_GETINDENT=CLM_FIRST+19 #wParam=new group indent
CLM_GETISEARCHSTRING=CLM_FIRST+20 #lParam=(char*)pszStr, max 120 bytes, returns number of chars in string
MAXITEMTEXTLEN=120
CLM_GETITEMTEXT=CLM_FIRST+21 #wParam=hItem, lParam=(char*)pszStr, max 120 bytes
CLM_GETSELECTION=CLM_FIRST+23 #returns hItem
CLM_SELECTITEM=CLM_FIRST+26 #wParam=hItem
CLM_GETHIDEOFFLINEROOT=CLM_FIRST+40 #returns TRUE/FALSE
CLM_GETEXSTYLE=CLM_FIRST+44 #returns CLS_EX_ flags
CLM_GETLEFTMARGIN=CLM_FIRST+46 #returns count of pixels
CLCIT_INVALID=-1
CLCIT_GROUP=0
CLCIT_CONTACT=1
CLCIT_DIVIDER=2
CLCIT_INFO=3
CLM_GETITEMTYPE=CLM_FIRST+49 #wParam=hItem, returns a CLCIT_
CLGN_ROOT=0
CLGN_CHILD=1
CLGN_PARENT=2
CLGN_NEXT=3
CLGN_PREVIOUS=4
CLGN_NEXTCONTACT=5
CLGN_PREVIOUSCONTACT=6
CLGN_NEXTGROUP=7
CLGN_PREVIOUSGROUP=8
CLM_GETNEXTITEM=CLM_FIRST+50 #wParam=flag, lParam=hItem, returns an hItem
CLM_GETTEXTCOLOR=CLM_FIRST+51 #wParam=FONTID_, returns COLORREF
MAXSTATUSMSGLEN=256
CLM_GETSTATUSMSG=CLM_FIRST+105
#other constants
ANSILOGS=(1001,1006)
MESSAGEVIEWERS=(1001,1005,5005)
class AppModule(appModuleHandler.AppModule):
lastTextLengths={}
lastMessages=[]
# Must not be > 9.
MessageHistoryLength=3
def chooseNVDAObjectOverlayClasses(self, obj, clsList):
if obj.role == controlTypes.ROLE_WINDOW:
return
windowClass = obj.windowClassName
if windowClass == "CListControl":
try:
clsList.remove(ContentGenericClient)
except ValueError:
pass
clsList.insert(0, mirandaIMContactList)
elif windowClass in ("MButtonClass", "TSButtonClass", "CLCButtonClass"):
clsList.insert(0, mirandaIMButton)
elif windowClass == "Hyperlink":
clsList.insert(0, mirandaIMHyperlink)
elif isinstance(obj, IAccessible) and obj.IAccessibleRole == oleacc.ROLE_SYSTEM_PROPERTYPAGE:
clsList.insert(0, MPropertyPage)
elif isinstance(obj, IAccessible) and obj.IAccessibleRole == oleacc.ROLE_SYSTEM_SCROLLBAR and obj.windowControlID in MESSAGEVIEWERS:
clsList.insert(0, MirandaMessageViewerScrollbar)
elif windowClass == "ListBox" and obj.windowControlID == 0:
clsList.insert(0, DuplicateFocusListBox)
def event_NVDAObject_init(self,obj):
if obj.windowClassName=="ColourPicker":
obj.role=controlTypes.ROLE_COLORCHOOSER
elif (obj.windowControlID in ANSILOGS) and (obj.windowClassName=="RichEdit20A"):
obj._isWindowUnicode=False
def script_readMessage(self,gesture):
num=int(gesture.mainKeyName[-1])
if len(self.lastMessages)>num-1:
ui.message(self.lastMessages[num-1])
else:
# Translators: This is presented to inform the user that no instant message has been received.
ui.message(_("No message yet"))
# Translators: The description of an NVDA command to view one of the recent messages.
script_readMessage.__doc__=_("Displays one of the recent messages")
def __init__(self, *args, **kwargs):
super(AppModule, self).__init__(*args, **kwargs)
for n in xrange(1, self.MessageHistoryLength + 1):
self.bindGesture("kb:NVDA+control+%s" % n, "readMessage")
class mirandaIMContactList(IAccessible):
def _get_name(self):
hItem=watchdog.cancellableSendMessage(self.windowHandle,CLM_GETSELECTION,0,0)
internalBuf=winKernel.virtualAllocEx(self.processHandle,None,MAXITEMTEXTLEN,winKernel.MEM_COMMIT,winKernel.PAGE_READWRITE)
try:
watchdog.cancellableSendMessage(self.windowHandle,CLM_GETITEMTEXT,hItem,internalBuf)
buf=create_unicode_buffer(MAXITEMTEXTLEN)
winKernel.readProcessMemory(self.processHandle,internalBuf,buf,MAXITEMTEXTLEN,None)
text=buf.value
statusMsgPtr=watchdog.cancellableSendMessage(self.windowHandle,CLM_GETSTATUSMSG,hItem,0)
if statusMsgPtr>0:
buf2=create_unicode_buffer(MAXSTATUSMSGLEN)
winKernel.readProcessMemory(self.processHandle,statusMsgPtr,buf2,MAXSTATUSMSGLEN,None)
text="%s %s"%(text,buf2.value)
finally:
winKernel.virtualFreeEx(self.processHandle,internalBuf,0,winKernel.MEM_RELEASE)
return text
def _get_role(self):
hItem=watchdog.cancellableSendMessage(self.windowHandle,CLM_GETSELECTION,0,0)
iType=watchdog.cancellableSendMessage(self.windowHandle,CLM_GETITEMTYPE,hItem,0)
if iType==CLCIT_DIVIDER or iType==CLCIT_INVALID: #some clists treat invalid as divider
return controlTypes.ROLE_SEPARATOR
else:
return controlTypes.ROLE_TREEVIEWITEM
def _get_states(self):
newStates=super(mirandaIMContactList,self)._get_states()
hItem=watchdog.cancellableSendMessage(self.windowHandle,CLM_GETSELECTION,0,0)
state=watchdog.cancellableSendMessage(self.windowHandle,CLM_GETEXPAND,hItem,0)
if state==CLE_EXPAND:
newStates.add(controlTypes.STATE_EXPANDED)
elif state==CLE_COLLAPSE:
newStates.add(controlTypes.STATE_COLLAPSED)
return newStates
def script_changeItem(self,gesture):
gesture.send()
if not isScriptWaiting():
api.processPendingEvents()
speech.speakObject(self,reason=controlTypes.REASON_FOCUS)
braille.handler.handleGainFocus(self)
__changeItemGestures = (
"kb:downArrow",
"kb:upArrow",
"kb:leftArrow",
"kb:rightArrow",
"kb:home",
"kb:end",
"kb:pageUp",
"kb:pageDown",
)
def initOverlayClass(self):
for gesture in self.__changeItemGestures:
self.bindGesture(gesture, "changeItem")
class mirandaIMButton(IAccessible):
def _get_name(self):
api.moveMouseToNVDAObject(self)
return super(mirandaIMButton,self)._get_name()
def _get_role(self):
return controlTypes.ROLE_BUTTON
def getActionName(self):
if controlTypes.STATE_FOCUSED not in self.states:
return
return "Click"
def doAction(self):
if controlTypes.STATE_FOCUSED not in self.states:
return
KeyboardInputGesture.fromName("space").send()
def script_doDefaultAction(self,gesture):
self.doAction()
def initOverlayClass(self):
self.bindGesture("kb:enter", "doDefaultAction")
class mirandaIMHyperlink(mirandaIMButton):
def _get_role(self):
return controlTypes.ROLE_LINK
class MPropertyPage(Dialog,IAccessible):
def _get_name(self):
name=super(MPropertyPage,self)._get_name()
if not name:
try:
tc=self.parent.next.firstChild
except AttributeError:
tc=None
if tc and tc.role==controlTypes.ROLE_TABCONTROL:
children=tc.children
for index in xrange(len(children)):
if (children[index].role==controlTypes.ROLE_TAB) and (controlTypes.STATE_SELECTED in children[index].states):
name=children[index].name
break
return name
class MirandaMessageViewerScrollbar(IAccessible):
def event_valueChange(self):
curTextLength=len(self.windowText)
if self.windowHandle not in self.appModule.lastTextLengths:
self.appModule.lastTextLengths[self.windowHandle]=curTextLength
elif self.appModule.lastTextLengths[self.windowHandle]<curTextLength:
message=self.windowText[self.appModule.lastTextLengths[self.windowHandle]:]
self.appModule.lastMessages.insert(0,message)
self.appModule.lastMessages=self.appModule.lastMessages[:self.appModule.MessageHistoryLength]
if config.conf["presentation"]["reportDynamicContentChanges"]:
ui.message(message)
self.appModule.lastTextLengths[self.windowHandle]=curTextLength
super(MirandaMessageViewerScrollbar,self).event_valueChange()
class DuplicateFocusListBox(IAccessible):
"""A list box which annoyingly fires focus events every second, even when a menu is open.
"""
def _get_shouldAllowIAccessibleFocusEvent(self):
# Stop annoying duplicate focus events, which are fired even if a menu is open.
focus = api.getFocusObject()
focusRole = focus.role
focusStates = focus.states
if (self == focus or
(focusRole == controlTypes.ROLE_MENUITEM and controlTypes.STATE_FOCUSED in focusStates) or
(focusRole == controlTypes.ROLE_POPUPMENU and controlTypes.STATE_INVISIBLE not in focusStates)
):
return False
return super(DuplicateFocusListBox, self).shouldAllowIAccessibleFocusEvent
|
darjeeling/django
|
refs/heads/master
|
tests/string_lookup/models.py
|
106
|
from django.db import models
class Foo(models.Model):
name = models.CharField(max_length=50)
friend = models.CharField(max_length=50, blank=True)
def __str__(self):
return "Foo %s" % self.name
class Bar(models.Model):
name = models.CharField(max_length=50)
normal = models.ForeignKey(Foo, models.CASCADE, related_name='normal_foo')
fwd = models.ForeignKey("Whiz", models.CASCADE)
back = models.ForeignKey("Foo", models.CASCADE)
def __str__(self):
return "Bar %s" % self.place.name
class Whiz(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return "Whiz %s" % self.name
class Child(models.Model):
parent = models.OneToOneField('Base', models.CASCADE)
name = models.CharField(max_length=50)
def __str__(self):
return "Child %s" % self.name
class Base(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return "Base %s" % self.name
class Article(models.Model):
name = models.CharField(max_length=50)
text = models.TextField()
submitted_from = models.GenericIPAddressField(blank=True, null=True)
def __str__(self):
return "Article %s" % self.name
|
hgl888/chromium-crosswalk-efl
|
refs/heads/efl/crosswalk-10/39.0.2171.19
|
media/cast/test/utility/netload.py
|
96
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Simple client/server script for generating an unlimited TCP stream.
# see shadow.sh for how it's intended to be used.
import socket
import sys
import thread
import time
sent = 0
received = 0
def Sink(socket):
global received
while True:
tmp = socket.recv(4096)
received += len(tmp)
if not tmp:
break;
def Spew(socket):
global sent
data = " " * 4096
while True:
tmp = socket.send(data)
if tmp <= 0:
break
sent += tmp;
def PrintStats():
global sent
global received
last_report = time.time()
last_sent = 0
last_received = 0
while True:
time.sleep(5)
now = time.time();
sent_now = sent
received_now = received
delta = now - last_report
sent_mbps = ((sent_now - last_sent) * 8.0 / 1000000) / delta
received_mbps = ((received_now - last_received) * 8.0 / 1000000) / delta
print "Sent: %5.2f mbps Received: %5.2f mbps" % (sent_mbps, received_mbps)
last_report = now
last_sent = sent_now
last_received = received_now
def Serve(socket, upload=True, download=True):
while True:
(s, addr) = socket.accept()
if upload:
thread.start_new_thread(Spew, (s,))
if download:
thread.start_new_thread(Sink, (s,))
def Receiver(port, upload=True, download=True):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(('', port))
s.listen(5)
thread.start_new_thread(Serve, (s, upload, download))
def Connect(to_hostport, upload=True, download=False):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.connect(to_hostport)
if upload:
thread.start_new_thread(Spew, (s,))
if download:
thread.start_new_thread(Sink, (s,))
def Usage():
print "One of:"
print "%s listen <port>" % sys.arv[0]
print "%s upload <host> <port>" % sys.arv[0]
print "%s download <host> <port>" % sys.arv[0]
print "%s updown <host> <port>" % sys.arv[0]
sys.exit(1)
if len(sys.argv) < 2:
Usage()
if sys.argv[1] == "listen":
Receiver(int(sys.argv[2]))
elif sys.argv[1] == "download":
Connect( (sys.argv[2], int(sys.argv[3])), upload=False, download=True)
elif sys.argv[1] == "upload":
Connect( (sys.argv[2], int(sys.argv[3])), upload=True, download=False)
elif sys.argv[1] == "updown":
Connect( (sys.argv[2], int(sys.argv[3])), upload=True, download=True)
else:
Usage()
PrintStats()
|
loco-odoo/localizacion_co
|
refs/heads/master
|
openerp/addons-extra/asset_mrp/__openerp__.py
|
3
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-2014 CodUP (<http://codup.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Assets & Manufacturing',
'version': '1.1',
'summary': 'Integrate Asset and Manufacturing',
'description': """
Integrate manufacturing and maintenance asset management.
===========================
This module allows use the same Assets for manufacturing and maintenance purposes.
Integration take in account following assumption. In manufacturing,
Work Center can be simple logical entity, but also can reference
to equipment that is physical asset. Each physical asset must be maintenable, but
not each can be manufacturing equipment. So, when you create Work Center, you can
reference it to asset.
Keep one entity in one place for escape mistakes!
""",
'author': 'CodUP',
'website': 'http://codup.com',
'images': ['static/description/icon.png'],
'category': 'Enterprise Asset Management',
'sequence': 0,
'depends': ['asset','mrp'],
'demo': ['asset_demo.xml'],
'data': [
'security/ir.model.access.csv',
'mrp_view.xml'
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
LethusTI/supportcenter
|
refs/heads/master
|
vendor/django/django/core/management/__init__.py
|
78
|
import collections
import os
import sys
from optparse import OptionParser, NO_DEFAULT
import imp
import warnings
from django.core.management.base import BaseCommand, CommandError, handle_default_options
from django.core.management.color import color_style
from django.utils.importlib import import_module
# For backwards compatibility: get_version() used to be in this module.
from django import get_version
# A cache of loaded commands, so that call_command
# doesn't have to reload every time it's called.
_commands = None
def find_commands(management_dir):
"""
Given a path to a management directory, returns a list of all the command
names that are available.
Returns an empty list if no commands are defined.
"""
command_dir = os.path.join(management_dir, 'commands')
try:
return [f[:-3] for f in os.listdir(command_dir)
if not f.startswith('_') and f.endswith('.py')]
except OSError:
return []
def find_management_module(app_name):
"""
Determines the path to the management module for the given app_name,
without actually importing the application or the management module.
Raises ImportError if the management module cannot be found for any reason.
"""
parts = app_name.split('.')
parts.append('management')
parts.reverse()
part = parts.pop()
path = None
# When using manage.py, the project module is added to the path,
# loaded, then removed from the path. This means that
# testproject.testapp.models can be loaded in future, even if
# testproject isn't in the path. When looking for the management
# module, we need look for the case where the project name is part
# of the app_name but the project directory itself isn't on the path.
try:
f, path, descr = imp.find_module(part,path)
except ImportError,e:
if os.path.basename(os.getcwd()) != part:
raise e
while parts:
part = parts.pop()
f, path, descr = imp.find_module(part, path and [path] or None)
return path
def load_command_class(app_name, name):
"""
Given a command name and an application name, returns the Command
class instance. All errors raised by the import process
(ImportError, AttributeError) are allowed to propagate.
"""
module = import_module('%s.management.commands.%s' % (app_name, name))
return module.Command()
def get_commands():
"""
Returns a dictionary mapping command names to their callback applications.
This works by looking for a management.commands package in django.core, and
in each installed application -- if a commands package exists, all commands
in that package are registered.
Core commands are always included. If a settings module has been
specified, user-defined commands will also be included.
The dictionary is in the format {command_name: app_name}. Key-value
pairs from this dictionary can then be used in calls to
load_command_class(app_name, command_name)
If a specific version of a command must be loaded (e.g., with the
startapp command), the instantiated module can be placed in the
dictionary in place of the application name.
The dictionary is cached on the first call and reused on subsequent
calls.
"""
global _commands
if _commands is None:
_commands = dict([(name, 'django.core') for name in find_commands(__path__[0])])
# Find the installed apps
try:
from django.conf import settings
apps = settings.INSTALLED_APPS
except (AttributeError, EnvironmentError, ImportError):
apps = []
# Find and load the management module for each installed app.
for app_name in apps:
try:
path = find_management_module(app_name)
_commands.update(dict([(name, app_name)
for name in find_commands(path)]))
except ImportError:
pass # No management module - ignore this app
return _commands
def call_command(name, *args, **options):
"""
Calls the given command, with the given options and args/kwargs.
This is the primary API you should use for calling specific commands.
Some examples:
call_command('syncdb')
call_command('shell', plain=True)
call_command('sqlall', 'myapp')
"""
# Load the command object.
try:
app_name = get_commands()[name]
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
klass = app_name
else:
klass = load_command_class(app_name, name)
except KeyError:
raise CommandError("Unknown command: %r" % name)
# Grab out a list of defaults from the options. optparse does this for us
# when the script runs from the command line, but since call_command can
# be called programatically, we need to simulate the loading and handling
# of defaults (see #10080 for details).
defaults = {}
for opt in klass.option_list:
if opt.default is NO_DEFAULT:
defaults[opt.dest] = None
else:
defaults[opt.dest] = opt.default
defaults.update(options)
return klass.execute(*args, **defaults)
class LaxOptionParser(OptionParser):
"""
An option parser that doesn't raise any errors on unknown options.
This is needed because the --settings and --pythonpath options affect
the commands (and thus the options) that are available to the user.
"""
def error(self, msg):
pass
def print_help(self):
"""Output nothing.
The lax options are included in the normal option parser, so under
normal usage, we don't need to print the lax options.
"""
pass
def print_lax_help(self):
"""Output the basic options available to every command.
This just redirects to the default print_help() behavior.
"""
OptionParser.print_help(self)
def _process_args(self, largs, rargs, values):
"""
Overrides OptionParser._process_args to exclusively handle default
options and ignore args and other options.
This overrides the behavior of the super class, which stop parsing
at the first unrecognized option.
"""
while rargs:
arg = rargs[0]
try:
if arg[0:2] == "--" and len(arg) > 2:
# process a single long option (possibly with value(s))
# the superclass code pops the arg off rargs
self._process_long_opt(rargs, values)
elif arg[:1] == "-" and len(arg) > 1:
# process a cluster of short options (possibly with
# value(s) for the last one only)
# the superclass code pops the arg off rargs
self._process_short_opts(rargs, values)
else:
# it's either a non-default option or an arg
# either way, add it to the args list so we can keep
# dealing with options
del rargs[0]
raise Exception
except:
largs.append(arg)
class ManagementUtility(object):
"""
Encapsulates the logic of the django-admin.py and manage.py utilities.
A ManagementUtility has a number of commands, which can be manipulated
by editing the self.commands dictionary.
"""
def __init__(self, argv=None):
self.argv = argv or sys.argv[:]
self.prog_name = os.path.basename(self.argv[0])
def main_help_text(self, commands_only=False):
"""
Returns the script's main help text, as a string.
"""
if commands_only:
usage = sorted(get_commands().keys())
else:
usage = [
"",
"Type '%s help <subcommand>' for help on a specific subcommand." % self.prog_name,
"",
"Available subcommands:",
]
commands_dict = collections.defaultdict(lambda: [])
for name, app in get_commands().iteritems():
if app == 'django.core':
app = 'django'
else:
app = app.rpartition('.')[-1]
commands_dict[app].append(name)
style = color_style()
for app in sorted(commands_dict.keys()):
usage.append("")
usage.append(style.NOTICE("[%s]" % app))
for name in sorted(commands_dict[app]):
usage.append(" %s" % name)
return '\n'.join(usage)
def fetch_command(self, subcommand):
"""
Tries to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"django-admin.py" or "manage.py") if it can't be found.
"""
try:
app_name = get_commands()[subcommand]
except KeyError:
sys.stderr.write("Unknown command: %r\nType '%s help' for usage.\n" % \
(subcommand, self.prog_name))
sys.exit(1)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
klass = app_name
else:
klass = load_command_class(app_name, subcommand)
return klass
def autocomplete(self):
"""
Output completion suggestions for BASH.
The output of this function is passed to BASH's `COMREPLY` variable and
treated as completion suggestions. `COMREPLY` expects a space
separated string as the result.
The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
to get information about the cli input. Please refer to the BASH
man-page for more information about this variables.
Subcommand options are saved as pairs. A pair consists of
the long option string (e.g. '--exclude') and a boolean
value indicating if the option requires arguments. When printing to
stdout, a equal sign is appended to options which require arguments.
Note: If debugging this function, it is recommended to write the debug
output in a separate file. Otherwise the debug output will be treated
and formatted as potential completion suggestions.
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'DJANGO_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
curr = cwords[cword-1]
except IndexError:
curr = ''
subcommands = get_commands().keys() + ['help']
options = [('--help', None)]
# subcommand
if cword == 1:
print ' '.join(sorted(filter(lambda x: x.startswith(curr), subcommands)))
# subcommand options
# special case: the 'help' subcommand has no options
elif cwords[0] in subcommands and cwords[0] != 'help':
subcommand_cls = self.fetch_command(cwords[0])
# special case: 'runfcgi' stores additional options as
# 'key=value' pairs
if cwords[0] == 'runfcgi':
from django.core.servers.fastcgi import FASTCGI_OPTIONS
options += [(k, 1) for k in FASTCGI_OPTIONS]
# special case: add the names of installed apps to options
elif cwords[0] in ('dumpdata', 'reset', 'sql', 'sqlall',
'sqlclear', 'sqlcustom', 'sqlindexes',
'sqlreset', 'sqlsequencereset', 'test'):
try:
from django.conf import settings
# Get the last part of the dotted path as the app name.
options += [(a.split('.')[-1], 0) for a in settings.INSTALLED_APPS]
except ImportError:
# Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
# user will find out once they execute the command.
pass
options += [(s_opt.get_opt_string(), s_opt.nargs) for s_opt in
subcommand_cls.option_list]
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
options = filter(lambda (x, v): x not in prev_opts, options)
# filter options by current input
options = sorted([(k, v) for k, v in options if k.startswith(curr)])
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1]:
opt_label += '='
print opt_label
sys.exit(1)
def execute(self):
"""
Given the command-line arguments, this figures out which subcommand is
being run, creates a parser appropriate to that command, and runs it.
"""
# Preprocess options to extract --settings and --pythonpath.
# These options could affect the commands that are available, so they
# must be processed early.
parser = LaxOptionParser(usage="%prog subcommand [options] [args]",
version=get_version(),
option_list=BaseCommand.option_list)
self.autocomplete()
try:
options, args = parser.parse_args(self.argv)
handle_default_options(options)
except:
pass # Ignore any option errors at this point.
try:
subcommand = self.argv[1]
except IndexError:
subcommand = 'help' # Display help if no arguments were given.
if subcommand == 'help':
if len(args) <= 2:
parser.print_lax_help()
sys.stdout.write(self.main_help_text() + '\n')
elif args[2] == '--commands':
sys.stdout.write(self.main_help_text(commands_only=True) + '\n')
else:
self.fetch_command(args[2]).print_help(self.prog_name, args[2])
elif subcommand == 'version':
sys.stdout.write(parser.get_version() + '\n')
# Special-cases: We want 'django-admin.py --version' and
# 'django-admin.py --help' to work, for backwards compatibility.
elif self.argv[1:] == ['--version']:
# LaxOptionParser already takes care of printing the version.
pass
elif self.argv[1:] in (['--help'], ['-h']):
parser.print_lax_help()
sys.stdout.write(self.main_help_text() + '\n')
else:
self.fetch_command(subcommand).run_from_argv(self.argv)
def setup_environ(settings_mod, original_settings_path=None):
"""
Configures the runtime environment. This can also be used by external
scripts wanting to set up a similar environment to manage.py.
Returns the project directory (assuming the passed settings module is
directly in the project directory).
The "original_settings_path" parameter is optional, but recommended, since
trying to work out the original path from the module can be problematic.
"""
warnings.warn(
"The 'setup_environ' function is deprecated, "
"you likely need to update your 'manage.py'; "
"please see the Django 1.4 release notes "
"(https://docs.djangoproject.com/en/dev/releases/1.4/).",
PendingDeprecationWarning)
# Add this project to sys.path so that it's importable in the conventional
# way. For example, if this file (manage.py) lives in a directory
# "myproject", this code would add "/path/to/myproject" to sys.path.
if '__init__.py' in settings_mod.__file__:
p = os.path.dirname(settings_mod.__file__)
else:
p = settings_mod.__file__
project_directory, settings_filename = os.path.split(p)
if project_directory == os.curdir or not project_directory:
project_directory = os.getcwd()
project_name = os.path.basename(project_directory)
# Strip filename suffix to get the module name.
settings_name = os.path.splitext(settings_filename)[0]
# Strip $py for Jython compiled files (like settings$py.class)
if settings_name.endswith("$py"):
settings_name = settings_name[:-3]
# Set DJANGO_SETTINGS_MODULE appropriately.
if original_settings_path:
os.environ['DJANGO_SETTINGS_MODULE'] = original_settings_path
else:
# If DJANGO_SETTINGS_MODULE is already set, use it.
os.environ['DJANGO_SETTINGS_MODULE'] = os.environ.get(
'DJANGO_SETTINGS_MODULE',
'%s.%s' % (project_name, settings_name)
)
# Import the project module. We add the parent directory to PYTHONPATH to
# avoid some of the path errors new users can have.
sys.path.append(os.path.join(project_directory, os.pardir))
import_module(project_name)
sys.path.pop()
return project_directory
def execute_from_command_line(argv=None):
"""
A simple method that runs a ManagementUtility.
"""
utility = ManagementUtility(argv)
utility.execute()
def execute_manager(settings_mod, argv=None):
"""
Like execute_from_command_line(), but for use by manage.py, a
project-specific django-admin.py utility.
"""
warnings.warn(
"The 'execute_manager' function is deprecated, "
"you likely need to update your 'manage.py'; "
"please see the Django 1.4 release notes "
"(https://docs.djangoproject.com/en/dev/releases/1.4/).",
PendingDeprecationWarning)
setup_environ(settings_mod)
utility = ManagementUtility(argv)
utility.execute()
|
DailyActie/md2sql
|
refs/heads/master
|
examples/20170510-software.esciencecenter.nl-gh-pages/estep/utils.py
|
1
|
# Copyright 2016 Netherlands eScience Center
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
from requests.packages.urllib3 import Retry
import rfc3987
import requests
import mimetypes
import os
class AbstractValidator(object):
def validate(self, instance):
for error in self.iter_errors(instance):
raise error
def finalize(self):
return []
def iter_errors(self, instance):
raise NotImplementedError
class ValidationError(ValueError):
def __init__(self, message, document_name, property_name):
super(ValidationError, self).__init__(message)
self.message = message
self.document_name = document_name
self.property_name = property_name
def __str__(self):
return '{}:{}: {}'.format(
url_to_path(parse_url(self.document_name)), self.property_name,
self.message)
def __repr__(self):
return "ValidationError('{}', '{}', '{}')".format(
self.message, self.document_name, self.property_name)
def __eq__(self, other):
return (self.message == other.message and
self.document_name == other.document_name and
self.property_name == other.property_name)
def check_internal_url(url):
if not is_internal_url(url):
raise ValueError('Url {} is not internal'
.format(rfc3987.compose(**url)))
def is_internal_url(url):
is_internal = url['authority'] in (None, 'software.esciencecenter.nl')
if is_internal and not url['path'].startswith('/'):
raise ValueError('Path {} must start with /'
.format(rfc3987.compose(**url)))
if is_internal and url['scheme'] == 'https':
raise ValueError('For the time being, use http instead of https '
'prefixes for http://software.esciencecenter.nl')
return is_internal
def absolute_url(url):
url = parse_url(url)
if url['authority'] is None:
url['scheme'] = 'http'
url['authority'] = 'software.esciencecenter.nl'
return rfc3987.compose(**url)
def url_to_path(url):
check_internal_url(url)
return '_' + url['path'].lstrip('/') + '.md'
def url_to_schema(url):
check_internal_url(url)
return ('http://software.esciencecenter.nl/schema/' +
url_to_collection_name(url))
def url_to_collection_name(url):
return url['path'].split('/')[1]
def parse_url(url):
return rfc3987.parse(url)
def download_file(url, base_path):
r = requests.get(url)
r.raise_for_status()
ext = os.path.splitext(url)[1]
if len(ext) == 0:
if 'content-type' in r.headers:
ext = mimetypes.guess_extension(
r.headers['content-type'].split(';')[0])
else:
raise ValueError("Cannot determine file type of {}".format(url))
path = base_path + ext
with open(path, 'wb') as f:
f.write(r.content)
return path
def retrying_http_session(retries=3):
"""Returns a requests session with performs retries on dns, connection, read errors.
Also retries when server returns 500 error code.
Args:
retries (int): Number of retries for a request
Returns:
requests.Session()
"""
http_session = requests.Session()
http_adapter = requests.adapters.HTTPAdapter()
retry = Retry(total=retries, status_forcelist=[500])
http_adapter.max_retries = retry
http_session.mount('http://', http_adapter)
http_session.mount('https://', http_adapter)
return http_session
|
troygrosfield/Django-facebook
|
refs/heads/master
|
docs/docs_env/Lib/encodings/rot_13.py
|
497
|
#!/usr/bin/env python
""" Python Character Mapping Codec for ROT13.
See http://ucsub.colorado.edu/~kominek/rot13/ for details.
Written by Marc-Andre Lemburg (mal@lemburg.com).
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='rot-13',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0041: 0x004e,
0x0042: 0x004f,
0x0043: 0x0050,
0x0044: 0x0051,
0x0045: 0x0052,
0x0046: 0x0053,
0x0047: 0x0054,
0x0048: 0x0055,
0x0049: 0x0056,
0x004a: 0x0057,
0x004b: 0x0058,
0x004c: 0x0059,
0x004d: 0x005a,
0x004e: 0x0041,
0x004f: 0x0042,
0x0050: 0x0043,
0x0051: 0x0044,
0x0052: 0x0045,
0x0053: 0x0046,
0x0054: 0x0047,
0x0055: 0x0048,
0x0056: 0x0049,
0x0057: 0x004a,
0x0058: 0x004b,
0x0059: 0x004c,
0x005a: 0x004d,
0x0061: 0x006e,
0x0062: 0x006f,
0x0063: 0x0070,
0x0064: 0x0071,
0x0065: 0x0072,
0x0066: 0x0073,
0x0067: 0x0074,
0x0068: 0x0075,
0x0069: 0x0076,
0x006a: 0x0077,
0x006b: 0x0078,
0x006c: 0x0079,
0x006d: 0x007a,
0x006e: 0x0061,
0x006f: 0x0062,
0x0070: 0x0063,
0x0071: 0x0064,
0x0072: 0x0065,
0x0073: 0x0066,
0x0074: 0x0067,
0x0075: 0x0068,
0x0076: 0x0069,
0x0077: 0x006a,
0x0078: 0x006b,
0x0079: 0x006c,
0x007a: 0x006d,
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
### Filter API
def rot13(infile, outfile):
outfile.write(infile.read().encode('rot-13'))
if __name__ == '__main__':
import sys
rot13(sys.stdin, sys.stdout)
|
zclfly/cgt
|
refs/heads/master
|
cgt/compilation.py
|
3
|
from . import core, utils
import cgt
import ctypes, os.path as osp, hashlib, numpy as np, sys, subprocess, string, os, time, traceback, cPickle
from collections import defaultdict, namedtuple
from StringIO import StringIO
import logging
def function(inputs, outputs, dbg=None, updates=None, givens=None):
assert isinstance(inputs, list), "Inputs must be a list"
assert all(el.is_argument() for el in inputs), "Invalid input: should be a list of Argument nodes"
if isinstance(outputs, list):
assert all(isinstance(el, core.Node) for el in outputs), "Invalid output: should all be symbolic variables"
return _function_listout(inputs, outputs, dbg, updates, givens)
elif isinstance(outputs, core.Node):
f_listout = _function_listout(inputs, [outputs], dbg, updates, givens)
return lambda *args : f_listout(*args)[0]
else:
raise ValueError("Expected `outputs` to be a Node or a list of Nodes. Got an object of type %s"%type(outputs))
def _function_listout(inputs, outputs, dbg = None, updates=None, givens=None):
if updates is None: updates = []
else: assert (isinstance(updates, list) and
all(isinstance(a,tuple) and len(a)==2
and isinstance(a[0], core.Node) and isinstance(a[1], core.Node)
for a in updates)), "updates should be a list of pairs (before, after)"
if givens is None: givens = []
else: assert all(before.is_data() for (before,_) in updates), "lhs of updates must be Data instances"
if dbg: raise core.Todo("debug functionality is broken")
outputs = [cgt.make_tuple(*x) if isinstance(x, tuple) else x for x in outputs]
interp = run_compilation_pipeline(inputs, outputs, updates, givens)
return interp
# ================================================================
# Execution
# ================================================================
def python_only():
return not hasattr(cgt,"cycgt")
def determine_devices(nodes_sorted, updatetarg2src):
# Op definitions (available impls, inplace-ness, etc) define constraints
# on possible devices for a node
if python_only():
return {node:Device() for node in nodes_sorted}
# (1) Get available devices for nodes, determined by which impls are available and node types
compile_info = get_compile_info()
cuda_enabled = compile_info["CGT_ENABLE_CUDA"]
node2dev = {}
home_device = core.Device(devtype="cpu", idx=0)
for node in nodes_sorted:
default_device = node.props.get("default_device", home_device)
if node in updatetarg2src:
device = node2dev[updatetarg2src[node]]
elif node.is_data():
device = node.op.device
elif node.is_argument():
device = home_device
else:
if "native_gpu" in node.op.available_impls and (default_device.devtype == "gpu" or "native_cpu" not in node.op.available_impls):
assert cuda_enabled, "trying to put op on gpu but cuda is disabled"
device = core.Device("gpu", default_device.idx)
else:
device = core.Device(devtype="cpu", idx=default_device.idx)
node2dev[node] = device
return node2dev
def is_tensor(x):
return isinstance(x.typ, core.TensorType)
def is_tuple(x):
return isinstance(x.typ, core.TupleType)
def create_interpreter(inputs, outputs, eg, node2memloc):
assert isinstance(eg, ExecutionGraph)
input_types = [input.typ for input in inputs] #pylint: disable=W0622
output_locs = [node2memloc[node] for node in outputs]
config = cgt.get_config()
backend = config["backend"]
parallel = config["parallel"]
if backend == "python":
if parallel:
raise NotImplementedError("For parallel=True, set backend=native")
# return ParallelInterpreter(eg, output_locs, input_types)
else:
return SequentialInterpreter(eg, output_locs, input_types)
elif backend == "native":
if parallel:
return cgt.cycgt.CppInterpreterWrapper(eg, input_types, output_locs, config["num_threads"])
else:
return cgt.cycgt.CppInterpreterWrapper(eg, input_types, output_locs, 0)
else:
raise NotImplementedError("invalid backend %s"%backend)
def topsorted_shapes_first(outputs, node2shape):
# Almost identical to topsorted(...) function
# But we also need to visit the shape elements of an in-place node
# before visiting that node
marks = {}
out = []
stack = []
for x in outputs:
stack.append((x,0))
while stack:
(i,jidx) = stack.pop()
if jidx == 0:
m = marks.get(i,0)
if m == 0:
marks[i] = 1
elif m == 1:
raise ValueError("not a dag")
else:
continue
ps = i.parents
###### Changed part ######
if i.ndim > 0 and not i.is_input() and i.op.return_type=="byref":
if i in node2shape:
shpels = node2shape[i]
else:
raise core.Unreachable
# shpels = i.op.shp_apply(i.parents)
ps = ps + shpels
elif is_tuple(i):
for arrshp in node2shape[i]:
ps = ps + arrshp
##########################
if jidx == len(ps):
marks[i] = 2
out.append(i)
else:
stack.append((i,jidx+1))
j = ps[jidx]
stack.append((j,0))
return out
def determine_memowner(nodes_sorted, updates, node2dev):
# First determine how many "child" nodes each node has
node2child = defaultdict(list)
for node in nodes_sorted:
for parent in node.parents:
node2child[parent].append(node)
# Now traverse graph again and see where we can use the same memory
node2memowner = {} # mapping node x -> the node that owns its memory
# For updates, memlocation(RHS) = memlocation(LHS)
after2before = {after:before for (before,after) in updates}
enable_inplace_opt = core.get_config()["enable_inplace_opt"]
for node in nodes_sorted:
base = node # by default,
if node.is_argument():
pass
elif node.op.writes_to_input >= 0:
base = node2memowner[node.parents[node.op.writes_to_input]]
elif node in after2before:
base = after2before[node]
elif enable_inplace_opt and node.op.return_type == "byref": # TODO think about if we need any other conditions
nodeshape = node.op.shp_apply(node.parents)
for parent in node.parents:
if (len(node2child[parent])==1
and nodeshape==cgt.shape(parent) # XXX not a very robust way to check
and node.dtype == parent.dtype
and _is_data_mutable(parent)):
base = parent
break
# TODO: add optimization for in-place incrementing
node2memowner[node] = base
return node2memowner
class MemCounter(object):
"""
returns `MemLocation`s with indices 0,1,...
`count` member indicates how many have been returned thus far
"""
def __init__(self):
self.count=0
def new_memloc(self, devtype):
out = MemLocation(self.count, devtype)
self.count += 1
return out
def create_execution_graph(inputs, nodes_sorted, node2shape, node2memowner, node2dev):
# node2impltype = copy.copy(node2impltype) # we'll insert transport ops
instrs = []
counter = MemCounter()
node2memloc = {}
for node in nodes_sorted:
if node not in node2dev: node2dev[node] = core.Device(devtype="cpu",idx=node2dev[node.parents[0]].idx if len(node.parents)>0 else 0)
if node.is_argument():
write_loc = counter.new_memloc(node2dev[node].devtype)
node2memloc[node] = write_loc
i = inputs.index(node)
instrs.append(LoadArgument(i, write_loc))
else:
read_locs = [node2memloc[parent] for parent in node.parents]
if node.op.return_type == "byref":
if node2memowner[node] is node:
if is_tensor(node): # just make one memory location for output
nodeshape = node2shape[node] if node.ndim > 0 else []
shape_locs = [node2memloc[shpel] for shpel in nodeshape]
write_loc = counter.new_memloc(node2dev[node].devtype)
instrs.append(Alloc(node.dtype, shape_locs, write_loc))
else: # if it's a tuple, we need to allocate all of the components, then build tuple
nodeshape = node2shape[node]
assert isinstance(nodeshape, tuple)
arr_locs = []
for (arrshp, arrtyp) in utils.safezip(nodeshape, node.typ):
arr_loc = counter.new_memloc(node2dev[node].devtype)
shape_locs = [node2memloc[shpel] for shpel in arrshp]
instrs.append(Alloc(arrtyp.dtype, shape_locs, arr_loc))
arr_locs.append(arr_loc)
write_loc = counter.new_memloc(node2dev[node].devtype)
instrs.append(BuildTup(node.typ, arr_locs, write_loc))
else:
# If this node writes to another node's memory, the devices must be the same
# this should have been enforced in determine_devices()
assert node2dev[node] == node2dev[node2memowner[node]]
write_loc = node2memloc[node2memowner[node]]
instrs.append(ReturnByRef(node.op, [par.typ for par in node.parents], read_locs, write_loc, node_props=node.props))
else:
assert node.op.return_type == "byval"
write_loc = counter.new_memloc(node2dev[node].devtype)
instrs.append(ReturnByVal(node.op, [par.typ for par in node.parents], read_locs, write_loc, node_props=node.props))
node2memloc[node] = write_loc
return ExecutionGraph(instrs, len(inputs), counter.count), node2memloc
def get_callable(op, input_types, devtype, prefer_python=False):
assert op.available_impls, "need to set op.available_impls"
config = core.get_config()
if (prefer_python or config["force_python_impl"]) and "python" in op.available_impls:
return op.get_py_callable(input_types)
elif config["backend"] == "python":
if "python" in op.available_impls:
return op.get_py_callable(input_types)
else:
assert devtype=="cpu", "can't use devtype=gpu with python backend"
if "native_cpu" in op.available_impls:
return get_native_callable(op, input_types, "cpu")
else:
raise RuntimeError("Can't find an implementation of %s suitable for python backend. Just have available_impls=%s"%(op,op.available_impls))
else: # backend = native
if devtype == "cpu":
if "native_cpu" in op.available_impls:
return get_native_callable(op, input_types, "cpu")
else:
print "using python impl for",op
return op.get_py_callable(input_types)
else:
if "native_gpu" in op.available_impls:
return get_native_callable(op, input_types, "gpu")
else:
raise RuntimeError("Tried to put Op %s on the GPU but I only have a python impl :("%op)
def get_native_callable(op, input_types, devtype):
nci = op.get_native_compile_info(input_types, devtype)
nci.op_str = str(op)
nci.return_type = op.return_type
nci.n_in = len(input_types)
return nci2callable(nci)
def add_transports(nodelist, node2dev, node2shape):
node2child = defaultdict(list)
for node in nodelist:
for par in node.parents:
node2child[par].append(node)
# XXX look at native compilation info, gpu deref mask
for node in nodelist:
dev = node2dev[node]
dev2copy = {}
for child in node2child[node]:
childdev = node2dev[child]
if not childdev == dev:
if childdev not in dev2copy:
nodecopy = core.Result(core.Transport(childdev), [node])
node2dev[nodecopy] = childdev
dev2copy[childdev] = nodecopy
node2shape[nodecopy] = node2shape[node]
replace_parents(child, node, dev2copy[childdev])
def replace_parents(node, before, after):
for (i,p) in enumerate(node.parents):
if p is before:
node.parents[i] = after
def run_compilation_pipeline(inputs, outputs, updates, givens):
"""
Compiles the expression graph into an execution graph.
"""
config = core.get_config()
# Phase 1: simplification and analysis of expression graph
# ------------------------------------------------------
# Add add update targets to outputs
logging.info("Simplification")
outputs_updatetargs = outputs + [after for (_before, after) in updates]
if givens: outputs_updatetargs = core.clone(outputs_updatetargs, dict(givens))
# Do simplification + analysis pass on expression graph
outputs_updatetargs_simple, analysis, _ = \
core.simplify_and_analyze(outputs_updatetargs) if config["enable_simplification"] \
else (outputs_updatetargs, core.analyze(outputs_updatetargs), {})
# Phase 2: device targeting
# ------------------------------------------------------
logging.info("Device targeting")
outputs_updatetargs_simple = cgt.core.clone(outputs_updatetargs_simple)
analysis = core.analyze(outputs_updatetargs_simple)
# XXX inefficient to just copy the graph and redo analysis
nodelist = core.topsorted(outputs_updatetargs_simple)
updatesrcs = [before for (before, _) in updates]
updatetargs_simple = outputs_updatetargs_simple[len(outputs):]
node2dev = determine_devices(nodelist, {targ:src for (src,targ) in zip(updatesrcs, updatetargs_simple)})
add_transports(nodelist, node2dev, analysis["node2shape"])
# Phase 3: build execution graph
# ------------------------------------------------------
# Sort nodes so that shape elements appear before a given node
logging.info("Build execution graph")
nodes_sorted = topsorted_shapes_first(outputs_updatetargs_simple, analysis["node2shape"])
# For each node, figure out if its output should be written to a previous node's memory
# (memowner : "memory owner")
updatetargs_simple = outputs_updatetargs_simple[len(outputs):]
node2memowner = determine_memowner(nodes_sorted, zip(updatesrcs, updatetargs_simple), node2dev)
# Find the outputs we want to return
outputs_simple = outputs_updatetargs_simple[:len(outputs)] # get rid
# Generate execution graph
eg, node2memloc = create_execution_graph(
inputs, nodes_sorted, analysis["node2shape"], node2memowner, node2dev)
# print execution graph
if config["verbose"]:
print 'begin'
print '\n'.join(str(i)+'.) \t'+repr(instr) for (i,instr) in enumerate(eg.instrs))
print 'end'
# Phase 3: create C or Python interpreter for graph
# ------------------------------------------------------
interp = create_interpreter(inputs, outputs_simple, eg, node2memloc)
# Done!
return interp
# ================================================================
# Simple numeric eval via traversal
# ================================================================
def numeric_eval(output, arg2val):
"""
Numerically evaluates symbolic variable without any compilation,
by associating each argument with a value (via `arg2val`) and traversing the
computation graph
Inputs
------
output: symbolic variable or list of variables we would like to evaluate
arg2val: dictionary assigning each argument that output depends on to a numerical value
Returns
-------
Numeric value or list of numeric values of variables corresponding to output
"""
if isinstance(output, list):
assert all(isinstance(x, core.Node) for x in output), "expected a list of Nodes"
return _numeric_eval_listout(output, arg2val)
elif isinstance(output, core.Node):
return _numeric_eval_listout([output],arg2val)[0]
else:
raise ValueError("expected `output` to be a Node or a list of Nodes. Got an object of type %s"%type(output))
def _numeric_eval_listout(outputs, arg2val):
"""
Evaluate outputs numerically. arg2val is a dictionary mapping arguments to numerical values
"""
assert isinstance(outputs, list)
assert isinstance(arg2val, dict)
nodes = list(core.topsorted(outputs))
node2val = {}
for node in nodes:
if node.is_argument():
node2val[node] = arg2val[node]
elif node.is_data():
node2val[node] = node.op.get_value()
else:
parentvals = [node2val[par] for par in node.parents]
node2val[node] = core.py_numeric_apply(node, parentvals)
# assert node.get_ndim() == np.array(node2val[node]).ndim
numeric_outputs = [node2val[node] for node in outputs]
return numeric_outputs
################################################################
### Execution graph
################################################################
MemInfo = namedtuple("MemInfo",["loc","access"])
MEM_OVERWRITE = "overwrite"
MEM_INCREMENT = "increment"
class ExecutionGraph(object):
def __init__(self, instrs, n_args, n_locs):
self.instrs = instrs
self.n_args = n_args
self.n_locs = n_locs
class MemLocation(object):
def __init__(self, idx, devtype):
assert isinstance(idx, int) and devtype in ["cpu", "gpu"]
self.index = idx
self.devtype = devtype
# TODO: dtype
def __repr__(self):
return "%%%i/%s" % (self.index, self.devtype)
# ================================================================
# Instructions
# ================================================================
class Instr(object):
def fire(self, interp):
raise NotImplementedError
class LoadArgument(Instr):
def __init__(self, ind, write_loc):
self.ind = ind
self.write_loc = write_loc
def fire(self, interp):
interp.set(self.write_loc, interp.getarg(self.ind))
def __repr__(self):
return "%s = LoadArg ind:%i" % (self.write_loc, self.ind)
class Alloc(Instr):
def __init__(self, dtype, read_locs, write_loc):
self.dtype = dtype
self.read_locs = read_locs
self.write_loc = write_loc
def fire(self, interp):
shp = tuple(interp.get(mem) for mem in self.read_locs)
prevarr = interp.get(self.write_loc)
if prevarr is None or prevarr.shape != shp:
interp.set(self.write_loc, np.ones(shp, self.dtype))
def __repr__(self):
return "%s = Alloc shp:%s dtype:%s" % (self.write_loc, str(self.read_locs), self.dtype)
class BuildTup(Instr):
def __init__(self, typ, read_locs, write_loc):
self.typ = typ
self.read_locs = read_locs
self.write_loc = write_loc
def fire(self, interp):
interp.set(self.write_loc, tuple(interp.get(loc) for loc in self.read_locs))
def __repr__(self):
return "%s = BuildTup args:%s" % (self.write_loc, str(self.read_locs))
class ReturnByRef(Instr):
def __init__(self, op, input_types, read_locs, write_loc, node_props=None):
self.op = op
self.input_types = input_types
self.read_locs = read_locs
self.write_loc = write_loc
self._callable = None
self.node_props=node_props
def fire(self, interp):
if self._callable is None: self._callable = self.get_callable()
self._callable.call(
[interp.get(mem) for mem in self.read_locs],
interp.get(self.write_loc))
def __repr__(self):
return "%s = ReturnByRef op:%s args:%s" % (self.write_loc, str(self.op), str(self.read_locs))
def get_callable(self):
return get_callable(self.op, self.input_types, self.write_loc.devtype)
class ReturnByVal(Instr):
def __init__(self, op, input_types, read_locs, write_loc, node_props=None):
self.op = op
self.input_types = input_types
self.read_locs = read_locs
self.write_loc = write_loc
self._callable = None
self.node_props=node_props
def fire(self, interp):
if self._callable is None: self._callable = self.get_callable()
interp.set(self.write_loc, self._callable.call([interp.get(mem) for mem in self.read_locs]))
def get_callable(self):
return get_callable(self.op, self.input_types, self.write_loc.devtype)
def __repr__(self):
return "%s = ReturnByVal op:%s args:%s" % (self.write_loc, str(self.op), str(self.read_locs))
# ================================================================
# Compiling native code
# ================================================================
def nci2callable(nci):
template_code = gen_templated_code(nci.includes, nci.closure_triples, nci.func_code)
compile_info = get_compile_info()
prefix = utils.hash_seq1(template_code, compile_info["CPP_FLAGS"], *(src.code for src in nci.extra_srcs))
d = dict(function=_funcname(prefix), closure=_closurename(prefix),setup=_setupname(prefix),teardown=_teardownname(prefix))
fn_srcfile = core.SrcFile("c++",string.Template(template_code).substitute(d))
srcfiles = [fn_srcfile]
srcfiles.extend(core.SrcFile(sf.lang, string.Template(sf.code).substitute(d)) for sf in nci.extra_srcs)
CACHE_ROOT = compile_info["CACHE_ROOT"]
libpath = osp.join(CACHE_ROOT, prefix+".so")
if not osp.exists(libpath):
tu = TranslationUnit(srcfiles, nci.link_flags)
tu.compile(prefix, libpath)
lib = get_or_load_lib(libpath)
fptr = getattr(lib, _funcname(prefix))
setup_fptr = getattr(lib, _setupname(prefix)) if nci.setup else None
teardown_fptr = getattr(lib, _teardownname(prefix)) if nci.teardown else None
cldata = _build_closure(nci.closure_triples)
return core.NativeCallable(nci.n_in, nci.return_type, nci.op_str, fptr, cldata=cldata, setup_fptr=setup_fptr, teardown_fptr=teardown_fptr,
store_objects=nci.store_objects)
def _funcname(prefix):
return "call_"+prefix
def _setupname(prefix):
return "setup_"+prefix
def _teardownname(prefix):
return "teardown_"+prefix
def _closurename(prefix):
return "closure_"+prefix
def gen_templated_code(includes, closure_info, func_code):
s = StringIO()
includes = ["cgt_common.h"] + includes
for fname in includes:
s.write('#include "%s"\n'%fname)
gen_struct_code(closure_info, s)
s.write(func_code)
return s.getvalue()
def gen_struct_code(triples, outstream):
if triples is None:
return
outstream.write("typedef struct $closure {\n")
for (fieldname,fieldtype,_val) in triples:
outstream.write(_ctypes2str[fieldtype])
outstream.write(" ")
outstream.write(fieldname)
outstream.write(";\n")
outstream.write("} $closure;\n")
_LIBRARIES = {}
def get_or_load_lib(libname):
if libname in _LIBRARIES:
return _LIBRARIES[libname]
else:
out = ctypes.cdll.LoadLibrary(libname)
_LIBRARIES[libname] = out
return out
class TranslationUnit(object):
"""All the input that goes into building a native binary for one or more ops"""
def __init__(self, srcfiles, link_flags):
self.srcfiles = srcfiles
self.link_flags = link_flags
def compile(self, prefix, libpath):
"""
Compiles all of the files, places them in the cache directory
Then links them creating prefix.so
"""
CACHE_ROOT = get_compile_info()["CACHE_ROOT"]
cmds = ["cd %s"%CACHE_ROOT]
objs = []
for (i,(lang,code)) in enumerate(self.srcfiles):
if lang=="c++":
srcpath = osp.join(CACHE_ROOT, prefix+"_%i.cpp"%i)
cmds.append(_make_cpp_compile_cmd(srcpath))
elif lang=="cuda":
srcpath = osp.join(CACHE_ROOT, prefix+"_%i.cu"%i)
cmds.append(_make_cuda_compile_cmd(srcpath))
else:
raise NotImplementedError
with open(srcpath,"w") as fh: fh.write(code)
objs.append(srcpath+".o")
cmds.append(_make_link_cmd(objs, self.link_flags, libpath))
bigcmd = " && ".join(cmds)
call_and_print(bigcmd)
_COMPILE_CONFIG = None
def get_compile_info():
global _COMPILE_CONFIG
if _COMPILE_CONFIG is None:
config = core.get_config()
CGT_BUILD_ROOT = cgt.cycgt.cgt_build_root() #pylint: disable=E1101
cmake_info = {}
with open(osp.join(CGT_BUILD_ROOT,"build_info.txt")) as fh:
lines = fh.readlines()
for line in lines:
if ":=" not in line: print "skipping",line
lhs,rhs = line.split(":=")
lhs = lhs.strip()
rhs = rhs.strip()
cmake_info[lhs] = rhs
CUDA_ROOT = cmake_info["CUDA_ROOT"]
CGT_ENABLE_CUDA = cmake_info["CGT_ENABLE_CUDA"] in ["1","ON"]
CGT_ENABLE_CUDNN = cmake_info["CGT_ENABLE_CUDNN"] in ["1","ON"]
DEFINITIONS = "-DENABLE_CUDA" if CGT_ENABLE_CUDA else ""
CUDNN_ROOT = cmake_info["CUDNN_ROOT"]
_COMPILE_CONFIG = dict(
OPENBLAS_INCLUDE_DIR = osp.join(CGT_BUILD_ROOT,"OpenBLAS"),
CGT_INCLUDE_DIR = cmake_info["CGT_INCLUDE_DIR"],
CGT_LIBRARY_DIR = osp.join(CGT_BUILD_ROOT,"lib"),
CUDA_LIBRARY_DIR = osp.join(CUDA_ROOT,"lib"),
CUDA_INCLUDE_DIR = osp.join(CUDA_ROOT,"include"),
CUDA_LIBRARIES = cmake_info["CUDA_LIBRARIES"],
DEFINITIONS = DEFINITIONS,
CUDA_ROOT = CUDA_ROOT,
CUDNN_ROOT = CUDNN_ROOT,
CACHE_ROOT = osp.expanduser(config["cache_dir"]),
CGT_ENABLE_CUDA = CGT_ENABLE_CUDA,
CGT_ENABLE_CUDNN = CGT_ENABLE_CUDNN,
# CGT_LIBRARY = cmake_info["CGT_LIBRARY"],
)
includes = "-I"+_COMPILE_CONFIG["CGT_INCLUDE_DIR"]
includes += " -I"+_COMPILE_CONFIG["OPENBLAS_INCLUDE_DIR"]
link_flags = ""
if _COMPILE_CONFIG["CGT_ENABLE_CUDA"]: includes += " -I"+_COMPILE_CONFIG["CUDA_INCLUDE_DIR"]
if _COMPILE_CONFIG["CGT_ENABLE_CUDNN"]: includes += " -I"+_COMPILE_CONFIG["CUDNN_ROOT"]
_COMPILE_CONFIG["INCLUDES"] = includes
link_flags = "-lcgt -L"+_COMPILE_CONFIG["CGT_LIBRARY_DIR"]
if _COMPILE_CONFIG["CGT_ENABLE_CUDA"]: link_flags += " -L"+_COMPILE_CONFIG["CUDA_LIBRARY_DIR"]
if _COMPILE_CONFIG["CGT_ENABLE_CUDNN"]:
link_flags += " -L"+_COMPILE_CONFIG["CUDNN_ROOT"]
link_flags += " -Wl,-rpath,"+_COMPILE_CONFIG["CUDNN_ROOT"]
if sys.platform == "darwin":
link_flags += " -dynamiclib -Wl,-headerpad_max_install_names"
else:
link_flags += " -shared -rdynamic"
_COMPILE_CONFIG["LINK_FLAGS"] = link_flags
cpp_flags = "-fvisibility=hidden -std=c++11 -fPIC" + (" -O0 -g" if config["debug_cpp"] else " -O3 -DNDEBUG")
if sys.platform == "darwin": cpp_flags += " -stdlib=libc++"
_COMPILE_CONFIG["CPP_FLAGS"] = cpp_flags
CACHE_ROOT = _COMPILE_CONFIG["CACHE_ROOT"]
if not osp.exists(CACHE_ROOT):
os.makedirs(CACHE_ROOT)
return _COMPILE_CONFIG
def _make_cpp_compile_cmd(srcpath):
d = get_compile_info()
return "c++ %(cpp_flags)s %(srcpath)s -c -o %(srcpath)s.o %(includes)s %(definitions)s"%dict(
srcpath = srcpath, includes=d["INCLUDES"], definitions=d["DEFINITIONS"],
cpp_flags=d["CPP_FLAGS"], cacheroot=d["CACHE_ROOT"])
def _make_cuda_compile_cmd(srcpath):
d = get_compile_info()
return "nvcc %(srcpath)s -c -o %(srcpath)s.o -ccbin cc -m64 -Xcompiler -fPIC -Xcompiler -O3 -Xcompiler -arch -Xcompiler x86_64 %(includes)s %(definitions)s"%dict(
srcpath = srcpath, includes=d["INCLUDES"], definitions=d["DEFINITIONS"])
def _make_link_cmd(objs, extra_link_flags, libpath):
d = get_compile_info()
iname = "-install_name %s"%osp.basename(libpath) if sys.platform=="darwin" else ""
return r"c++ %(cpp_flags)s %(objnames)s %(link_flags)s %(iname)s -o %(libpath)s"%dict(
objnames=" ".join(objs), includes=d["INCLUDES"], cpp_flags=d["CPP_FLAGS"], libpath=libpath,
link_flags=d["LINK_FLAGS"]+" "+extra_link_flags, cacheroot=d["CACHE_ROOT"], iname=iname)
def call_and_print(cmd):
print "\x1b[32m%s\x1b[0m"%cmd
subprocess.check_call(cmd,shell=True)
_ctypes2str = {
ctypes.c_byte : "uint8_t",
ctypes.c_bool : "bool",
ctypes.c_char : "char",
ctypes.c_int : "int",
ctypes.c_long : "long",
ctypes.c_void_p : "void*",
ctypes.c_double : "double",
ctypes.c_float : "float"
}
_struct_cache = {} # because creating ctypes.Structure class is slow for some reason
def _build_closure(triples):
if triples is None:
return ctypes.c_void_p(0)
vals = []
fields = []
for (fieldname,fieldtype,val) in triples:
vals.append(val)
fields.append((fieldname,fieldtype))
try:
key = cPickle.dumps(fields)
S = _struct_cache[key]
except KeyError:
class S(ctypes.Structure):
_fields_ = fields
_struct_cache[key] = S
closure = S(*vals)
return closure
################################################################
### Interpreters
################################################################
class Interpreter(object):
def __call__(self, args):
raise NotImplementedError
def get(self, mem):
raise NotImplementedError
def set(self, mem, val):
raise NotImplementedError
def getarg(self, i):
raise NotImplementedError
class SequentialInterpreter(Interpreter):
"""
Runs an execution graph
"""
def __init__(self, eg, output_locs, input_types, copy_outputs=True):
self.eg = eg
self.input_types = input_types
self.output_locs = output_locs
self.storage = [None for _ in xrange(self.eg.n_locs)]
self.args = None
self.copy_outputs = copy_outputs
def __call__(self, *args):
assert len(args) == len(self.input_types), "Wrong number of inputs provided"
self.args = tuple(core.as_valid_array(arg, intype) for (arg, intype) in zip(args, self.input_types))
for instr in self.eg.instrs:
if profiler.on: tstart = time.time()
try:
instr.fire(self)
except Exception as e:
traceback.print_exc()
if isinstance(instr, (ReturnByRef,ReturnByVal)):
if core.get_config()["debug"]:
assert "stack" in instr.node_props
utils.colorprint(utils.Color.MAGENTA, "HERE'S THE STACK WHEN THE OFFENDING NODE WAS CREATED\n",o=sys.stderr)
print>>sys.stderr, ">>>>>>>>>>>>>>>>>>>>>>>>>>"
traceback.print_list(instr.node_props["stack"])
print>>sys.stderr, "<<<<<<<<<<<<<<<<<<<<<<<<<<"
raise e
else:
utils.error("Didn't save the stack so I can't give you a nice traceback :(. Try running with CGT_FLAGS=debug=True")
raise e
else:
utils.error("Oy vey, an exception occurred in a %s Instruction. I don't know how to help you debug this one right now :(."%type(instr))
raise e
if profiler.on: profiler.update(instr, time.time()-tstart)
outputs = [self.get(loc) for loc in self.output_locs]
if self.copy_outputs: outputs = map(_copy, outputs)
return outputs
# need to copy because otherwise we might mess up the data when we call func again
# todo: add option that prevents this behavior
def get(self, mem):
return self.storage[mem.index]
def set(self, mem, val):
self.storage[mem.index] = val
def getarg(self, i):
return self.args[i]
# ================================================================
# Profiler
# ================================================================
class _Profiler(object):
"""
Profiler for Python backend, i.e. Interpreter
"""
def __init__(self):
self.instr2stats = {}
self.on = False
self.t_total = 0.0
def start(self): self.on = True
def stop(self): self.on = False
def update(self, instr, elapsed):
(prevcount, prevtime) = self.instr2stats.get(instr, (0,0.0))
self.instr2stats[instr] = (prevcount+1, prevtime+elapsed)
self.t_total += elapsed
def print_stats(self):
op2stats = {}
# Collapse by Op, rather than instruction
for (instr,(count,t)) in self.instr2stats.iteritems():
if isinstance(instr, (ReturnByRef, ReturnByVal)):
opkey = str(instr.op)
elif isinstance(instr, Alloc):
opkey = "Alloc{dtype=%s,ndim=%i}"%(instr.dtype, len(instr.read_locs))
else:
opkey = instr.__class__.__name__
(prevcount, prevtime) = op2stats.get(opkey, (0, 0.0))
op2stats[opkey] = (prevcount+count, prevtime+t)
print "Total time elapsed: %.3g seconds"%self.t_total
# _print_heading("By instruction")
# _print_stats(self.instr2stats, self.t_total)
_print_heading("By Op")
_print_stats(op2stats, self.t_total)
def clear_stats(self):
self.instr2stats = {}
self.t_total = 0.0
profiler = _Profiler()
def _print_heading(heading):
heading = " " + heading + " "
width = 60
assert len(heading) < width-10
print
print "*"*width
padleft = (width-len(heading))//2
padright = width-len(heading)-padleft
print "*"*padleft + heading + "*"*padright
print "*"*width
def _print_stats(key2stats, t_total):
rows = []
for (key, (count,t)) in key2stats.iteritems():
rows.append([str(key), count, t, t/t_total])
rows = sorted(rows, key=lambda row: row[2], reverse=True)
cumsum = 0
for row in rows:
cumsum += row[3]
row.append(cumsum)
from thirdparty.tabulate import tabulate
print tabulate(rows, headers=["Instruction","Count","Time","Frac","Frac cumsum"])
def _copy(x):
if isinstance(x, np.ndarray): return x.copy()
elif isinstance(x, tuple): return tuple(el.copy() for el in x)
elif np.isscalar(x): return x # xxx is this case ok?
else: raise NotImplementedError
def typecheck_args(numargs, types):
assert len(numargs)==len(types), "wrong number of arguments. got %i, expected %i"%(len(numargs),len(types))
for (numarg,typ) in zip(numargs,types):
if isinstance(typ, core.TensorType):
assert numarg.dtype==typ.dtype and numarg.ndim==typ.ndim
# ================================================================
# Utils
# ================================================================
def _list_to_json(xs):
return [x.to_json() for x in xs]
def _is_data_mutable(node):
return not node.is_input() and not isinstance(node.op, core.Constant)
|
jhona22baz/blog-flask
|
refs/heads/master
|
python2.7/lib/python2.7/site-packages/pip/cmdoptions.py
|
361
|
"""
shared options and groups
The principle here is to define options once, but *not* instantiate them globally.
One reason being that options with action='append' can carry state between parses.
pip parse's general options twice internally, and shouldn't pass on state.
To be consistent, all options will follow this design.
"""
import copy
from optparse import OptionGroup, SUPPRESS_HELP, Option
from pip.locations import build_prefix, default_log_file
def make_option_group(group, parser):
"""
Return an OptionGroup object
group -- assumed to be dict with 'name' and 'options' keys
parser -- an optparse Parser
"""
option_group = OptionGroup(parser, group['name'])
for option in group['options']:
option_group.add_option(option.make())
return option_group
class OptionMaker(object):
"""Class that stores the args/kwargs that would be used to make an Option,
for making them later, and uses deepcopy's to reset state."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def make(self):
args_copy = copy.deepcopy(self.args)
kwargs_copy = copy.deepcopy(self.kwargs)
return Option(*args_copy, **kwargs_copy)
###########
# options #
###########
help_ = OptionMaker(
'-h', '--help',
dest='help',
action='help',
help='Show help.')
require_virtualenv = OptionMaker(
# Run only if inside a virtualenv, bail if not.
'--require-virtualenv', '--require-venv',
dest='require_venv',
action='store_true',
default=False,
help=SUPPRESS_HELP)
verbose = OptionMaker(
'-v', '--verbose',
dest='verbose',
action='count',
default=0,
help='Give more output. Option is additive, and can be used up to 3 times.')
version = OptionMaker(
'-V', '--version',
dest='version',
action='store_true',
help='Show version and exit.')
quiet = OptionMaker(
'-q', '--quiet',
dest='quiet',
action='count',
default=0,
help='Give less output.')
log = OptionMaker(
'--log',
dest='log',
metavar='path',
help='Path to a verbose appending log. This log is inactive by default.')
log_explicit_levels = OptionMaker(
# Writes the log levels explicitely to the log'
'--log-explicit-levels',
dest='log_explicit_levels',
action='store_true',
default=False,
help=SUPPRESS_HELP)
log_file = OptionMaker(
# The default log file
'--log-file', '--local-log',
dest='log_file',
metavar='path',
default=default_log_file,
help='Path to a verbose non-appending log, that only logs failures. This log is active by default at %default.')
no_input = OptionMaker(
# Don't ask for input
'--no-input',
dest='no_input',
action='store_true',
default=False,
help=SUPPRESS_HELP)
proxy = OptionMaker(
'--proxy',
dest='proxy',
type='str',
default='',
help="Specify a proxy in the form [user:passwd@]proxy.server:port.")
timeout = OptionMaker(
'--timeout', '--default-timeout',
metavar='sec',
dest='timeout',
type='float',
default=15,
help='Set the socket timeout (default %default seconds).')
default_vcs = OptionMaker(
# The default version control system for editables, e.g. 'svn'
'--default-vcs',
dest='default_vcs',
type='str',
default='',
help=SUPPRESS_HELP)
skip_requirements_regex = OptionMaker(
# A regex to be used to skip requirements
'--skip-requirements-regex',
dest='skip_requirements_regex',
type='str',
default='',
help=SUPPRESS_HELP)
exists_action = OptionMaker(
# Option when path already exist
'--exists-action',
dest='exists_action',
type='choice',
choices=['s', 'i', 'w', 'b'],
default=[],
action='append',
metavar='action',
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup.")
cert = OptionMaker(
'--cert',
dest='cert',
type='str',
default='',
metavar='path',
help = "Path to alternate CA bundle.")
index_url = OptionMaker(
'-i', '--index-url', '--pypi-url',
dest='index_url',
metavar='URL',
default='https://pypi.python.org/simple/',
help='Base URL of Python Package Index (default %default).')
extra_index_url = OptionMaker(
'--extra-index-url',
dest='extra_index_urls',
metavar='URL',
action='append',
default=[],
help='Extra URLs of package indexes to use in addition to --index-url.')
no_index = OptionMaker(
'--no-index',
dest='no_index',
action='store_true',
default=False,
help='Ignore package index (only looking at --find-links URLs instead).')
find_links = OptionMaker(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='url',
help="If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.")
# TODO: Remove after 1.6
use_mirrors = OptionMaker(
'-M', '--use-mirrors',
dest='use_mirrors',
action='store_true',
default=False,
help=SUPPRESS_HELP)
# TODO: Remove after 1.6
mirrors = OptionMaker(
'--mirrors',
dest='mirrors',
metavar='URL',
action='append',
default=[],
help=SUPPRESS_HELP)
allow_external = OptionMaker(
"--allow-external",
dest="allow_external",
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of externally hosted files",
)
allow_all_external = OptionMaker(
"--allow-all-external",
dest="allow_all_external",
action="store_true",
default=False,
help="Allow the installation of all externally hosted files",
)
# Remove after 1.7
no_allow_external = OptionMaker(
"--no-allow-external",
dest="allow_all_external",
action="store_false",
default=False,
help=SUPPRESS_HELP,
)
# Remove --allow-insecure after 1.7
allow_unsafe = OptionMaker(
"--allow-unverified", "--allow-insecure",
dest="allow_unverified",
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of insecure and unverifiable files",
)
# Remove after 1.7
no_allow_unsafe = OptionMaker(
"--no-allow-insecure",
dest="allow_all_insecure",
action="store_false",
default=False,
help=SUPPRESS_HELP
)
# Remove after 1.5
process_dependency_links = OptionMaker(
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
)
requirements = OptionMaker(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Install from the given requirements file. '
'This option can be used multiple times.')
use_wheel = OptionMaker(
'--use-wheel',
dest='use_wheel',
action='store_true',
help=SUPPRESS_HELP,
)
no_use_wheel = OptionMaker(
'--no-use-wheel',
dest='use_wheel',
action='store_false',
default=True,
help=('Do not Find and prefer wheel archives when searching indexes and '
'find-links locations.'),
)
download_cache = OptionMaker(
'--download-cache',
dest='download_cache',
metavar='dir',
default=None,
help='Cache downloaded packages in <dir>.')
no_deps = OptionMaker(
'--no-deps', '--no-dependencies',
dest='ignore_dependencies',
action='store_true',
default=False,
help="Don't install package dependencies.")
build_dir = OptionMaker(
'-b', '--build', '--build-dir', '--build-directory',
dest='build_dir',
metavar='dir',
default=build_prefix,
help='Directory to unpack packages into and build in. '
'The default in a virtualenv is "<venv path>/build". '
'The default for global installs is "<OS temp dir>/pip_build_<username>".')
install_options = OptionMaker(
'--install-option',
dest='install_options',
action='append',
metavar='options',
help="Extra arguments to be supplied to the setup.py install "
"command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
"Use multiple --install-option options to pass multiple options to setup.py install. "
"If you are using an option with a directory path, be sure to use absolute path.")
global_options = OptionMaker(
'--global-option',
dest='global_options',
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
"call before the install command.")
no_clean = OptionMaker(
'--no-clean',
action='store_true',
default=False,
help="Don't clean up build directories.")
##########
# groups #
##########
general_group = {
'name': 'General Options',
'options': [
help_,
require_virtualenv,
verbose,
version,
quiet,
log_file,
log,
log_explicit_levels,
no_input,
proxy,
timeout,
default_vcs,
skip_requirements_regex,
exists_action,
cert,
]
}
index_group = {
'name': 'Package Index Options',
'options': [
index_url,
extra_index_url,
no_index,
find_links,
use_mirrors,
mirrors,
allow_external,
allow_all_external,
no_allow_external,
allow_unsafe,
no_allow_unsafe,
process_dependency_links,
]
}
|
foxwill/ol-api-tester
|
refs/heads/master
|
env/lib/python2.7/site-packages/requests/packages/chardet/jisfreq.py
|
3130
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
#
# Japanese frequency table, applied to both S-JIS and EUC-JP
# They are sorted in order.
# 128 --> 0.77094
# 256 --> 0.85710
# 512 --> 0.92635
# 1024 --> 0.97130
# 2048 --> 0.99431
#
# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
#
# Typical Distribution Ratio, 25% of IDR
JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
# Char to FreqOrder table ,
JIS_TABLE_SIZE = 4368
JISCharToFreqOrder = (
40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64
2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80
5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96
1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112
5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128
5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144
5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160
5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176
5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192
5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208
1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224
1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240
1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256
2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272
3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288
3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304
4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320
12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336
1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352
109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368
5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384
271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400
32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416
43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432
280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448
54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464
5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480
5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496
5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512
4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528
5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544
5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560
5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576
5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592
5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608
5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624
5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640
5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656
5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672
3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688
5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704
5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720
5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736
5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752
5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768
5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784
5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800
5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816
5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832
5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848
5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864
5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880
5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912
5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928
5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944
5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960
5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976
5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992
5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488
1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176
861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544
2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800
1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872
1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952
1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192
1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
#Everything below is of no interest for detection purpose
2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384
6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400
6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416
6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432
6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448
4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464
4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480
3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496
3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512
4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528
3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544
6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560
4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576
6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592
6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608
6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624
6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640
6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656
6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672
3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688
3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704
6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720
2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736
4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752
4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768
4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784
6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800
3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816
4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832
4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848
6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864
4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880
6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896
3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912
2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928
4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944
2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960
6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976
4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992
6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008
6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024
6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040
4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056
6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072
2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088
6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104
4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120
6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136
4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152
4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168
6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184
6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200
6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216
3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232
1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248
3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264
3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280
4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296
6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312
3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328
6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344
3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360
3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376
2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392
6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408
6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424
3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440
6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456
3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472
6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488
6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504
6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520
4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536
6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552
4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568
3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584
3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600
6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616
6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632
4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648
6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664
6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680
6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696
6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712
6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728
6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744
4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760
4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776
3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792
6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808
4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824
2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840
6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856
6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872
4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888
2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904
4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920
2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936
4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952
4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968
4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984
6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000
3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016
6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032
3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048
6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064
2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080
3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096
7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112
2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128
3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144
3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160
3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176
3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192
7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208
7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224
7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240
7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256
7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272
4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288
3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304
3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320
4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336
3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352
3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368
7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384
4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400
7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416
7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432
7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448
7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464
7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480
4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496
4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512
7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528
3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544
4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560
7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576
7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592
4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608
3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624
3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640
7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656
4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672
4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688
4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704
4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720
4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736
4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752
7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768
7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784
7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800
7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816
7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832
2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848
3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864
7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880
7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896
3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912
4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928
3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944
3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960
2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976
7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992
7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008
4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024
3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040
3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056
7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072
7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088
7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104
4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120
7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136
2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152
3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168
4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184
7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200
4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216
4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232
7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248
7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264
5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280
7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296
7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312
7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328
7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344
7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360
5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376
5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392
7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408
3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424
7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440
7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456
3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472
7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488
7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504
1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520
3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536
4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552
2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568
3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584
2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600
5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616
4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632
4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648
5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664
7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680
7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696
7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712
7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728
3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744
7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760
3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776
7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792
4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808
7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824
7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840
7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856
7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872
7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888
7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904
7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920
7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272
# flake8: noqa
|
throwable-one/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/regressiontests/syndication/__init__.py
|
12133432
| |
impactlab/eemeter
|
refs/heads/master
|
eemeter/resources/__init__.py
|
12133432
| |
LightStage-Aber/LightStage-Repo
|
refs/heads/master
|
exp/exp_plotting/__init__.py
|
12133432
| |
bhouston/three.js
|
refs/heads/master
|
utils/converters/msgpack/json2msgpack.py
|
331
|
#!/usr/bin/env python
__doc__ = '''
Convert a json file to msgpack.
If fed only an input file the converted will write out a .pack file
of the same base name in the same directory
$ json2msgpack.py -i foo.json
foo.json > foo.pack
Specify an output file path
$ json2msgpack.py -i foo.json -o /bar/tmp/bar.pack
foo.json > /bar/tmp/bar.pack
Dependencies:
https://github.com/msgpack/msgpack-python
'''
import os
import sys
import json
import argparse
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
import msgpack
EXT = '.pack'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--infile', required=True,
help='Input json file to convert to msgpack')
parser.add_argument('-o', '--outfile',
help=('Optional output. If not specified the .pack file '\
'will write to the same director as the input file.'))
args = parser.parse_args()
convert(args.infile, args.outfile)
def convert(infile, outfile):
if not outfile:
ext = infile.split('.')[-1]
outfile = '%s%s' % (infile[:-len(ext)-1], EXT)
print('%s > %s' % (infile, outfile))
print('reading in JSON')
with open(infile) as op:
data = json.load(op)
print('writing to msgpack')
with open(outfile, 'wb') as op:
msgpack.dump(data, op)
if __name__ == '__main__':
main()
|
itzzshirlayyy/Online_Ordering
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py
|
1734
|
from __future__ import absolute_import, division, unicode_literals
from . import _base
from ..sanitizer import HTMLSanitizerMixin
class Filter(_base.Filter, HTMLSanitizerMixin):
def __iter__(self):
for token in _base.Filter.__iter__(self):
token = self.sanitize_token(token)
if token:
yield token
|
webmedic/booker
|
refs/heads/master
|
src/gdata/alt/__init__.py
|
271
|
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This package's modules adapt the gdata library to run in other environments
The first example is the appengine module which contains functions and
classes which modify a GDataService object to run on Google App Engine.
"""
|
MOA-2011/enigma2.pli4.0
|
refs/heads/master
|
po/xml2po.py
|
40
|
#!/usr/bin/python
import sys
import os
import string
import re
from xml.sax import make_parser
from xml.sax.handler import ContentHandler, property_lexical_handler
try:
from _xmlplus.sax.saxlib import LexicalHandler
no_comments = False
except ImportError:
class LexicalHandler:
pass
no_comments = True
class parseXML(ContentHandler, LexicalHandler):
def __init__(self, attrlist):
self.isPointsElement, self.isReboundsElement = 0, 0
self.attrlist = attrlist
self.last_comment = None
self.ishex = re.compile('#[0-9a-fA-F]+\Z')
def comment(self, comment):
if "TRANSLATORS:" in comment:
self.last_comment = comment
def startElement(self, name, attrs):
for x in ["text", "title", "value", "caption", "description"]:
try:
k = str(attrs[x])
if k.strip() != "" and not self.ishex.match(k):
attrlist.add((attrs[x], self.last_comment))
self.last_comment = None
except KeyError:
pass
parser = make_parser()
attrlist = set()
contentHandler = parseXML(attrlist)
parser.setContentHandler(contentHandler)
if not no_comments:
parser.setProperty(property_lexical_handler, contentHandler)
for arg in sys.argv[1:]:
if os.path.isdir(arg):
for file in os.listdir(arg):
if (file.endswith(".xml")):
parser.parse(os.path.join(arg, file))
else:
parser.parse(arg)
attrlist = list(attrlist)
attrlist.sort(key=lambda a: a[0])
for (k,c) in attrlist:
print
print '#: ' + arg
string.replace(k, "\\n", "\"\n\"")
if c:
for l in c.split('\n'):
print "#. ", l
print 'msgid "' + str(k) + '"'
print 'msgstr ""'
attrlist = set()
|
richardliaw/ray
|
refs/heads/master
|
rllib/utils/debug.py
|
3
|
import numpy as np
import pprint
from typing import Mapping
from ray.rllib.policy.sample_batch import SampleBatch, MultiAgentBatch
_printer = pprint.PrettyPrinter(indent=2, width=60)
def summarize(obj):
"""Return a pretty-formatted string for an object.
This has special handling for pretty-formatting of commonly used data types
in RLlib, such as SampleBatch, numpy arrays, etc.
"""
return _printer.pformat(_summarize(obj))
def _summarize(obj):
if isinstance(obj, Mapping):
return {k: _summarize(v) for k, v in obj.items()}
elif hasattr(obj, "_asdict"):
return {
"type": obj.__class__.__name__,
"data": _summarize(obj._asdict()),
}
elif isinstance(obj, list):
return [_summarize(x) for x in obj]
elif isinstance(obj, tuple):
return tuple(_summarize(x) for x in obj)
elif isinstance(obj, np.ndarray):
if obj.size == 0:
return _StringValue("np.ndarray({}, dtype={})".format(
obj.shape, obj.dtype))
elif obj.dtype == np.object or obj.dtype.type is np.str_:
return _StringValue("np.ndarray({}, dtype={}, head={})".format(
obj.shape, obj.dtype, _summarize(obj[0])))
else:
return _StringValue(
"np.ndarray({}, dtype={}, min={}, max={}, mean={})".format(
obj.shape, obj.dtype, round(float(np.min(obj)), 3),
round(float(np.max(obj)), 3), round(
float(np.mean(obj)), 3)))
elif isinstance(obj, MultiAgentBatch):
return {
"type": "MultiAgentBatch",
"policy_batches": _summarize(obj.policy_batches),
"count": obj.count,
}
elif isinstance(obj, SampleBatch):
return {
"type": "SampleBatch",
"data": {k: _summarize(v)
for k, v in obj.items()},
}
else:
return obj
class _StringValue:
def __init__(self, value):
self.value = value
def __repr__(self):
return self.value
|
40223136/-2015cd_midterm
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/html/entities.py
|
814
|
"""HTML character entity references."""
# maps the HTML entity name to the Unicode codepoint
name2codepoint = {
'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1
'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1
'Acirc': 0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1
'Agrave': 0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1
'Alpha': 0x0391, # greek capital letter alpha, U+0391
'Aring': 0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1
'Atilde': 0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1
'Auml': 0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1
'Beta': 0x0392, # greek capital letter beta, U+0392
'Ccedil': 0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1
'Chi': 0x03a7, # greek capital letter chi, U+03A7
'Dagger': 0x2021, # double dagger, U+2021 ISOpub
'Delta': 0x0394, # greek capital letter delta, U+0394 ISOgrk3
'ETH': 0x00d0, # latin capital letter ETH, U+00D0 ISOlat1
'Eacute': 0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1
'Ecirc': 0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1
'Egrave': 0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1
'Epsilon': 0x0395, # greek capital letter epsilon, U+0395
'Eta': 0x0397, # greek capital letter eta, U+0397
'Euml': 0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1
'Gamma': 0x0393, # greek capital letter gamma, U+0393 ISOgrk3
'Iacute': 0x00cd, # latin capital letter I with acute, U+00CD ISOlat1
'Icirc': 0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1
'Igrave': 0x00cc, # latin capital letter I with grave, U+00CC ISOlat1
'Iota': 0x0399, # greek capital letter iota, U+0399
'Iuml': 0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1
'Kappa': 0x039a, # greek capital letter kappa, U+039A
'Lambda': 0x039b, # greek capital letter lambda, U+039B ISOgrk3
'Mu': 0x039c, # greek capital letter mu, U+039C
'Ntilde': 0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1
'Nu': 0x039d, # greek capital letter nu, U+039D
'OElig': 0x0152, # latin capital ligature OE, U+0152 ISOlat2
'Oacute': 0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1
'Ocirc': 0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1
'Ograve': 0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1
'Omega': 0x03a9, # greek capital letter omega, U+03A9 ISOgrk3
'Omicron': 0x039f, # greek capital letter omicron, U+039F
'Oslash': 0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1
'Otilde': 0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1
'Ouml': 0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1
'Phi': 0x03a6, # greek capital letter phi, U+03A6 ISOgrk3
'Pi': 0x03a0, # greek capital letter pi, U+03A0 ISOgrk3
'Prime': 0x2033, # double prime = seconds = inches, U+2033 ISOtech
'Psi': 0x03a8, # greek capital letter psi, U+03A8 ISOgrk3
'Rho': 0x03a1, # greek capital letter rho, U+03A1
'Scaron': 0x0160, # latin capital letter S with caron, U+0160 ISOlat2
'Sigma': 0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3
'THORN': 0x00de, # latin capital letter THORN, U+00DE ISOlat1
'Tau': 0x03a4, # greek capital letter tau, U+03A4
'Theta': 0x0398, # greek capital letter theta, U+0398 ISOgrk3
'Uacute': 0x00da, # latin capital letter U with acute, U+00DA ISOlat1
'Ucirc': 0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1
'Ugrave': 0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1
'Upsilon': 0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3
'Uuml': 0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1
'Xi': 0x039e, # greek capital letter xi, U+039E ISOgrk3
'Yacute': 0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1
'Yuml': 0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2
'Zeta': 0x0396, # greek capital letter zeta, U+0396
'aacute': 0x00e1, # latin small letter a with acute, U+00E1 ISOlat1
'acirc': 0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1
'acute': 0x00b4, # acute accent = spacing acute, U+00B4 ISOdia
'aelig': 0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1
'agrave': 0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1
'alefsym': 0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW
'alpha': 0x03b1, # greek small letter alpha, U+03B1 ISOgrk3
'amp': 0x0026, # ampersand, U+0026 ISOnum
'and': 0x2227, # logical and = wedge, U+2227 ISOtech
'ang': 0x2220, # angle, U+2220 ISOamso
'aring': 0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1
'asymp': 0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr
'atilde': 0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1
'auml': 0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1
'bdquo': 0x201e, # double low-9 quotation mark, U+201E NEW
'beta': 0x03b2, # greek small letter beta, U+03B2 ISOgrk3
'brvbar': 0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum
'bull': 0x2022, # bullet = black small circle, U+2022 ISOpub
'cap': 0x2229, # intersection = cap, U+2229 ISOtech
'ccedil': 0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1
'cedil': 0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia
'cent': 0x00a2, # cent sign, U+00A2 ISOnum
'chi': 0x03c7, # greek small letter chi, U+03C7 ISOgrk3
'circ': 0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub
'clubs': 0x2663, # black club suit = shamrock, U+2663 ISOpub
'cong': 0x2245, # approximately equal to, U+2245 ISOtech
'copy': 0x00a9, # copyright sign, U+00A9 ISOnum
'crarr': 0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW
'cup': 0x222a, # union = cup, U+222A ISOtech
'curren': 0x00a4, # currency sign, U+00A4 ISOnum
'dArr': 0x21d3, # downwards double arrow, U+21D3 ISOamsa
'dagger': 0x2020, # dagger, U+2020 ISOpub
'darr': 0x2193, # downwards arrow, U+2193 ISOnum
'deg': 0x00b0, # degree sign, U+00B0 ISOnum
'delta': 0x03b4, # greek small letter delta, U+03B4 ISOgrk3
'diams': 0x2666, # black diamond suit, U+2666 ISOpub
'divide': 0x00f7, # division sign, U+00F7 ISOnum
'eacute': 0x00e9, # latin small letter e with acute, U+00E9 ISOlat1
'ecirc': 0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1
'egrave': 0x00e8, # latin small letter e with grave, U+00E8 ISOlat1
'empty': 0x2205, # empty set = null set = diameter, U+2205 ISOamso
'emsp': 0x2003, # em space, U+2003 ISOpub
'ensp': 0x2002, # en space, U+2002 ISOpub
'epsilon': 0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3
'equiv': 0x2261, # identical to, U+2261 ISOtech
'eta': 0x03b7, # greek small letter eta, U+03B7 ISOgrk3
'eth': 0x00f0, # latin small letter eth, U+00F0 ISOlat1
'euml': 0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1
'euro': 0x20ac, # euro sign, U+20AC NEW
'exist': 0x2203, # there exists, U+2203 ISOtech
'fnof': 0x0192, # latin small f with hook = function = florin, U+0192 ISOtech
'forall': 0x2200, # for all, U+2200 ISOtech
'frac12': 0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum
'frac14': 0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum
'frac34': 0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum
'frasl': 0x2044, # fraction slash, U+2044 NEW
'gamma': 0x03b3, # greek small letter gamma, U+03B3 ISOgrk3
'ge': 0x2265, # greater-than or equal to, U+2265 ISOtech
'gt': 0x003e, # greater-than sign, U+003E ISOnum
'hArr': 0x21d4, # left right double arrow, U+21D4 ISOamsa
'harr': 0x2194, # left right arrow, U+2194 ISOamsa
'hearts': 0x2665, # black heart suit = valentine, U+2665 ISOpub
'hellip': 0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub
'iacute': 0x00ed, # latin small letter i with acute, U+00ED ISOlat1
'icirc': 0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1
'iexcl': 0x00a1, # inverted exclamation mark, U+00A1 ISOnum
'igrave': 0x00ec, # latin small letter i with grave, U+00EC ISOlat1
'image': 0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso
'infin': 0x221e, # infinity, U+221E ISOtech
'int': 0x222b, # integral, U+222B ISOtech
'iota': 0x03b9, # greek small letter iota, U+03B9 ISOgrk3
'iquest': 0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum
'isin': 0x2208, # element of, U+2208 ISOtech
'iuml': 0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1
'kappa': 0x03ba, # greek small letter kappa, U+03BA ISOgrk3
'lArr': 0x21d0, # leftwards double arrow, U+21D0 ISOtech
'lambda': 0x03bb, # greek small letter lambda, U+03BB ISOgrk3
'lang': 0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech
'laquo': 0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum
'larr': 0x2190, # leftwards arrow, U+2190 ISOnum
'lceil': 0x2308, # left ceiling = apl upstile, U+2308 ISOamsc
'ldquo': 0x201c, # left double quotation mark, U+201C ISOnum
'le': 0x2264, # less-than or equal to, U+2264 ISOtech
'lfloor': 0x230a, # left floor = apl downstile, U+230A ISOamsc
'lowast': 0x2217, # asterisk operator, U+2217 ISOtech
'loz': 0x25ca, # lozenge, U+25CA ISOpub
'lrm': 0x200e, # left-to-right mark, U+200E NEW RFC 2070
'lsaquo': 0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed
'lsquo': 0x2018, # left single quotation mark, U+2018 ISOnum
'lt': 0x003c, # less-than sign, U+003C ISOnum
'macr': 0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia
'mdash': 0x2014, # em dash, U+2014 ISOpub
'micro': 0x00b5, # micro sign, U+00B5 ISOnum
'middot': 0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum
'minus': 0x2212, # minus sign, U+2212 ISOtech
'mu': 0x03bc, # greek small letter mu, U+03BC ISOgrk3
'nabla': 0x2207, # nabla = backward difference, U+2207 ISOtech
'nbsp': 0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum
'ndash': 0x2013, # en dash, U+2013 ISOpub
'ne': 0x2260, # not equal to, U+2260 ISOtech
'ni': 0x220b, # contains as member, U+220B ISOtech
'not': 0x00ac, # not sign, U+00AC ISOnum
'notin': 0x2209, # not an element of, U+2209 ISOtech
'nsub': 0x2284, # not a subset of, U+2284 ISOamsn
'ntilde': 0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1
'nu': 0x03bd, # greek small letter nu, U+03BD ISOgrk3
'oacute': 0x00f3, # latin small letter o with acute, U+00F3 ISOlat1
'ocirc': 0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1
'oelig': 0x0153, # latin small ligature oe, U+0153 ISOlat2
'ograve': 0x00f2, # latin small letter o with grave, U+00F2 ISOlat1
'oline': 0x203e, # overline = spacing overscore, U+203E NEW
'omega': 0x03c9, # greek small letter omega, U+03C9 ISOgrk3
'omicron': 0x03bf, # greek small letter omicron, U+03BF NEW
'oplus': 0x2295, # circled plus = direct sum, U+2295 ISOamsb
'or': 0x2228, # logical or = vee, U+2228 ISOtech
'ordf': 0x00aa, # feminine ordinal indicator, U+00AA ISOnum
'ordm': 0x00ba, # masculine ordinal indicator, U+00BA ISOnum
'oslash': 0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1
'otilde': 0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1
'otimes': 0x2297, # circled times = vector product, U+2297 ISOamsb
'ouml': 0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1
'para': 0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum
'part': 0x2202, # partial differential, U+2202 ISOtech
'permil': 0x2030, # per mille sign, U+2030 ISOtech
'perp': 0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech
'phi': 0x03c6, # greek small letter phi, U+03C6 ISOgrk3
'pi': 0x03c0, # greek small letter pi, U+03C0 ISOgrk3
'piv': 0x03d6, # greek pi symbol, U+03D6 ISOgrk3
'plusmn': 0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum
'pound': 0x00a3, # pound sign, U+00A3 ISOnum
'prime': 0x2032, # prime = minutes = feet, U+2032 ISOtech
'prod': 0x220f, # n-ary product = product sign, U+220F ISOamsb
'prop': 0x221d, # proportional to, U+221D ISOtech
'psi': 0x03c8, # greek small letter psi, U+03C8 ISOgrk3
'quot': 0x0022, # quotation mark = APL quote, U+0022 ISOnum
'rArr': 0x21d2, # rightwards double arrow, U+21D2 ISOtech
'radic': 0x221a, # square root = radical sign, U+221A ISOtech
'rang': 0x232a, # right-pointing angle bracket = ket, U+232A ISOtech
'raquo': 0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum
'rarr': 0x2192, # rightwards arrow, U+2192 ISOnum
'rceil': 0x2309, # right ceiling, U+2309 ISOamsc
'rdquo': 0x201d, # right double quotation mark, U+201D ISOnum
'real': 0x211c, # blackletter capital R = real part symbol, U+211C ISOamso
'reg': 0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum
'rfloor': 0x230b, # right floor, U+230B ISOamsc
'rho': 0x03c1, # greek small letter rho, U+03C1 ISOgrk3
'rlm': 0x200f, # right-to-left mark, U+200F NEW RFC 2070
'rsaquo': 0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed
'rsquo': 0x2019, # right single quotation mark, U+2019 ISOnum
'sbquo': 0x201a, # single low-9 quotation mark, U+201A NEW
'scaron': 0x0161, # latin small letter s with caron, U+0161 ISOlat2
'sdot': 0x22c5, # dot operator, U+22C5 ISOamsb
'sect': 0x00a7, # section sign, U+00A7 ISOnum
'shy': 0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum
'sigma': 0x03c3, # greek small letter sigma, U+03C3 ISOgrk3
'sigmaf': 0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3
'sim': 0x223c, # tilde operator = varies with = similar to, U+223C ISOtech
'spades': 0x2660, # black spade suit, U+2660 ISOpub
'sub': 0x2282, # subset of, U+2282 ISOtech
'sube': 0x2286, # subset of or equal to, U+2286 ISOtech
'sum': 0x2211, # n-ary sumation, U+2211 ISOamsb
'sup': 0x2283, # superset of, U+2283 ISOtech
'sup1': 0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum
'sup2': 0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum
'sup3': 0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum
'supe': 0x2287, # superset of or equal to, U+2287 ISOtech
'szlig': 0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1
'tau': 0x03c4, # greek small letter tau, U+03C4 ISOgrk3
'there4': 0x2234, # therefore, U+2234 ISOtech
'theta': 0x03b8, # greek small letter theta, U+03B8 ISOgrk3
'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW
'thinsp': 0x2009, # thin space, U+2009 ISOpub
'thorn': 0x00fe, # latin small letter thorn with, U+00FE ISOlat1
'tilde': 0x02dc, # small tilde, U+02DC ISOdia
'times': 0x00d7, # multiplication sign, U+00D7 ISOnum
'trade': 0x2122, # trade mark sign, U+2122 ISOnum
'uArr': 0x21d1, # upwards double arrow, U+21D1 ISOamsa
'uacute': 0x00fa, # latin small letter u with acute, U+00FA ISOlat1
'uarr': 0x2191, # upwards arrow, U+2191 ISOnum
'ucirc': 0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1
'ugrave': 0x00f9, # latin small letter u with grave, U+00F9 ISOlat1
'uml': 0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia
'upsih': 0x03d2, # greek upsilon with hook symbol, U+03D2 NEW
'upsilon': 0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3
'uuml': 0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1
'weierp': 0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso
'xi': 0x03be, # greek small letter xi, U+03BE ISOgrk3
'yacute': 0x00fd, # latin small letter y with acute, U+00FD ISOlat1
'yen': 0x00a5, # yen sign = yuan sign, U+00A5 ISOnum
'yuml': 0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1
'zeta': 0x03b6, # greek small letter zeta, U+03B6 ISOgrk3
'zwj': 0x200d, # zero width joiner, U+200D NEW RFC 2070
'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070
}
# maps the HTML5 named character references to the equivalent Unicode character(s)
html5 = {
'Aacute': '\xc1',
'aacute': '\xe1',
'Aacute;': '\xc1',
'aacute;': '\xe1',
'Abreve;': '\u0102',
'abreve;': '\u0103',
'ac;': '\u223e',
'acd;': '\u223f',
'acE;': '\u223e\u0333',
'Acirc': '\xc2',
'acirc': '\xe2',
'Acirc;': '\xc2',
'acirc;': '\xe2',
'acute': '\xb4',
'acute;': '\xb4',
'Acy;': '\u0410',
'acy;': '\u0430',
'AElig': '\xc6',
'aelig': '\xe6',
'AElig;': '\xc6',
'aelig;': '\xe6',
'af;': '\u2061',
'Afr;': '\U0001d504',
'afr;': '\U0001d51e',
'Agrave': '\xc0',
'agrave': '\xe0',
'Agrave;': '\xc0',
'agrave;': '\xe0',
'alefsym;': '\u2135',
'aleph;': '\u2135',
'Alpha;': '\u0391',
'alpha;': '\u03b1',
'Amacr;': '\u0100',
'amacr;': '\u0101',
'amalg;': '\u2a3f',
'AMP': '&',
'amp': '&',
'AMP;': '&',
'amp;': '&',
'And;': '\u2a53',
'and;': '\u2227',
'andand;': '\u2a55',
'andd;': '\u2a5c',
'andslope;': '\u2a58',
'andv;': '\u2a5a',
'ang;': '\u2220',
'ange;': '\u29a4',
'angle;': '\u2220',
'angmsd;': '\u2221',
'angmsdaa;': '\u29a8',
'angmsdab;': '\u29a9',
'angmsdac;': '\u29aa',
'angmsdad;': '\u29ab',
'angmsdae;': '\u29ac',
'angmsdaf;': '\u29ad',
'angmsdag;': '\u29ae',
'angmsdah;': '\u29af',
'angrt;': '\u221f',
'angrtvb;': '\u22be',
'angrtvbd;': '\u299d',
'angsph;': '\u2222',
'angst;': '\xc5',
'angzarr;': '\u237c',
'Aogon;': '\u0104',
'aogon;': '\u0105',
'Aopf;': '\U0001d538',
'aopf;': '\U0001d552',
'ap;': '\u2248',
'apacir;': '\u2a6f',
'apE;': '\u2a70',
'ape;': '\u224a',
'apid;': '\u224b',
'apos;': "'",
'ApplyFunction;': '\u2061',
'approx;': '\u2248',
'approxeq;': '\u224a',
'Aring': '\xc5',
'aring': '\xe5',
'Aring;': '\xc5',
'aring;': '\xe5',
'Ascr;': '\U0001d49c',
'ascr;': '\U0001d4b6',
'Assign;': '\u2254',
'ast;': '*',
'asymp;': '\u2248',
'asympeq;': '\u224d',
'Atilde': '\xc3',
'atilde': '\xe3',
'Atilde;': '\xc3',
'atilde;': '\xe3',
'Auml': '\xc4',
'auml': '\xe4',
'Auml;': '\xc4',
'auml;': '\xe4',
'awconint;': '\u2233',
'awint;': '\u2a11',
'backcong;': '\u224c',
'backepsilon;': '\u03f6',
'backprime;': '\u2035',
'backsim;': '\u223d',
'backsimeq;': '\u22cd',
'Backslash;': '\u2216',
'Barv;': '\u2ae7',
'barvee;': '\u22bd',
'Barwed;': '\u2306',
'barwed;': '\u2305',
'barwedge;': '\u2305',
'bbrk;': '\u23b5',
'bbrktbrk;': '\u23b6',
'bcong;': '\u224c',
'Bcy;': '\u0411',
'bcy;': '\u0431',
'bdquo;': '\u201e',
'becaus;': '\u2235',
'Because;': '\u2235',
'because;': '\u2235',
'bemptyv;': '\u29b0',
'bepsi;': '\u03f6',
'bernou;': '\u212c',
'Bernoullis;': '\u212c',
'Beta;': '\u0392',
'beta;': '\u03b2',
'beth;': '\u2136',
'between;': '\u226c',
'Bfr;': '\U0001d505',
'bfr;': '\U0001d51f',
'bigcap;': '\u22c2',
'bigcirc;': '\u25ef',
'bigcup;': '\u22c3',
'bigodot;': '\u2a00',
'bigoplus;': '\u2a01',
'bigotimes;': '\u2a02',
'bigsqcup;': '\u2a06',
'bigstar;': '\u2605',
'bigtriangledown;': '\u25bd',
'bigtriangleup;': '\u25b3',
'biguplus;': '\u2a04',
'bigvee;': '\u22c1',
'bigwedge;': '\u22c0',
'bkarow;': '\u290d',
'blacklozenge;': '\u29eb',
'blacksquare;': '\u25aa',
'blacktriangle;': '\u25b4',
'blacktriangledown;': '\u25be',
'blacktriangleleft;': '\u25c2',
'blacktriangleright;': '\u25b8',
'blank;': '\u2423',
'blk12;': '\u2592',
'blk14;': '\u2591',
'blk34;': '\u2593',
'block;': '\u2588',
'bne;': '=\u20e5',
'bnequiv;': '\u2261\u20e5',
'bNot;': '\u2aed',
'bnot;': '\u2310',
'Bopf;': '\U0001d539',
'bopf;': '\U0001d553',
'bot;': '\u22a5',
'bottom;': '\u22a5',
'bowtie;': '\u22c8',
'boxbox;': '\u29c9',
'boxDL;': '\u2557',
'boxDl;': '\u2556',
'boxdL;': '\u2555',
'boxdl;': '\u2510',
'boxDR;': '\u2554',
'boxDr;': '\u2553',
'boxdR;': '\u2552',
'boxdr;': '\u250c',
'boxH;': '\u2550',
'boxh;': '\u2500',
'boxHD;': '\u2566',
'boxHd;': '\u2564',
'boxhD;': '\u2565',
'boxhd;': '\u252c',
'boxHU;': '\u2569',
'boxHu;': '\u2567',
'boxhU;': '\u2568',
'boxhu;': '\u2534',
'boxminus;': '\u229f',
'boxplus;': '\u229e',
'boxtimes;': '\u22a0',
'boxUL;': '\u255d',
'boxUl;': '\u255c',
'boxuL;': '\u255b',
'boxul;': '\u2518',
'boxUR;': '\u255a',
'boxUr;': '\u2559',
'boxuR;': '\u2558',
'boxur;': '\u2514',
'boxV;': '\u2551',
'boxv;': '\u2502',
'boxVH;': '\u256c',
'boxVh;': '\u256b',
'boxvH;': '\u256a',
'boxvh;': '\u253c',
'boxVL;': '\u2563',
'boxVl;': '\u2562',
'boxvL;': '\u2561',
'boxvl;': '\u2524',
'boxVR;': '\u2560',
'boxVr;': '\u255f',
'boxvR;': '\u255e',
'boxvr;': '\u251c',
'bprime;': '\u2035',
'Breve;': '\u02d8',
'breve;': '\u02d8',
'brvbar': '\xa6',
'brvbar;': '\xa6',
'Bscr;': '\u212c',
'bscr;': '\U0001d4b7',
'bsemi;': '\u204f',
'bsim;': '\u223d',
'bsime;': '\u22cd',
'bsol;': '\\',
'bsolb;': '\u29c5',
'bsolhsub;': '\u27c8',
'bull;': '\u2022',
'bullet;': '\u2022',
'bump;': '\u224e',
'bumpE;': '\u2aae',
'bumpe;': '\u224f',
'Bumpeq;': '\u224e',
'bumpeq;': '\u224f',
'Cacute;': '\u0106',
'cacute;': '\u0107',
'Cap;': '\u22d2',
'cap;': '\u2229',
'capand;': '\u2a44',
'capbrcup;': '\u2a49',
'capcap;': '\u2a4b',
'capcup;': '\u2a47',
'capdot;': '\u2a40',
'CapitalDifferentialD;': '\u2145',
'caps;': '\u2229\ufe00',
'caret;': '\u2041',
'caron;': '\u02c7',
'Cayleys;': '\u212d',
'ccaps;': '\u2a4d',
'Ccaron;': '\u010c',
'ccaron;': '\u010d',
'Ccedil': '\xc7',
'ccedil': '\xe7',
'Ccedil;': '\xc7',
'ccedil;': '\xe7',
'Ccirc;': '\u0108',
'ccirc;': '\u0109',
'Cconint;': '\u2230',
'ccups;': '\u2a4c',
'ccupssm;': '\u2a50',
'Cdot;': '\u010a',
'cdot;': '\u010b',
'cedil': '\xb8',
'cedil;': '\xb8',
'Cedilla;': '\xb8',
'cemptyv;': '\u29b2',
'cent': '\xa2',
'cent;': '\xa2',
'CenterDot;': '\xb7',
'centerdot;': '\xb7',
'Cfr;': '\u212d',
'cfr;': '\U0001d520',
'CHcy;': '\u0427',
'chcy;': '\u0447',
'check;': '\u2713',
'checkmark;': '\u2713',
'Chi;': '\u03a7',
'chi;': '\u03c7',
'cir;': '\u25cb',
'circ;': '\u02c6',
'circeq;': '\u2257',
'circlearrowleft;': '\u21ba',
'circlearrowright;': '\u21bb',
'circledast;': '\u229b',
'circledcirc;': '\u229a',
'circleddash;': '\u229d',
'CircleDot;': '\u2299',
'circledR;': '\xae',
'circledS;': '\u24c8',
'CircleMinus;': '\u2296',
'CirclePlus;': '\u2295',
'CircleTimes;': '\u2297',
'cirE;': '\u29c3',
'cire;': '\u2257',
'cirfnint;': '\u2a10',
'cirmid;': '\u2aef',
'cirscir;': '\u29c2',
'ClockwiseContourIntegral;': '\u2232',
'CloseCurlyDoubleQuote;': '\u201d',
'CloseCurlyQuote;': '\u2019',
'clubs;': '\u2663',
'clubsuit;': '\u2663',
'Colon;': '\u2237',
'colon;': ':',
'Colone;': '\u2a74',
'colone;': '\u2254',
'coloneq;': '\u2254',
'comma;': ',',
'commat;': '@',
'comp;': '\u2201',
'compfn;': '\u2218',
'complement;': '\u2201',
'complexes;': '\u2102',
'cong;': '\u2245',
'congdot;': '\u2a6d',
'Congruent;': '\u2261',
'Conint;': '\u222f',
'conint;': '\u222e',
'ContourIntegral;': '\u222e',
'Copf;': '\u2102',
'copf;': '\U0001d554',
'coprod;': '\u2210',
'Coproduct;': '\u2210',
'COPY': '\xa9',
'copy': '\xa9',
'COPY;': '\xa9',
'copy;': '\xa9',
'copysr;': '\u2117',
'CounterClockwiseContourIntegral;': '\u2233',
'crarr;': '\u21b5',
'Cross;': '\u2a2f',
'cross;': '\u2717',
'Cscr;': '\U0001d49e',
'cscr;': '\U0001d4b8',
'csub;': '\u2acf',
'csube;': '\u2ad1',
'csup;': '\u2ad0',
'csupe;': '\u2ad2',
'ctdot;': '\u22ef',
'cudarrl;': '\u2938',
'cudarrr;': '\u2935',
'cuepr;': '\u22de',
'cuesc;': '\u22df',
'cularr;': '\u21b6',
'cularrp;': '\u293d',
'Cup;': '\u22d3',
'cup;': '\u222a',
'cupbrcap;': '\u2a48',
'CupCap;': '\u224d',
'cupcap;': '\u2a46',
'cupcup;': '\u2a4a',
'cupdot;': '\u228d',
'cupor;': '\u2a45',
'cups;': '\u222a\ufe00',
'curarr;': '\u21b7',
'curarrm;': '\u293c',
'curlyeqprec;': '\u22de',
'curlyeqsucc;': '\u22df',
'curlyvee;': '\u22ce',
'curlywedge;': '\u22cf',
'curren': '\xa4',
'curren;': '\xa4',
'curvearrowleft;': '\u21b6',
'curvearrowright;': '\u21b7',
'cuvee;': '\u22ce',
'cuwed;': '\u22cf',
'cwconint;': '\u2232',
'cwint;': '\u2231',
'cylcty;': '\u232d',
'Dagger;': '\u2021',
'dagger;': '\u2020',
'daleth;': '\u2138',
'Darr;': '\u21a1',
'dArr;': '\u21d3',
'darr;': '\u2193',
'dash;': '\u2010',
'Dashv;': '\u2ae4',
'dashv;': '\u22a3',
'dbkarow;': '\u290f',
'dblac;': '\u02dd',
'Dcaron;': '\u010e',
'dcaron;': '\u010f',
'Dcy;': '\u0414',
'dcy;': '\u0434',
'DD;': '\u2145',
'dd;': '\u2146',
'ddagger;': '\u2021',
'ddarr;': '\u21ca',
'DDotrahd;': '\u2911',
'ddotseq;': '\u2a77',
'deg': '\xb0',
'deg;': '\xb0',
'Del;': '\u2207',
'Delta;': '\u0394',
'delta;': '\u03b4',
'demptyv;': '\u29b1',
'dfisht;': '\u297f',
'Dfr;': '\U0001d507',
'dfr;': '\U0001d521',
'dHar;': '\u2965',
'dharl;': '\u21c3',
'dharr;': '\u21c2',
'DiacriticalAcute;': '\xb4',
'DiacriticalDot;': '\u02d9',
'DiacriticalDoubleAcute;': '\u02dd',
'DiacriticalGrave;': '`',
'DiacriticalTilde;': '\u02dc',
'diam;': '\u22c4',
'Diamond;': '\u22c4',
'diamond;': '\u22c4',
'diamondsuit;': '\u2666',
'diams;': '\u2666',
'die;': '\xa8',
'DifferentialD;': '\u2146',
'digamma;': '\u03dd',
'disin;': '\u22f2',
'div;': '\xf7',
'divide': '\xf7',
'divide;': '\xf7',
'divideontimes;': '\u22c7',
'divonx;': '\u22c7',
'DJcy;': '\u0402',
'djcy;': '\u0452',
'dlcorn;': '\u231e',
'dlcrop;': '\u230d',
'dollar;': '$',
'Dopf;': '\U0001d53b',
'dopf;': '\U0001d555',
'Dot;': '\xa8',
'dot;': '\u02d9',
'DotDot;': '\u20dc',
'doteq;': '\u2250',
'doteqdot;': '\u2251',
'DotEqual;': '\u2250',
'dotminus;': '\u2238',
'dotplus;': '\u2214',
'dotsquare;': '\u22a1',
'doublebarwedge;': '\u2306',
'DoubleContourIntegral;': '\u222f',
'DoubleDot;': '\xa8',
'DoubleDownArrow;': '\u21d3',
'DoubleLeftArrow;': '\u21d0',
'DoubleLeftRightArrow;': '\u21d4',
'DoubleLeftTee;': '\u2ae4',
'DoubleLongLeftArrow;': '\u27f8',
'DoubleLongLeftRightArrow;': '\u27fa',
'DoubleLongRightArrow;': '\u27f9',
'DoubleRightArrow;': '\u21d2',
'DoubleRightTee;': '\u22a8',
'DoubleUpArrow;': '\u21d1',
'DoubleUpDownArrow;': '\u21d5',
'DoubleVerticalBar;': '\u2225',
'DownArrow;': '\u2193',
'Downarrow;': '\u21d3',
'downarrow;': '\u2193',
'DownArrowBar;': '\u2913',
'DownArrowUpArrow;': '\u21f5',
'DownBreve;': '\u0311',
'downdownarrows;': '\u21ca',
'downharpoonleft;': '\u21c3',
'downharpoonright;': '\u21c2',
'DownLeftRightVector;': '\u2950',
'DownLeftTeeVector;': '\u295e',
'DownLeftVector;': '\u21bd',
'DownLeftVectorBar;': '\u2956',
'DownRightTeeVector;': '\u295f',
'DownRightVector;': '\u21c1',
'DownRightVectorBar;': '\u2957',
'DownTee;': '\u22a4',
'DownTeeArrow;': '\u21a7',
'drbkarow;': '\u2910',
'drcorn;': '\u231f',
'drcrop;': '\u230c',
'Dscr;': '\U0001d49f',
'dscr;': '\U0001d4b9',
'DScy;': '\u0405',
'dscy;': '\u0455',
'dsol;': '\u29f6',
'Dstrok;': '\u0110',
'dstrok;': '\u0111',
'dtdot;': '\u22f1',
'dtri;': '\u25bf',
'dtrif;': '\u25be',
'duarr;': '\u21f5',
'duhar;': '\u296f',
'dwangle;': '\u29a6',
'DZcy;': '\u040f',
'dzcy;': '\u045f',
'dzigrarr;': '\u27ff',
'Eacute': '\xc9',
'eacute': '\xe9',
'Eacute;': '\xc9',
'eacute;': '\xe9',
'easter;': '\u2a6e',
'Ecaron;': '\u011a',
'ecaron;': '\u011b',
'ecir;': '\u2256',
'Ecirc': '\xca',
'ecirc': '\xea',
'Ecirc;': '\xca',
'ecirc;': '\xea',
'ecolon;': '\u2255',
'Ecy;': '\u042d',
'ecy;': '\u044d',
'eDDot;': '\u2a77',
'Edot;': '\u0116',
'eDot;': '\u2251',
'edot;': '\u0117',
'ee;': '\u2147',
'efDot;': '\u2252',
'Efr;': '\U0001d508',
'efr;': '\U0001d522',
'eg;': '\u2a9a',
'Egrave': '\xc8',
'egrave': '\xe8',
'Egrave;': '\xc8',
'egrave;': '\xe8',
'egs;': '\u2a96',
'egsdot;': '\u2a98',
'el;': '\u2a99',
'Element;': '\u2208',
'elinters;': '\u23e7',
'ell;': '\u2113',
'els;': '\u2a95',
'elsdot;': '\u2a97',
'Emacr;': '\u0112',
'emacr;': '\u0113',
'empty;': '\u2205',
'emptyset;': '\u2205',
'EmptySmallSquare;': '\u25fb',
'emptyv;': '\u2205',
'EmptyVerySmallSquare;': '\u25ab',
'emsp13;': '\u2004',
'emsp14;': '\u2005',
'emsp;': '\u2003',
'ENG;': '\u014a',
'eng;': '\u014b',
'ensp;': '\u2002',
'Eogon;': '\u0118',
'eogon;': '\u0119',
'Eopf;': '\U0001d53c',
'eopf;': '\U0001d556',
'epar;': '\u22d5',
'eparsl;': '\u29e3',
'eplus;': '\u2a71',
'epsi;': '\u03b5',
'Epsilon;': '\u0395',
'epsilon;': '\u03b5',
'epsiv;': '\u03f5',
'eqcirc;': '\u2256',
'eqcolon;': '\u2255',
'eqsim;': '\u2242',
'eqslantgtr;': '\u2a96',
'eqslantless;': '\u2a95',
'Equal;': '\u2a75',
'equals;': '=',
'EqualTilde;': '\u2242',
'equest;': '\u225f',
'Equilibrium;': '\u21cc',
'equiv;': '\u2261',
'equivDD;': '\u2a78',
'eqvparsl;': '\u29e5',
'erarr;': '\u2971',
'erDot;': '\u2253',
'Escr;': '\u2130',
'escr;': '\u212f',
'esdot;': '\u2250',
'Esim;': '\u2a73',
'esim;': '\u2242',
'Eta;': '\u0397',
'eta;': '\u03b7',
'ETH': '\xd0',
'eth': '\xf0',
'ETH;': '\xd0',
'eth;': '\xf0',
'Euml': '\xcb',
'euml': '\xeb',
'Euml;': '\xcb',
'euml;': '\xeb',
'euro;': '\u20ac',
'excl;': '!',
'exist;': '\u2203',
'Exists;': '\u2203',
'expectation;': '\u2130',
'ExponentialE;': '\u2147',
'exponentiale;': '\u2147',
'fallingdotseq;': '\u2252',
'Fcy;': '\u0424',
'fcy;': '\u0444',
'female;': '\u2640',
'ffilig;': '\ufb03',
'fflig;': '\ufb00',
'ffllig;': '\ufb04',
'Ffr;': '\U0001d509',
'ffr;': '\U0001d523',
'filig;': '\ufb01',
'FilledSmallSquare;': '\u25fc',
'FilledVerySmallSquare;': '\u25aa',
'fjlig;': 'fj',
'flat;': '\u266d',
'fllig;': '\ufb02',
'fltns;': '\u25b1',
'fnof;': '\u0192',
'Fopf;': '\U0001d53d',
'fopf;': '\U0001d557',
'ForAll;': '\u2200',
'forall;': '\u2200',
'fork;': '\u22d4',
'forkv;': '\u2ad9',
'Fouriertrf;': '\u2131',
'fpartint;': '\u2a0d',
'frac12': '\xbd',
'frac12;': '\xbd',
'frac13;': '\u2153',
'frac14': '\xbc',
'frac14;': '\xbc',
'frac15;': '\u2155',
'frac16;': '\u2159',
'frac18;': '\u215b',
'frac23;': '\u2154',
'frac25;': '\u2156',
'frac34': '\xbe',
'frac34;': '\xbe',
'frac35;': '\u2157',
'frac38;': '\u215c',
'frac45;': '\u2158',
'frac56;': '\u215a',
'frac58;': '\u215d',
'frac78;': '\u215e',
'frasl;': '\u2044',
'frown;': '\u2322',
'Fscr;': '\u2131',
'fscr;': '\U0001d4bb',
'gacute;': '\u01f5',
'Gamma;': '\u0393',
'gamma;': '\u03b3',
'Gammad;': '\u03dc',
'gammad;': '\u03dd',
'gap;': '\u2a86',
'Gbreve;': '\u011e',
'gbreve;': '\u011f',
'Gcedil;': '\u0122',
'Gcirc;': '\u011c',
'gcirc;': '\u011d',
'Gcy;': '\u0413',
'gcy;': '\u0433',
'Gdot;': '\u0120',
'gdot;': '\u0121',
'gE;': '\u2267',
'ge;': '\u2265',
'gEl;': '\u2a8c',
'gel;': '\u22db',
'geq;': '\u2265',
'geqq;': '\u2267',
'geqslant;': '\u2a7e',
'ges;': '\u2a7e',
'gescc;': '\u2aa9',
'gesdot;': '\u2a80',
'gesdoto;': '\u2a82',
'gesdotol;': '\u2a84',
'gesl;': '\u22db\ufe00',
'gesles;': '\u2a94',
'Gfr;': '\U0001d50a',
'gfr;': '\U0001d524',
'Gg;': '\u22d9',
'gg;': '\u226b',
'ggg;': '\u22d9',
'gimel;': '\u2137',
'GJcy;': '\u0403',
'gjcy;': '\u0453',
'gl;': '\u2277',
'gla;': '\u2aa5',
'glE;': '\u2a92',
'glj;': '\u2aa4',
'gnap;': '\u2a8a',
'gnapprox;': '\u2a8a',
'gnE;': '\u2269',
'gne;': '\u2a88',
'gneq;': '\u2a88',
'gneqq;': '\u2269',
'gnsim;': '\u22e7',
'Gopf;': '\U0001d53e',
'gopf;': '\U0001d558',
'grave;': '`',
'GreaterEqual;': '\u2265',
'GreaterEqualLess;': '\u22db',
'GreaterFullEqual;': '\u2267',
'GreaterGreater;': '\u2aa2',
'GreaterLess;': '\u2277',
'GreaterSlantEqual;': '\u2a7e',
'GreaterTilde;': '\u2273',
'Gscr;': '\U0001d4a2',
'gscr;': '\u210a',
'gsim;': '\u2273',
'gsime;': '\u2a8e',
'gsiml;': '\u2a90',
'GT': '>',
'gt': '>',
'GT;': '>',
'Gt;': '\u226b',
'gt;': '>',
'gtcc;': '\u2aa7',
'gtcir;': '\u2a7a',
'gtdot;': '\u22d7',
'gtlPar;': '\u2995',
'gtquest;': '\u2a7c',
'gtrapprox;': '\u2a86',
'gtrarr;': '\u2978',
'gtrdot;': '\u22d7',
'gtreqless;': '\u22db',
'gtreqqless;': '\u2a8c',
'gtrless;': '\u2277',
'gtrsim;': '\u2273',
'gvertneqq;': '\u2269\ufe00',
'gvnE;': '\u2269\ufe00',
'Hacek;': '\u02c7',
'hairsp;': '\u200a',
'half;': '\xbd',
'hamilt;': '\u210b',
'HARDcy;': '\u042a',
'hardcy;': '\u044a',
'hArr;': '\u21d4',
'harr;': '\u2194',
'harrcir;': '\u2948',
'harrw;': '\u21ad',
'Hat;': '^',
'hbar;': '\u210f',
'Hcirc;': '\u0124',
'hcirc;': '\u0125',
'hearts;': '\u2665',
'heartsuit;': '\u2665',
'hellip;': '\u2026',
'hercon;': '\u22b9',
'Hfr;': '\u210c',
'hfr;': '\U0001d525',
'HilbertSpace;': '\u210b',
'hksearow;': '\u2925',
'hkswarow;': '\u2926',
'hoarr;': '\u21ff',
'homtht;': '\u223b',
'hookleftarrow;': '\u21a9',
'hookrightarrow;': '\u21aa',
'Hopf;': '\u210d',
'hopf;': '\U0001d559',
'horbar;': '\u2015',
'HorizontalLine;': '\u2500',
'Hscr;': '\u210b',
'hscr;': '\U0001d4bd',
'hslash;': '\u210f',
'Hstrok;': '\u0126',
'hstrok;': '\u0127',
'HumpDownHump;': '\u224e',
'HumpEqual;': '\u224f',
'hybull;': '\u2043',
'hyphen;': '\u2010',
'Iacute': '\xcd',
'iacute': '\xed',
'Iacute;': '\xcd',
'iacute;': '\xed',
'ic;': '\u2063',
'Icirc': '\xce',
'icirc': '\xee',
'Icirc;': '\xce',
'icirc;': '\xee',
'Icy;': '\u0418',
'icy;': '\u0438',
'Idot;': '\u0130',
'IEcy;': '\u0415',
'iecy;': '\u0435',
'iexcl': '\xa1',
'iexcl;': '\xa1',
'iff;': '\u21d4',
'Ifr;': '\u2111',
'ifr;': '\U0001d526',
'Igrave': '\xcc',
'igrave': '\xec',
'Igrave;': '\xcc',
'igrave;': '\xec',
'ii;': '\u2148',
'iiiint;': '\u2a0c',
'iiint;': '\u222d',
'iinfin;': '\u29dc',
'iiota;': '\u2129',
'IJlig;': '\u0132',
'ijlig;': '\u0133',
'Im;': '\u2111',
'Imacr;': '\u012a',
'imacr;': '\u012b',
'image;': '\u2111',
'ImaginaryI;': '\u2148',
'imagline;': '\u2110',
'imagpart;': '\u2111',
'imath;': '\u0131',
'imof;': '\u22b7',
'imped;': '\u01b5',
'Implies;': '\u21d2',
'in;': '\u2208',
'incare;': '\u2105',
'infin;': '\u221e',
'infintie;': '\u29dd',
'inodot;': '\u0131',
'Int;': '\u222c',
'int;': '\u222b',
'intcal;': '\u22ba',
'integers;': '\u2124',
'Integral;': '\u222b',
'intercal;': '\u22ba',
'Intersection;': '\u22c2',
'intlarhk;': '\u2a17',
'intprod;': '\u2a3c',
'InvisibleComma;': '\u2063',
'InvisibleTimes;': '\u2062',
'IOcy;': '\u0401',
'iocy;': '\u0451',
'Iogon;': '\u012e',
'iogon;': '\u012f',
'Iopf;': '\U0001d540',
'iopf;': '\U0001d55a',
'Iota;': '\u0399',
'iota;': '\u03b9',
'iprod;': '\u2a3c',
'iquest': '\xbf',
'iquest;': '\xbf',
'Iscr;': '\u2110',
'iscr;': '\U0001d4be',
'isin;': '\u2208',
'isindot;': '\u22f5',
'isinE;': '\u22f9',
'isins;': '\u22f4',
'isinsv;': '\u22f3',
'isinv;': '\u2208',
'it;': '\u2062',
'Itilde;': '\u0128',
'itilde;': '\u0129',
'Iukcy;': '\u0406',
'iukcy;': '\u0456',
'Iuml': '\xcf',
'iuml': '\xef',
'Iuml;': '\xcf',
'iuml;': '\xef',
'Jcirc;': '\u0134',
'jcirc;': '\u0135',
'Jcy;': '\u0419',
'jcy;': '\u0439',
'Jfr;': '\U0001d50d',
'jfr;': '\U0001d527',
'jmath;': '\u0237',
'Jopf;': '\U0001d541',
'jopf;': '\U0001d55b',
'Jscr;': '\U0001d4a5',
'jscr;': '\U0001d4bf',
'Jsercy;': '\u0408',
'jsercy;': '\u0458',
'Jukcy;': '\u0404',
'jukcy;': '\u0454',
'Kappa;': '\u039a',
'kappa;': '\u03ba',
'kappav;': '\u03f0',
'Kcedil;': '\u0136',
'kcedil;': '\u0137',
'Kcy;': '\u041a',
'kcy;': '\u043a',
'Kfr;': '\U0001d50e',
'kfr;': '\U0001d528',
'kgreen;': '\u0138',
'KHcy;': '\u0425',
'khcy;': '\u0445',
'KJcy;': '\u040c',
'kjcy;': '\u045c',
'Kopf;': '\U0001d542',
'kopf;': '\U0001d55c',
'Kscr;': '\U0001d4a6',
'kscr;': '\U0001d4c0',
'lAarr;': '\u21da',
'Lacute;': '\u0139',
'lacute;': '\u013a',
'laemptyv;': '\u29b4',
'lagran;': '\u2112',
'Lambda;': '\u039b',
'lambda;': '\u03bb',
'Lang;': '\u27ea',
'lang;': '\u27e8',
'langd;': '\u2991',
'langle;': '\u27e8',
'lap;': '\u2a85',
'Laplacetrf;': '\u2112',
'laquo': '\xab',
'laquo;': '\xab',
'Larr;': '\u219e',
'lArr;': '\u21d0',
'larr;': '\u2190',
'larrb;': '\u21e4',
'larrbfs;': '\u291f',
'larrfs;': '\u291d',
'larrhk;': '\u21a9',
'larrlp;': '\u21ab',
'larrpl;': '\u2939',
'larrsim;': '\u2973',
'larrtl;': '\u21a2',
'lat;': '\u2aab',
'lAtail;': '\u291b',
'latail;': '\u2919',
'late;': '\u2aad',
'lates;': '\u2aad\ufe00',
'lBarr;': '\u290e',
'lbarr;': '\u290c',
'lbbrk;': '\u2772',
'lbrace;': '{',
'lbrack;': '[',
'lbrke;': '\u298b',
'lbrksld;': '\u298f',
'lbrkslu;': '\u298d',
'Lcaron;': '\u013d',
'lcaron;': '\u013e',
'Lcedil;': '\u013b',
'lcedil;': '\u013c',
'lceil;': '\u2308',
'lcub;': '{',
'Lcy;': '\u041b',
'lcy;': '\u043b',
'ldca;': '\u2936',
'ldquo;': '\u201c',
'ldquor;': '\u201e',
'ldrdhar;': '\u2967',
'ldrushar;': '\u294b',
'ldsh;': '\u21b2',
'lE;': '\u2266',
'le;': '\u2264',
'LeftAngleBracket;': '\u27e8',
'LeftArrow;': '\u2190',
'Leftarrow;': '\u21d0',
'leftarrow;': '\u2190',
'LeftArrowBar;': '\u21e4',
'LeftArrowRightArrow;': '\u21c6',
'leftarrowtail;': '\u21a2',
'LeftCeiling;': '\u2308',
'LeftDoubleBracket;': '\u27e6',
'LeftDownTeeVector;': '\u2961',
'LeftDownVector;': '\u21c3',
'LeftDownVectorBar;': '\u2959',
'LeftFloor;': '\u230a',
'leftharpoondown;': '\u21bd',
'leftharpoonup;': '\u21bc',
'leftleftarrows;': '\u21c7',
'LeftRightArrow;': '\u2194',
'Leftrightarrow;': '\u21d4',
'leftrightarrow;': '\u2194',
'leftrightarrows;': '\u21c6',
'leftrightharpoons;': '\u21cb',
'leftrightsquigarrow;': '\u21ad',
'LeftRightVector;': '\u294e',
'LeftTee;': '\u22a3',
'LeftTeeArrow;': '\u21a4',
'LeftTeeVector;': '\u295a',
'leftthreetimes;': '\u22cb',
'LeftTriangle;': '\u22b2',
'LeftTriangleBar;': '\u29cf',
'LeftTriangleEqual;': '\u22b4',
'LeftUpDownVector;': '\u2951',
'LeftUpTeeVector;': '\u2960',
'LeftUpVector;': '\u21bf',
'LeftUpVectorBar;': '\u2958',
'LeftVector;': '\u21bc',
'LeftVectorBar;': '\u2952',
'lEg;': '\u2a8b',
'leg;': '\u22da',
'leq;': '\u2264',
'leqq;': '\u2266',
'leqslant;': '\u2a7d',
'les;': '\u2a7d',
'lescc;': '\u2aa8',
'lesdot;': '\u2a7f',
'lesdoto;': '\u2a81',
'lesdotor;': '\u2a83',
'lesg;': '\u22da\ufe00',
'lesges;': '\u2a93',
'lessapprox;': '\u2a85',
'lessdot;': '\u22d6',
'lesseqgtr;': '\u22da',
'lesseqqgtr;': '\u2a8b',
'LessEqualGreater;': '\u22da',
'LessFullEqual;': '\u2266',
'LessGreater;': '\u2276',
'lessgtr;': '\u2276',
'LessLess;': '\u2aa1',
'lesssim;': '\u2272',
'LessSlantEqual;': '\u2a7d',
'LessTilde;': '\u2272',
'lfisht;': '\u297c',
'lfloor;': '\u230a',
'Lfr;': '\U0001d50f',
'lfr;': '\U0001d529',
'lg;': '\u2276',
'lgE;': '\u2a91',
'lHar;': '\u2962',
'lhard;': '\u21bd',
'lharu;': '\u21bc',
'lharul;': '\u296a',
'lhblk;': '\u2584',
'LJcy;': '\u0409',
'ljcy;': '\u0459',
'Ll;': '\u22d8',
'll;': '\u226a',
'llarr;': '\u21c7',
'llcorner;': '\u231e',
'Lleftarrow;': '\u21da',
'llhard;': '\u296b',
'lltri;': '\u25fa',
'Lmidot;': '\u013f',
'lmidot;': '\u0140',
'lmoust;': '\u23b0',
'lmoustache;': '\u23b0',
'lnap;': '\u2a89',
'lnapprox;': '\u2a89',
'lnE;': '\u2268',
'lne;': '\u2a87',
'lneq;': '\u2a87',
'lneqq;': '\u2268',
'lnsim;': '\u22e6',
'loang;': '\u27ec',
'loarr;': '\u21fd',
'lobrk;': '\u27e6',
'LongLeftArrow;': '\u27f5',
'Longleftarrow;': '\u27f8',
'longleftarrow;': '\u27f5',
'LongLeftRightArrow;': '\u27f7',
'Longleftrightarrow;': '\u27fa',
'longleftrightarrow;': '\u27f7',
'longmapsto;': '\u27fc',
'LongRightArrow;': '\u27f6',
'Longrightarrow;': '\u27f9',
'longrightarrow;': '\u27f6',
'looparrowleft;': '\u21ab',
'looparrowright;': '\u21ac',
'lopar;': '\u2985',
'Lopf;': '\U0001d543',
'lopf;': '\U0001d55d',
'loplus;': '\u2a2d',
'lotimes;': '\u2a34',
'lowast;': '\u2217',
'lowbar;': '_',
'LowerLeftArrow;': '\u2199',
'LowerRightArrow;': '\u2198',
'loz;': '\u25ca',
'lozenge;': '\u25ca',
'lozf;': '\u29eb',
'lpar;': '(',
'lparlt;': '\u2993',
'lrarr;': '\u21c6',
'lrcorner;': '\u231f',
'lrhar;': '\u21cb',
'lrhard;': '\u296d',
'lrm;': '\u200e',
'lrtri;': '\u22bf',
'lsaquo;': '\u2039',
'Lscr;': '\u2112',
'lscr;': '\U0001d4c1',
'Lsh;': '\u21b0',
'lsh;': '\u21b0',
'lsim;': '\u2272',
'lsime;': '\u2a8d',
'lsimg;': '\u2a8f',
'lsqb;': '[',
'lsquo;': '\u2018',
'lsquor;': '\u201a',
'Lstrok;': '\u0141',
'lstrok;': '\u0142',
'LT': '<',
'lt': '<',
'LT;': '<',
'Lt;': '\u226a',
'lt;': '<',
'ltcc;': '\u2aa6',
'ltcir;': '\u2a79',
'ltdot;': '\u22d6',
'lthree;': '\u22cb',
'ltimes;': '\u22c9',
'ltlarr;': '\u2976',
'ltquest;': '\u2a7b',
'ltri;': '\u25c3',
'ltrie;': '\u22b4',
'ltrif;': '\u25c2',
'ltrPar;': '\u2996',
'lurdshar;': '\u294a',
'luruhar;': '\u2966',
'lvertneqq;': '\u2268\ufe00',
'lvnE;': '\u2268\ufe00',
'macr': '\xaf',
'macr;': '\xaf',
'male;': '\u2642',
'malt;': '\u2720',
'maltese;': '\u2720',
'Map;': '\u2905',
'map;': '\u21a6',
'mapsto;': '\u21a6',
'mapstodown;': '\u21a7',
'mapstoleft;': '\u21a4',
'mapstoup;': '\u21a5',
'marker;': '\u25ae',
'mcomma;': '\u2a29',
'Mcy;': '\u041c',
'mcy;': '\u043c',
'mdash;': '\u2014',
'mDDot;': '\u223a',
'measuredangle;': '\u2221',
'MediumSpace;': '\u205f',
'Mellintrf;': '\u2133',
'Mfr;': '\U0001d510',
'mfr;': '\U0001d52a',
'mho;': '\u2127',
'micro': '\xb5',
'micro;': '\xb5',
'mid;': '\u2223',
'midast;': '*',
'midcir;': '\u2af0',
'middot': '\xb7',
'middot;': '\xb7',
'minus;': '\u2212',
'minusb;': '\u229f',
'minusd;': '\u2238',
'minusdu;': '\u2a2a',
'MinusPlus;': '\u2213',
'mlcp;': '\u2adb',
'mldr;': '\u2026',
'mnplus;': '\u2213',
'models;': '\u22a7',
'Mopf;': '\U0001d544',
'mopf;': '\U0001d55e',
'mp;': '\u2213',
'Mscr;': '\u2133',
'mscr;': '\U0001d4c2',
'mstpos;': '\u223e',
'Mu;': '\u039c',
'mu;': '\u03bc',
'multimap;': '\u22b8',
'mumap;': '\u22b8',
'nabla;': '\u2207',
'Nacute;': '\u0143',
'nacute;': '\u0144',
'nang;': '\u2220\u20d2',
'nap;': '\u2249',
'napE;': '\u2a70\u0338',
'napid;': '\u224b\u0338',
'napos;': '\u0149',
'napprox;': '\u2249',
'natur;': '\u266e',
'natural;': '\u266e',
'naturals;': '\u2115',
'nbsp': '\xa0',
'nbsp;': '\xa0',
'nbump;': '\u224e\u0338',
'nbumpe;': '\u224f\u0338',
'ncap;': '\u2a43',
'Ncaron;': '\u0147',
'ncaron;': '\u0148',
'Ncedil;': '\u0145',
'ncedil;': '\u0146',
'ncong;': '\u2247',
'ncongdot;': '\u2a6d\u0338',
'ncup;': '\u2a42',
'Ncy;': '\u041d',
'ncy;': '\u043d',
'ndash;': '\u2013',
'ne;': '\u2260',
'nearhk;': '\u2924',
'neArr;': '\u21d7',
'nearr;': '\u2197',
'nearrow;': '\u2197',
'nedot;': '\u2250\u0338',
'NegativeMediumSpace;': '\u200b',
'NegativeThickSpace;': '\u200b',
'NegativeThinSpace;': '\u200b',
'NegativeVeryThinSpace;': '\u200b',
'nequiv;': '\u2262',
'nesear;': '\u2928',
'nesim;': '\u2242\u0338',
'NestedGreaterGreater;': '\u226b',
'NestedLessLess;': '\u226a',
'NewLine;': '\n',
'nexist;': '\u2204',
'nexists;': '\u2204',
'Nfr;': '\U0001d511',
'nfr;': '\U0001d52b',
'ngE;': '\u2267\u0338',
'nge;': '\u2271',
'ngeq;': '\u2271',
'ngeqq;': '\u2267\u0338',
'ngeqslant;': '\u2a7e\u0338',
'nges;': '\u2a7e\u0338',
'nGg;': '\u22d9\u0338',
'ngsim;': '\u2275',
'nGt;': '\u226b\u20d2',
'ngt;': '\u226f',
'ngtr;': '\u226f',
'nGtv;': '\u226b\u0338',
'nhArr;': '\u21ce',
'nharr;': '\u21ae',
'nhpar;': '\u2af2',
'ni;': '\u220b',
'nis;': '\u22fc',
'nisd;': '\u22fa',
'niv;': '\u220b',
'NJcy;': '\u040a',
'njcy;': '\u045a',
'nlArr;': '\u21cd',
'nlarr;': '\u219a',
'nldr;': '\u2025',
'nlE;': '\u2266\u0338',
'nle;': '\u2270',
'nLeftarrow;': '\u21cd',
'nleftarrow;': '\u219a',
'nLeftrightarrow;': '\u21ce',
'nleftrightarrow;': '\u21ae',
'nleq;': '\u2270',
'nleqq;': '\u2266\u0338',
'nleqslant;': '\u2a7d\u0338',
'nles;': '\u2a7d\u0338',
'nless;': '\u226e',
'nLl;': '\u22d8\u0338',
'nlsim;': '\u2274',
'nLt;': '\u226a\u20d2',
'nlt;': '\u226e',
'nltri;': '\u22ea',
'nltrie;': '\u22ec',
'nLtv;': '\u226a\u0338',
'nmid;': '\u2224',
'NoBreak;': '\u2060',
'NonBreakingSpace;': '\xa0',
'Nopf;': '\u2115',
'nopf;': '\U0001d55f',
'not': '\xac',
'Not;': '\u2aec',
'not;': '\xac',
'NotCongruent;': '\u2262',
'NotCupCap;': '\u226d',
'NotDoubleVerticalBar;': '\u2226',
'NotElement;': '\u2209',
'NotEqual;': '\u2260',
'NotEqualTilde;': '\u2242\u0338',
'NotExists;': '\u2204',
'NotGreater;': '\u226f',
'NotGreaterEqual;': '\u2271',
'NotGreaterFullEqual;': '\u2267\u0338',
'NotGreaterGreater;': '\u226b\u0338',
'NotGreaterLess;': '\u2279',
'NotGreaterSlantEqual;': '\u2a7e\u0338',
'NotGreaterTilde;': '\u2275',
'NotHumpDownHump;': '\u224e\u0338',
'NotHumpEqual;': '\u224f\u0338',
'notin;': '\u2209',
'notindot;': '\u22f5\u0338',
'notinE;': '\u22f9\u0338',
'notinva;': '\u2209',
'notinvb;': '\u22f7',
'notinvc;': '\u22f6',
'NotLeftTriangle;': '\u22ea',
'NotLeftTriangleBar;': '\u29cf\u0338',
'NotLeftTriangleEqual;': '\u22ec',
'NotLess;': '\u226e',
'NotLessEqual;': '\u2270',
'NotLessGreater;': '\u2278',
'NotLessLess;': '\u226a\u0338',
'NotLessSlantEqual;': '\u2a7d\u0338',
'NotLessTilde;': '\u2274',
'NotNestedGreaterGreater;': '\u2aa2\u0338',
'NotNestedLessLess;': '\u2aa1\u0338',
'notni;': '\u220c',
'notniva;': '\u220c',
'notnivb;': '\u22fe',
'notnivc;': '\u22fd',
'NotPrecedes;': '\u2280',
'NotPrecedesEqual;': '\u2aaf\u0338',
'NotPrecedesSlantEqual;': '\u22e0',
'NotReverseElement;': '\u220c',
'NotRightTriangle;': '\u22eb',
'NotRightTriangleBar;': '\u29d0\u0338',
'NotRightTriangleEqual;': '\u22ed',
'NotSquareSubset;': '\u228f\u0338',
'NotSquareSubsetEqual;': '\u22e2',
'NotSquareSuperset;': '\u2290\u0338',
'NotSquareSupersetEqual;': '\u22e3',
'NotSubset;': '\u2282\u20d2',
'NotSubsetEqual;': '\u2288',
'NotSucceeds;': '\u2281',
'NotSucceedsEqual;': '\u2ab0\u0338',
'NotSucceedsSlantEqual;': '\u22e1',
'NotSucceedsTilde;': '\u227f\u0338',
'NotSuperset;': '\u2283\u20d2',
'NotSupersetEqual;': '\u2289',
'NotTilde;': '\u2241',
'NotTildeEqual;': '\u2244',
'NotTildeFullEqual;': '\u2247',
'NotTildeTilde;': '\u2249',
'NotVerticalBar;': '\u2224',
'npar;': '\u2226',
'nparallel;': '\u2226',
'nparsl;': '\u2afd\u20e5',
'npart;': '\u2202\u0338',
'npolint;': '\u2a14',
'npr;': '\u2280',
'nprcue;': '\u22e0',
'npre;': '\u2aaf\u0338',
'nprec;': '\u2280',
'npreceq;': '\u2aaf\u0338',
'nrArr;': '\u21cf',
'nrarr;': '\u219b',
'nrarrc;': '\u2933\u0338',
'nrarrw;': '\u219d\u0338',
'nRightarrow;': '\u21cf',
'nrightarrow;': '\u219b',
'nrtri;': '\u22eb',
'nrtrie;': '\u22ed',
'nsc;': '\u2281',
'nsccue;': '\u22e1',
'nsce;': '\u2ab0\u0338',
'Nscr;': '\U0001d4a9',
'nscr;': '\U0001d4c3',
'nshortmid;': '\u2224',
'nshortparallel;': '\u2226',
'nsim;': '\u2241',
'nsime;': '\u2244',
'nsimeq;': '\u2244',
'nsmid;': '\u2224',
'nspar;': '\u2226',
'nsqsube;': '\u22e2',
'nsqsupe;': '\u22e3',
'nsub;': '\u2284',
'nsubE;': '\u2ac5\u0338',
'nsube;': '\u2288',
'nsubset;': '\u2282\u20d2',
'nsubseteq;': '\u2288',
'nsubseteqq;': '\u2ac5\u0338',
'nsucc;': '\u2281',
'nsucceq;': '\u2ab0\u0338',
'nsup;': '\u2285',
'nsupE;': '\u2ac6\u0338',
'nsupe;': '\u2289',
'nsupset;': '\u2283\u20d2',
'nsupseteq;': '\u2289',
'nsupseteqq;': '\u2ac6\u0338',
'ntgl;': '\u2279',
'Ntilde': '\xd1',
'ntilde': '\xf1',
'Ntilde;': '\xd1',
'ntilde;': '\xf1',
'ntlg;': '\u2278',
'ntriangleleft;': '\u22ea',
'ntrianglelefteq;': '\u22ec',
'ntriangleright;': '\u22eb',
'ntrianglerighteq;': '\u22ed',
'Nu;': '\u039d',
'nu;': '\u03bd',
'num;': '#',
'numero;': '\u2116',
'numsp;': '\u2007',
'nvap;': '\u224d\u20d2',
'nVDash;': '\u22af',
'nVdash;': '\u22ae',
'nvDash;': '\u22ad',
'nvdash;': '\u22ac',
'nvge;': '\u2265\u20d2',
'nvgt;': '>\u20d2',
'nvHarr;': '\u2904',
'nvinfin;': '\u29de',
'nvlArr;': '\u2902',
'nvle;': '\u2264\u20d2',
'nvlt;': '<\u20d2',
'nvltrie;': '\u22b4\u20d2',
'nvrArr;': '\u2903',
'nvrtrie;': '\u22b5\u20d2',
'nvsim;': '\u223c\u20d2',
'nwarhk;': '\u2923',
'nwArr;': '\u21d6',
'nwarr;': '\u2196',
'nwarrow;': '\u2196',
'nwnear;': '\u2927',
'Oacute': '\xd3',
'oacute': '\xf3',
'Oacute;': '\xd3',
'oacute;': '\xf3',
'oast;': '\u229b',
'ocir;': '\u229a',
'Ocirc': '\xd4',
'ocirc': '\xf4',
'Ocirc;': '\xd4',
'ocirc;': '\xf4',
'Ocy;': '\u041e',
'ocy;': '\u043e',
'odash;': '\u229d',
'Odblac;': '\u0150',
'odblac;': '\u0151',
'odiv;': '\u2a38',
'odot;': '\u2299',
'odsold;': '\u29bc',
'OElig;': '\u0152',
'oelig;': '\u0153',
'ofcir;': '\u29bf',
'Ofr;': '\U0001d512',
'ofr;': '\U0001d52c',
'ogon;': '\u02db',
'Ograve': '\xd2',
'ograve': '\xf2',
'Ograve;': '\xd2',
'ograve;': '\xf2',
'ogt;': '\u29c1',
'ohbar;': '\u29b5',
'ohm;': '\u03a9',
'oint;': '\u222e',
'olarr;': '\u21ba',
'olcir;': '\u29be',
'olcross;': '\u29bb',
'oline;': '\u203e',
'olt;': '\u29c0',
'Omacr;': '\u014c',
'omacr;': '\u014d',
'Omega;': '\u03a9',
'omega;': '\u03c9',
'Omicron;': '\u039f',
'omicron;': '\u03bf',
'omid;': '\u29b6',
'ominus;': '\u2296',
'Oopf;': '\U0001d546',
'oopf;': '\U0001d560',
'opar;': '\u29b7',
'OpenCurlyDoubleQuote;': '\u201c',
'OpenCurlyQuote;': '\u2018',
'operp;': '\u29b9',
'oplus;': '\u2295',
'Or;': '\u2a54',
'or;': '\u2228',
'orarr;': '\u21bb',
'ord;': '\u2a5d',
'order;': '\u2134',
'orderof;': '\u2134',
'ordf': '\xaa',
'ordf;': '\xaa',
'ordm': '\xba',
'ordm;': '\xba',
'origof;': '\u22b6',
'oror;': '\u2a56',
'orslope;': '\u2a57',
'orv;': '\u2a5b',
'oS;': '\u24c8',
'Oscr;': '\U0001d4aa',
'oscr;': '\u2134',
'Oslash': '\xd8',
'oslash': '\xf8',
'Oslash;': '\xd8',
'oslash;': '\xf8',
'osol;': '\u2298',
'Otilde': '\xd5',
'otilde': '\xf5',
'Otilde;': '\xd5',
'otilde;': '\xf5',
'Otimes;': '\u2a37',
'otimes;': '\u2297',
'otimesas;': '\u2a36',
'Ouml': '\xd6',
'ouml': '\xf6',
'Ouml;': '\xd6',
'ouml;': '\xf6',
'ovbar;': '\u233d',
'OverBar;': '\u203e',
'OverBrace;': '\u23de',
'OverBracket;': '\u23b4',
'OverParenthesis;': '\u23dc',
'par;': '\u2225',
'para': '\xb6',
'para;': '\xb6',
'parallel;': '\u2225',
'parsim;': '\u2af3',
'parsl;': '\u2afd',
'part;': '\u2202',
'PartialD;': '\u2202',
'Pcy;': '\u041f',
'pcy;': '\u043f',
'percnt;': '%',
'period;': '.',
'permil;': '\u2030',
'perp;': '\u22a5',
'pertenk;': '\u2031',
'Pfr;': '\U0001d513',
'pfr;': '\U0001d52d',
'Phi;': '\u03a6',
'phi;': '\u03c6',
'phiv;': '\u03d5',
'phmmat;': '\u2133',
'phone;': '\u260e',
'Pi;': '\u03a0',
'pi;': '\u03c0',
'pitchfork;': '\u22d4',
'piv;': '\u03d6',
'planck;': '\u210f',
'planckh;': '\u210e',
'plankv;': '\u210f',
'plus;': '+',
'plusacir;': '\u2a23',
'plusb;': '\u229e',
'pluscir;': '\u2a22',
'plusdo;': '\u2214',
'plusdu;': '\u2a25',
'pluse;': '\u2a72',
'PlusMinus;': '\xb1',
'plusmn': '\xb1',
'plusmn;': '\xb1',
'plussim;': '\u2a26',
'plustwo;': '\u2a27',
'pm;': '\xb1',
'Poincareplane;': '\u210c',
'pointint;': '\u2a15',
'Popf;': '\u2119',
'popf;': '\U0001d561',
'pound': '\xa3',
'pound;': '\xa3',
'Pr;': '\u2abb',
'pr;': '\u227a',
'prap;': '\u2ab7',
'prcue;': '\u227c',
'prE;': '\u2ab3',
'pre;': '\u2aaf',
'prec;': '\u227a',
'precapprox;': '\u2ab7',
'preccurlyeq;': '\u227c',
'Precedes;': '\u227a',
'PrecedesEqual;': '\u2aaf',
'PrecedesSlantEqual;': '\u227c',
'PrecedesTilde;': '\u227e',
'preceq;': '\u2aaf',
'precnapprox;': '\u2ab9',
'precneqq;': '\u2ab5',
'precnsim;': '\u22e8',
'precsim;': '\u227e',
'Prime;': '\u2033',
'prime;': '\u2032',
'primes;': '\u2119',
'prnap;': '\u2ab9',
'prnE;': '\u2ab5',
'prnsim;': '\u22e8',
'prod;': '\u220f',
'Product;': '\u220f',
'profalar;': '\u232e',
'profline;': '\u2312',
'profsurf;': '\u2313',
'prop;': '\u221d',
'Proportion;': '\u2237',
'Proportional;': '\u221d',
'propto;': '\u221d',
'prsim;': '\u227e',
'prurel;': '\u22b0',
'Pscr;': '\U0001d4ab',
'pscr;': '\U0001d4c5',
'Psi;': '\u03a8',
'psi;': '\u03c8',
'puncsp;': '\u2008',
'Qfr;': '\U0001d514',
'qfr;': '\U0001d52e',
'qint;': '\u2a0c',
'Qopf;': '\u211a',
'qopf;': '\U0001d562',
'qprime;': '\u2057',
'Qscr;': '\U0001d4ac',
'qscr;': '\U0001d4c6',
'quaternions;': '\u210d',
'quatint;': '\u2a16',
'quest;': '?',
'questeq;': '\u225f',
'QUOT': '"',
'quot': '"',
'QUOT;': '"',
'quot;': '"',
'rAarr;': '\u21db',
'race;': '\u223d\u0331',
'Racute;': '\u0154',
'racute;': '\u0155',
'radic;': '\u221a',
'raemptyv;': '\u29b3',
'Rang;': '\u27eb',
'rang;': '\u27e9',
'rangd;': '\u2992',
'range;': '\u29a5',
'rangle;': '\u27e9',
'raquo': '\xbb',
'raquo;': '\xbb',
'Rarr;': '\u21a0',
'rArr;': '\u21d2',
'rarr;': '\u2192',
'rarrap;': '\u2975',
'rarrb;': '\u21e5',
'rarrbfs;': '\u2920',
'rarrc;': '\u2933',
'rarrfs;': '\u291e',
'rarrhk;': '\u21aa',
'rarrlp;': '\u21ac',
'rarrpl;': '\u2945',
'rarrsim;': '\u2974',
'Rarrtl;': '\u2916',
'rarrtl;': '\u21a3',
'rarrw;': '\u219d',
'rAtail;': '\u291c',
'ratail;': '\u291a',
'ratio;': '\u2236',
'rationals;': '\u211a',
'RBarr;': '\u2910',
'rBarr;': '\u290f',
'rbarr;': '\u290d',
'rbbrk;': '\u2773',
'rbrace;': '}',
'rbrack;': ']',
'rbrke;': '\u298c',
'rbrksld;': '\u298e',
'rbrkslu;': '\u2990',
'Rcaron;': '\u0158',
'rcaron;': '\u0159',
'Rcedil;': '\u0156',
'rcedil;': '\u0157',
'rceil;': '\u2309',
'rcub;': '}',
'Rcy;': '\u0420',
'rcy;': '\u0440',
'rdca;': '\u2937',
'rdldhar;': '\u2969',
'rdquo;': '\u201d',
'rdquor;': '\u201d',
'rdsh;': '\u21b3',
'Re;': '\u211c',
'real;': '\u211c',
'realine;': '\u211b',
'realpart;': '\u211c',
'reals;': '\u211d',
'rect;': '\u25ad',
'REG': '\xae',
'reg': '\xae',
'REG;': '\xae',
'reg;': '\xae',
'ReverseElement;': '\u220b',
'ReverseEquilibrium;': '\u21cb',
'ReverseUpEquilibrium;': '\u296f',
'rfisht;': '\u297d',
'rfloor;': '\u230b',
'Rfr;': '\u211c',
'rfr;': '\U0001d52f',
'rHar;': '\u2964',
'rhard;': '\u21c1',
'rharu;': '\u21c0',
'rharul;': '\u296c',
'Rho;': '\u03a1',
'rho;': '\u03c1',
'rhov;': '\u03f1',
'RightAngleBracket;': '\u27e9',
'RightArrow;': '\u2192',
'Rightarrow;': '\u21d2',
'rightarrow;': '\u2192',
'RightArrowBar;': '\u21e5',
'RightArrowLeftArrow;': '\u21c4',
'rightarrowtail;': '\u21a3',
'RightCeiling;': '\u2309',
'RightDoubleBracket;': '\u27e7',
'RightDownTeeVector;': '\u295d',
'RightDownVector;': '\u21c2',
'RightDownVectorBar;': '\u2955',
'RightFloor;': '\u230b',
'rightharpoondown;': '\u21c1',
'rightharpoonup;': '\u21c0',
'rightleftarrows;': '\u21c4',
'rightleftharpoons;': '\u21cc',
'rightrightarrows;': '\u21c9',
'rightsquigarrow;': '\u219d',
'RightTee;': '\u22a2',
'RightTeeArrow;': '\u21a6',
'RightTeeVector;': '\u295b',
'rightthreetimes;': '\u22cc',
'RightTriangle;': '\u22b3',
'RightTriangleBar;': '\u29d0',
'RightTriangleEqual;': '\u22b5',
'RightUpDownVector;': '\u294f',
'RightUpTeeVector;': '\u295c',
'RightUpVector;': '\u21be',
'RightUpVectorBar;': '\u2954',
'RightVector;': '\u21c0',
'RightVectorBar;': '\u2953',
'ring;': '\u02da',
'risingdotseq;': '\u2253',
'rlarr;': '\u21c4',
'rlhar;': '\u21cc',
'rlm;': '\u200f',
'rmoust;': '\u23b1',
'rmoustache;': '\u23b1',
'rnmid;': '\u2aee',
'roang;': '\u27ed',
'roarr;': '\u21fe',
'robrk;': '\u27e7',
'ropar;': '\u2986',
'Ropf;': '\u211d',
'ropf;': '\U0001d563',
'roplus;': '\u2a2e',
'rotimes;': '\u2a35',
'RoundImplies;': '\u2970',
'rpar;': ')',
'rpargt;': '\u2994',
'rppolint;': '\u2a12',
'rrarr;': '\u21c9',
'Rrightarrow;': '\u21db',
'rsaquo;': '\u203a',
'Rscr;': '\u211b',
'rscr;': '\U0001d4c7',
'Rsh;': '\u21b1',
'rsh;': '\u21b1',
'rsqb;': ']',
'rsquo;': '\u2019',
'rsquor;': '\u2019',
'rthree;': '\u22cc',
'rtimes;': '\u22ca',
'rtri;': '\u25b9',
'rtrie;': '\u22b5',
'rtrif;': '\u25b8',
'rtriltri;': '\u29ce',
'RuleDelayed;': '\u29f4',
'ruluhar;': '\u2968',
'rx;': '\u211e',
'Sacute;': '\u015a',
'sacute;': '\u015b',
'sbquo;': '\u201a',
'Sc;': '\u2abc',
'sc;': '\u227b',
'scap;': '\u2ab8',
'Scaron;': '\u0160',
'scaron;': '\u0161',
'sccue;': '\u227d',
'scE;': '\u2ab4',
'sce;': '\u2ab0',
'Scedil;': '\u015e',
'scedil;': '\u015f',
'Scirc;': '\u015c',
'scirc;': '\u015d',
'scnap;': '\u2aba',
'scnE;': '\u2ab6',
'scnsim;': '\u22e9',
'scpolint;': '\u2a13',
'scsim;': '\u227f',
'Scy;': '\u0421',
'scy;': '\u0441',
'sdot;': '\u22c5',
'sdotb;': '\u22a1',
'sdote;': '\u2a66',
'searhk;': '\u2925',
'seArr;': '\u21d8',
'searr;': '\u2198',
'searrow;': '\u2198',
'sect': '\xa7',
'sect;': '\xa7',
'semi;': ';',
'seswar;': '\u2929',
'setminus;': '\u2216',
'setmn;': '\u2216',
'sext;': '\u2736',
'Sfr;': '\U0001d516',
'sfr;': '\U0001d530',
'sfrown;': '\u2322',
'sharp;': '\u266f',
'SHCHcy;': '\u0429',
'shchcy;': '\u0449',
'SHcy;': '\u0428',
'shcy;': '\u0448',
'ShortDownArrow;': '\u2193',
'ShortLeftArrow;': '\u2190',
'shortmid;': '\u2223',
'shortparallel;': '\u2225',
'ShortRightArrow;': '\u2192',
'ShortUpArrow;': '\u2191',
'shy': '\xad',
'shy;': '\xad',
'Sigma;': '\u03a3',
'sigma;': '\u03c3',
'sigmaf;': '\u03c2',
'sigmav;': '\u03c2',
'sim;': '\u223c',
'simdot;': '\u2a6a',
'sime;': '\u2243',
'simeq;': '\u2243',
'simg;': '\u2a9e',
'simgE;': '\u2aa0',
'siml;': '\u2a9d',
'simlE;': '\u2a9f',
'simne;': '\u2246',
'simplus;': '\u2a24',
'simrarr;': '\u2972',
'slarr;': '\u2190',
'SmallCircle;': '\u2218',
'smallsetminus;': '\u2216',
'smashp;': '\u2a33',
'smeparsl;': '\u29e4',
'smid;': '\u2223',
'smile;': '\u2323',
'smt;': '\u2aaa',
'smte;': '\u2aac',
'smtes;': '\u2aac\ufe00',
'SOFTcy;': '\u042c',
'softcy;': '\u044c',
'sol;': '/',
'solb;': '\u29c4',
'solbar;': '\u233f',
'Sopf;': '\U0001d54a',
'sopf;': '\U0001d564',
'spades;': '\u2660',
'spadesuit;': '\u2660',
'spar;': '\u2225',
'sqcap;': '\u2293',
'sqcaps;': '\u2293\ufe00',
'sqcup;': '\u2294',
'sqcups;': '\u2294\ufe00',
'Sqrt;': '\u221a',
'sqsub;': '\u228f',
'sqsube;': '\u2291',
'sqsubset;': '\u228f',
'sqsubseteq;': '\u2291',
'sqsup;': '\u2290',
'sqsupe;': '\u2292',
'sqsupset;': '\u2290',
'sqsupseteq;': '\u2292',
'squ;': '\u25a1',
'Square;': '\u25a1',
'square;': '\u25a1',
'SquareIntersection;': '\u2293',
'SquareSubset;': '\u228f',
'SquareSubsetEqual;': '\u2291',
'SquareSuperset;': '\u2290',
'SquareSupersetEqual;': '\u2292',
'SquareUnion;': '\u2294',
'squarf;': '\u25aa',
'squf;': '\u25aa',
'srarr;': '\u2192',
'Sscr;': '\U0001d4ae',
'sscr;': '\U0001d4c8',
'ssetmn;': '\u2216',
'ssmile;': '\u2323',
'sstarf;': '\u22c6',
'Star;': '\u22c6',
'star;': '\u2606',
'starf;': '\u2605',
'straightepsilon;': '\u03f5',
'straightphi;': '\u03d5',
'strns;': '\xaf',
'Sub;': '\u22d0',
'sub;': '\u2282',
'subdot;': '\u2abd',
'subE;': '\u2ac5',
'sube;': '\u2286',
'subedot;': '\u2ac3',
'submult;': '\u2ac1',
'subnE;': '\u2acb',
'subne;': '\u228a',
'subplus;': '\u2abf',
'subrarr;': '\u2979',
'Subset;': '\u22d0',
'subset;': '\u2282',
'subseteq;': '\u2286',
'subseteqq;': '\u2ac5',
'SubsetEqual;': '\u2286',
'subsetneq;': '\u228a',
'subsetneqq;': '\u2acb',
'subsim;': '\u2ac7',
'subsub;': '\u2ad5',
'subsup;': '\u2ad3',
'succ;': '\u227b',
'succapprox;': '\u2ab8',
'succcurlyeq;': '\u227d',
'Succeeds;': '\u227b',
'SucceedsEqual;': '\u2ab0',
'SucceedsSlantEqual;': '\u227d',
'SucceedsTilde;': '\u227f',
'succeq;': '\u2ab0',
'succnapprox;': '\u2aba',
'succneqq;': '\u2ab6',
'succnsim;': '\u22e9',
'succsim;': '\u227f',
'SuchThat;': '\u220b',
'Sum;': '\u2211',
'sum;': '\u2211',
'sung;': '\u266a',
'sup1': '\xb9',
'sup1;': '\xb9',
'sup2': '\xb2',
'sup2;': '\xb2',
'sup3': '\xb3',
'sup3;': '\xb3',
'Sup;': '\u22d1',
'sup;': '\u2283',
'supdot;': '\u2abe',
'supdsub;': '\u2ad8',
'supE;': '\u2ac6',
'supe;': '\u2287',
'supedot;': '\u2ac4',
'Superset;': '\u2283',
'SupersetEqual;': '\u2287',
'suphsol;': '\u27c9',
'suphsub;': '\u2ad7',
'suplarr;': '\u297b',
'supmult;': '\u2ac2',
'supnE;': '\u2acc',
'supne;': '\u228b',
'supplus;': '\u2ac0',
'Supset;': '\u22d1',
'supset;': '\u2283',
'supseteq;': '\u2287',
'supseteqq;': '\u2ac6',
'supsetneq;': '\u228b',
'supsetneqq;': '\u2acc',
'supsim;': '\u2ac8',
'supsub;': '\u2ad4',
'supsup;': '\u2ad6',
'swarhk;': '\u2926',
'swArr;': '\u21d9',
'swarr;': '\u2199',
'swarrow;': '\u2199',
'swnwar;': '\u292a',
'szlig': '\xdf',
'szlig;': '\xdf',
'Tab;': '\t',
'target;': '\u2316',
'Tau;': '\u03a4',
'tau;': '\u03c4',
'tbrk;': '\u23b4',
'Tcaron;': '\u0164',
'tcaron;': '\u0165',
'Tcedil;': '\u0162',
'tcedil;': '\u0163',
'Tcy;': '\u0422',
'tcy;': '\u0442',
'tdot;': '\u20db',
'telrec;': '\u2315',
'Tfr;': '\U0001d517',
'tfr;': '\U0001d531',
'there4;': '\u2234',
'Therefore;': '\u2234',
'therefore;': '\u2234',
'Theta;': '\u0398',
'theta;': '\u03b8',
'thetasym;': '\u03d1',
'thetav;': '\u03d1',
'thickapprox;': '\u2248',
'thicksim;': '\u223c',
'ThickSpace;': '\u205f\u200a',
'thinsp;': '\u2009',
'ThinSpace;': '\u2009',
'thkap;': '\u2248',
'thksim;': '\u223c',
'THORN': '\xde',
'thorn': '\xfe',
'THORN;': '\xde',
'thorn;': '\xfe',
'Tilde;': '\u223c',
'tilde;': '\u02dc',
'TildeEqual;': '\u2243',
'TildeFullEqual;': '\u2245',
'TildeTilde;': '\u2248',
'times': '\xd7',
'times;': '\xd7',
'timesb;': '\u22a0',
'timesbar;': '\u2a31',
'timesd;': '\u2a30',
'tint;': '\u222d',
'toea;': '\u2928',
'top;': '\u22a4',
'topbot;': '\u2336',
'topcir;': '\u2af1',
'Topf;': '\U0001d54b',
'topf;': '\U0001d565',
'topfork;': '\u2ada',
'tosa;': '\u2929',
'tprime;': '\u2034',
'TRADE;': '\u2122',
'trade;': '\u2122',
'triangle;': '\u25b5',
'triangledown;': '\u25bf',
'triangleleft;': '\u25c3',
'trianglelefteq;': '\u22b4',
'triangleq;': '\u225c',
'triangleright;': '\u25b9',
'trianglerighteq;': '\u22b5',
'tridot;': '\u25ec',
'trie;': '\u225c',
'triminus;': '\u2a3a',
'TripleDot;': '\u20db',
'triplus;': '\u2a39',
'trisb;': '\u29cd',
'tritime;': '\u2a3b',
'trpezium;': '\u23e2',
'Tscr;': '\U0001d4af',
'tscr;': '\U0001d4c9',
'TScy;': '\u0426',
'tscy;': '\u0446',
'TSHcy;': '\u040b',
'tshcy;': '\u045b',
'Tstrok;': '\u0166',
'tstrok;': '\u0167',
'twixt;': '\u226c',
'twoheadleftarrow;': '\u219e',
'twoheadrightarrow;': '\u21a0',
'Uacute': '\xda',
'uacute': '\xfa',
'Uacute;': '\xda',
'uacute;': '\xfa',
'Uarr;': '\u219f',
'uArr;': '\u21d1',
'uarr;': '\u2191',
'Uarrocir;': '\u2949',
'Ubrcy;': '\u040e',
'ubrcy;': '\u045e',
'Ubreve;': '\u016c',
'ubreve;': '\u016d',
'Ucirc': '\xdb',
'ucirc': '\xfb',
'Ucirc;': '\xdb',
'ucirc;': '\xfb',
'Ucy;': '\u0423',
'ucy;': '\u0443',
'udarr;': '\u21c5',
'Udblac;': '\u0170',
'udblac;': '\u0171',
'udhar;': '\u296e',
'ufisht;': '\u297e',
'Ufr;': '\U0001d518',
'ufr;': '\U0001d532',
'Ugrave': '\xd9',
'ugrave': '\xf9',
'Ugrave;': '\xd9',
'ugrave;': '\xf9',
'uHar;': '\u2963',
'uharl;': '\u21bf',
'uharr;': '\u21be',
'uhblk;': '\u2580',
'ulcorn;': '\u231c',
'ulcorner;': '\u231c',
'ulcrop;': '\u230f',
'ultri;': '\u25f8',
'Umacr;': '\u016a',
'umacr;': '\u016b',
'uml': '\xa8',
'uml;': '\xa8',
'UnderBar;': '_',
'UnderBrace;': '\u23df',
'UnderBracket;': '\u23b5',
'UnderParenthesis;': '\u23dd',
'Union;': '\u22c3',
'UnionPlus;': '\u228e',
'Uogon;': '\u0172',
'uogon;': '\u0173',
'Uopf;': '\U0001d54c',
'uopf;': '\U0001d566',
'UpArrow;': '\u2191',
'Uparrow;': '\u21d1',
'uparrow;': '\u2191',
'UpArrowBar;': '\u2912',
'UpArrowDownArrow;': '\u21c5',
'UpDownArrow;': '\u2195',
'Updownarrow;': '\u21d5',
'updownarrow;': '\u2195',
'UpEquilibrium;': '\u296e',
'upharpoonleft;': '\u21bf',
'upharpoonright;': '\u21be',
'uplus;': '\u228e',
'UpperLeftArrow;': '\u2196',
'UpperRightArrow;': '\u2197',
'Upsi;': '\u03d2',
'upsi;': '\u03c5',
'upsih;': '\u03d2',
'Upsilon;': '\u03a5',
'upsilon;': '\u03c5',
'UpTee;': '\u22a5',
'UpTeeArrow;': '\u21a5',
'upuparrows;': '\u21c8',
'urcorn;': '\u231d',
'urcorner;': '\u231d',
'urcrop;': '\u230e',
'Uring;': '\u016e',
'uring;': '\u016f',
'urtri;': '\u25f9',
'Uscr;': '\U0001d4b0',
'uscr;': '\U0001d4ca',
'utdot;': '\u22f0',
'Utilde;': '\u0168',
'utilde;': '\u0169',
'utri;': '\u25b5',
'utrif;': '\u25b4',
'uuarr;': '\u21c8',
'Uuml': '\xdc',
'uuml': '\xfc',
'Uuml;': '\xdc',
'uuml;': '\xfc',
'uwangle;': '\u29a7',
'vangrt;': '\u299c',
'varepsilon;': '\u03f5',
'varkappa;': '\u03f0',
'varnothing;': '\u2205',
'varphi;': '\u03d5',
'varpi;': '\u03d6',
'varpropto;': '\u221d',
'vArr;': '\u21d5',
'varr;': '\u2195',
'varrho;': '\u03f1',
'varsigma;': '\u03c2',
'varsubsetneq;': '\u228a\ufe00',
'varsubsetneqq;': '\u2acb\ufe00',
'varsupsetneq;': '\u228b\ufe00',
'varsupsetneqq;': '\u2acc\ufe00',
'vartheta;': '\u03d1',
'vartriangleleft;': '\u22b2',
'vartriangleright;': '\u22b3',
'Vbar;': '\u2aeb',
'vBar;': '\u2ae8',
'vBarv;': '\u2ae9',
'Vcy;': '\u0412',
'vcy;': '\u0432',
'VDash;': '\u22ab',
'Vdash;': '\u22a9',
'vDash;': '\u22a8',
'vdash;': '\u22a2',
'Vdashl;': '\u2ae6',
'Vee;': '\u22c1',
'vee;': '\u2228',
'veebar;': '\u22bb',
'veeeq;': '\u225a',
'vellip;': '\u22ee',
'Verbar;': '\u2016',
'verbar;': '|',
'Vert;': '\u2016',
'vert;': '|',
'VerticalBar;': '\u2223',
'VerticalLine;': '|',
'VerticalSeparator;': '\u2758',
'VerticalTilde;': '\u2240',
'VeryThinSpace;': '\u200a',
'Vfr;': '\U0001d519',
'vfr;': '\U0001d533',
'vltri;': '\u22b2',
'vnsub;': '\u2282\u20d2',
'vnsup;': '\u2283\u20d2',
'Vopf;': '\U0001d54d',
'vopf;': '\U0001d567',
'vprop;': '\u221d',
'vrtri;': '\u22b3',
'Vscr;': '\U0001d4b1',
'vscr;': '\U0001d4cb',
'vsubnE;': '\u2acb\ufe00',
'vsubne;': '\u228a\ufe00',
'vsupnE;': '\u2acc\ufe00',
'vsupne;': '\u228b\ufe00',
'Vvdash;': '\u22aa',
'vzigzag;': '\u299a',
'Wcirc;': '\u0174',
'wcirc;': '\u0175',
'wedbar;': '\u2a5f',
'Wedge;': '\u22c0',
'wedge;': '\u2227',
'wedgeq;': '\u2259',
'weierp;': '\u2118',
'Wfr;': '\U0001d51a',
'wfr;': '\U0001d534',
'Wopf;': '\U0001d54e',
'wopf;': '\U0001d568',
'wp;': '\u2118',
'wr;': '\u2240',
'wreath;': '\u2240',
'Wscr;': '\U0001d4b2',
'wscr;': '\U0001d4cc',
'xcap;': '\u22c2',
'xcirc;': '\u25ef',
'xcup;': '\u22c3',
'xdtri;': '\u25bd',
'Xfr;': '\U0001d51b',
'xfr;': '\U0001d535',
'xhArr;': '\u27fa',
'xharr;': '\u27f7',
'Xi;': '\u039e',
'xi;': '\u03be',
'xlArr;': '\u27f8',
'xlarr;': '\u27f5',
'xmap;': '\u27fc',
'xnis;': '\u22fb',
'xodot;': '\u2a00',
'Xopf;': '\U0001d54f',
'xopf;': '\U0001d569',
'xoplus;': '\u2a01',
'xotime;': '\u2a02',
'xrArr;': '\u27f9',
'xrarr;': '\u27f6',
'Xscr;': '\U0001d4b3',
'xscr;': '\U0001d4cd',
'xsqcup;': '\u2a06',
'xuplus;': '\u2a04',
'xutri;': '\u25b3',
'xvee;': '\u22c1',
'xwedge;': '\u22c0',
'Yacute': '\xdd',
'yacute': '\xfd',
'Yacute;': '\xdd',
'yacute;': '\xfd',
'YAcy;': '\u042f',
'yacy;': '\u044f',
'Ycirc;': '\u0176',
'ycirc;': '\u0177',
'Ycy;': '\u042b',
'ycy;': '\u044b',
'yen': '\xa5',
'yen;': '\xa5',
'Yfr;': '\U0001d51c',
'yfr;': '\U0001d536',
'YIcy;': '\u0407',
'yicy;': '\u0457',
'Yopf;': '\U0001d550',
'yopf;': '\U0001d56a',
'Yscr;': '\U0001d4b4',
'yscr;': '\U0001d4ce',
'YUcy;': '\u042e',
'yucy;': '\u044e',
'yuml': '\xff',
'Yuml;': '\u0178',
'yuml;': '\xff',
'Zacute;': '\u0179',
'zacute;': '\u017a',
'Zcaron;': '\u017d',
'zcaron;': '\u017e',
'Zcy;': '\u0417',
'zcy;': '\u0437',
'Zdot;': '\u017b',
'zdot;': '\u017c',
'zeetrf;': '\u2128',
'ZeroWidthSpace;': '\u200b',
'Zeta;': '\u0396',
'zeta;': '\u03b6',
'Zfr;': '\u2128',
'zfr;': '\U0001d537',
'ZHcy;': '\u0416',
'zhcy;': '\u0436',
'zigrarr;': '\u21dd',
'Zopf;': '\u2124',
'zopf;': '\U0001d56b',
'Zscr;': '\U0001d4b5',
'zscr;': '\U0001d4cf',
'zwj;': '\u200d',
'zwnj;': '\u200c',
}
# maps the Unicode codepoint to the HTML entity name
codepoint2name = {}
# maps the HTML entity name to the character
# (or a character reference if the character is outside the Latin-1 range)
entitydefs = {}
for (name, codepoint) in name2codepoint.items():
codepoint2name[codepoint] = name
entitydefs[name] = chr(codepoint)
del name, codepoint
|
StrellaGroup/erpnext
|
refs/heads/develop
|
erpnext/accounts/doctype/payment_order/test_payment_order.py
|
15
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestPaymentOrder(unittest.TestCase):
pass
|
KitKatXperience/platform_external_chromium_org
|
refs/heads/kk
|
native_client_sdk/src/build_tools/parse_dsc.py
|
23
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import fnmatch
import optparse
import os
import sys
VALID_TOOLCHAINS = ['newlib', 'glibc', 'pnacl', 'win', 'linux', 'mac']
# 'KEY' : ( <TYPE>, [Accepted Values], <Required?>)
DSC_FORMAT = {
'DISABLE': (bool, [True, False], False),
'SEL_LDR': (bool, [True, False], False),
'DISABLE_PACKAGE': (bool, [True, False], False),
'TOOLS' : (list, VALID_TOOLCHAINS, True),
'CONFIGS' : (list, ['Debug', 'Release'], False),
'PREREQ' : (list, '', False),
'TARGETS' : (list, {
'NAME': (str, '', True),
# main = nexe target
# lib = library target
# so = shared object target, automatically added to NMF
# so-standalone = shared object target, not put into NMF
'TYPE': (str, ['main', 'lib', 'static-lib', 'so', 'so-standalone'],
True),
'SOURCES': (list, '', True),
'CFLAGS': (list, '', False),
'CFLAGS_GCC': (list, '', False),
'CXXFLAGS': (list, '', False),
'DEFINES': (list, '', False),
'LDFLAGS': (list, '', False),
'INCLUDES': (list, '', False),
'LIBS' : (dict, VALID_TOOLCHAINS, False),
'DEPS' : (list, '', False)
}, True),
'HEADERS': (list, {
'FILES': (list, '', True),
'DEST': (str, '', True),
}, False),
'SEARCH': (list, '', False),
'POST': (str, '', False),
'PRE': (str, '', False),
'DEST': (str, ['examples/getting_started', 'examples/api',
'examples/demo', 'examples/tutorial',
'src', 'tests'], True),
'NAME': (str, '', False),
'DATA': (list, '', False),
'TITLE': (str, '', False),
'GROUP': (str, '', False),
'EXPERIMENTAL': (bool, [True, False], False),
'PERMISSIONS': (list, '', False),
'SOCKET_PERMISSIONS': (list, '', False)
}
class ValidationError(Exception):
pass
def ValidateFormat(src, dsc_format):
# Verify all required keys are there
for key in dsc_format:
exp_type, exp_value, required = dsc_format[key]
if required and key not in src:
raise ValidationError('Missing required key %s.' % key)
# For each provided key, verify it's valid
for key in src:
# Verify the key is known
if key not in dsc_format:
raise ValidationError('Unexpected key %s.' % key)
exp_type, exp_value, required = dsc_format[key]
value = src[key]
# Verify the value is non-empty if required
if required and not value:
raise ValidationError('Expected non-empty value for %s.' % key)
# If the expected type is a dict, but the provided type is a list
# then the list applies to all keys of the dictionary, so we reset
# the expected type and value.
if exp_type is dict:
if type(value) is list:
exp_type = list
exp_value = ''
# Verify the key is of the expected type
if exp_type != type(value):
raise ValidationError('Key %s expects %s not %s.' % (
key, exp_type.__name__.upper(), type(value).__name__.upper()))
# If it's a bool, the expected values are always True or False.
if exp_type is bool:
continue
# If it's a string and there are expected values, make sure it matches
if exp_type is str:
if type(exp_value) is list and exp_value:
if value not in exp_value:
raise ValidationError("Value '%s' not expected for %s." %
(value, key))
continue
# if it's a list, then we need to validate the values
if exp_type is list:
# If we expect a dictionary, then call this recursively
if type(exp_value) is dict:
for val in value:
ValidateFormat(val, exp_value)
continue
# If we expect a list of strings
if type(exp_value) is str:
for val in value:
if type(val) is not str:
raise ValidationError('Value %s in %s is not a string.' %
(val, key))
continue
# if we expect a particular string
if type(exp_value) is list:
for val in value:
if val not in exp_value:
raise ValidationError('Value %s not expected in %s.' %
(val, key))
continue
# if we are expecting a dict, verify the keys are allowed
if exp_type is dict:
print "Expecting dict\n"
for sub in value:
if sub not in exp_value:
raise ValidationError('Sub key %s not expected in %s.' %
(sub, key))
continue
# If we got this far, it's an unexpected type
raise ValidationError('Unexpected type %s for key %s.' %
(str(type(src[key])), key))
def LoadProject(filename):
with open(filename, 'r') as descfile:
desc = eval(descfile.read(), {}, {})
if desc.get('DISABLE', False):
return None
ValidateFormat(desc, DSC_FORMAT)
desc['FILEPATH'] = os.path.abspath(filename)
return desc
def LoadProjectTreeUnfiltered(srcpath):
# Build the tree
out = collections.defaultdict(list)
for root, _, files in os.walk(srcpath):
for filename in files:
if fnmatch.fnmatch(filename, '*.dsc'):
filepath = os.path.join(root, filename)
try:
desc = LoadProject(filepath)
except ValidationError as e:
raise ValidationError("Failed to validate: %s: %s" % (filepath, e))
if desc:
key = desc['DEST']
out[key].append(desc)
return out
def LoadProjectTree(srcpath, include, exclude=None):
out = LoadProjectTreeUnfiltered(srcpath)
return FilterTree(out, MakeDefaultFilterFn(include, exclude))
def GenerateProjects(tree):
for key in tree:
for val in tree[key]:
yield key, val
def FilterTree(tree, filter_fn):
out = collections.defaultdict(list)
for branch, desc in GenerateProjects(tree):
if filter_fn(desc):
out[branch].append(desc)
return out
def MakeDefaultFilterFn(include, exclude):
def DefaultFilterFn(desc):
matches_include = not include or DescMatchesFilter(desc, include)
matches_exclude = exclude and DescMatchesFilter(desc, exclude)
# Exclude list overrides include list.
if matches_exclude:
return False
return matches_include
return DefaultFilterFn
def DescMatchesFilter(desc, filters):
for key, expected in filters.iteritems():
# For any filtered key which is unspecified, assumed False
value = desc.get(key, False)
# If we provide an expected list, match at least one
if type(expected) != list:
expected = set([expected])
if type(value) != list:
value = set([value])
if not set(expected) & set(value):
return False
# If we fall through, then we matched the filters
return True
def PrintProjectTree(tree):
for key in tree:
print key + ':'
for val in tree[key]:
print '\t' + val['NAME']
def main(argv):
parser = optparse.OptionParser(usage='%prog [options] <dir>')
parser.add_option('-e', '--experimental',
help='build experimental examples and libraries', action='store_true')
parser.add_option('-t', '--toolchain',
help='Build using toolchain. Can be passed more than once.',
action='append')
options, args = parser.parse_args(argv[1:])
filters = {}
load_from_dir = '.'
if len(args) > 1:
parser.error('Expected 0 or 1 args, got %d.' % len(args))
if args:
load_from_dir = args[0]
if options.toolchain:
filters['TOOLS'] = options.toolchain
if not options.experimental:
filters['EXPERIMENTAL'] = False
try:
tree = LoadProjectTree(load_from_dir, include=filters)
except ValidationError as e:
sys.stderr.write(str(e) + '\n')
return 1
PrintProjectTree(tree)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Dyhzy/selector-shell
|
refs/heads/master
|
appengine_django/tests/memcache_test.py
|
6
|
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Ensures the App Engine memcache API works as Django's memcache backend."""
import unittest
from django.core.cache import get_cache
from appengine_django import appconfig
from appengine_django import appid
from appengine_django import have_appserver
class AppengineMemcacheTest(unittest.TestCase):
"""Tests that the memcache backend works."""
def setUp(self):
"""Get the memcache cache module so it is available to tests."""
self._cache = get_cache("memcached://")
def testSimpleSetGet(self):
"""Tests that a simple set/get operation through the cache works."""
self._cache.set("test_key", "test_value")
self.assertEqual(self._cache.get("test_key"), "test_value")
def testDelete(self):
"""Tests that delete removes values from the cache."""
self._cache.set("test_key", "test_value")
self.assertEqual(self._cache.has_key("test_key"), True)
self._cache.delete("test_key")
self.assertEqual(self._cache.has_key("test_key"), False)
|
ProfessionalIT/maxigenios-website
|
refs/heads/master
|
sdk/google_appengine/lib/pyasn1/pyasn1/type/tag.py
|
200
|
# ASN.1 types tags
from operator import getitem
from pyasn1 import error
tagClassUniversal = 0x00
tagClassApplication = 0x40
tagClassContext = 0x80
tagClassPrivate = 0xC0
tagFormatSimple = 0x00
tagFormatConstructed = 0x20
tagCategoryImplicit = 0x01
tagCategoryExplicit = 0x02
tagCategoryUntagged = 0x04
class Tag:
def __init__(self, tagClass, tagFormat, tagId):
if tagId < 0:
raise error.PyAsn1Error(
'Negative tag ID (%s) not allowed' % (tagId,)
)
self.__tag = (tagClass, tagFormat, tagId)
self.uniq = (tagClass, tagId)
self.__hashedUniqTag = hash(self.uniq)
def __repr__(self):
return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
(self.__class__.__name__,) + self.__tag
)
# These is really a hotspot -- expose public "uniq" attribute to save on
# function calls
def __eq__(self, other): return self.uniq == other.uniq
def __ne__(self, other): return self.uniq != other.uniq
def __lt__(self, other): return self.uniq < other.uniq
def __le__(self, other): return self.uniq <= other.uniq
def __gt__(self, other): return self.uniq > other.uniq
def __ge__(self, other): return self.uniq >= other.uniq
def __hash__(self): return self.__hashedUniqTag
def __getitem__(self, idx): return self.__tag[idx]
def __and__(self, otherTag):
(tagClass, tagFormat, tagId) = otherTag
return self.__class__(
self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId
)
def __or__(self, otherTag):
(tagClass, tagFormat, tagId) = otherTag
return self.__class__(
self.__tag[0]|tagClass,
self.__tag[1]|tagFormat,
self.__tag[2]|tagId
)
def asTuple(self): return self.__tag # __getitem__() is slow
class TagSet:
def __init__(self, baseTag=(), *superTags):
self.__baseTag = baseTag
self.__superTags = superTags
self.__hashedSuperTags = hash(superTags)
_uniq = ()
for t in superTags:
_uniq = _uniq + t.uniq
self.uniq = _uniq
self.__lenOfSuperTags = len(superTags)
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join([repr(x) for x in self.__superTags])
)
def __add__(self, superTag):
return self.__class__(
self.__baseTag, *self.__superTags + (superTag,)
)
def __radd__(self, superTag):
return self.__class__(
self.__baseTag, *(superTag,) + self.__superTags
)
def tagExplicitly(self, superTag):
tagClass, tagFormat, tagId = superTag
if tagClass == tagClassUniversal:
raise error.PyAsn1Error(
'Can\'t tag with UNIVERSAL-class tag'
)
if tagFormat != tagFormatConstructed:
superTag = Tag(tagClass, tagFormatConstructed, tagId)
return self + superTag
def tagImplicitly(self, superTag):
tagClass, tagFormat, tagId = superTag
if self.__superTags:
superTag = Tag(tagClass, self.__superTags[-1][1], tagId)
return self[:-1] + superTag
def getBaseTag(self): return self.__baseTag
def __getitem__(self, idx):
if isinstance(idx, slice):
return self.__class__(
self.__baseTag, *getitem(self.__superTags, idx)
)
return self.__superTags[idx]
def __eq__(self, other): return self.uniq == other.uniq
def __ne__(self, other): return self.uniq != other.uniq
def __lt__(self, other): return self.uniq < other.uniq
def __le__(self, other): return self.uniq <= other.uniq
def __gt__(self, other): return self.uniq > other.uniq
def __ge__(self, other): return self.uniq >= other.uniq
def __hash__(self): return self.__hashedSuperTags
def __len__(self): return self.__lenOfSuperTags
def isSuperTagSetOf(self, tagSet):
if len(tagSet) < self.__lenOfSuperTags:
return
idx = self.__lenOfSuperTags - 1
while idx >= 0:
if self.__superTags[idx] != tagSet[idx]:
return
idx = idx - 1
return 1
def initTagSet(tag): return TagSet(tag, tag)
|
KevinGoodsell/sympy
|
refs/heads/cache
|
doc/src/modules/galgebra/GA/reciprocalframeGAtest.py
|
12
|
MV.setup('e1 e2 e3',metric)
print 'Example: Reciprocal Frames e1, e2, and e3 unit vectors.\n\n'
E = e1^e2^e3
Esq = (E*E)()
print 'E =',E
print 'E^2 =',Esq
Esq_inv = 1/Esq
E1 = (e2^e3)*E
E2 = (-1)*(e1^e3)*E
E3 = (e1^e2)*E
print 'E1 = (e2^e3)*E =',E1
print 'E2 =-(e1^e3)*E =',E2
print 'E3 = (e1^e2)*E =',E3
w = (E1|e2)
w.collect(MV.g)
w = w().expand()
print 'E1|e2 =',w
w = (E1|e3)
w.collect(MV.g)
w = w().expand()
print 'E1|e3 =',w
w = (E2|e1)
w.collect(MV.g)
w = w().expand()
print 'E2|e1 =',w
w = (E2|e3)
w.collect(MV.g)
w = w().expand()
print 'E2|e3 =',w
w = (E3|e1)
w.collect(MV.g)
w = w().expand()
print 'E3|e1 =',w
w = (E3|e2)
w.collect(MV.g)
w = w().expand()
print 'E3|e2 =',w
w = (E1|e1)
w = w().expand()
Esq = Esq.expand()
print '(E1|e1)/E^2 =',w/Esq
w = (E2|e2)
w = w().expand()
print '(E2|e2)/E^2 =',w/Esq
w = (E3|e3)
w = w().expand()
print '(E3|e3)/E^2 =',w/Esq
Example: Reciprocal Frames e1, e2, and e3 unit vectors.
E = e1^e2^e3
E^2 = -1 - 2*(e1.e2)*(e1.e3)*(e2.e3) + (e1.e2)**2 + (e1.e3)**2 + (e2.e3)**2
E1 = (e2^e3)*E = {-1 + (e2.e3)**2}e1+{(e1.e2) - (e1.e3)*(e2.e3)}e2+{(e1.e3) - (e1.e2)*(e2.e3)}e3
E2 =-(e1^e3)*E = {(e1.e2) - (e1.e3)*(e2.e3)}e1+{-1 + (e1.e3)**2}e2+{(e2.e3) - (e1.e2)*(e1.e3)}e3
E3 = (e1^e2)*E = {(e1.e3) - (e1.e2)*(e2.e3)}e1+{(e2.e3) - (e1.e2)*(e1.e3)}e2+{-1 + (e1.e2)**2}e3
E1|e2 = 0
E1|e3 = 0
E2|e1 = 0
E2|e3 = 0
E3|e1 = 0
E3|e2 = 0
(E1|e1)/E^2 = 1
(E2|e2)/E^2 = 1
(E3|e3)/E^2 = 1
|
jkandasa/integration_tests
|
refs/heads/master
|
cfme/utils/appliance/implementations/rest.py
|
1
|
# -*- coding: utf-8 -*-
class ViaREST(object):
def __init__(self, owner):
self.owner = owner
@property
def appliance(self):
return self.owner
def __str__(self):
return 'REST'
|
krishna-pandey-git/django
|
refs/heads/master
|
tests/queryset_pickle/models.py
|
281
|
import datetime
from django.db import DJANGO_VERSION_PICKLE_KEY, models
from django.utils import six
from django.utils.translation import ugettext_lazy as _
def standalone_number():
return 1
class Numbers(object):
@staticmethod
def get_static_number():
return 2
class PreviousDjangoVersionQuerySet(models.QuerySet):
def __getstate__(self):
state = super(PreviousDjangoVersionQuerySet, self).__getstate__()
state[DJANGO_VERSION_PICKLE_KEY] = '1.0'
return state
class MissingDjangoVersionQuerySet(models.QuerySet):
def __getstate__(self):
state = super(MissingDjangoVersionQuerySet, self).__getstate__()
del state[DJANGO_VERSION_PICKLE_KEY]
return state
class Group(models.Model):
name = models.CharField(_('name'), max_length=100)
objects = models.Manager()
previous_django_version_objects = PreviousDjangoVersionQuerySet.as_manager()
missing_django_version_objects = MissingDjangoVersionQuerySet.as_manager()
class Event(models.Model):
title = models.CharField(max_length=100)
group = models.ForeignKey(Group, models.CASCADE)
class Happening(models.Model):
when = models.DateTimeField(blank=True, default=datetime.datetime.now)
name = models.CharField(blank=True, max_length=100, default="test")
number1 = models.IntegerField(blank=True, default=standalone_number)
if six.PY3:
# default serializable on Python 3 only
number2 = models.IntegerField(blank=True, default=Numbers.get_static_number)
class Container(object):
# To test pickling we need a class that isn't defined on module, but
# is still available from app-cache. So, the Container class moves
# SomeModel outside of module level
class SomeModel(models.Model):
somefield = models.IntegerField()
class M2MModel(models.Model):
groups = models.ManyToManyField(Group)
|
chargehound/chargehound-python
|
refs/heads/master
|
test/test_error.py
|
1
|
import chargehound
import requests_mock
import unittest2
from chargehound.error import ChargehoundBadRequestError, ChargehoundError
class ErrorTest(unittest2.TestCase):
def setUp(self):
super(ErrorTest, self).setUp()
chargehound.api_key = 'API_KEY'
@requests_mock.mock()
def test_bad_request(self, mock):
mock.post('https://api.chargehound.com/v1/disputes/dp_123/submit',
status_code=400,
json={
'error': {'status': 400, 'message': 'Bad request.'}
})
try:
chargehound.Disputes.submit('dp_123')
except ChargehoundBadRequestError as bad:
assert bad.status == 400
assert bad.message == 'Bad request.'
def test_propagate_errors(self):
orig_host = chargehound.host
chargehound.host = 'test'
try:
chargehound.Disputes.retrieve('dp_123')
except Exception as e:
assert not isinstance(e, ChargehoundError)
chargehound.host = orig_host
|
mozilla-metrics/hadoop-etls
|
refs/heads/master
|
fhr/v3/base_etl_job.py
|
1
|
#!/usr/bin/env python
import sys, os
import codecs
import datetime
import mrjob.job
import mrjob.protocol
try: # workaround
from fhrdata import FHRData
import util
except ImportError:
pass
class BaseETLJob(mrjob.job.MRJob):
HADOOP_INPUT_FORMAT = 'SequenceFileAsTextInputFormat'
OUTPUT_PROTOCOL = mrjob.protocol.RawValueProtocol
def mark_invalid_input(self):
self.increment_counter("errors", "invalid_input_line")
def mark_invalid_json(self):
self.increment_counter("errors", "invalid_json")
def mark_invalid_report(self):
self.increment_counter("errors", "invalid_report")
def mark_invalid_date(self):
self.increment_counter("error", "invalid_date")
def configure_options(self):
super(BaseETLJob, self).configure_options()
self.add_passthrough_option(
'--field-separator', default=chr(1),
help="Specify field separator")
self.add_passthrough_option(
'--snapshot-date', default=datetime.datetime.now().strftime("%Y-%m-%d"),
help="Specify field separator")
def get_fhr_report(self, line):
rec = raw_json = None
try:
raw_json = line.split("\t",1)[1]
except:
self.mark_invalid_input()
return
try:
rec = FHRData(raw_json)
except:
self.mark_invalid_json()
return
return rec
|
rio-group/trabalho-ai-pacman
|
refs/heads/master
|
graphicsDisplay.py
|
2
|
# graphicsDisplay.py
# ------------------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
from graphicsUtils import *
import math, time
from game import Directions
###########################
# GRAPHICS DISPLAY CODE #
###########################
# Most code by Dan Klein and John Denero written or rewritten for cs188, UC Berkeley.
# Some code from a Pacman implementation by LiveWires, and used / modified with permission.
DEFAULT_GRID_SIZE = 30.0
INFO_PANE_HEIGHT = 35
BACKGROUND_COLOR = formatColor(0,0,0)
WALL_COLOR = formatColor(0.0/255.0, 51.0/255.0, 255.0/255.0)
INFO_PANE_COLOR = formatColor(.4,.4,0)
SCORE_COLOR = formatColor(.9, .9, .9)
PACMAN_OUTLINE_WIDTH = 2
PACMAN_CAPTURE_OUTLINE_WIDTH = 4
GHOST_COLORS = []
GHOST_COLORS.append(formatColor(.9,0,0)) # Red
GHOST_COLORS.append(formatColor(0,.3,.9)) # Blue
GHOST_COLORS.append(formatColor(.98,.41,.07)) # Orange
GHOST_COLORS.append(formatColor(.1,.75,.7)) # Green
GHOST_COLORS.append(formatColor(1.0,0.6,0.0)) # Yellow
GHOST_COLORS.append(formatColor(.4,0.13,0.91)) # Purple
TEAM_COLORS = GHOST_COLORS[:2]
GHOST_SHAPE = [
( 0, 0.3 ),
( 0.25, 0.75 ),
( 0.5, 0.3 ),
( 0.75, 0.75 ),
( 0.75, -0.5 ),
( 0.5, -0.75 ),
(-0.5, -0.75 ),
(-0.75, -0.5 ),
(-0.75, 0.75 ),
(-0.5, 0.3 ),
(-0.25, 0.75 )
]
GHOST_SIZE = 0.65
SCARED_COLOR = formatColor(1,1,1)
GHOST_VEC_COLORS = list(map(colorToVector, GHOST_COLORS))
PACMAN_COLOR = formatColor(255.0/255.0,255.0/255.0,61.0/255)
PACMAN_SCALE = 0.5
#pacman_speed = 0.25
# Food
FOOD_COLOR = formatColor(1,1,1)
FOOD_SIZE = 0.1
# Laser
LASER_COLOR = formatColor(1,0,0)
LASER_SIZE = 0.02
# Capsule graphics
CAPSULE_COLOR = formatColor(1,1,1)
CAPSULE_SIZE = 0.25
# Drawing walls
WALL_RADIUS = 0.15
class InfoPane:
def __init__(self, layout, gridSize):
self.gridSize = gridSize
self.width = (layout.width) * gridSize
self.base = (layout.height + 1) * gridSize
self.height = INFO_PANE_HEIGHT
self.fontSize = 24
self.textColor = PACMAN_COLOR
self.drawPane()
def toScreen(self, pos, y = None):
"""
Translates a point relative from the bottom left of the info pane.
"""
if y == None:
x,y = pos
else:
x = pos
x = self.gridSize + x # Margin
y = self.base + y
return x,y
def drawPane(self):
self.scoreText = text( self.toScreen(0, 0 ), self.textColor, "SCORE: 0", "Times", self.fontSize, "bold")
def initializeGhostDistances(self, distances):
self.ghostDistanceText = []
size = 20
if self.width < 240:
size = 12
if self.width < 160:
size = 10
for i, d in enumerate(distances):
t = text( self.toScreen(self.width/2 + self.width/8 * i, 0), GHOST_COLORS[i+1], d, "Times", size, "bold")
self.ghostDistanceText.append(t)
def updateScore(self, score):
changeText(self.scoreText, "SCORE: % 4d" % score)
def setTeam(self, isBlue):
text = "RED TEAM"
if isBlue: text = "BLUE TEAM"
self.teamText = text( self.toScreen(300, 0 ), self.textColor, text, "Times", self.fontSize, "bold")
def updateGhostDistances(self, distances):
if len(distances) == 0: return
if 'ghostDistanceText' not in dir(self): self.initializeGhostDistances(distances)
else:
for i, d in enumerate(distances):
changeText(self.ghostDistanceText[i], d)
def drawGhost(self):
pass
def drawPacman(self):
pass
def drawWarning(self):
pass
def clearIcon(self):
pass
def updateMessage(self, message):
pass
def clearMessage(self):
pass
class PacmanGraphics:
def __init__(self, zoom=1.0, frameTime=0.0, capture=False):
self.have_window = 0
self.currentGhostImages = {}
self.pacmanImage = None
self.zoom = zoom
self.gridSize = DEFAULT_GRID_SIZE * zoom
self.capture = capture
self.frameTime = frameTime
def initialize(self, state, isBlue = False):
self.isBlue = isBlue
self.startGraphics(state)
# self.drawDistributions(state)
self.distributionImages = None # Initialized lazily
self.drawStaticObjects(state)
self.drawAgentObjects(state)
# Information
self.previousState = state
def startGraphics(self, state):
self.layout = state.layout
layout = self.layout
self.width = layout.width
self.height = layout.height
self.make_window(self.width, self.height)
self.infoPane = InfoPane(layout, self.gridSize)
self.currentState = layout
def drawDistributions(self, state):
walls = state.layout.walls
dist = []
for x in range(walls.width):
distx = []
dist.append(distx)
for y in range(walls.height):
( screen_x, screen_y ) = self.to_screen( (x, y) )
block = square( (screen_x, screen_y),
0.5 * self.gridSize,
color = BACKGROUND_COLOR,
filled = 1, behind=2)
distx.append(block)
self.distributionImages = dist
def drawStaticObjects(self, state):
layout = self.layout
self.drawWalls(layout.walls)
self.food = self.drawFood(layout.food)
self.capsules = self.drawCapsules(layout.capsules)
refresh()
def drawAgentObjects(self, state):
self.agentImages = [] # (agentState, image)
for index, agent in enumerate(state.agentStates):
if agent.isPacman:
image = self.drawPacman(agent, index)
self.agentImages.append( (agent, image) )
else:
image = self.drawGhost(agent, index)
self.agentImages.append( (agent, image) )
refresh()
def swapImages(self, agentIndex, newState):
"""
Changes an image from a ghost to a pacman or vis versa (for capture)
"""
prevState, prevImage = self.agentImages[agentIndex]
for item in prevImage: remove_from_screen(item)
if newState.isPacman:
image = self.drawPacman(newState, agentIndex)
self.agentImages[agentIndex] = (newState, image )
else:
image = self.drawGhost(newState, agentIndex)
self.agentImages[agentIndex] = (newState, image )
refresh()
def update(self, newState):
agentIndex = newState._agentMoved
agentState = newState.agentStates[agentIndex]
if self.agentImages[agentIndex][0].isPacman != agentState.isPacman: self.swapImages(agentIndex, agentState)
prevState, prevImage = self.agentImages[agentIndex]
if agentState.isPacman:
self.animatePacman(agentState, prevState, prevImage)
else:
self.moveGhost(agentState, agentIndex, prevState, prevImage)
self.agentImages[agentIndex] = (agentState, prevImage)
if newState._foodEaten != None:
self.removeFood(newState._foodEaten, self.food)
if newState._capsuleEaten != None:
self.removeCapsule(newState._capsuleEaten, self.capsules)
self.infoPane.updateScore(newState.score)
if 'ghostDistances' in dir(newState):
self.infoPane.updateGhostDistances(newState.ghostDistances)
def make_window(self, width, height):
grid_width = (width-1) * self.gridSize
grid_height = (height-1) * self.gridSize
screen_width = 2*self.gridSize + grid_width
screen_height = 2*self.gridSize + grid_height + INFO_PANE_HEIGHT
begin_graphics(screen_width,
screen_height,
BACKGROUND_COLOR,
"AI Pacman")
def drawPacman(self, pacman, index):
position = self.getPosition(pacman)
screen_point = self.to_screen(position)
endpoints = self.getEndpoints(self.getDirection(pacman))
width = PACMAN_OUTLINE_WIDTH
outlineColor = PACMAN_COLOR
fillColor = PACMAN_COLOR
if self.capture:
outlineColor = TEAM_COLORS[index % 2]
fillColor = GHOST_COLORS[index]
width = PACMAN_CAPTURE_OUTLINE_WIDTH
return [circle(screen_point, PACMAN_SCALE * self.gridSize,
fillColor = fillColor, outlineColor = outlineColor,
endpoints = endpoints,
width = width)]
def getEndpoints(self, direction, position=(0,0)):
x, y = position
pos = x - int(x) + y - int(y)
width = 30 + 80 * math.sin(math.pi* pos)
delta = width / 2
if (direction == 'West'):
endpoints = (180+delta, 180-delta)
elif (direction == 'North'):
endpoints = (90+delta, 90-delta)
elif (direction == 'South'):
endpoints = (270+delta, 270-delta)
else:
endpoints = (0+delta, 0-delta)
return endpoints
def movePacman(self, position, direction, image):
screenPosition = self.to_screen(position)
endpoints = self.getEndpoints( direction, position )
r = PACMAN_SCALE * self.gridSize
moveCircle(image[0], screenPosition, r, endpoints)
refresh()
def animatePacman(self, pacman, prevPacman, image):
if self.frameTime < 0:
print('Press any key to step forward, "q" to play')
keys = wait_for_keys()
if 'q' in keys:
self.frameTime = 0.1
if self.frameTime > 0.01 or self.frameTime < 0:
start = time.time()
fx, fy = self.getPosition(prevPacman)
px, py = self.getPosition(pacman)
frames = 4.0
for i in range(1,int(frames) + 1):
pos = px*i/frames + fx*(frames-i)/frames, py*i/frames + fy*(frames-i)/frames
self.movePacman(pos, self.getDirection(pacman), image)
refresh()
sleep(abs(self.frameTime) / frames)
else:
self.movePacman(self.getPosition(pacman), self.getDirection(pacman), image)
refresh()
def getGhostColor(self, ghost, ghostIndex):
if ghost.scaredTimer > 0:
return SCARED_COLOR
else:
return GHOST_COLORS[ghostIndex]
def drawGhost(self, ghost, agentIndex):
pos = self.getPosition(ghost)
dir = self.getDirection(ghost)
(screen_x, screen_y) = (self.to_screen(pos) )
coords = []
for (x, y) in GHOST_SHAPE:
coords.append((x*self.gridSize*GHOST_SIZE + screen_x, y*self.gridSize*GHOST_SIZE + screen_y))
colour = self.getGhostColor(ghost, agentIndex)
body = polygon(coords, colour, filled = 1)
WHITE = formatColor(1.0, 1.0, 1.0)
BLACK = formatColor(0.0, 0.0, 0.0)
dx = 0
dy = 0
if dir == 'North':
dy = -0.2
if dir == 'South':
dy = 0.2
if dir == 'East':
dx = 0.2
if dir == 'West':
dx = -0.2
leftEye = circle((screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2, WHITE, WHITE)
rightEye = circle((screen_x+self.gridSize*GHOST_SIZE*(0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2, WHITE, WHITE)
leftPupil = circle((screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08, BLACK, BLACK)
rightPupil = circle((screen_x+self.gridSize*GHOST_SIZE*(0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08, BLACK, BLACK)
ghostImageParts = []
ghostImageParts.append(body)
ghostImageParts.append(leftEye)
ghostImageParts.append(rightEye)
ghostImageParts.append(leftPupil)
ghostImageParts.append(rightPupil)
return ghostImageParts
def moveEyes(self, pos, dir, eyes):
(screen_x, screen_y) = (self.to_screen(pos) )
dx = 0
dy = 0
if dir == 'North':
dy = -0.2
if dir == 'South':
dy = 0.2
if dir == 'East':
dx = 0.2
if dir == 'West':
dx = -0.2
moveCircle(eyes[0],(screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2)
moveCircle(eyes[1],(screen_x+self.gridSize*GHOST_SIZE*(0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2)
moveCircle(eyes[2],(screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08)
moveCircle(eyes[3],(screen_x+self.gridSize*GHOST_SIZE*(0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08)
def moveGhost(self, ghost, ghostIndex, prevGhost, ghostImageParts):
old_x, old_y = self.to_screen(self.getPosition(prevGhost))
new_x, new_y = self.to_screen(self.getPosition(ghost))
delta = new_x - old_x, new_y - old_y
for ghostImagePart in ghostImageParts:
move_by(ghostImagePart, delta)
refresh()
if ghost.scaredTimer > 0:
color = SCARED_COLOR
else:
color = GHOST_COLORS[ghostIndex]
edit(ghostImageParts[0], ('fill', color), ('outline', color))
self.moveEyes(self.getPosition(ghost), self.getDirection(ghost), ghostImageParts[-4:])
refresh()
def getPosition(self, agentState):
if agentState.configuration == None: return (-1000, -1000)
return agentState.getPosition()
def getDirection(self, agentState):
if agentState.configuration == None: return Directions.STOP
return agentState.configuration.getDirection()
def finish(self):
end_graphics()
def to_screen(self, point):
( x, y ) = point
#y = self.height - y
x = (x + 1)*self.gridSize
y = (self.height - y)*self.gridSize
return ( x, y )
# Fixes some TK issue with off-center circles
def to_screen2(self, point):
( x, y ) = point
#y = self.height - y
x = (x + 1)*self.gridSize
y = (self.height - y)*self.gridSize
return ( x, y )
def drawWalls(self, wallMatrix):
wallColor = WALL_COLOR
for xNum, x in enumerate(wallMatrix):
if self.capture and (xNum * 2) < wallMatrix.width: wallColor = TEAM_COLORS[0]
if self.capture and (xNum * 2) >= wallMatrix.width: wallColor = TEAM_COLORS[1]
for yNum, cell in enumerate(x):
if cell: # There's a wall here
pos = (xNum, yNum)
screen = self.to_screen(pos)
screen2 = self.to_screen2(pos)
# draw each quadrant of the square based on adjacent walls
wIsWall = self.isWall(xNum-1, yNum, wallMatrix)
eIsWall = self.isWall(xNum+1, yNum, wallMatrix)
nIsWall = self.isWall(xNum, yNum+1, wallMatrix)
sIsWall = self.isWall(xNum, yNum-1, wallMatrix)
nwIsWall = self.isWall(xNum-1, yNum+1, wallMatrix)
swIsWall = self.isWall(xNum-1, yNum-1, wallMatrix)
neIsWall = self.isWall(xNum+1, yNum+1, wallMatrix)
seIsWall = self.isWall(xNum+1, yNum-1, wallMatrix)
# NE quadrant
if (not nIsWall) and (not eIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (0,91), 'arc')
if (nIsWall) and (not eIsWall):
# vertical line
line(add(screen, (self.gridSize*WALL_RADIUS, 0)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-0.5)-1)), wallColor)
if (not nIsWall) and (eIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
if (nIsWall) and (eIsWall) and (not neIsWall):
# outer circle
circle(add(screen2, (self.gridSize*2*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (180,271), 'arc')
line(add(screen, (self.gridSize*2*WALL_RADIUS-1, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS+1)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-0.5))), wallColor)
# NW quadrant
if (not nIsWall) and (not wIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (90,181), 'arc')
if (nIsWall) and (not wIsWall):
# vertical line
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, 0)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-0.5)-1)), wallColor)
if (not nIsWall) and (wIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5)-1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
if (nIsWall) and (wIsWall) and (not nwIsWall):
# outer circle
circle(add(screen2, (self.gridSize*(-2)*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (270,361), 'arc')
line(add(screen, (self.gridSize*(-2)*WALL_RADIUS+1, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5), self.gridSize*(-1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS+1)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-0.5))), wallColor)
# SE quadrant
if (not sIsWall) and (not eIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (270,361), 'arc')
if (sIsWall) and (not eIsWall):
# vertical line
line(add(screen, (self.gridSize*WALL_RADIUS, 0)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(0.5)+1)), wallColor)
if (not sIsWall) and (eIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(1)*WALL_RADIUS)), wallColor)
if (sIsWall) and (eIsWall) and (not seIsWall):
# outer circle
circle(add(screen2, (self.gridSize*2*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (90,181), 'arc')
line(add(screen, (self.gridSize*2*WALL_RADIUS-1, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5, self.gridSize*(1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS-1)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(0.5))), wallColor)
# SW quadrant
if (not sIsWall) and (not wIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (180,271), 'arc')
if (sIsWall) and (not wIsWall):
# vertical line
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, 0)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(0.5)+1)), wallColor)
if (not sIsWall) and (wIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5)-1, self.gridSize*(1)*WALL_RADIUS)), wallColor)
if (sIsWall) and (wIsWall) and (not swIsWall):
# outer circle
circle(add(screen2, (self.gridSize*(-2)*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (0,91), 'arc')
line(add(screen, (self.gridSize*(-2)*WALL_RADIUS+1, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5), self.gridSize*(1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS-1)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(0.5))), wallColor)
def isWall(self, x, y, walls):
if x < 0 or y < 0:
return False
if x >= walls.width or y >= walls.height:
return False
return walls[x][y]
def drawFood(self, foodMatrix ):
foodImages = []
color = FOOD_COLOR
for xNum, x in enumerate(foodMatrix):
if self.capture and (xNum * 2) <= foodMatrix.width: color = TEAM_COLORS[0]
if self.capture and (xNum * 2) > foodMatrix.width: color = TEAM_COLORS[1]
imageRow = []
foodImages.append(imageRow)
for yNum, cell in enumerate(x):
if cell: # There's food here
screen = self.to_screen((xNum, yNum ))
dot = circle( screen,
FOOD_SIZE * self.gridSize,
outlineColor = color, fillColor = color,
width = 1)
imageRow.append(dot)
else:
imageRow.append(None)
return foodImages
def drawCapsules(self, capsules ):
capsuleImages = {}
for capsule in capsules:
( screen_x, screen_y ) = self.to_screen(capsule)
dot = circle( (screen_x, screen_y),
CAPSULE_SIZE * self.gridSize,
outlineColor = CAPSULE_COLOR,
fillColor = CAPSULE_COLOR,
width = 1)
capsuleImages[capsule] = dot
return capsuleImages
def removeFood(self, cell, foodImages ):
x, y = cell
remove_from_screen(foodImages[x][y])
def removeCapsule(self, cell, capsuleImages ):
x, y = cell
remove_from_screen(capsuleImages[(x, y)])
def drawExpandedCells(self, cells):
"""
Draws an overlay of expanded grid positions for search agents
"""
n = float(len(cells))
baseColor = [1.0, 0.0, 0.0]
self.clearExpandedCells()
self.expandedCells = []
for k, cell in enumerate(cells):
screenPos = self.to_screen( cell)
cellColor = formatColor(*[(n-k) * c * .5 / n + .25 for c in baseColor])
block = square(screenPos,
0.5 * self.gridSize,
color = cellColor,
filled = 1, behind=2)
self.expandedCells.append(block)
if self.frameTime < 0:
refresh()
def clearExpandedCells(self):
if 'expandedCells' in dir(self) and len(self.expandedCells) > 0:
for cell in self.expandedCells:
remove_from_screen(cell)
def updateDistributions(self, distributions):
"Draws an agent's belief distributions"
if self.distributionImages == None:
self.drawDistributions(self.previousState)
for x in range(len(self.distributionImages)):
for y in range(len(self.distributionImages[0])):
image = self.distributionImages[x][y]
weights = [dist[ (x,y) ] for dist in distributions]
if sum(weights) != 0:
pass
# Fog of war
color = [0.0,0.0,0.0]
colors = GHOST_VEC_COLORS[1:] # With Pacman
if self.capture: colors = GHOST_VEC_COLORS
for weight, gcolor in zip(weights, colors):
color = [min(1.0, c + 0.95 * g * weight ** .3) for c,g in zip(color, gcolor)]
changeColor(image, formatColor(*color))
refresh()
class FirstPersonPacmanGraphics(PacmanGraphics):
def __init__(self, zoom = 1.0, showGhosts = True, capture = False, frameTime=0):
PacmanGraphics.__init__(self, zoom, frameTime=frameTime)
self.showGhosts = showGhosts
self.capture = capture
def initialize(self, state, isBlue = False):
self.isBlue = isBlue
PacmanGraphics.startGraphics(self, state)
# Initialize distribution images
walls = state.layout.walls
dist = []
self.layout = state.layout
# Draw the rest
self.distributionImages = None # initialize lazily
self.drawStaticObjects(state)
self.drawAgentObjects(state)
# Information
self.previousState = state
def lookAhead(self, config, state):
if config.getDirection() == 'Stop':
return
else:
pass
# Draw relevant ghosts
allGhosts = state.getGhostStates()
visibleGhosts = state.getVisibleGhosts()
for i, ghost in enumerate(allGhosts):
if ghost in visibleGhosts:
self.drawGhost(ghost, i)
else:
self.currentGhostImages[i] = None
def getGhostColor(self, ghost, ghostIndex):
return GHOST_COLORS[ghostIndex]
def getPosition(self, ghostState):
if not self.showGhosts and not ghostState.isPacman and ghostState.getPosition()[1] > 1:
return (-1000, -1000)
else:
return PacmanGraphics.getPosition(self, ghostState)
def add(x, y):
return (x[0] + y[0], x[1] + y[1])
# Saving graphical output
# -----------------------
# Note: to make an animated gif from this postscript output, try the command:
# convert -delay 7 -loop 1 -compress lzw -layers optimize frame* out.gif
# convert is part of imagemagick (freeware)
SAVE_POSTSCRIPT = False
POSTSCRIPT_OUTPUT_DIR = 'frames'
FRAME_NUMBER = 0
import os
def saveFrame():
"Saves the current graphical output as a postscript file"
global SAVE_POSTSCRIPT, FRAME_NUMBER, POSTSCRIPT_OUTPUT_DIR
if not SAVE_POSTSCRIPT: return
if not os.path.exists(POSTSCRIPT_OUTPUT_DIR): os.mkdir(POSTSCRIPT_OUTPUT_DIR)
name = os.path.join(POSTSCRIPT_OUTPUT_DIR, 'frame_%08d.ps' % FRAME_NUMBER)
FRAME_NUMBER += 1
writePostscript(name) # writes the current canvas
|
gabrielfalcao/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/regressiontests/syndication/models.py
|
103
|
from django.db import models
class Entry(models.Model):
title = models.CharField(max_length=200)
date = models.DateTimeField()
class Meta:
ordering = ('date',)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/blog/%s/" % self.pk
class Article(models.Model):
title = models.CharField(max_length=200)
entry = models.ForeignKey(Entry)
def __unicode__(self):
return self.title
|
xingyepei/edx-platform
|
refs/heads/release
|
lms/djangoapps/instructor/management/__init__.py
|
12133432
| |
helldorado/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/powershell/__init__.py
|
12133432
| |
rogerhu/django
|
refs/heads/master
|
tests/file_storage/__init__.py
|
12133432
| |
wemanuel/smry
|
refs/heads/master
|
smry/server-auth/ls/google-cloud-sdk/platform/gsutil/third_party/boto/boto/swf/layer2.py
|
130
|
"""Object-oriented interface to SWF wrapping boto.swf.layer1.Layer1"""
import time
from functools import wraps
from boto.swf.layer1 import Layer1
from boto.swf.layer1_decisions import Layer1Decisions
DEFAULT_CREDENTIALS = {
'aws_access_key_id': None,
'aws_secret_access_key': None
}
def set_default_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
DEFAULT_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class SWFBase(object):
name = None
domain = None
aws_access_key_id = None
aws_secret_access_key = None
region = None
def __init__(self, **kwargs):
# Set default credentials.
for credkey in ('aws_access_key_id', 'aws_secret_access_key'):
if DEFAULT_CREDENTIALS.get(credkey):
setattr(self, credkey, DEFAULT_CREDENTIALS[credkey])
# Override attributes with keyword args.
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self._swf = Layer1(self.aws_access_key_id,
self.aws_secret_access_key,
region=self.region)
def __repr__(self):
rep_str = str(self.name)
if hasattr(self, 'version'):
rep_str += '-' + str(getattr(self, 'version'))
return '<%s %r at 0x%x>' % (self.__class__.__name__, rep_str, id(self))
class Domain(SWFBase):
"""Simple Workflow Domain."""
description = None
retention = 30
@wraps(Layer1.describe_domain)
def describe(self):
"""DescribeDomain."""
return self._swf.describe_domain(self.name)
@wraps(Layer1.deprecate_domain)
def deprecate(self):
"""DeprecateDomain"""
self._swf.deprecate_domain(self.name)
@wraps(Layer1.register_domain)
def register(self):
"""RegisterDomain."""
self._swf.register_domain(self.name, str(self.retention),
self.description)
@wraps(Layer1.list_activity_types)
def activities(self, status='REGISTERED', **kwargs):
"""ListActivityTypes."""
act_types = self._swf.list_activity_types(self.name, status, **kwargs)
act_objects = []
for act_args in act_types['typeInfos']:
act_ident = act_args['activityType']
del act_args['activityType']
act_args.update(act_ident)
act_args.update({
'aws_access_key_id': self.aws_access_key_id,
'aws_secret_access_key': self.aws_secret_access_key,
'domain': self.name,
'region': self.region,
})
act_objects.append(ActivityType(**act_args))
return act_objects
@wraps(Layer1.list_workflow_types)
def workflows(self, status='REGISTERED', **kwargs):
"""ListWorkflowTypes."""
wf_types = self._swf.list_workflow_types(self.name, status, **kwargs)
wf_objects = []
for wf_args in wf_types['typeInfos']:
wf_ident = wf_args['workflowType']
del wf_args['workflowType']
wf_args.update(wf_ident)
wf_args.update({
'aws_access_key_id': self.aws_access_key_id,
'aws_secret_access_key': self.aws_secret_access_key,
'domain': self.name,
'region': self.region,
})
wf_objects.append(WorkflowType(**wf_args))
return wf_objects
def executions(self, closed=False, **kwargs):
"""List list open/closed executions.
For a full list of available parameters refer to
:py:func:`boto.swf.layer1.Layer1.list_closed_workflow_executions` and
:py:func:`boto.swf.layer1.Layer1.list_open_workflow_executions`
"""
if closed:
executions = self._swf.list_closed_workflow_executions(self.name,
**kwargs)
else:
if 'oldest_date' not in kwargs:
# Last 24 hours.
kwargs['oldest_date'] = time.time() - (3600 * 24)
executions = self._swf.list_open_workflow_executions(self.name,
**kwargs)
exe_objects = []
for exe_args in executions['executionInfos']:
for nested_key in ('execution', 'workflowType'):
nested_dict = exe_args[nested_key]
del exe_args[nested_key]
exe_args.update(nested_dict)
exe_args.update({
'aws_access_key_id': self.aws_access_key_id,
'aws_secret_access_key': self.aws_secret_access_key,
'domain': self.name,
'region': self.region,
})
exe_objects.append(WorkflowExecution(**exe_args))
return exe_objects
@wraps(Layer1.count_pending_activity_tasks)
def count_pending_activity_tasks(self, task_list):
"""CountPendingActivityTasks."""
return self._swf.count_pending_activity_tasks(self.name, task_list)
@wraps(Layer1.count_pending_decision_tasks)
def count_pending_decision_tasks(self, task_list):
"""CountPendingDecisionTasks."""
return self._swf.count_pending_decision_tasks(self.name, task_list)
class Actor(SWFBase):
task_list = None
last_tasktoken = None
domain = None
def run(self):
"""To be overloaded by subclasses."""
raise NotImplementedError()
class ActivityWorker(Actor):
"""Base class for SimpleWorkflow activity workers."""
@wraps(Layer1.respond_activity_task_canceled)
def cancel(self, task_token=None, details=None):
"""RespondActivityTaskCanceled."""
if task_token is None:
task_token = self.last_tasktoken
return self._swf.respond_activity_task_canceled(task_token, details)
@wraps(Layer1.respond_activity_task_completed)
def complete(self, task_token=None, result=None):
"""RespondActivityTaskCompleted."""
if task_token is None:
task_token = self.last_tasktoken
return self._swf.respond_activity_task_completed(task_token, result)
@wraps(Layer1.respond_activity_task_failed)
def fail(self, task_token=None, details=None, reason=None):
"""RespondActivityTaskFailed."""
if task_token is None:
task_token = self.last_tasktoken
return self._swf.respond_activity_task_failed(task_token, details,
reason)
@wraps(Layer1.record_activity_task_heartbeat)
def heartbeat(self, task_token=None, details=None):
"""RecordActivityTaskHeartbeat."""
if task_token is None:
task_token = self.last_tasktoken
return self._swf.record_activity_task_heartbeat(task_token, details)
@wraps(Layer1.poll_for_activity_task)
def poll(self, **kwargs):
"""PollForActivityTask."""
task_list = self.task_list
if 'task_list' in kwargs:
task_list = kwargs.get('task_list')
del kwargs['task_list']
task = self._swf.poll_for_activity_task(self.domain, task_list,
**kwargs)
self.last_tasktoken = task.get('taskToken')
return task
class Decider(Actor):
"""Base class for SimpleWorkflow deciders."""
@wraps(Layer1.respond_decision_task_completed)
def complete(self, task_token=None, decisions=None, **kwargs):
"""RespondDecisionTaskCompleted."""
if isinstance(decisions, Layer1Decisions):
# Extract decision list from a Layer1Decisions instance.
decisions = decisions._data
if task_token is None:
task_token = self.last_tasktoken
return self._swf.respond_decision_task_completed(task_token, decisions,
**kwargs)
@wraps(Layer1.poll_for_decision_task)
def poll(self, **kwargs):
"""PollForDecisionTask."""
task_list = self.task_list
if 'task_list' in kwargs:
task_list = kwargs.get('task_list')
del kwargs['task_list']
decision_task = self._swf.poll_for_decision_task(self.domain, task_list,
**kwargs)
self.last_tasktoken = decision_task.get('taskToken')
return decision_task
class WorkflowType(SWFBase):
"""A versioned workflow type."""
version = None
task_list = None
child_policy = 'TERMINATE'
@wraps(Layer1.describe_workflow_type)
def describe(self):
"""DescribeWorkflowType."""
return self._swf.describe_workflow_type(self.domain, self.name,
self.version)
@wraps(Layer1.register_workflow_type)
def register(self, **kwargs):
"""RegisterWorkflowType."""
args = {
'default_execution_start_to_close_timeout': '3600',
'default_task_start_to_close_timeout': '300',
'default_child_policy': 'TERMINATE',
}
args.update(kwargs)
self._swf.register_workflow_type(self.domain, self.name, self.version,
**args)
@wraps(Layer1.deprecate_workflow_type)
def deprecate(self):
"""DeprecateWorkflowType."""
self._swf.deprecate_workflow_type(self.domain, self.name, self.version)
@wraps(Layer1.start_workflow_execution)
def start(self, **kwargs):
"""StartWorkflowExecution."""
if 'workflow_id' in kwargs:
workflow_id = kwargs['workflow_id']
del kwargs['workflow_id']
else:
workflow_id = '%s-%s-%i' % (self.name, self.version, time.time())
for def_attr in ('task_list', 'child_policy'):
kwargs[def_attr] = kwargs.get(def_attr, getattr(self, def_attr))
run_id = self._swf.start_workflow_execution(self.domain, workflow_id,
self.name, self.version, **kwargs)['runId']
return WorkflowExecution(name=self.name, version=self.version,
runId=run_id, domain=self.domain, workflowId=workflow_id,
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key)
class WorkflowExecution(SWFBase):
"""An instance of a workflow."""
workflowId = None
runId = None
@wraps(Layer1.signal_workflow_execution)
def signal(self, signame, **kwargs):
"""SignalWorkflowExecution."""
self._swf.signal_workflow_execution(self.domain, signame,
self.workflowId, **kwargs)
@wraps(Layer1.terminate_workflow_execution)
def terminate(self, **kwargs):
"""TerminateWorkflowExecution (p. 103)."""
return self._swf.terminate_workflow_execution(self.domain,
self.workflowId, **kwargs)
@wraps(Layer1.get_workflow_execution_history)
def history(self, **kwargs):
"""GetWorkflowExecutionHistory."""
return self._swf.get_workflow_execution_history(self.domain, self.runId,
self.workflowId, **kwargs)['events']
@wraps(Layer1.describe_workflow_execution)
def describe(self):
"""DescribeWorkflowExecution."""
return self._swf.describe_workflow_execution(self.domain, self.runId,
self.workflowId)
@wraps(Layer1.request_cancel_workflow_execution)
def request_cancel(self):
"""RequestCancelWorkflowExecution."""
return self._swf.request_cancel_workflow_execution(self.domain,
self.workflowId, self.runId)
class ActivityType(SWFBase):
"""A versioned activity type."""
version = None
@wraps(Layer1.deprecate_activity_type)
def deprecate(self):
"""DeprecateActivityType."""
return self._swf.deprecate_activity_type(self.domain, self.name,
self.version)
@wraps(Layer1.describe_activity_type)
def describe(self):
"""DescribeActivityType."""
return self._swf.describe_activity_type(self.domain, self.name,
self.version)
@wraps(Layer1.register_activity_type)
def register(self, **kwargs):
"""RegisterActivityType."""
args = {
'default_task_heartbeat_timeout': '600',
'default_task_schedule_to_close_timeout': '3900',
'default_task_schedule_to_start_timeout': '300',
'default_task_start_to_close_timeout': '3600',
}
args.update(kwargs)
self._swf.register_activity_type(self.domain, self.name, self.version,
**args)
|
ephes/scikit-learn
|
refs/heads/master
|
doc/datasets/rcv1_fixture.py
|
238
|
"""Fixture module to skip the datasets loading when offline
The RCV1 data is rather large and some CI workers such as travis are
stateless hence will not cache the dataset as regular sklearn users would do.
The following will skip the execution of the rcv1.rst doctests
if the proper environment variable is configured (see the source code of
check_skip_network for more details).
"""
from sklearn.utils.testing import check_skip_network, SkipTest
import os
from sklearn.datasets import get_data_home
def setup_module():
check_skip_network()
# skip the test in rcv1.rst if the dataset is not already loaded
rcv1_dir = os.path.join(get_data_home(), "RCV1")
if not os.path.exists(rcv1_dir):
raise SkipTest("Download RCV1 dataset to run this test.")
|
dallingham/regenerate
|
refs/heads/master
|
regenerate/ui/columns.py
|
1
|
#
# Manage registers in a hardware design
#
# Copyright (C) 2008 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Provides TreeView column to simplify usage.
"""
import gtk
import pango
class ToggleColumn(gtk.TreeViewColumn):
"""
A TreeViewColumn that has editable cells. The callback and listmodel
columns are passed and used to create the CellRenderer.
"""
def __init__(self, title, change_callback, source_column,
visible_callback = None):
renderer = gtk.CellRendererToggle()
renderer.set_property('activatable', True)
if change_callback:
renderer.connect('toggled', change_callback, source_column)
gtk.TreeViewColumn.__init__(self, title, renderer,
active=source_column)
if visible_callback:
self.set_cell_data_func(renderer, visible_callback)
class EditableColumn(gtk.TreeViewColumn):
"""
A TreeViewColumn that has editable cells. The callback and listmodel
columns are passed and used to create the CellRenderer.
"""
def __init__(self, title, change_callback, source_column, monospace=False,
visible_callback = None):
self.renderer = gtk.CellRendererText()
if change_callback:
self.renderer.set_property('editable', True)
self.renderer.connect('edited', change_callback, source_column)
self.renderer.set_property('ellipsize', pango.ELLIPSIZE_END)
if monospace:
self.renderer.set_property('family', "Monospace")
gtk.TreeViewColumn.__init__(self, title, self.renderer,
text=source_column)
self.renderer.connect('editing-canceled', self.edit_canceled)
self.renderer.connect('editing-started', self.edit_started)
self.path = 0
self.entry = None
if visible_callback:
self.set_cell_data_func(self.renderer, visible_callback)
def edit_started(self, cell, entry, path):
self.path = path
self.entry = entry
def edit_canceled(self, obj):
val = self.entry.get_text()
self.renderer.emit('edited', self.path, val)
class ComboMapColumn(gtk.TreeViewColumn):
"""
A TreeViewColumn that has a menu of options. The callback and listmodel
columns are passed and used to create the CellRenderer.
"""
def __init__(self, title, callback, data_list, source_column, dtype=int,
visible_callback = None):
renderer = gtk.CellRendererCombo()
model = gtk.ListStore(str, dtype)
for item in data_list:
model.append(row=item)
renderer.set_property("text-column", 0)
renderer.set_property("model", model)
renderer.set_property("has-entry", False)
renderer.set_property('editable', True)
if callback:
renderer.connect('changed', callback, source_column)
gtk.TreeViewColumn.__init__(self, title, renderer, text=source_column)
if visible_callback:
self.set_cell_data_func(renderer, visible_callback)
class SwitchComboMapColumn(gtk.TreeViewColumn):
"""
A TreeViewColumn that has a menu of options. The callback and listmodel
columns are passed and used to create the CellRenderer.
"""
def __init__(self, title, callback, data_list0, data_list1, data_list2,
source_column, dtype=int):
self.renderer = gtk.CellRendererCombo()
self.model = []
self.model.append(gtk.ListStore(str, dtype))
for item in data_list0:
self.model[0].append(row=item)
self.model.append(gtk.ListStore(str, dtype))
for item in data_list1:
self.model[1].append(row=item)
self.model.append(gtk.ListStore(str, dtype))
for item in data_list2:
self.model[2].append(row=item)
self.renderer.set_property("text-column", 0)
self.renderer.set_property("model", self.model[0])
self.renderer.set_property("has-entry", False)
self.renderer.set_property('editable', True)
self.renderer.connect('changed', callback, source_column)
gtk.TreeViewColumn.__init__(self, title, self.renderer, text=source_column)
def set_mode(self, i):
self.renderer.set_property("model", self.model[i])
|
andriibekker/biddingsbase
|
refs/heads/master
|
django/contrib/admin/models.py
|
228
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.contrib.admin.util import quote
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode
from django.utils.safestring import mark_safe
ADDITION = 1
CHANGE = 2
DELETION = 3
class LogEntryManager(models.Manager):
def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''):
e = self.model(None, None, user_id, content_type_id, smart_unicode(object_id), object_repr[:200], action_flag, change_message)
e.save()
class LogEntry(models.Model):
action_time = models.DateTimeField(_('action time'), auto_now=True)
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType, blank=True, null=True)
object_id = models.TextField(_('object id'), blank=True, null=True)
object_repr = models.CharField(_('object repr'), max_length=200)
action_flag = models.PositiveSmallIntegerField(_('action flag'))
change_message = models.TextField(_('change message'), blank=True)
objects = LogEntryManager()
class Meta:
verbose_name = _('log entry')
verbose_name_plural = _('log entries')
db_table = 'django_admin_log'
ordering = ('-action_time',)
def __repr__(self):
return smart_unicode(self.action_time)
def is_addition(self):
return self.action_flag == ADDITION
def is_change(self):
return self.action_flag == CHANGE
def is_deletion(self):
return self.action_flag == DELETION
def get_edited_object(self):
"Returns the edited object represented by this log entry"
return self.content_type.get_object_for_this_type(pk=self.object_id)
def get_admin_url(self):
"""
Returns the admin URL to edit the object represented by this log entry.
This is relative to the Django admin index page.
"""
if self.content_type and self.object_id:
return mark_safe(u"%s/%s/%s/" % (self.content_type.app_label, self.content_type.model, quote(self.object_id)))
return None
|
unho/pootle
|
refs/heads/master
|
pootle/apps/pootle_statistics/models.py
|
5
|
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import models
from django.template.defaultfilters import truncatechars
from django.urls import reverse
from pootle.core.user import get_system_user
from pootle.core.utils import dateformat
from pootle.i18n.gettext import ugettext_lazy as _
from pootle_checks.constants import CHECK_NAMES
from pootle_store.constants import FUZZY, TRANSLATED
from pootle_store.fields import to_python
MUTED = "0"
SIMILARITY_THRESHOLD = 0.5
UNMUTED = "1"
#: These are the values for the 'type' field of Submission
class SubmissionTypes(object):
# None/0 = no information
WEB = 1 # Interactive web editing
UPLOAD = 4 # Uploading an offline file
SYSTEM = 5 # Batch actions performed offline
# Combined types that rely on other types (useful for querying)
# Please use the `_TYPES` suffix to make it clear they're not core
# types that are stored in the DB
EDIT_TYPES = [WEB, SYSTEM, UPLOAD]
CONTRIBUTION_TYPES = [WEB, SYSTEM]
#: Values for the 'field' field of Submission
class SubmissionFields(object):
NONE = 0 # non-field submission
SOURCE = 1 # pootle_store.models.Unit.source
TARGET = 2 # pootle_store.models.Unit.target
STATE = 3 # pootle_store.models.Unit.state
COMMENT = 4 # pootle_store.models.Unit.translator_comment
CHECK = 5
TRANSLATION_FIELDS = [TARGET]
NAMES_MAP = {
NONE: "",
SOURCE: _("Source"),
TARGET: _("Target"),
STATE: _("State"),
COMMENT: _("Comment"),
CHECK: (_("Check")),
}
class TranslationActionTypes(object):
TRANSLATED = 0
EDITED = 1
PRE_TRANSLATED = 2
REMOVED = 3
REVIEWED = 4
NEEDS_WORK = 5
class SubmissionQuerySet(models.QuerySet):
def _earliest_or_latest(self, field_name=None, direction="-"):
"""
Overrides QuerySet._earliest_or_latest to add pk for secondary ordering
"""
order_by = field_name or getattr(self.model._meta, 'get_latest_by')
assert bool(order_by), "earliest() and latest() require either a "\
"field_name parameter or 'get_latest_by' in the model"
assert self.query.can_filter(), \
"Cannot change a query once a slice has been taken."
obj = self._clone()
obj.query.set_limits(high=1)
obj.query.clear_ordering(force_empty=True)
# add pk as secondary ordering for Submissions
obj.query.add_ordering('%s%s' % (direction, order_by),
'%s%s' % (direction, "pk"))
return obj.get()
def earliest(self, field_name=None):
return self._earliest_or_latest(field_name=field_name, direction="")
def latest(self, field_name=None):
return self._earliest_or_latest(field_name=field_name, direction="-")
class SubmissionManager(models.Manager):
def get_queryset(self):
return SubmissionQuerySet(self.model, using=self._db)
def get_unit_comments(self):
"""Submissions that change a `Unit`'s comment.
:return: Queryset of `Submissions`s that change a `Unit`'s comment.
"""
return self.get_queryset().filter(field=SubmissionFields.COMMENT)
def get_unit_edits(self):
"""`Submission`s that change a `Unit`'s `target`.
:return: Queryset of `Submissions`s that change a `Unit`'s target.
"""
return (
self.get_queryset().exclude(new_value__isnull=True).filter(
field__in=SubmissionFields.TRANSLATION_FIELDS,
type__in=SubmissionTypes.EDIT_TYPES,
)
)
def get_unit_state_changes(self):
"""Submissions that change a unit's STATE.
:return: Queryset of `Submissions`s change a `Unit`'s `STATE`
- ie FUZZY/TRANSLATED/UNTRANSLATED.
"""
return self.get_queryset().filter(field=SubmissionFields.STATE)
def get_unit_suggestion_reviews(self):
"""Submissions that review (reject/accept) `Unit` suggestions.
:return: Queryset of `Submissions`s that `REJECT`/`ACCEPT`
`Suggestion`s.
"""
# reject_suggestion does not set field so we must exclude STATE reviews
# and it seems there are submissions that use STATE and are in
# REVIEW_TYPES
return (self.get_queryset().exclude(
field=SubmissionFields.STATE).filter(
suggestion__isnull=False))
class Submission(models.Model):
class Meta(object):
ordering = ["creation_time", "pk"]
index_together = ["submitter", "creation_time"]
get_latest_by = "creation_time"
db_table = 'pootle_app_submission'
base_manager_name = 'objects'
objects = SubmissionManager()
creation_time = models.DateTimeField(db_index=True)
translation_project = models.ForeignKey(
'pootle_translationproject.TranslationProject',
db_index=True, on_delete=models.CASCADE)
submitter = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=True,
db_index=False,
on_delete=models.SET(get_system_user))
suggestion = models.ForeignKey('pootle_store.Suggestion', blank=True,
null=True, db_index=True,
on_delete=models.CASCADE)
unit = models.ForeignKey(
'pootle_store.Unit',
db_index=True,
on_delete=models.CASCADE)
quality_check = models.ForeignKey('pootle_store.QualityCheck', blank=True,
null=True, db_index=True,
on_delete=models.CASCADE)
#: The field in the unit that changed
field = models.IntegerField(null=True, blank=True, db_index=True)
# how did this submission come about? (one of the constants above)
type = models.IntegerField(null=True, blank=True, db_index=True)
# old_value and new_value can store string representations of multistrings
# in the case where they store values for a unit's source or target. In
# such cases, the strings might not be usable as is. Use the two helper
# functions in pootle_store.fields to convert to and from this format.
old_value = models.TextField(blank=True, default=u"")
new_value = models.TextField(blank=True, default=u"")
# Unit revision when submission was created if applicable
revision = models.IntegerField(
null=True,
db_index=True,
blank=True)
def __unicode__(self):
return u"%s (%s)" % (self.creation_time.strftime("%Y-%m-%d %H:%M"),
unicode(self.submitter))
def get_submission_info(self):
"""Returns a dictionary describing the submission.
The dict includes the user (with link to profile and gravatar),
a type and translation_action_type describing the action performed,
and when it was performed.
"""
result = {}
if self.unit is not None:
result.update({
'unit_source': truncatechars(self.unit, 50),
'unit_url': self.unit.get_translate_url(),
})
if self.quality_check is not None:
check_name = self.quality_check.name
result.update({
'check_name': check_name,
'check_display_name': CHECK_NAMES.get(check_name,
check_name),
'checks_url': reverse('pootle-checks-descriptions'),
})
# Sadly we may not have submitter information in all the
# situations yet
# TODO check if it is true
if self.submitter:
displayuser = self.submitter
else:
User = get_user_model()
displayuser = User.objects.get_nobody_user()
result.update({
"profile_url": displayuser.get_absolute_url(),
"email": displayuser.email_hash,
"displayname": displayuser.display_name,
"username": displayuser.username,
"display_datetime": dateformat.format(self.creation_time),
"type": self.type,
"mtime": int(dateformat.format(self.creation_time, 'U')),
})
# TODO Fix bug 3011 and remove the following code related to
# TranslationActionTypes.
if self.type in SubmissionTypes.EDIT_TYPES:
translation_action_type = None
try:
if self.field == SubmissionFields.TARGET:
if self.new_value != '':
# Note that we analyze current unit state:
# if this submission is not last unit state
# can be changed
if self.unit.state == TRANSLATED:
if self.old_value == '':
translation_action_type = \
TranslationActionTypes.TRANSLATED
else:
translation_action_type = \
TranslationActionTypes.EDITED
elif self.unit.state == FUZZY:
if self.old_value == '':
translation_action_type = \
TranslationActionTypes.PRE_TRANSLATED
else:
translation_action_type = \
TranslationActionTypes.EDITED
else:
translation_action_type = \
TranslationActionTypes.REMOVED
elif self.field == SubmissionFields.STATE:
# Note that a submission where field is STATE
# should be created before a submission where
# field is TARGET
translation_action_type = {
TRANSLATED: TranslationActionTypes.REVIEWED,
FUZZY: TranslationActionTypes.NEEDS_WORK
}.get(int(to_python(self.new_value)), None)
except AttributeError:
return result
if translation_action_type is not None:
result['translation_action_type'] = translation_action_type
return result
def save(self, *args, **kwargs):
if self.unit:
self.revision = self.unit.revision
super(Submission, self).save(*args, **kwargs)
|
ondoheer/flask-admin
|
refs/heads/master
|
flask_admin/tests/sqla/test_inlineform.py
|
25
|
# -*- coding: utf-8 -*-
from nose.tools import eq_, ok_, raises
from wtforms import fields
from flask_admin.contrib.sqla import ModelView
from flask_admin.contrib.sqla.fields import InlineModelFormList
from flask_admin.contrib.sqla.validators import ItemsRequired
from . import setup
def test_inline_form():
app, db, admin = setup()
client = app.test_client()
# Set up models and database
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, unique=True)
def __init__(self, name=None):
self.name = name
class UserInfo(db.Model):
__tablename__ = 'user_info'
id = db.Column(db.Integer, primary_key=True)
key = db.Column(db.String, nullable=False)
val = db.Column(db.String)
user_id = db.Column(db.Integer, db.ForeignKey(User.id))
user = db.relationship(User, backref=db.backref('info', cascade="all, delete-orphan", single_parent=True))
db.create_all()
# Set up Admin
class UserModelView(ModelView):
inline_models = (UserInfo,)
view = UserModelView(User, db.session)
admin.add_view(view)
# Basic tests
ok_(view._create_form_class is not None)
ok_(view._edit_form_class is not None)
eq_(view.endpoint, 'user')
# Verify form
eq_(view._create_form_class.name.field_class, fields.StringField)
eq_(view._create_form_class.info.field_class, InlineModelFormList)
rv = client.get('/admin/user/')
eq_(rv.status_code, 200)
rv = client.get('/admin/user/new/')
eq_(rv.status_code, 200)
# Create
rv = client.post('/admin/user/new/', data=dict(name=u'äõüxyz'))
eq_(rv.status_code, 302)
eq_(User.query.count(), 1)
eq_(UserInfo.query.count(), 0)
rv = client.post('/admin/user/new/', data={'name': u'fbar', \
'info-0-key': 'foo', 'info-0-val' : 'bar'})
eq_(rv.status_code, 302)
eq_(User.query.count(), 2)
eq_(UserInfo.query.count(), 1)
# Edit
rv = client.get('/admin/user/edit/?id=2')
eq_(rv.status_code, 200)
# Edit - update
rv = client.post('/admin/user/edit/?id=2', data={'name': u'barfoo', \
'info-0-id': 1, 'info-0-key': u'xxx', 'info-0-val':u'yyy'})
eq_(UserInfo.query.count(), 1)
eq_(UserInfo.query.one().key, u'xxx')
# Edit - add & delete
rv = client.post('/admin/user/edit/?id=2', data={'name': u'barf', \
'del-info-0': 'on', 'info-0-id': '1', 'info-0-key': 'yyy', 'info-0-val': 'xxx',
'info-1-id': None, 'info-1-key': u'bar', 'info-1-val' : u'foo'})
eq_(rv.status_code, 302)
eq_(User.query.count(), 2)
eq_(User.query.get(2).name, u'barf')
eq_(UserInfo.query.count(), 1)
eq_(UserInfo.query.one().key, u'bar')
# Delete
rv = client.post('/admin/user/delete/?id=2')
eq_(rv.status_code, 302)
eq_(User.query.count(), 1)
rv = client.post('/admin/user/delete/?id=1')
eq_(rv.status_code, 302)
eq_(User.query.count(), 0)
eq_(UserInfo.query.count(), 0)
def test_inline_form_required():
app, db, admin = setup()
client = app.test_client()
# Set up models and database
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, unique=True)
def __init__(self, name=None):
self.name = name
class UserEmail(db.Model):
__tablename__ = 'user_info'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, nullable=False, unique=True)
verified_at = db.Column(db.DateTime)
user_id = db.Column(db.Integer, db.ForeignKey(User.id))
user = db.relationship(User, backref=db.backref('emails', cascade="all, delete-orphan", single_parent=True))
db.create_all()
# Set up Admin
class UserModelView(ModelView):
inline_models = (UserEmail,)
form_args = {
"emails": {"validators": [ItemsRequired()]}
}
view = UserModelView(User, db.session)
admin.add_view(view)
# Create
rv = client.post('/admin/user/new/', data=dict(name=u'no-email'))
eq_(rv.status_code, 200)
eq_(User.query.count(), 0)
data = {
'name': 'hasEmail',
'emails-0-email': 'foo@bar.com',
}
rv = client.post('/admin/user/new/', data=data)
eq_(rv.status_code, 302)
eq_(User.query.count(), 1)
eq_(UserEmail.query.count(), 1)
def test_inline_form_ajax_fk():
app, db, admin = setup()
# Set up models and database
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, unique=True)
def __init__(self, name=None):
self.name = name
class Tag(db.Model):
__tablename__ = 'tags'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, unique=True)
class UserInfo(db.Model):
__tablename__ = 'user_info'
id = db.Column(db.Integer, primary_key=True)
key = db.Column(db.String, nullable=False)
val = db.Column(db.String)
user_id = db.Column(db.Integer, db.ForeignKey(User.id))
user = db.relationship(User, backref=db.backref('info', cascade="all, delete-orphan", single_parent=True))
tag_id = db.Column(db.Integer, db.ForeignKey(Tag.id))
tag = db.relationship(Tag, backref='user_info')
db.create_all()
# Set up Admin
class UserModelView(ModelView):
opts = {
'form_ajax_refs': {
'tag': {
'fields': ['name']
}
}
}
inline_models = [(UserInfo, opts)]
view = UserModelView(User, db.session)
admin.add_view(view)
form = view.create_form()
user_info_form = form.info.unbound_field.args[0]
loader = user_info_form.tag.args[0]
eq_(loader.name, 'userinfo-tag')
eq_(loader.model, Tag)
ok_('userinfo-tag' in view._form_ajax_refs)
def test_inline_form_self():
app, db, admin = setup()
class Tree(db.Model):
id = db.Column(db.Integer, primary_key=True)
parent_id = db.Column(db.Integer, db.ForeignKey('tree.id'))
parent = db.relationship('Tree', remote_side=[id], backref='children')
db.create_all()
class TreeView(ModelView):
inline_models = (Tree,)
view = TreeView(Tree, db.session)
parent = Tree()
child = Tree(parent=parent)
form = view.edit_form(child)
eq_(form.parent.data, parent)
|
stanford-ssi/balloons-VALBAL
|
refs/heads/master
|
utils/parser.py
|
1
|
#!/usr/bin/env python
# Stanford Student Space Initiative
# Balloons | VALBAL | May 2017
# Davy Ragland | dragland@stanford.edu
# File: parser.py
# --------------------------
# Server side script to parse incoming messages
# from RockBLOCK
#******************************* SETUP ***************************************
import math
#****************************** GLOBALS **************************************
#binary is the string we get from RockBLOCK
binary = "0000000000010100010010110101001110101011000101001000111101101100000110010001010000011000110010101111111111101001000011000000000000000000000000000000000000000000000000000000000000000001010011100000011101011100011011101000000101011100000000000000000011110010011000100111110000010110010001100100110001001110001011100110011111111000000011100000000001010000101000000011100000011101111101000000000000000000000000001111111100001001111000000000000100111100000000000000001000000001111011000000110000110011111000001110011000001011011011011011001100011111111000110100001010001010000001000001001100110000000000000000010011000100000010000010011001100000000000000000000000000000";
#Regex is what is pasted into "advanced parser"
regex = """lengthBits += compressVariable(data.TIME / 1000, 0, 3000000, 20, lengthBits); // time
lengthBits += compressVariable(data.LAT_GPS, -90, 90, 21, lengthBits); // latitude
lengthBits += compressVariable(data.LONG_GPS, -180, 180, 22, lengthBits); // longitude
lengthBits += compressVariable(data.ALTITUDE_BAROMETER, -2000, 40000, 16, lengthBits); // altitude_barometer
lengthBits += compressVariable(data.ALTITUDE_GPS, -2000, 40000, 14, lengthBits);
lengthBits += compressVariable(data.ASCENT_RATE, -10, 10, 11, lengthBits);
lengthBits += compressVariable(data.VALVE_INCENTIVE, -50, 10, 12, lengthBits);
lengthBits += compressVariable(data.BALLAST_INCENTIVE, -50, 10, 12, lengthBits);
lengthBits += compressVariable(data.VALVE_STATE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.BALLAST_STATE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.VALVE_QUEUE / 1000, 0, 1023, 10, lengthBits);
lengthBits += compressVariable(data.BALLAST_QUEUE / 1000, 0, 1023, 10, lengthBits);
lengthBits += compressVariable(data.VALVE_TIME_TOTAL / 1000, 0, 16383, 13, lengthBits); // valve time total
lengthBits += compressVariable(data.BALLAST_TIME_TOTAL / 1000, 0, 16383, 13, lengthBits); // ballast time total
lengthBits += compressVariable(data.VALVE_NUM_ACTIONS, 0, 63, 6, lengthBits);
lengthBits += compressVariable(data.BALLAST_NUM_ACTIONS, 0, 63, 6, lengthBits);
lengthBits += compressVariable(data.VALVE_NUM_ATTEMPTS, 0, 63, 6, lengthBits);
lengthBits += compressVariable(data.BALLAST_NUM_ATTEMPTS, 0, 63, 6, lengthBits);
lengthBits += compressVariable(data.BALLAST_NUM_OVERCURRENTS, 0, 63, 6, lengthBits);
lengthBits += compressVariable(data.CUTDOWN_STATE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.TEMP_INT, -85, 65, 9, lengthBits);
lengthBits += compressVariable(data.JOULES_TOTAL, 0, 1572863, 18, lengthBits);
lengthBits += compressVariable(data.VOLTAGE_PRIMARY, 0, 6, 9, lengthBits);
lengthBits += compressVariable(data.VOLTAGE_5V, 4, 6, 7, lengthBits);
lengthBits += compressVariable(data.CURRENT_TOTAL_AVG, 0, 4095, 12, lengthBits);
lengthBits += compressVariable(data.CURRENT_TOTAL_MIN, 0, 4095, 12, lengthBits);
lengthBits += compressVariable(data.CURRENT_TOTAL_MAX, 0, 4095, 12, lengthBits);
lengthBits += compressVariable(data.CURRENT_RB_AVG, 0, 1023, 8, lengthBits);
lengthBits += compressVariable(data.CURRENT_RB_MAX, 0, 1023, 8, lengthBits);
lengthBits += compressVariable(data.CURRENT_MOTOR_VALVE_AVG, 0, 1023, 8, lengthBits);
lengthBits += compressVariable(data.CURRENT_MOTOR_VALVE_MAX, 0, 1023, 8, lengthBits);
lengthBits += compressVariable(data.CURRENT_MOTOR_BALLAST_AVG, 0, 1023, 8, lengthBits);
lengthBits += compressVariable(data.CURRENT_MOTOR_BALLAST_MAX, 0, 1023, 8, lengthBits);
lengthBits += compressVariable(data.CURRENT_PAYLOAD_AVG, 0, 1023, 8, lengthBits);
lengthBits += compressVariable(data.CURRENT_PAYLOAD_MAX, 0, 1023, 8, lengthBits);
lengthBits += compressVariable(data.TEMP_EXT, -100, 30, 8, lengthBits);
lengthBits += compressVariable(data.LOOP_TIME_MAX, 0, 10239, 10, lengthBits);
lengthBits += compressVariable(data.RB_SENT_COMMS, 0, 8191, 13, lengthBits);
lengthBits += compressVariable(data.RB_SLEEP_FAILS, 0, 8191, 13, lengthBits);
lengthBits += compressVariable(data.MANUAL_MODE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.REPORT_MODE, 0, 2, 2, lengthBits);
lengthBits += compressVariable(data.SHOULD_REPORT, 0, 1, 1, lengthBits);
if (data.SHOULD_REPORT || data.REPORT_MODE != 0) {
lengthBits += compressVariable(data.POWER_STATE_LED, 0, 1, 1, lengthBits); // LED Power state
lengthBits += compressVariable(data.POWER_STATE_RB, 0, 1, 1, lengthBits); // RB Power State
lengthBits += compressVariable(data.POWER_STATE_GPS, 0, 1, 1, lengthBits); // GPS Power State
lengthBits += compressVariable(data.POWER_STATE_PAYLOAD, 0, 1, 1, lengthBits); // Payload Power State
lengthBits += compressVariable(data.NUM_SATS_GPS, 0, 15, 3, lengthBits);
lengthBits += compressVariable(data.INCENTIVE_NOISE, 0, 4, 8, lengthBits);
lengthBits += compressVariable(data.RE_ARM_CONSTANT, 0, 4, 8, lengthBits);
lengthBits += compressVariable(data.VALVE_ALT_LAST, -2000, 50000, 11, lengthBits); // Altitude During Last Venting Event
lengthBits += compressVariable(data.BALLAST_ALT_LAST, -2000, 50000, 11, lengthBits); // Altitude During Last Ballast Event
lengthBits += compressVariable(data.DEBUG_STATE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.FORCE_VALVE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.FORCE_BALLAST, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.BMP_1_ENABLE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.BMP_2_ENABLE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.BMP_3_ENABLE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.BMP_4_ENABLE, 0, 1, 1, lengthBits);
lengthBits += compressVariable(log2(data.BMP_1_REJECTIONS + 1), 0, 6, 4, lengthBits); // sensor_1_logrejections
lengthBits += compressVariable(log2(data.BMP_2_REJECTIONS + 1), 0, 6, 4, lengthBits); // sensor_2_logrejections
lengthBits += compressVariable(log2(data.BMP_3_REJECTIONS + 1), 0, 6, 4, lengthBits); // sensor_3_logrejections
lengthBits += compressVariable(log2(data.BMP_4_REJECTIONS + 1), 0, 6, 4, lengthBits); // sensor_4_logrejections
lengthBits += compressVariable(data.BLACK_BODY_TEMP, -100, 30, 8, lengthBits);
}
if (data.SHOULD_REPORT || data.REPORT_MODE == 2) {
lengthBits += compressVariable(data.RB_INTERVAL / 1000, 0, 1023, 10, lengthBits); // RB communication interval
lengthBits += compressVariable(data.GPS_INTERVAL / 1000, 0, 1023, 10, lengthBits); // GPS communication interval
lengthBits += compressVariable(data.RB_SHOULD_SLEEP, 0, 1, 1, lengthBits);
lengthBits += compressVariable(data.PRESS_BASELINE, 0, 131071, 17, lengthBits); // Pressure baseline
lengthBits += compressVariable(data.INCENTIVE_THRESHOLD, 0, 4, 3, lengthBits);
lengthBits += compressVariable(data.BALLAST_ARM_ALT, -2000, 40000, 16, lengthBits); // Ballast Arming Altitude
lengthBits += compressVariable(data.BALLAST_REVERSE_INTERVAL / 1000, 0, 1599, 4, lengthBits); // Ballast reverse interval
lengthBits += compressVariable(data.VALVE_LEAK_INTERVAL / 1000, 0, 1599, 4, lengthBits);
lengthBits += compressVariable(data.BALLAST_STALL_CURRENT, 0, 511, 4, lengthBits);
lengthBits += compressVariable(data.VALVE_OPENING_DURATION / 1000, 0, 10, 5, lengthBits);
lengthBits += compressVariable(data.VALVE_CLOSING_DURATION / 1000, 0, 10, 5, lengthBits);
lengthBits += compressVariable(data.VALVE_SETPOINT, -2000, 50000, 11, lengthBits);
lengthBits += compressVariable(data.VALVE_VENT_DURATION / 1000, 0, 1023, 6, lengthBits);
lengthBits += compressVariable(data.VALVE_FORCE_DURATION / 1000, 0, 1023, 6, lengthBits);
lengthBits += compressVariable(data.VALVE_VELOCITY_CONSTANT, 0, 5, 8, lengthBits); // Valve Speed Constant
lengthBits += compressVariable(1.0 / data.VALVE_ALTITUDE_DIFF_CONSTANT, 0, 4095, 8, lengthBits); // Valve Altitude Difference Constant
lengthBits += compressVariable(1.0 / data.VALVE_LAST_ACTION_CONSTANT, 0, 4095, 8, lengthBits); // Valve last action constant
lengthBits += compressVariable(data.BALLAST_SETPOINT, -2000, 50000, 11, lengthBits);
lengthBits += compressVariable(data.BALLAST_DROP_DURATION / 1000, 0, 1023, 6, lengthBits);
lengthBits += compressVariable(data.BALLAST_FORCE_DURATION / 1000, 0, 1023, 6, lengthBits);
lengthBits += compressVariable(data.BALLAST_VELOCITY_CONSTANT, 0, 5, 8, lengthBits); // Ballast Speed Constant
lengthBits += compressVariable(1.0 / data.BALLAST_ALTITUDE_DIFF_CONSTANT,0, 4095, 8, lengthBits); // Ballast Altitude Difference Constant
lengthBits += compressVariable(1.0 / data.BALLAST_LAST_ACTION_CONSTANT, 0, 4095, 8, lengthBits); // Ballast last action constant"""
names = []
mins = []
maxs = []
bits = []
#****************************** HELPERS *************************************
def setupREGEX():
for line in regex.split('\n'):
csv = line.split(",")
names.append(csv[0].split("data.")[1])
mins.append(int(csv[1].replace(" ", "")))
maxs.append(int(csv[2].replace(" ", "")))
bits.append(int(csv[3].replace(" ", "")))
def parseMessage(message):
curr = 0
for i in range(len(names)):
num = message[curr:(curr + bits[i])]
curr = curr + bits[i]
adc = int(num, 2)
value = mins[i] + adc * ((maxs[i] - mins[i]) / (math.pow(2, bits[i]) - 1))
print(names[i] + ":" + str(value))
#******************************** MAIN ***************************************
setupREGEX()
parseMessage(binary)
|
moreati/numpy
|
refs/heads/master
|
numpy/lib/tests/test_polynomial.py
|
116
|
from __future__ import division, absolute_import, print_function
'''
>>> p = np.poly1d([1.,2,3])
>>> p
poly1d([ 1., 2., 3.])
>>> print(p)
2
1 x + 2 x + 3
>>> q = np.poly1d([3.,2,1])
>>> q
poly1d([ 3., 2., 1.])
>>> print(q)
2
3 x + 2 x + 1
>>> print(np.poly1d([1.89999+2j, -3j, -5.12345678, 2+1j]))
3 2
(1.9 + 2j) x - 3j x - 5.123 x + (2 + 1j)
>>> print(np.poly1d([-3, -2, -1]))
2
-3 x - 2 x - 1
>>> p(0)
3.0
>>> p(5)
38.0
>>> q(0)
1.0
>>> q(5)
86.0
>>> p * q
poly1d([ 3., 8., 14., 8., 3.])
>>> p / q
(poly1d([ 0.33333333]), poly1d([ 1.33333333, 2.66666667]))
>>> p + q
poly1d([ 4., 4., 4.])
>>> p - q
poly1d([-2., 0., 2.])
>>> p ** 4
poly1d([ 1., 8., 36., 104., 214., 312., 324., 216., 81.])
>>> p(q)
poly1d([ 9., 12., 16., 8., 6.])
>>> q(p)
poly1d([ 3., 12., 32., 40., 34.])
>>> np.asarray(p)
array([ 1., 2., 3.])
>>> len(p)
2
>>> p[0], p[1], p[2], p[3]
(3.0, 2.0, 1.0, 0)
>>> p.integ()
poly1d([ 0.33333333, 1. , 3. , 0. ])
>>> p.integ(1)
poly1d([ 0.33333333, 1. , 3. , 0. ])
>>> p.integ(5)
poly1d([ 0.00039683, 0.00277778, 0.025 , 0. , 0. ,
0. , 0. , 0. ])
>>> p.deriv()
poly1d([ 2., 2.])
>>> p.deriv(2)
poly1d([ 2.])
>>> q = np.poly1d([1.,2,3], variable='y')
>>> print(q)
2
1 y + 2 y + 3
>>> q = np.poly1d([1.,2,3], variable='lambda')
>>> print(q)
2
1 lambda + 2 lambda + 3
>>> np.polydiv(np.poly1d([1,0,-1]), np.poly1d([1,1]))
(poly1d([ 1., -1.]), poly1d([ 0.]))
'''
import numpy as np
from numpy.testing import (
run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
assert_almost_equal, rundocs
)
class TestDocs(TestCase):
def test_doctests(self):
return rundocs()
def test_roots(self):
assert_array_equal(np.roots([1, 0, 0]), [0, 0])
def test_str_leading_zeros(self):
p = np.poly1d([4, 3, 2, 1])
p[3] = 0
assert_equal(str(p),
" 2\n"
"3 x + 2 x + 1")
p = np.poly1d([1, 2])
p[0] = 0
p[1] = 0
assert_equal(str(p), " \n0")
def test_polyfit(self):
c = np.array([3., 2., 1.])
x = np.linspace(0, 2, 7)
y = np.polyval(c, x)
err = [1, -1, 1, -1, 1, -1, 1]
weights = np.arange(8, 1, -1)**2/7.0
# check 1D case
m, cov = np.polyfit(x, y+err, 2, cov=True)
est = [3.8571, 0.2857, 1.619]
assert_almost_equal(est, m, decimal=4)
val0 = [[2.9388, -5.8776, 1.6327],
[-5.8776, 12.7347, -4.2449],
[1.6327, -4.2449, 2.3220]]
assert_almost_equal(val0, cov, decimal=4)
m2, cov2 = np.polyfit(x, y+err, 2, w=weights, cov=True)
assert_almost_equal([4.8927, -1.0177, 1.7768], m2, decimal=4)
val = [[8.7929, -10.0103, 0.9756],
[-10.0103, 13.6134, -1.8178],
[0.9756, -1.8178, 0.6674]]
assert_almost_equal(val, cov2, decimal=4)
# check 2D (n,1) case
y = y[:, np.newaxis]
c = c[:, np.newaxis]
assert_almost_equal(c, np.polyfit(x, y, 2))
# check 2D (n,2) case
yy = np.concatenate((y, y), axis=1)
cc = np.concatenate((c, c), axis=1)
assert_almost_equal(cc, np.polyfit(x, yy, 2))
m, cov = np.polyfit(x, yy + np.array(err)[:, np.newaxis], 2, cov=True)
assert_almost_equal(est, m[:, 0], decimal=4)
assert_almost_equal(est, m[:, 1], decimal=4)
assert_almost_equal(val0, cov[:, :, 0], decimal=4)
assert_almost_equal(val0, cov[:, :, 1], decimal=4)
def test_objects(self):
from decimal import Decimal
p = np.poly1d([Decimal('4.0'), Decimal('3.0'), Decimal('2.0')])
p2 = p * Decimal('1.333333333333333')
assert_(p2[1] == Decimal("3.9999999999999990"))
p2 = p.deriv()
assert_(p2[1] == Decimal('8.0'))
p2 = p.integ()
assert_(p2[3] == Decimal("1.333333333333333333333333333"))
assert_(p2[2] == Decimal('1.5'))
assert_(np.issubdtype(p2.coeffs.dtype, np.object_))
p = np.poly([Decimal(1), Decimal(2)])
assert_equal(np.poly([Decimal(1), Decimal(2)]),
[1, Decimal(-3), Decimal(2)])
def test_complex(self):
p = np.poly1d([3j, 2j, 1j])
p2 = p.integ()
assert_((p2.coeffs == [1j, 1j, 1j, 0]).all())
p2 = p.deriv()
assert_((p2.coeffs == [6j, 2j]).all())
def test_integ_coeffs(self):
p = np.poly1d([3, 2, 1])
p2 = p.integ(3, k=[9, 7, 6])
assert_(
(p2.coeffs == [1/4./5., 1/3./4., 1/2./3., 9/1./2., 7, 6]).all())
def test_zero_dims(self):
try:
np.poly(np.zeros((0, 0)))
except ValueError:
pass
def test_poly_int_overflow(self):
"""
Regression test for gh-5096.
"""
v = np.arange(1, 21)
assert_almost_equal(np.poly(v), np.poly(np.diag(v)))
if __name__ == "__main__":
run_module_suite()
|
iDTLabssl/hr
|
refs/heads/8.0
|
__unported__/hr_worked_days_hourly_rate/tests/__init__.py
|
28
|
# -*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Odoo Canada. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
test_hr_worked_days,
)
checks = [
test_hr_worked_days,
]
|
nafex/pyload
|
refs/heads/stable
|
module/lib/beaker/util.py
|
45
|
"""Beaker utilities"""
try:
import thread as _thread
import threading as _threading
except ImportError:
import dummy_thread as _thread
import dummy_threading as _threading
from datetime import datetime, timedelta
import os
import string
import types
import weakref
import warnings
import sys
py3k = getattr(sys, 'py3kwarning', False) or sys.version_info >= (3, 0)
py24 = sys.version_info < (2,5)
jython = sys.platform.startswith('java')
if py3k or jython:
import pickle
else:
import cPickle as pickle
from beaker.converters import asbool
from threading import local as _tlocal
__all__ = ["ThreadLocal", "Registry", "WeakValuedRegistry", "SyncDict",
"encoded_path", "verify_directory"]
def verify_directory(dir):
"""verifies and creates a directory. tries to
ignore collisions with other threads and processes."""
tries = 0
while not os.access(dir, os.F_OK):
try:
tries += 1
os.makedirs(dir)
except:
if tries > 5:
raise
def deprecated(message):
def wrapper(fn):
def deprecated_method(*args, **kargs):
warnings.warn(message, DeprecationWarning, 2)
return fn(*args, **kargs)
# TODO: use decorator ? functools.wrapper ?
deprecated_method.__name__ = fn.__name__
deprecated_method.__doc__ = "%s\n\n%s" % (message, fn.__doc__)
return deprecated_method
return wrapper
class ThreadLocal(object):
"""stores a value on a per-thread basis"""
__slots__ = '_tlocal'
def __init__(self):
self._tlocal = _tlocal()
def put(self, value):
self._tlocal.value = value
def has(self):
return hasattr(self._tlocal, 'value')
def get(self, default=None):
return getattr(self._tlocal, 'value', default)
def remove(self):
del self._tlocal.value
class SyncDict(object):
"""
An efficient/threadsafe singleton map algorithm, a.k.a.
"get a value based on this key, and create if not found or not
valid" paradigm:
exists && isvalid ? get : create
Designed to work with weakref dictionaries to expect items
to asynchronously disappear from the dictionary.
Use python 2.3.3 or greater ! a major bug was just fixed in Nov.
2003 that was driving me nuts with garbage collection/weakrefs in
this section.
"""
def __init__(self):
self.mutex = _thread.allocate_lock()
self.dict = {}
def get(self, key, createfunc, *args, **kwargs):
try:
if self.has_key(key):
return self.dict[key]
else:
return self.sync_get(key, createfunc, *args, **kwargs)
except KeyError:
return self.sync_get(key, createfunc, *args, **kwargs)
def sync_get(self, key, createfunc, *args, **kwargs):
self.mutex.acquire()
try:
try:
if self.has_key(key):
return self.dict[key]
else:
return self._create(key, createfunc, *args, **kwargs)
except KeyError:
return self._create(key, createfunc, *args, **kwargs)
finally:
self.mutex.release()
def _create(self, key, createfunc, *args, **kwargs):
self[key] = obj = createfunc(*args, **kwargs)
return obj
def has_key(self, key):
return self.dict.has_key(key)
def __contains__(self, key):
return self.dict.__contains__(key)
def __getitem__(self, key):
return self.dict.__getitem__(key)
def __setitem__(self, key, value):
self.dict.__setitem__(key, value)
def __delitem__(self, key):
return self.dict.__delitem__(key)
def clear(self):
self.dict.clear()
class WeakValuedRegistry(SyncDict):
def __init__(self):
self.mutex = _threading.RLock()
self.dict = weakref.WeakValueDictionary()
sha1 = None
def encoded_path(root, identifiers, extension = ".enc", depth = 3,
digest_filenames=True):
"""Generate a unique file-accessible path from the given list of
identifiers starting at the given root directory."""
ident = "_".join(identifiers)
global sha1
if sha1 is None:
from beaker.crypto import sha1
if digest_filenames:
if py3k:
ident = sha1(ident.encode('utf-8')).hexdigest()
else:
ident = sha1(ident).hexdigest()
ident = os.path.basename(ident)
tokens = []
for d in range(1, depth):
tokens.append(ident[0:d])
dir = os.path.join(root, *tokens)
verify_directory(dir)
return os.path.join(dir, ident + extension)
def verify_options(opt, types, error):
if not isinstance(opt, types):
if not isinstance(types, tuple):
types = (types,)
coerced = False
for typ in types:
try:
if typ in (list, tuple):
opt = [x.strip() for x in opt.split(',')]
else:
if typ == bool:
typ = asbool
opt = typ(opt)
coerced = True
except:
pass
if coerced:
break
if not coerced:
raise Exception(error)
elif isinstance(opt, str) and not opt.strip():
raise Exception("Empty strings are invalid for: %s" % error)
return opt
def verify_rules(params, ruleset):
for key, types, message in ruleset:
if key in params:
params[key] = verify_options(params[key], types, message)
return params
def coerce_session_params(params):
rules = [
('data_dir', (str, types.NoneType), "data_dir must be a string "
"referring to a directory."),
('lock_dir', (str, types.NoneType), "lock_dir must be a string referring to a "
"directory."),
('type', (str, types.NoneType), "Session type must be a string."),
('cookie_expires', (bool, datetime, timedelta), "Cookie expires was "
"not a boolean, datetime, or timedelta instance."),
('cookie_domain', (str, types.NoneType), "Cookie domain must be a "
"string."),
('id', (str,), "Session id must be a string."),
('key', (str,), "Session key must be a string."),
('secret', (str, types.NoneType), "Session secret must be a string."),
('validate_key', (str, types.NoneType), "Session encrypt_key must be "
"a string."),
('encrypt_key', (str, types.NoneType), "Session validate_key must be "
"a string."),
('secure', (bool, types.NoneType), "Session secure must be a boolean."),
('timeout', (int, types.NoneType), "Session timeout must be an "
"integer."),
('auto', (bool, types.NoneType), "Session is created if accessed."),
]
return verify_rules(params, rules)
def coerce_cache_params(params):
rules = [
('data_dir', (str, types.NoneType), "data_dir must be a string "
"referring to a directory."),
('lock_dir', (str, types.NoneType), "lock_dir must be a string referring to a "
"directory."),
('type', (str,), "Cache type must be a string."),
('enabled', (bool, types.NoneType), "enabled must be true/false "
"if present."),
('expire', (int, types.NoneType), "expire must be an integer representing "
"how many seconds the cache is valid for"),
('regions', (list, tuple, types.NoneType), "Regions must be a "
"comma seperated list of valid regions")
]
return verify_rules(params, rules)
def parse_cache_config_options(config, include_defaults=True):
"""Parse configuration options and validate for use with the
CacheManager"""
# Load default cache options
if include_defaults:
options= dict(type='memory', data_dir=None, expire=None,
log_file=None)
else:
options = {}
for key, val in config.iteritems():
if key.startswith('beaker.cache.'):
options[key[13:]] = val
if key.startswith('cache.'):
options[key[6:]] = val
coerce_cache_params(options)
# Set cache to enabled if not turned off
if 'enabled' not in options:
options['enabled'] = True
# Configure region dict if regions are available
regions = options.pop('regions', None)
if regions:
region_configs = {}
for region in regions:
# Setup the default cache options
region_options = dict(data_dir=options.get('data_dir'),
lock_dir=options.get('lock_dir'),
type=options.get('type'),
enabled=options['enabled'],
expire=options.get('expire'))
region_len = len(region) + 1
for key in options.keys():
if key.startswith('%s.' % region):
region_options[key[region_len:]] = options.pop(key)
coerce_cache_params(region_options)
region_configs[region] = region_options
options['cache_regions'] = region_configs
return options
def func_namespace(func):
"""Generates a unique namespace for a function"""
kls = None
if hasattr(func, 'im_func'):
kls = func.im_class
func = func.im_func
if kls:
return '%s.%s' % (kls.__module__, kls.__name__)
else:
return '%s.%s' % (func.__module__, func.__name__)
|
nsnam/ns-3-dev-git
|
refs/heads/master
|
src/uan/bindings/callbacks_list.py
|
7
|
callback_classes = [
['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'double', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::MobilityModel>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Mac8Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Packet>', 'unsigned short', 'const ns3::Mac8Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Packet>', 'double', 'ns3::UanTxMode', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Packet>', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::UanTxMode', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Time', 'ns3::Time', 'unsigned int', 'unsigned int', 'double', 'unsigned int', 'double', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<const ns3::Packet>', 'double', 'ns3::UanTxMode', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
brian-l/django-1.4.10
|
refs/heads/master
|
tests/regressiontests/app_loading/not_installed/__init__.py
|
12133432
| |
mewtaylor/django
|
refs/heads/master
|
tests/model_formsets/__init__.py
|
12133432
| |
mohitsethi/solum
|
refs/heads/master
|
solum/tests/api/__init__.py
|
12133432
| |
tajkhan/pluto
|
refs/heads/master
|
annotations/module/loop/submodule/boundreplace/__init__.py
|
12133432
| |
Tatsh-ansible/ansible
|
refs/heads/devel
|
test/units/parsing/utils/__init__.py
|
12133432
| |
MwanzanFelipe/rockletonfortune
|
refs/heads/master
|
lib/django/contrib/sites/managers.py
|
472
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.core import checks
from django.core.exceptions import FieldDoesNotExist
from django.db import models
class CurrentSiteManager(models.Manager):
"Use this to limit objects to those associated with the current site."
use_in_migrations = True
def __init__(self, field_name=None):
super(CurrentSiteManager, self).__init__()
self.__field_name = field_name
def check(self, **kwargs):
errors = super(CurrentSiteManager, self).check(**kwargs)
errors.extend(self._check_field_name())
return errors
def _check_field_name(self):
field_name = self._get_field_name()
try:
field = self.model._meta.get_field(field_name)
except FieldDoesNotExist:
return [
checks.Error(
"CurrentSiteManager could not find a field named '%s'." % field_name,
hint=None,
obj=self,
id='sites.E001',
)
]
if not isinstance(field, (models.ForeignKey, models.ManyToManyField)):
return [
checks.Error(
"CurrentSiteManager cannot use '%s.%s' as it is not a ForeignKey or ManyToManyField." % (
self.model._meta.object_name, field_name
),
hint=None,
obj=self,
id='sites.E002',
)
]
return []
def _get_field_name(self):
""" Return self.__field_name or 'site' or 'sites'. """
if not self.__field_name:
try:
self.model._meta.get_field('site')
except FieldDoesNotExist:
self.__field_name = 'sites'
else:
self.__field_name = 'site'
return self.__field_name
def get_queryset(self):
return super(CurrentSiteManager, self).get_queryset().filter(
**{self._get_field_name() + '__id': settings.SITE_ID})
|
houchj/selenium
|
refs/heads/master
|
py/selenium/webdriver/firefox/firefox_profile.py
|
60
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import with_statement
import base64
import copy
import json
import os
import re
import shutil
import sys
import tempfile
import zipfile
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
from xml.dom import minidom
from selenium.webdriver.common.proxy import ProxyType
from selenium.common.exceptions import WebDriverException
WEBDRIVER_EXT = "webdriver.xpi"
WEBDRIVER_PREFERENCES = "webdriver_prefs.json"
EXTENSION_NAME = "fxdriver@googlecode.com"
class AddonFormatError(Exception):
"""Exception for not well-formed add-on manifest files"""
class FirefoxProfile(object):
ANONYMOUS_PROFILE_NAME = "WEBDRIVER_ANONYMOUS_PROFILE"
DEFAULT_PREFERENCES = None
def __init__(self, profile_directory=None):
"""
Initialises a new instance of a Firefox Profile
:args:
- profile_directory: Directory of profile that you want to use.
This defaults to None and will create a new
directory when object is created.
"""
if not FirefoxProfile.DEFAULT_PREFERENCES:
with open(os.path.join(os.path.dirname(__file__),
WEBDRIVER_PREFERENCES)) as default_prefs:
FirefoxProfile.DEFAULT_PREFERENCES = json.load(default_prefs)
self.default_preferences = copy.deepcopy(
FirefoxProfile.DEFAULT_PREFERENCES['mutable'])
self.native_events_enabled = True
self.profile_dir = profile_directory
self.tempfolder = None
if self.profile_dir is None:
self.profile_dir = self._create_tempfolder()
else:
self.tempfolder = tempfile.mkdtemp()
newprof = os.path.join(self.tempfolder, "webdriver-py-profilecopy")
shutil.copytree(self.profile_dir, newprof,
ignore=shutil.ignore_patterns("parent.lock", "lock", ".parentlock"))
self.profile_dir = newprof
self._read_existing_userjs(os.path.join(self.profile_dir, "user.js"))
self.extensionsDir = os.path.join(self.profile_dir, "extensions")
self.userPrefs = os.path.join(self.profile_dir, "user.js")
#Public Methods
def set_preference(self, key, value):
"""
sets the preference that we want in the profile.
"""
self.default_preferences[key] = value
def add_extension(self, extension=WEBDRIVER_EXT):
self._install_extension(extension)
def update_preferences(self):
for key, value in FirefoxProfile.DEFAULT_PREFERENCES['frozen'].items():
self.default_preferences[key] = value
self._write_user_prefs(self.default_preferences)
#Properties
@property
def path(self):
"""
Gets the profile directory that is currently being used
"""
return self.profile_dir
@property
def port(self):
"""
Gets the port that WebDriver is working on
"""
return self._port
@port.setter
def port(self, port):
"""
Sets the port that WebDriver will be running on
"""
if not isinstance(port, int):
raise WebDriverException("Port needs to be an integer")
try:
port = int(port)
if port < 1 or port > 65535:
raise WebDriverException("Port number must be in the range 1..65535")
except (ValueError, TypeError) as e:
raise WebDriverException("Port needs to be an integer")
self._port = port
self.set_preference("webdriver_firefox_port", self._port)
@property
def accept_untrusted_certs(self):
return self.default_preferences["webdriver_accept_untrusted_certs"]
@accept_untrusted_certs.setter
def accept_untrusted_certs(self, value):
if value not in [True, False]:
raise WebDriverException("Please pass in a Boolean to this call")
self.set_preference("webdriver_accept_untrusted_certs", value)
@property
def assume_untrusted_cert_issuer(self):
return self.default_preferences["webdriver_assume_untrusted_issuer"]
@assume_untrusted_cert_issuer.setter
def assume_untrusted_cert_issuer(self, value):
if value not in [True, False]:
raise WebDriverException("Please pass in a Boolean to this call")
self.set_preference("webdriver_assume_untrusted_issuer", value)
@property
def native_events_enabled(self):
return self.default_preferences['webdriver_enable_native_events']
@native_events_enabled.setter
def native_events_enabled(self, value):
if value not in [True, False]:
raise WebDriverException("Please pass in a Boolean to this call")
self.set_preference("webdriver_enable_native_events", value)
@property
def encoded(self):
"""
A zipped, base64 encoded string of profile directory
for use with remote WebDriver JSON wire protocol
"""
fp = BytesIO()
zipped = zipfile.ZipFile(fp, 'w', zipfile.ZIP_DEFLATED)
path_root = len(self.path) + 1 # account for trailing slash
for base, dirs, files in os.walk(self.path):
for fyle in files:
filename = os.path.join(base, fyle)
zipped.write(filename, filename[path_root:])
zipped.close()
return base64.b64encode(fp.getvalue()).decode('UTF-8')
def set_proxy(self, proxy):
import warnings
warnings.warn(
"This method has been deprecated. Please pass in the proxy object to the Driver Object",
DeprecationWarning)
if proxy is None:
raise ValueError("proxy can not be None")
if proxy.proxy_type is ProxyType.UNSPECIFIED:
return
self.set_preference("network.proxy.type", proxy.proxy_type['ff_value'])
if proxy.proxy_type is ProxyType.MANUAL:
self.set_preference("network.proxy.no_proxies_on", proxy.no_proxy)
self._set_manual_proxy_preference("ftp", proxy.ftp_proxy)
self._set_manual_proxy_preference("http", proxy.http_proxy)
self._set_manual_proxy_preference("ssl", proxy.ssl_proxy)
self._set_manual_proxy_preference("socks", proxy.socks_proxy)
elif proxy.proxy_type is ProxyType.PAC:
self.set_preference("network.proxy.autoconfig_url", proxy.proxy_autoconfig_url)
def _set_manual_proxy_preference(self, key, setting):
if setting is None or setting is '':
return
host_details = setting.split(":")
self.set_preference("network.proxy.%s" % key, host_details[0])
if len(host_details) > 1:
self.set_preference("network.proxy.%s_port" % key, int(host_details[1]))
def _create_tempfolder(self):
"""
Creates a temp folder to store User.js and the extension
"""
return tempfile.mkdtemp()
def _write_user_prefs(self, user_prefs):
"""
writes the current user prefs dictionary to disk
"""
with open(self.userPrefs, "w") as f:
for key, value in user_prefs.items():
f.write('user_pref("%s", %s);\n' % (key, json.dumps(value)))
def _read_existing_userjs(self, userjs):
import warnings
PREF_RE = re.compile(r'user_pref\("(.*)",\s(.*)\)')
try:
with open(userjs) as f:
for usr in f:
matches = re.search(PREF_RE, usr)
try:
self.default_preferences[matches.group(1)] = json.loads(matches.group(2))
except:
warnings.warn("(skipping) failed to json.loads existing preference: " +
matches.group(1) + matches.group(2))
except:
# The profile given hasn't had any changes made, i.e no users.js
pass
def _install_extension(self, addon, unpack=True):
"""
Installs addon from a filepath, url
or directory of addons in the profile.
- path: url, path to .xpi, or directory of addons
- unpack: whether to unpack unless specified otherwise in the install.rdf
"""
if addon == WEBDRIVER_EXT:
addon = os.path.join(os.path.dirname(__file__), WEBDRIVER_EXT)
tmpdir = None
xpifile = None
if addon.endswith('.xpi'):
tmpdir = tempfile.mkdtemp(suffix='.' + os.path.split(addon)[-1])
compressed_file = zipfile.ZipFile(addon, 'r')
for name in compressed_file.namelist():
if name.endswith('/'):
if not os.path.isdir(os.path.join(tmpdir, name)):
os.makedirs(os.path.join(tmpdir, name))
else:
if not os.path.isdir(os.path.dirname(os.path.join(tmpdir, name))):
os.makedirs(os.path.dirname(os.path.join(tmpdir, name)))
data = compressed_file.read(name)
with open(os.path.join(tmpdir, name), 'wb') as f:
f.write(data)
xpifile = addon
addon = tmpdir
# determine the addon id
addon_details = self._addon_details(addon)
addon_id = addon_details.get('id')
assert addon_id, 'The addon id could not be found: %s' % addon
# copy the addon to the profile
extensions_path = os.path.join(self.profile_dir, 'extensions')
addon_path = os.path.join(extensions_path, addon_id)
if not unpack and not addon_details['unpack'] and xpifile:
if not os.path.exists(extensions_path):
os.makedirs(extensions_path)
shutil.copy(xpifile, addon_path + '.xpi')
else:
if not os.path.exists(addon_path):
shutil.copytree(addon, addon_path, symlinks=True)
# remove the temporary directory, if any
if tmpdir:
shutil.rmtree(tmpdir)
def _addon_details(self, addon_path):
"""
Returns a dictionary of details about the addon.
:param addon_path: path to the add-on directory or XPI
Returns::
{'id': u'rainbow@colors.org', # id of the addon
'version': u'1.4', # version of the addon
'name': u'Rainbow', # name of the addon
'unpack': False } # whether to unpack the addon
"""
details = {
'id': None,
'unpack': False,
'name': None,
'version': None
}
def get_namespace_id(doc, url):
attributes = doc.documentElement.attributes
namespace = ""
for i in range(attributes.length):
if attributes.item(i).value == url:
if ":" in attributes.item(i).name:
# If the namespace is not the default one remove 'xlmns:'
namespace = attributes.item(i).name.split(':')[1] + ":"
break
return namespace
def get_text(element):
"""Retrieve the text value of a given node"""
rc = []
for node in element.childNodes:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc).strip()
if not os.path.exists(addon_path):
raise IOError('Add-on path does not exist: %s' % addon_path)
try:
if zipfile.is_zipfile(addon_path):
# Bug 944361 - We cannot use 'with' together with zipFile because
# it will cause an exception thrown in Python 2.6.
try:
compressed_file = zipfile.ZipFile(addon_path, 'r')
manifest = compressed_file.read('install.rdf')
finally:
compressed_file.close()
elif os.path.isdir(addon_path):
with open(os.path.join(addon_path, 'install.rdf'), 'r') as f:
manifest = f.read()
else:
raise IOError('Add-on path is neither an XPI nor a directory: %s' % addon_path)
except (IOError, KeyError) as e:
raise AddonFormatError(str(e), sys.exc_info()[2])
try:
doc = minidom.parseString(manifest)
# Get the namespaces abbreviations
em = get_namespace_id(doc, 'http://www.mozilla.org/2004/em-rdf#')
rdf = get_namespace_id(doc, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')
description = doc.getElementsByTagName(rdf + 'Description').item(0)
if description is None:
description = doc.getElementsByTagName('Description').item(0)
for node in description.childNodes:
# Remove the namespace prefix from the tag for comparison
entry = node.nodeName.replace(em, "")
if entry in details.keys():
details.update({entry: get_text(node)})
if details.get('id') is None:
for i in range(description.attributes.length):
attribute = description.attributes.item(i)
if attribute.name == em + 'id':
details.update({'id': attribute.value})
except Exception as e:
raise AddonFormatError(str(e), sys.exc_info()[2])
# turn unpack into a true/false value
if isinstance(details['unpack'], str):
details['unpack'] = details['unpack'].lower() == 'true'
# If no ID is set, the add-on is invalid
if details.get('id') is None:
raise AddonFormatError('Add-on id could not be found.')
return details
|
CTSRD-SOAAP/chromium-42.0.2311.135
|
refs/heads/master
|
testing/gtest/test/gtest_filter_unittest.py
|
2826
|
#!/usr/bin/env python
#
# Copyright 2005 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test test filters.
A user can specify which test(s) in a Google Test program to run via either
the GTEST_FILTER environment variable or the --gtest_filter flag.
This script tests such functionality by invoking
gtest_filter_unittest_ (a program written with Google Test) with different
environments and command line flags.
Note that test sharding may also influence which tests are filtered. Therefore,
we test that here also.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sets
import sys
import gtest_test_utils
# Constants.
# Checks if this platform can pass empty environment variables to child
# processes. We set an env variable to an empty string and invoke a python
# script in a subprocess to print whether the variable is STILL in
# os.environ. We then use 'eval' to parse the child's output so that an
# exception is thrown if the input is anything other than 'True' nor 'False'.
os.environ['EMPTY_VAR'] = ''
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print \'EMPTY_VAR\' in os.environ'])
CAN_PASS_EMPTY_ENV = eval(child.output)
# Check if this platform can unset environment variables in child processes.
# We set an env variable to a non-empty string, unset it, and invoke
# a python script in a subprocess to print whether the variable
# is NO LONGER in os.environ.
# We use 'eval' to parse the child's output so that an exception
# is thrown if the input is neither 'True' nor 'False'.
os.environ['UNSET_VAR'] = 'X'
del os.environ['UNSET_VAR']
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print \'UNSET_VAR\' not in os.environ'])
CAN_UNSET_ENV = eval(child.output)
# Checks if we should test with an empty filter. This doesn't
# make sense on platforms that cannot pass empty env variables (Win32)
# and on platforms that cannot unset variables (since we cannot tell
# the difference between "" and NULL -- Borland and Solaris < 5.10)
CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)
# The environment variable for specifying the test filters.
FILTER_ENV_VAR = 'GTEST_FILTER'
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
# The command line flag for specifying the test filters.
FILTER_FLAG = 'gtest_filter'
# The command line flag for including disabled tests.
ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests'
# Command to run the gtest_filter_unittest_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_')
# Regex for determining whether parameterized tests are enabled in the binary.
PARAM_TEST_REGEX = re.compile(r'/ParamTest')
# Regex for parsing test case names from Google Test's output.
TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
# Regex for parsing test names from Google Test's output.
TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
# The command line flag to tell Google Test to output the list of tests it
# will run.
LIST_TESTS_FLAG = '--gtest_list_tests'
# Indicates whether Google Test supports death tests.
SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
[COMMAND, LIST_TESTS_FLAG]).output
# Full names of all tests in gtest_filter_unittests_.
PARAM_TESTS = [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestX/1',
'SeqQ/ParamTest.TestY/0',
'SeqQ/ParamTest.TestY/1',
]
DISABLED_TESTS = [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
]
if SUPPORTS_DEATH_TESTS:
DEATH_TESTS = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
]
else:
DEATH_TESTS = []
# All the non-disabled tests.
ACTIVE_TESTS = [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS
param_tests_present = None
# Utilities.
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def RunAndReturnOutput(args = None):
"""Runs the test program and returns its output."""
return gtest_test_utils.Subprocess([COMMAND] + (args or []),
env=environ).output
def RunAndExtractTestList(args = None):
"""Runs the test program and returns its exit code and a list of tests run."""
p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
tests_run = []
test_case = ''
test = ''
for line in p.output.split('\n'):
match = TEST_CASE_REGEX.match(line)
if match is not None:
test_case = match.group(1)
else:
match = TEST_REGEX.match(line)
if match is not None:
test = match.group(1)
tests_run.append(test_case + '.' + test)
return (tests_run, p.exit_code)
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
"""Runs the given function and arguments in a modified environment."""
try:
original_env = environ.copy()
environ.update(extra_env)
return function(*args, **kwargs)
finally:
environ.clear()
environ.update(original_env)
def RunWithSharding(total_shards, shard_index, command):
"""Runs a test program shard and returns exit code and a list of tests run."""
extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
TOTAL_SHARDS_ENV_VAR: str(total_shards)}
return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
# The unit test.
class GTestFilterUnitTest(gtest_test_utils.TestCase):
"""Tests the env variable or the command line flag to filter tests."""
# Utilities.
def AssertSetEqual(self, lhs, rhs):
"""Asserts that two sets are equal."""
for elem in lhs:
self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
for elem in rhs:
self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
def AssertPartitionIsValid(self, set_var, list_of_sets):
"""Asserts that list_of_sets is a valid partition of set_var."""
full_partition = []
for slice_var in list_of_sets:
full_partition.extend(slice_var)
self.assertEqual(len(set_var), len(full_partition))
self.assertEqual(sets.Set(set_var), sets.Set(full_partition))
def AdjustForParameterizedTests(self, tests_to_run):
"""Adjust tests_to_run in case value parameterized tests are disabled."""
global param_tests_present
if not param_tests_present:
return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS))
else:
return tests_to_run
def RunAndVerify(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for a given filter."""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# First, tests using the environment variable.
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
tests_run = RunAndExtractTestList()[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, tests_to_run)
# pylint: enable-msg=C6403
# Next, tests using the command line flag.
if gtest_filter is None:
args = []
else:
args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
args=None, check_exit_0=False):
"""Checks that binary runs correct tests for the given filter and shard.
Runs all shards of gtest_filter_unittest_ with the given filter, and
verifies that the right set of tests were run. The union of tests run
on each shard should be identical to tests_to_run, without duplicates.
Args:
gtest_filter: A filter to apply to the tests.
total_shards: A total number of shards to split test run into.
tests_to_run: A set of tests expected to run.
args : Arguments to pass to the to the test binary.
check_exit_0: When set to a true value, make sure that all shards
return 0.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
partition = []
for i in range(0, total_shards):
(tests_run, exit_code) = RunWithSharding(total_shards, i, args)
if check_exit_0:
self.assertEqual(0, exit_code)
partition.append(tests_run)
self.AssertPartitionIsValid(tests_to_run, partition)
SetEnvVar(FILTER_ENV_VAR, None)
# pylint: enable-msg=C6403
def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for the given filter.
Runs gtest_filter_unittest_ with the given filter, and enables
disabled tests. Verifies that the right set of tests were run.
Args:
gtest_filter: A filter to apply to the tests.
tests_to_run: A set of tests expected to run.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Construct the command line.
args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG]
if gtest_filter is not None:
args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def setUp(self):
"""Sets up test case.
Determines whether value-parameterized tests are enabled in the binary and
sets the flags accordingly.
"""
global param_tests_present
if param_tests_present is None:
param_tests_present = PARAM_TEST_REGEX.search(
RunAndReturnOutput()) is not None
def testDefaultBehavior(self):
"""Tests the behavior of not specifying the filter."""
self.RunAndVerify(None, ACTIVE_TESTS)
def testDefaultBehaviorWithShards(self):
"""Tests the behavior without the filter, with sharding enabled."""
self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)
def testEmptyFilter(self):
"""Tests an empty filter."""
self.RunAndVerify('', [])
self.RunAndVerifyWithSharding('', 1, [])
self.RunAndVerifyWithSharding('', 2, [])
def testBadFilter(self):
"""Tests a filter that matches nothing."""
self.RunAndVerify('BadFilter', [])
self.RunAndVerifyAllowingDisabled('BadFilter', [])
def testFullName(self):
"""Tests filtering by full name."""
self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])
def testUniversalFilters(self):
"""Tests filters that match everything."""
self.RunAndVerify('*', ACTIVE_TESTS)
self.RunAndVerify('*.*', ACTIVE_TESTS)
self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)
self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)
self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)
def testFilterByTestCase(self):
"""Tests filtering by test case name."""
self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])
BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
self.RunAndVerify('BazTest.*', BAZ_TESTS)
self.RunAndVerifyAllowingDisabled('BazTest.*',
BAZ_TESTS + ['BazTest.DISABLED_TestC'])
def testFilterByTest(self):
"""Tests filtering by test name."""
self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])
def testFilterDisabledTests(self):
"""Select only the disabled tests to run."""
self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
['DISABLED_FoobarTest.Test1'])
self.RunAndVerify('*DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
self.RunAndVerify('*.DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.DISABLED_Test2',
])
self.RunAndVerify('DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('DISABLED_*', [
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
])
def testWildcardInTestCaseName(self):
"""Tests using wildcard in the test case name."""
self.RunAndVerify('*a*.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)
def testWildcardInTestName(self):
"""Tests using wildcard in the test name."""
self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testFilterWithoutDot(self):
"""Tests a filter that has no '.' in it."""
self.RunAndVerify('*z*', [
'FooTest.Xyz',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
])
def testTwoPatterns(self):
"""Tests filters that consist of two patterns."""
self.RunAndVerify('Foo*.*:*A*', [
'FooTest.Abc',
'FooTest.Xyz',
'BazTest.TestA',
])
# An empty pattern + a non-empty one
self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testThreePatterns(self):
"""Tests filters that consist of three patterns."""
self.RunAndVerify('*oo*:*A*:*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
'BazTest.TestA',
])
# The 2nd pattern is empty.
self.RunAndVerify('*oo*::*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
])
# The last 2 patterns are empty.
self.RunAndVerify('*oo*::', [
'FooTest.Abc',
'FooTest.Xyz',
])
def testNegativeFilters(self):
self.RunAndVerify('*-BazTest.TestOne', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('*-FooTest.Abc:BazTest.*', [
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('BarTest.*-BarTest.TestOne', [
'BarTest.TestTwo',
'BarTest.TestThree',
])
# Tests without leading '*'.
self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
# Value parameterized tests.
self.RunAndVerify('*/*', PARAM_TESTS)
# Value parameterized tests filtering by the sequence name.
self.RunAndVerify('SeqP/*', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
])
# Value parameterized tests filtering by the test name.
self.RunAndVerify('*/0', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestY/0',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestY/0',
])
def testFlagOverridesEnvVar(self):
"""Tests that the filter flag overrides the filtering env. variable."""
SetEnvVar(FILTER_ENV_VAR, 'Foo*')
args = ['--%s=%s' % (FILTER_FLAG, '*One')]
tests_run = RunAndExtractTestList(args)[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
def testShardStatusFileIsCreated(self):
"""Tests that the shard file is created if specified in the environment."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
finally:
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
def testShardStatusFileIsCreatedWithListTests(self):
"""Tests that the shard file is created with the "list_tests" flag."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file2')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
output = InvokeWithModifiedEnv(extra_env,
RunAndReturnOutput,
[LIST_TESTS_FLAG])
finally:
# This assertion ensures that Google Test enumerated the tests as
# opposed to running them.
self.assert_('[==========]' not in output,
'Unexpected output during test enumeration.\n'
'Please ensure that LIST_TESTS_FLAG is assigned the\n'
'correct flag value for listing Google Test tests.')
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
if SUPPORTS_DEATH_TESTS:
def testShardingWorksWithDeathTests(self):
"""Tests integration with death tests and sharding."""
gtest_filter = 'HasDeathTest.*:SeqP/*'
expected_tests = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
]
for flag in ['--gtest_death_test_style=threadsafe',
'--gtest_death_test_style=fast']:
self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
check_exit_0=True, args=[flag])
self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
check_exit_0=True, args=[flag])
if __name__ == '__main__':
gtest_test_utils.Main()
|
LarryHillyer/PoolHost
|
refs/heads/master
|
PoolHost/env/Lib/site-packages/wheel/test/test_signatures.py
|
565
|
from wheel import signatures
from wheel.signatures import djbec, ed25519py
from wheel.util import binary
def test_getlib():
signatures.get_ed25519ll()
def test_djbec():
djbec.dsa_test()
djbec.dh_test()
def test_ed25519py():
kp0 = ed25519py.crypto_sign_keypair(binary(' '*32))
kp = ed25519py.crypto_sign_keypair()
signed = ed25519py.crypto_sign(binary('test'), kp.sk)
ed25519py.crypto_sign_open(signed, kp.vk)
try:
ed25519py.crypto_sign_open(signed, kp0.vk)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign_keypair(binary(' '*33))
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign(binary(''), binary(' ')*31)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign_open(binary(''), binary(' ')*31)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
|
charleswhchan/ansible
|
refs/heads/devel
|
lib/ansible/utils/module_docs_fragments/files.py
|
25
|
# (c) 2014, Matt Martz <matt@sivel.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
mode:
required: false
default: null
description:
- mode the file or directory should be. For those used to I(/usr/bin/chmod) remember that modes are actually octal numbers (like 0644). Leaving off the leading zero will likely have unexpected results. As of version 1.8, the mode may be specified as a symbolic mode (for example, C(u+rwx) or C(u=rw,g=r,o=r)).
owner:
required: false
default: null
description:
- name of the user that should own the file/directory, as would be fed to I(chown)
group:
required: false
default: null
description:
- name of the group that should own the file/directory, as would be fed to I(chown)
seuser:
required: false
default: null
description:
- user part of SELinux file context. Will default to system policy, if
applicable. If set to C(_default), it will use the C(user) portion of the
policy if available
serole:
required: false
default: null
description:
- role part of SELinux file context, C(_default) feature works as for I(seuser).
setype:
required: false
default: null
description:
- type part of SELinux file context, C(_default) feature works as for I(seuser).
selevel:
required: false
default: "s0"
description:
- level part of the SELinux file context. This is the MLS/MCS attribute,
sometimes known as the C(range). C(_default) feature works as for
I(seuser).
"""
|
awslabs/aws-big-data-blog
|
refs/heads/master
|
aws-blog-vpc-toptalkers/bin/flowlogs-counter.py
|
2
|
#!/usr/bin/python2.7
import os
import sys
import fileinput
import shlex
## Index
SRC=3
DST=4
PORT=6
def stream_index():
try:
if os.environ['mapred_input_format_class'] == \
'org.apache.hadoop.mapred.lib.NLineInputFormat' and \
os.environ['mapreduce_task_ismap'] == "true":
return 1
except:
pass
return 0
def main(index=0):
for line in fileinput.input("-"):
try:
i = shlex.split(line.rstrip('\r\n'))
print "LongValueSum:%s:%s:%s\t%s" % (i[SRC+index],
i[DST+index], i[PORT+index], 1)
except:
sys.stderr.write("reporter:status:err:%s" % line)
raise
# -----------------------------------------------------------------------------
# Main
# -----------------------------------------------------------------------------
if __name__ == "__main__":
main(index=stream_index())
|
mayankcu/Django-social
|
refs/heads/master
|
venv/Lib/site-packages/django/contrib/localflavor/ie/ie_counties.py
|
503
|
"""
Sources:
Irish Counties: http://en.wikipedia.org/wiki/Counties_of_Ireland
"""
from django.utils.translation import ugettext_lazy as _
IE_COUNTY_CHOICES = (
('antrim', _('Antrim')),
('armagh', _('Armagh')),
('carlow', _('Carlow')),
('cavan', _('Cavan')),
('clare', _('Clare')),
('cork', _('Cork')),
('derry', _('Derry')),
('donegal', _('Donegal')),
('down', _('Down')),
('dublin', _('Dublin')),
('fermanagh', _('Fermanagh')),
('galway', _('Galway')),
('kerry', _('Kerry')),
('kildare', _('Kildare')),
('kilkenny', _('Kilkenny')),
('laois', _('Laois')),
('leitrim', _('Leitrim')),
('limerick', _('Limerick')),
('longford', _('Longford')),
('louth', _('Louth')),
('mayo', _('Mayo')),
('meath', _('Meath')),
('monaghan', _('Monaghan')),
('offaly', _('Offaly')),
('roscommon', _('Roscommon')),
('sligo', _('Sligo')),
('tipperary', _('Tipperary')),
('tyrone', _('Tyrone')),
('waterford', _('Waterford')),
('westmeath', _('Westmeath')),
('wexford', _('Wexford')),
('wicklow', _('Wicklow')),
)
|
spxiwh/pycbc-glue
|
refs/heads/nr_precessing_branch
|
test/segdb/segdb_test.py
|
3
|
#!/usr/bin/env python
"""This script test ligolw_segment_query and ligolw_segments_from_cats:
For ligolw_segment_query, the test runs against E13 data at the time of writing.
1. run test query "ligolw_segments_from_cats --segment-url=https://segdb.ligo.caltech.edu --gps-start-time 924821634 --gps-end-time 924828992 --veto-file=/H1H2-CBC_E13_ONLINE-923682800-2419200.xml --separate-categories"
2. get segment start_time, end_time out from the result test_ligolw_segment_query.xml and put them in a temp time file "segScript"
3. diff segScript against the validated correct results in the "correct_ligolw_segment_query_results.txt"
For ligolw_segments_from_cats, the test runs against E13 data at the time of writing
1. run test query "ligolw_segments_from_cats --segment-url=https://segdb.ligo.caltech.edu --gps-start-time 924821634 --gps-end-time 924828992 --veto-file=/H1H2-CBC_E13_ONLINE-923682800-2419200.xml --separate-categories". This command returns 8 xml files.
2. loop in the 12 xml files to get segment start_time and end_time and put them in a temp time file, for example, result_H1CAT1
3. in the loop, diff the temp time file against its correspond validated results in, for example, H1CAT1
"""
from glue import segments
import commands
import sys
import os
#--------------------------------------------------------------------------------
# Test ligolw_segment_query without explicit versions
#--------------------------------------------------------------------------------
print "Testing ligolw_segment_query against E13 data (without versions)..."
# run the testing ligolw_segment_query command and generate the result xml file
com = "ligolw_segment_query --segment-url=https://segdb.ligo.caltech.edu --gps-start-time 924821632 --gps-end-time 924921632 --include-segments H1:DMT-SCIENCE --exclude-segments H1:DMT-BADGAMMA --query-segments | ligolw_print -t segment -c start_time -c end_time -d ' ' > segScript"
a = commands.getstatusoutput(com)
if a[0] == 0:
pass
else:
print "Error executing command to generate result xml file"
sys.exit(1)
# diff result file from ligolw_segment_query and from database
com = 'diff correct_ligolw_segment_query_results.txt segScript'
a = commands.getstatusoutput(com)
if a[0] == 0:
print "Test pass"
print
else:
print "Test fail"
print a[1]
os.remove('segScript')
#--------------------------------------------------------------------------------
# Test ligolw_segment_query with explicit versions
#--------------------------------------------------------------------------------
print "Testing ligolw_segment_query against E13 data (with versions)..."
# run the testing ligolw_segment_query command and generate the result xml file
com = "ligolw_segment_query --segment-url=https://segdb.ligo.caltech.edu --gps-start-time 924821632 --gps-end-time 924921632 --include-segments H1:DMT-SCIENCE:1 --exclude-segments H1:DMT-BADGAMMA:1 --query-segments | ligolw_print -t segment -c start_time -c end_time -d ' ' > segScript"
a = commands.getstatusoutput(com)
if a[0] == 0:
pass
else:
print "Error executing command to generate result xml file"
sys.exit(1)
# diff result file from ligolw_segment_query and from database
com = 'diff correct_ligolw_segment_query_results.txt segScript'
a = commands.getstatusoutput(com)
if a[0] == 0:
print "Test pass"
print
else:
print "Test fail"
print a[1]
os.remove('segScript')
#---------------------------------------------------------------------------------
# Test ligolw_segments_from_cats
#---------------------------------------------------------------------------------
print
print "Testing ligolw_segments_from_cats against E13 data..."
print " It may take a while ..."
# run ligolw_segments_from_cats and get 12 result files back
com = "ligolw_segments_from_cats --segment-url=https://segdb.ligo.caltech.edu --gps-start-time 924821634 --gps-end-time 924828992 --veto-file=H1H2-CBC_E13_ONLINE-923682800-2419200.xml --separate-categories"
a = commands.getstatusoutput(com)
if a[0] == 0:
pass
else:
print "Error executing ligolw_segments_from_cats command"
sys.exit(1)
ret = 0
for i in ['H1', 'H2']:
for c in [1,2,3,4]: # loop in categories
# get the segment start and end time from the result xml file, and put them in a temp time file
result_file_name = i + '-VETOTIME_CAT' + str(c) + '-924821634-7358.xml'
com = "cat " + result_file_name + " | ligolw_print -t segment -c start_time -c end_time -d ' ' > " + "result_" + i + "CAT" + str(c)
a = commands.getstatusoutput(com)
if a[0] != 0:
print "Error execute command to get segment start and end time from result xml file"
sys.exit(0)
# diff result file agaisnt the correct results
com = 'diff ' + i + 'CAT' + str(c) + " result_" + i + "CAT" + str(c)
a = commands.getstatusoutput(com)
if a[0]!=0:
print "Error diff time file %s from %s" % ("result_" + i + "CAT" + str(c), result_file_name)
ret = 1
# remove the temp result file and the time file
os.remove("result_" + i + "CAT" + str(c))
os.remove(result_file_name)
if ret == 0:
print "test pass"
sys.exit(0)
|
sencha/chromium-spacewalk
|
refs/heads/master
|
mojo/public/tools/bindings/mojom_bindings_generator_unittest.py
|
107
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from mojom_bindings_generator import MakeImportStackMessage
class MojoBindingsGeneratorTest(unittest.TestCase):
"""Tests mojo_bindings_generator."""
def testMakeImportStackMessage(self):
"""Tests MakeImportStackMessage()."""
self.assertEquals(MakeImportStackMessage(["x"]), "")
self.assertEquals(MakeImportStackMessage(["x", "y"]),
"\n y was imported by x")
self.assertEquals(MakeImportStackMessage(["x", "y", "z"]),
"\n z was imported by y\n y was imported by x")
if __name__ == "__main__":
unittest.main()
|
kenshay/ImageScript
|
refs/heads/master
|
ProgramData/SystemFiles/Python/Lib/site-packages/networkx/algorithms/components/tests/test_attracting.py
|
6
|
#!/usr/bin/env python
from nose.tools import *
import networkx as nx
from networkx import NetworkXNotImplemented
class TestAttractingComponents(object):
def setUp(self):
self.G1 = nx.DiGraph()
self.G1.add_edges_from([(5, 11), (11, 2), (11, 9), (11, 10),
(7, 11), (7, 8), (8, 9), (3, 8), (3, 10)])
self.G2 = nx.DiGraph()
self.G2.add_edges_from([(0, 1), (0, 2), (1, 1), (1, 2), (2, 1)])
self.G3 = nx.DiGraph()
self.G3.add_edges_from([(0, 1), (1, 2), (2, 1), (0, 3), (3, 4), (4, 3)])
self.G4 = nx.DiGraph()
def test_attracting_components(self):
ac = list(nx.attracting_components(self.G1))
assert_true({2} in ac)
assert_true({9} in ac)
assert_true({10} in ac)
ac = list(nx.attracting_components(self.G2))
ac = [tuple(sorted(x)) for x in ac]
assert_true(ac == [(1, 2)])
ac = list(nx.attracting_components(self.G3))
ac = [tuple(sorted(x)) for x in ac]
assert_true((1, 2) in ac)
assert_true((3, 4) in ac)
assert_equal(len(ac), 2)
ac = list(nx.attracting_components(self.G4))
assert_equal(ac, [])
def test_number_attacting_components(self):
assert_equal(nx.number_attracting_components(self.G1), 3)
assert_equal(nx.number_attracting_components(self.G2), 1)
assert_equal(nx.number_attracting_components(self.G3), 2)
assert_equal(nx.number_attracting_components(self.G4), 0)
def test_is_attracting_component(self):
assert_false(nx.is_attracting_component(self.G1))
assert_false(nx.is_attracting_component(self.G2))
assert_false(nx.is_attracting_component(self.G3))
g2 = self.G3.subgraph([1, 2])
assert_true(nx.is_attracting_component(g2))
assert_false(nx.is_attracting_component(self.G4))
def test_connected_raise(self):
G = nx.Graph()
assert_raises(NetworkXNotImplemented, nx.attracting_components, G)
assert_raises(NetworkXNotImplemented, nx.number_attracting_components, G)
assert_raises(NetworkXNotImplemented, nx.is_attracting_component, G)
# deprecated
assert_raises(NetworkXNotImplemented, nx.attracting_component_subgraphs, G)
|
valkjsaaa/sl4a
|
refs/heads/master
|
python-build/python-libs/gdata/tests/atom_tests/mock_client_test.py
|
128
|
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'j.s@google.com (Jeff Scudder)'
import unittest
import atom.mock_http
import atom.http
class MockHttpClientUnitTest(unittest.TestCase):
def setUp(self):
self.client = atom.mock_http.MockHttpClient()
def testRepondToGet(self):
mock_response = atom.http_interface.HttpResponse(body='Hooray!',
status=200, reason='OK')
self.client.add_response(mock_response, 'GET',
'http://example.com/hooray')
response = self.client.request('GET', 'http://example.com/hooray')
self.assertEquals(len(self.client.recordings), 1)
self.assertEquals(response.status, 200)
self.assertEquals(response.read(), 'Hooray!')
def testRecordResponse(self):
# Turn on pass-through record mode.
self.client.real_client = atom.http.ProxiedHttpClient()
live_response = self.client.request('GET',
'http://www.google.com/base/feeds/snippets?max-results=1')
live_response_body = live_response.read()
self.assertEquals(live_response.status, 200)
self.assertEquals(live_response_body.startswith('<?xml'), True)
# Requery for the now canned data.
self.client.real_client = None
canned_response = self.client.request('GET',
'http://www.google.com/base/feeds/snippets?max-results=1')
# The canned response should be the stored response.
canned_response_body = canned_response.read()
self.assertEquals(canned_response.status, 200)
self.assertEquals(canned_response_body, live_response_body)
def testUnrecordedRequest(self):
try:
self.client.request('POST', 'http://example.org')
self.fail()
except atom.mock_http.NoRecordingFound:
pass
def suite():
return unittest.TestSuite(
(unittest.makeSuite(MockHttpClientUnitTest,'test'),))
if __name__ == '__main__':
unittest.main()
|
mtekel/libcloud
|
refs/heads/trunk
|
libcloud/test/loadbalancer/test_softlayer.py
|
37
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import xmlrpclib
from libcloud.compute.base import NodeLocation
from libcloud.loadbalancer.base import Member, Algorithm
from libcloud.loadbalancer.drivers.softlayer import SoftlayerLBDriver
from libcloud.loadbalancer.types import State
from libcloud.test import MockHttpTestCase
from libcloud.test.secrets import SOFTLAYER_PARAMS
from libcloud.test.file_fixtures import LoadBalancerFileFixtures
class SoftlayerLBTests(unittest.TestCase):
def setUp(self):
SoftlayerLBDriver.connectionCls.conn_classes = (SoftLayerMockHttp,
SoftLayerMockHttp)
SoftLayerMockHttp.type = None
self.driver = SoftlayerLBDriver(*SOFTLAYER_PARAMS)
def test_list_protocols(self):
protocols = self.driver.list_protocols()
self.assertEqual(len(protocols), 6)
self.assertTrue('tcp' in protocols)
self.assertTrue('http' in protocols)
def test_list_balancers(self):
balancers = self.driver.list_balancers()
self.assertEqual(len(balancers), 2)
self.assertEqual(balancers[0].id, '76185')
self.assertEqual(balancers[0].extra['datacenter'], 'dal05')
self.assertEqual(balancers[0].extra['connection_limit'], 50)
self.assertEqual(balancers[1].id, '76265')
self.assertEqual(balancers[1].extra['datacenter'], 'par01')
self.assertEqual(balancers[1].extra['connection_limit'], 50)
def test_get_balancer(self):
balancer = self.driver.get_balancer(balancer_id='76185')
self.assertEqual(balancer.id, '76185')
self.assertEqual(balancer.state, State.UNKNOWN)
self.assertEqual(balancer.extra['datacenter'], 'dal05')
self.assertEqual(balancer.extra['protocol'], 'http')
self.assertEqual(balancer.extra['algorithm'], Algorithm.ROUND_ROBIN)
def test_balancer_list_members(self):
balancer = self.driver.get_balancer(balancer_id='76185')
members = balancer.list_members()
self.assertEqual(len(members), 3)
self.assertEqual(members[0].balancer, balancer)
self.assertEqual(members[0].id, '226227')
self.assertEqual(members[0].ip, '10.126.5.34')
self.assertEqual(members[1].balancer, balancer)
self.assertEqual(members[1].id, '226229')
self.assertEqual(members[1].ip, '10.126.5.35')
def test_balancer_attach_member(self):
balancer = self.driver.get_balancer(balancer_id='76185')
member = balancer.attach_member(Member(None, ip='10.126.5.34',
port=8000))
self.assertEqual(member.id, '226227')
self.assertEqual(member.ip, '10.126.5.34')
self.assertEqual(member.port, 8000)
def test_balancer_detach_member(self):
balancer = self.driver.get_balancer(balancer_id='76265')
member = Member('226227', None, None)
self.assertTrue(balancer.detach_member(member))
def test_destroy_balancer(self):
balancer = self.driver.get_balancer(balancer_id='76185')
self.assertTrue(self.driver.destroy_balancer(balancer))
def test_ex_list_balancer_packages(self):
packages = self.driver.ex_list_balancer_packages()
self.assertEqual(len(packages), 9)
def test_ex_place_balancer_order(self):
packages = self.driver.ex_list_balancer_packages()
lb_package = [p for p in packages if p.capacity == 50][0]
self.assertTrue(self.driver.ex_place_balancer_order(
lb_package, NodeLocation('dal05', None, None, None)))
class SoftLayerMockHttp(MockHttpTestCase):
fixtures = LoadBalancerFileFixtures('softlayer')
def _get_method_name(self, type, use_param, qs, path):
return "_xmlrpc"
def _xmlrpc(self, method, url, body, headers):
params, meth_name = xmlrpclib.loads(body)
url = url.replace("/", "_")
meth_name = "%s_%s" % (url, meth_name)
return getattr(self, meth_name)(method, url, body, headers)
def _xmlrpc_v3_SoftLayer_Account_getAdcLoadBalancers(
self, method, url, body, headers):
body = self.fixtures.load(
'v3__SoftLayer_Account_getAdcLoadBalancers.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Billing_Item_cancelService(self, method, url,
body, headers):
body = self.fixtures.load(
'v3__SoftLayer_Billing_Item_cancelService.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Location_Datacenter_getDatacenters(
self, method, url, body, headers):
body = self.fixtures.load(
'v3__SoftLayer_Location_Datacenter_getDatacenters.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Network_Application_Delivery_Controller_LoadBalancer_Service_deleteObject(
self, method, url, body, headers):
body = self.fixtures.load(
'v3__SoftLayer_Network_Application_Delivery_Controller_'
'LoadBalancer_Service_deleteObject.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Network_Application_Delivery_Controller_LoadBalancer_VirtualIpAddress_editObject(
self, method, url, body, headers):
body = self.fixtures.load(
'v3__SoftLayer_Network_Application_Delivery_Controller_'
'LoadBalancer_VirtualIpAddress_editObject.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Network_Application_Delivery_Controller_LoadBalancer_VirtualIpAddress_getBillingItem(
self, method, url, body, headers):
body = self.fixtures.load(
'v3__SoftLayer_Network_Application_Delivery_Controller_'
'LoadBalancer_VirtualIpAddress_getBillingItem.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Network_Application_Delivery_Controller_LoadBalancer_VirtualIpAddress_getObject(
self, method, url, body, headers):
body = self.fixtures.load(
'v3__SoftLayer_Network_Application_Delivery_Controller_'
'LoadBalancer_VirtualIpAddress_getObject.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Network_Subnet_IpAddress_getByIpAddress(
self, method, url, body, headers):
body = self.fixtures.load(
'v3__SoftLayer_Network_Subnet_IpAddress_getByIpAddress.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Product_Order_placeOrder(self, method, url, body,
headers):
body = self.fixtures.load(
'v3__SoftLayer_Product_Order_placeOrder.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _xmlrpc_v3_SoftLayer_Product_Package_getItems(self, method, url, body,
headers):
body = self.fixtures.load(
'v3__SoftLayer_Product_Package_getItems.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == "__main__":
sys.exit(unittest.main())
|
hydroshare/hydroshare_drupal
|
refs/heads/master
|
sites/all/modules/proj4js/lib/proj4js/tools/mkpjcat.py
|
250
|
#!/usr/bin/env python
import sys
sys.path.append(".")
import pjjs
resourcesDirectory = "catalogues"
targetDirectory = "../lib/defs"
if len(sys.argv) > 1:
resourcesDirectory = sys.argv[1]
if len(sys.argv) > 2:
targetDirectory = sys.argv[2]
print "Generating Proj4js catalogues."
pjjs.pjcat2js_clean(resourcesDirectory,targetDirectory)
pjjs.pjcat2js_run(resourcesDirectory,targetDirectory)
print "Done."
|
automl/paramsklearn
|
refs/heads/master
|
tests/components/feature_preprocessing/test_kernel_pca.py
|
1
|
import unittest
from sklearn.linear_model import RidgeClassifier
from ParamSklearn.components.feature_preprocessing.kernel_pca import \
KernelPCA
from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \
get_dataset
import sklearn.metrics
class KernelPCAComponentTest(PreprocessingTestCase):
def test_default_configuration(self):
transformation, original = _test_preprocessing(KernelPCA,
dataset='digits')
self.assertEqual(transformation.shape[0], original.shape[0])
self.assertFalse((transformation == 0).all())
def test_default_configuration_sparse(self):
transformation, original = _test_preprocessing(KernelPCA,
make_sparse=True,
dataset='digits')
self.assertEqual(transformation.shape[0], original.shape[0])
self.assertFalse((transformation == 0).all())
def test_default_configuration_classify(self):
for i in range(5):
X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits',
make_sparse=False)
configuration_space = KernelPCA.get_hyperparameter_search_space()
default = configuration_space.get_default_configuration()
preprocessor = KernelPCA(random_state=1,
**{hp_name: default[hp_name] for hp_name in
default if default[hp_name] is not None})
preprocessor.fit(X_train, Y_train)
X_train_trans = preprocessor.transform(X_train)
X_test_trans = preprocessor.transform(X_test)
# fit a classifier on top
classifier = RidgeClassifier()
predictor = classifier.fit(X_train_trans, Y_train)
predictions = predictor.predict(X_test_trans)
accuracy = sklearn.metrics.accuracy_score(predictions, Y_test)
self.assertAlmostEqual(accuracy, 0.096539162112932606)
@unittest.skip("Always returns float64")
def test_preprocessing_dtype(self):
super(KernelPCAComponentTest,
self)._test_preprocessing_dtype(KernelPCA)
|
lokeshjindal15/pd-gem5
|
refs/heads/master
|
configs/topologies/Cluster.py
|
41
|
# Copyright (c) 2012 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Jason Power
from BaseTopology import BaseTopology
class Cluster(BaseTopology):
""" A cluster is a group of nodes which are all one hop from eachother
Clusters can also contain other clusters
When creating this kind of topology, return a single cluster (usually
the root cluster) from create_system in configs/ruby/<protocol>.py
"""
_num_int_links = 0
_num_ext_links = 0
_num_routers = 0
# Below methods for auto counting
@classmethod
def num_int_links(cls):
cls._num_int_links += 1
return cls._num_int_links - 1
@classmethod
def num_ext_links(cls):
cls._num_ext_links += 1
return cls._num_ext_links - 1
@classmethod
def num_routers(cls):
cls._num_routers += 1
return cls._num_routers - 1
def __init__(self, intBW=0, extBW=0, intLatency=0, extLatency=0):
""" internalBandwidth is bandwidth of all links within the cluster
externalBandwidth is bandwidth from this cluster to any cluster
connecting to it.
internal/externalLatency are similar
**** When creating a cluster with sub-clusters, the sub-cluster
external bandwidth overrides the internal bandwidth of the
super cluster
"""
self.nodes = []
self.router = None # created in makeTopology
self.intBW = intBW
self.extBW = extBW
self.intLatency = intLatency
self.extLatency = extLatency
def add(self, node):
self.nodes.append(node)
def makeTopology(self, options, network, IntLink, ExtLink, Router):
""" Recursively make all of the links and routers
"""
# make a router to connect all of the nodes
self.router = Router(router_id=self.num_routers())
network.routers.append(self.router)
for node in self.nodes:
if type(node) == Cluster:
node.makeTopology(options, network, IntLink, ExtLink, Router)
# connect this cluster to the router
link = IntLink(link_id=self.num_int_links(), node_a=self.router,
node_b=node.router)
if node.extBW:
link.bandwidth_factor = node.extBW
# if there is an interanl b/w for this node
# and no ext b/w to override
elif self.intBW:
link.bandwidth_factor = self.intBW
if node.extLatency:
link.latency = node.extLatency
elif self.intLatency:
link.latency = self.intLatency
network.int_links.append(link)
else:
# node is just a controller,
# connect it to the router via a ext_link
link = ExtLink(link_id=self.num_ext_links(), ext_node=node,
int_node=self.router)
if self.intBW:
link.bandwidth_factor = self.intBW
if self.intLatency:
link.latency = self.intLatency
network.ext_links.append(link)
def __len__(self):
return len([i for i in self.nodes if type(i) != Cluster]) + \
sum([len(i) for i in self.nodes if type(i) == Cluster])
|
toast38coza/WeddingSite
|
refs/heads/master
|
events/migrations/0002_auto_20150207_0721.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='event',
name='address',
field=models.TextField(help_text=b'Address where this event will be held', blank=True),
),
migrations.AlterField(
model_name='event',
name='picture',
field=models.ImageField(upload_to=b'events', blank=True),
),
]
|
hassanabidpk/django
|
refs/heads/master
|
django/core/cache/utils.py
|
585
|
from __future__ import unicode_literals
import hashlib
from django.utils.encoding import force_bytes
from django.utils.http import urlquote
TEMPLATE_FRAGMENT_KEY_TEMPLATE = 'template.cache.%s.%s'
def make_template_fragment_key(fragment_name, vary_on=None):
if vary_on is None:
vary_on = ()
key = ':'.join(urlquote(var) for var in vary_on)
args = hashlib.md5(force_bytes(key))
return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, args.hexdigest())
|
deisi/home-assistant
|
refs/heads/master
|
homeassistant/components/alarm_control_panel/__init__.py
|
6
|
"""
Component to interface with an alarm control panel.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/alarm_control_panel/
"""
import logging
import os
import voluptuous as vol
from homeassistant.const import (
ATTR_CODE, ATTR_CODE_FORMAT, ATTR_ENTITY_ID, SERVICE_ALARM_TRIGGER,
SERVICE_ALARM_DISARM, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_AWAY)
from homeassistant.config import load_yaml_config_file
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
DOMAIN = 'alarm_control_panel'
SCAN_INTERVAL = 30
ENTITY_ID_FORMAT = DOMAIN + '.{}'
SERVICE_TO_METHOD = {
SERVICE_ALARM_DISARM: 'alarm_disarm',
SERVICE_ALARM_ARM_HOME: 'alarm_arm_home',
SERVICE_ALARM_ARM_AWAY: 'alarm_arm_away',
SERVICE_ALARM_TRIGGER: 'alarm_trigger'
}
ATTR_TO_PROPERTY = [
ATTR_CODE,
ATTR_CODE_FORMAT
]
ALARM_SERVICE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(ATTR_CODE): cv.string,
})
def setup(hass, config):
"""Track states and offer events for sensors."""
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
component.setup(config)
def alarm_service_handler(service):
"""Map services to methods on Alarm."""
target_alarms = component.extract_from_service(service)
code = service.data.get(ATTR_CODE)
method = SERVICE_TO_METHOD[service.service]
for alarm in target_alarms:
getattr(alarm, method)(code)
if alarm.should_poll:
alarm.update_ha_state(True)
descriptions = load_yaml_config_file(
os.path.join(os.path.dirname(__file__), 'services.yaml'))
for service in SERVICE_TO_METHOD:
hass.services.register(DOMAIN, service, alarm_service_handler,
descriptions.get(service),
schema=ALARM_SERVICE_SCHEMA)
return True
def alarm_disarm(hass, code=None, entity_id=None):
"""Send the alarm the command for disarm."""
data = {}
if code:
data[ATTR_CODE] = code
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_ALARM_DISARM, data)
def alarm_arm_home(hass, code=None, entity_id=None):
"""Send the alarm the command for arm home."""
data = {}
if code:
data[ATTR_CODE] = code
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_ALARM_ARM_HOME, data)
def alarm_arm_away(hass, code=None, entity_id=None):
"""Send the alarm the command for arm away."""
data = {}
if code:
data[ATTR_CODE] = code
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_ALARM_ARM_AWAY, data)
def alarm_trigger(hass, code=None, entity_id=None):
"""Send the alarm the command for trigger."""
data = {}
if code:
data[ATTR_CODE] = code
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_ALARM_TRIGGER, data)
# pylint: disable=no-self-use
class AlarmControlPanel(Entity):
"""An abstract class for alarm control devices."""
@property
def code_format(self):
"""Regex for code format or None if no code is required."""
return None
def alarm_disarm(self, code=None):
"""Send disarm command."""
raise NotImplementedError()
def alarm_arm_home(self, code=None):
"""Send arm home command."""
raise NotImplementedError()
def alarm_arm_away(self, code=None):
"""Send arm away command."""
raise NotImplementedError()
def alarm_trigger(self, code=None):
"""Send alarm trigger command."""
raise NotImplementedError()
@property
def state_attributes(self):
"""Return the state attributes."""
state_attr = {
ATTR_CODE_FORMAT: self.code_format,
}
return state_attr
|
harshitamistry/calligraRepository
|
refs/heads/master
|
3rdparty/google-breakpad/src/tools/gyp/test/ninja/action_dependencies/gyptest-action-dependencies.py
|
246
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that building an object file correctly depends on running actions in
dependent targets, but not the targets themselves.
"""
import os
import sys
import TestGyp
# NOTE(piman): This test will not work with other generators because:
# - it explicitly tests the optimization, which is not implemented (yet?) on
# other generators
# - it relies on the exact path to output object files, which is generator
# dependent, and actually, relies on the ability to build only that object file,
# which I don't think is available on all generators.
# TODO(piman): Extend to other generators when possible.
test = TestGyp.TestGyp(formats=['ninja'])
test.run_gyp('action_dependencies.gyp', chdir='src')
chdir = 'relocate/src'
test.relocate('src', chdir)
objext = '.obj' if sys.platform == 'win32' else '.o'
test.build('action_dependencies.gyp',
os.path.join('obj', 'b.b' + objext),
chdir=chdir)
# The 'a' actions should be run (letting b.c compile), but the a static library
# should not be built.
test.built_file_must_not_exist('a', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_exist(os.path.join('obj', 'b.b' + objext), chdir=chdir)
test.build('action_dependencies.gyp',
os.path.join('obj', 'c.c' + objext),
chdir=chdir)
# 'a' and 'b' should be built, so that the 'c' action succeeds, letting c.c
# compile
test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir)
test.built_file_must_exist('b', type=test.EXECUTABLE, chdir=chdir)
test.built_file_must_exist(os.path.join('obj', 'c.c' + objext), chdir=chdir)
test.pass_test()
|
openstack/poppy
|
refs/heads/master
|
poppy/dns/rackspace/services.py
|
2
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import re
try:
set
except NameError: # noqa pragma: no cover
from sets import Set as set # noqa pragma: no cover
from oslo_log import log
import pyrax.exceptions as exc
from poppy.dns import base
LOG = log.getLogger(__name__)
class ServicesController(base.ServicesBase):
def __init__(self, driver):
super(ServicesController, self).__init__(driver)
self.client = driver.client
def _get_subdomain(self, subdomain_name):
"""Returns a subdomain, if it does not exist, create it
:param subdomain_name
:return subdomain
"""
try:
LOG.info("Fetching DNS Record - {0}".format(subdomain_name))
subdomain = self.client.find(name=subdomain_name)
except exc.NotFound:
subdomain = self.client.create(
name=subdomain_name,
emailAddress=self._driver.rackdns_conf.email,
ttl=900)
return subdomain
def _create_cname_records(self, links):
"""Creates a subdomain
:param links: Access URLS from providers
:return dns_links: Map from provider access URL to DNS access URL
"""
cdn_domain_name = self._driver.rackdns_conf.url
shard_prefix = self._driver.rackdns_conf.shard_prefix
num_shards = self._driver.rackdns_conf.num_shards
# randomly select a shard
shard_id = random.randint(1, num_shards)
# ex. cdnXXX.altcdn.com
subdomain_name = '{0}{1}.{2}'.format(shard_prefix, shard_id,
cdn_domain_name)
# create CNAME record for adding
cname_records = []
dns_links = {}
shared_ssl_subdomain_name = None
for link in links:
# pick out shared ssl domains here
domain_name, certificate, old_operator_url = link
if certificate == "shared":
shared_ssl_subdomain_name = (
'.'.join(domain_name.split('.')[1:]))
# perform shared ssl cert logic
name = domain_name
else:
if old_operator_url is not None:
created_dns_links = self._create_preferred_cname_record(
domain_name,
certificate,
old_operator_url,
links[link]
)
dns_links.update(created_dns_links)
continue
else:
name = '{0}.{1}'.format(domain_name, subdomain_name)
cname_record = {'type': 'CNAME',
'name': name,
'data': links[link],
'ttl': 300}
dns_links[link] = {
'provider_url': links[link],
'operator_url': name
}
if certificate == "shared":
LOG.info("Creating Shared SSL DNS Record - {0}".format(name))
shared_ssl_subdomain = self._get_subdomain(
shared_ssl_subdomain_name)
shared_ssl_subdomain.add_records([cname_record])
else:
cname_records.append(cname_record)
# add the cname records
if len(cname_records) > 0:
subdomain = self._get_subdomain(subdomain_name)
LOG.info("Creating DNS Record - {0}".format(cname_records))
subdomain.add_records(cname_records)
return dns_links
def _create_preferred_cname_record(
self, domain_name, certificate, operator_url, provider_url):
"""Creates a CNAME chain with designated operator_url
:param domain_name: domain name that CNAME chain is created for
:param certificate: certificate type
:operator_url: The preferred operator url
:provider_url: provider url
:return dns_links: Map from provider access URL to DNS access URL
"""
shard_prefix = self._driver.rackdns_conf.shard_prefix
cdn_domain_name = self._driver.rackdns_conf.url
dns_links = {}
# verify sub-domain exists
regex_match = re.match(
r'^.*(' + shard_prefix + '[0-9]+\.' +
re.escape(cdn_domain_name) + ')$',
operator_url
)
my_sub_domain_name = regex_match.groups(-1)[0]
if my_sub_domain_name is None:
raise ValueError('Unable to parse old operator url')
# add to cname record
my_sub_domain = self._get_subdomain(my_sub_domain_name)
LOG.info(
"Updating dns record {0}. "
"CNAME create/update from {1} to {2}".format(
my_sub_domain_name,
operator_url,
provider_url
)
)
try:
old_dns_record = my_sub_domain.find_record('CNAME', operator_url)
except exc.DomainRecordNotFound:
my_sub_domain.add_records(
[{
'type': 'CNAME',
'name': operator_url,
'data': provider_url,
'ttl': 300
}]
)
else:
my_sub_domain.update_record(old_dns_record, data=provider_url)
dns_links[(domain_name, certificate, operator_url)] = {
'provider_url': provider_url,
'operator_url': operator_url
}
return dns_links
def _search_cname_record(self, access_url, shared_ssl_flag):
"""Search a CNAME record
:param access_url: DNS Access URL
:param shared_ssl_flag: flag indicating if this is a shared ssl domain
:return records: returns records, if any
"""
# extract shard name
if shared_ssl_flag:
suffix = self._driver.rackdns_conf.shared_ssl_domain_suffix
else:
suffix = self._driver.rackdns_conf.url
# Note: use rindex to find last occurrence of the suffix
shard_name = access_url[:access_url.rindex(suffix)-1].split('.')[-1]
subdomain_name = '.'.join([shard_name, suffix])
# for sharding is disabled, the suffix is the subdomain_name
if shared_ssl_flag and (
self._driver.rackdns_conf.shared_ssl_num_shards == 0):
subdomain_name = suffix
# get subdomain
subdomain = self.client.find(name=subdomain_name)
# search and find the CNAME record
LOG.info('Searching DNS records for : {0}'.format(subdomain))
name = access_url
record_type = 'CNAME'
records = self.client.search_records(subdomain, record_type, name)
return records
def _delete_cname_record(self, access_url, shared_ssl_flag):
"""Delete a CNAME record
:param access_url: DNS Access URL
:param shared_ssl_flag: flag indicating if this is a shared ssl domain
:return error_msg: returns error message, if any
"""
LOG.info('Attempting to delete DNS records for : {0}'.format(
access_url))
records = self._search_cname_record(access_url, shared_ssl_flag)
# delete the record
# we should get one record,
# or none if it has been deleted already
if not records:
LOG.error('DNS record already deleted: {0}'.format(access_url))
elif len(records) > 1:
error_msg = 'Multiple DNS records found: {0}'.format(access_url)
LOG.error(error_msg)
return error_msg
elif len(records) == 1:
LOG.info('Deleting DNS records for : {0}'.format(access_url))
records[0].delete()
return
def _change_cname_record(self, access_url, target_url, shared_ssl_flag):
"""Change a CNAME record
:param access_url: DNS Access URL
:param target_url: Operator Access URL
:param shared_ssl_flag: flag indicating if this is a shared ssl domain
:return error_msg: returns error message, if any
"""
records = self._search_cname_record(access_url, shared_ssl_flag)
# we should get one record, or none if it has been deleted already
if not records:
LOG.error('DNS record not found for: {0}'.format(access_url))
elif len(records) > 1:
LOG.error('Multiple DNS records found: {0}'.format(access_url))
elif len(records) == 1:
LOG.info('Updating DNS record for : {0}'.format(access_url))
records[0].update(data=target_url)
LOG.info('Updated DNS record for : {0}'.format(access_url))
return
def _generate_sharded_domain_name(self, shard_prefix, num_shards, suffix):
"""Generate a sharded domain name based on the scheme:
'{shard_prefix}{a random shard_id}.{suffix}'
:return A string of sharded domain name
"""
if num_shards == 0:
# shard disabled, just use the suffix
yield suffix
else:
# shard enabled, iterate through shards after
# randomly shuffling them
shard_ids = [i for i in range(1, num_shards + 1)]
random.shuffle(shard_ids)
for shard_id in shard_ids:
yield '{0}{1}.{2}'.format(shard_prefix, shard_id, suffix)
def generate_shared_ssl_domain_suffix(self):
"""Rackspace DNS scheme to generate a shared ssl domain suffix,
to be used with manager for shared ssl feature
:return A string of shared ssl domain name
"""
shared_ssl_domain_name = self._generate_sharded_domain_name(
self._driver.rackdns_conf.shared_ssl_shard_prefix,
self._driver.rackdns_conf.shared_ssl_num_shards,
self._driver.rackdns_conf.shared_ssl_domain_suffix)
return shared_ssl_domain_name
def create(self, responders):
"""Create CNAME record for a service.
:param responders: responders from providers
:return dns_links: Map from provider urls to DNS urls
"""
providers = []
for responder in responders:
for provider in responder:
providers.append(provider)
for responder in responders:
for provider_name in responder:
if 'error' in responder[provider_name]:
error_msg = responder[provider_name]['error_detail']
error_dict = {
'error_msg': error_msg
}
return self.responder.failed(providers, error_dict)
# gather the provider urls and cname them
links = {}
for responder in responders:
for provider_name in responder:
for link in responder[provider_name]['links']:
if link['rel'] == 'access_url':
# We need to distinguish shared ssl domains in
# which case the we will use different shard prefix and
# and shard number
links[(
link['domain'],
link.get('certificate', None),
None # new link no pref operator url
)] = link['href']
# create CNAME records
try:
dns_links = self._create_cname_records(links)
except Exception as e:
msg = 'Rackspace DNS Exception: {0}'.format(e)
error = {
'error_msg': msg,
'error_class': e.__class__
}
LOG.error(msg)
return self.responder.failed(providers, error)
# gather the CNAMED links
dns_details = {}
for responder in responders:
for provider_name in responder:
access_urls = []
for link in responder[provider_name]['links']:
if link['rel'] == 'access_url':
access_url = {
'domain': link['domain'],
'provider_url': dns_links[(
link['domain'],
link.get('certificate', None),
None
)]['provider_url'],
'operator_url': dns_links[(
link['domain'],
link.get('certificate', None),
None
)]['operator_url']}
# Need to indicate if this access_url is a shared ssl
# access url, since its has different shard_prefix and
# num_shard
if link.get('certificate', None) == 'shared':
access_url['shared_ssl_flag'] = True
access_urls.append(access_url)
dns_details[provider_name] = {'access_urls': access_urls}
return self.responder.created(dns_details)
def delete(self, provider_details):
"""Delete CNAME records for a service.
:param provider_details
:return dns_details: Map from provider_name to delete errors
"""
providers = []
for provider in provider_details:
providers.append(provider)
dns_details = {}
error_msg = ''
error_class = None
for provider_name in provider_details:
access_urls = provider_details[provider_name].access_urls
for access_url in access_urls:
if 'operator_url' in access_url:
try:
msg = self._delete_cname_record(
access_url['operator_url'],
access_url.get('shared_ssl_flag', False))
if msg:
error_msg += msg
except exc.NotFound as e:
LOG.error('Can not access the subdomain. Please make '
'sure it exists and you have permissions '
'to CDN subdomain {0}'.format(e))
error_msg = (error_msg + 'Can not access subdomain . '
'Exception: {0}'.format(e))
error_class = e.__class__
except Exception as e:
LOG.error('Rackspace DNS Exception: {0}'.format(e))
error_msg += 'Rackspace DNS ' \
'Exception: {0}'.format(e)
error_class = e.__class__
# format the error message for this provider
if not error_msg:
dns_details[provider_name] = self.responder.deleted({})
# format the error message
if error_msg:
error = {
'error_msg': error_msg,
'error_class': error_class
}
return self.responder.failed(providers, error)
return dns_details
def _update_added_domains(self, responders, added_domains):
"""Update added domains."""
# if no domains are added, return
dns_details = {}
if not added_domains:
for responder in responders:
for provider_name in responder:
dns_details[provider_name] = {'access_urls': []}
return dns_details
providers = []
for responder in responders:
for provider in responder:
providers.append(provider)
# gather the provider links for the added domains
links = {}
for responder in responders:
for provider_name in responder:
for link in responder[provider_name]['links']:
domain_added = (link['rel'] == 'access_url' and
link['domain'] in added_domains)
if domain_added:
links[(
link['domain'],
link.get('certificate', None),
link.get('old_operator_url', None)
)] = link['href']
# create CNAME records for added domains
try:
dns_links = self._create_cname_records(links)
except Exception as e:
error_msg = 'Rackspace DNS Exception: {0}'.format(e)
error_class = e.__class__
error = {
'error_msg': error_msg,
'error_class': error_class
}
LOG.error(error_msg)
return self.responder.failed(providers, error)
# gather the CNAMED links for added domains
for responder in responders:
for provider_name in responder:
access_urls = []
for link in responder[provider_name]['links']:
if link['domain'] in added_domains:
access_url = {
'domain': link['domain'],
'provider_url':
dns_links[(link['domain'],
link.get('certificate', None),
link.get('old_operator_url', None)
)]['provider_url'],
'operator_url':
dns_links[(link['domain'],
link.get('certificate', None),
link.get('old_operator_url', None)
)]['operator_url']}
# Need to indicate if this access_url is a shared ssl
# access url, since its has different shard_prefix and
# num_shard
if link.get('certificate', None) == 'shared':
access_url['shared_ssl_flag'] = True
access_urls.append(access_url)
dns_details[provider_name] = {'access_urls': access_urls}
return dns_details
def _update_removed_domains(self, provider_details, removed_domains):
"""Update removed domains."""
# if no domains are removed, return
dns_details = {}
if not removed_domains:
for provider_name in provider_details:
dns_details[provider_name] = {'access_urls': []}
return dns_details
providers = []
for provider in provider_details:
providers.append(provider)
# delete the records for deleted domains
error_msg = ''
error_class = None
for provider_name in provider_details:
provider_detail = provider_details[provider_name]
for access_url in provider_detail.access_urls:
# log delivery access url does not have domain field
if 'domain' in access_url and (
access_url['domain'] not in removed_domains):
continue
try:
msg = self._delete_cname_record(access_url['operator_url'],
access_url.get(
'shared_ssl_flag',
False))
if msg:
error_msg = error_msg + msg
except exc.NotFound as e:
LOG.error('Can not access the subdomain. Please make sure'
' it exists and you have permissions to CDN '
'subdomain {0}'.format(e))
error_msg = (error_msg + 'Can not access subdomain. '
'Exception: {0}'.format(e))
error_class = e.__class__
except Exception as e:
LOG.error('Exception: {0}'.format(e))
error_msg = error_msg + 'Exception: {0}'.format(e)
error_class = e.__class__
# format the success message for this provider
if not error_msg:
dns_details[provider_name] = self.responder.deleted({})
# format the error message
if error_msg:
error_dict = {
'error_msg': error_msg,
'error_class': error_class
}
return self.responder.failed(providers, error_dict)
return dns_details
def update(self, service_old, service_updates, responders):
"""Update CNAME records for a service.
:param service_old: previous service state
:param service_updates: updates to service state
:param responders: responders from providers
:return dns_details: Map from provider_name to update errors
"""
# get old domains
old_domains = set()
old_access_urls_map = {}
project_id = service_old.project_id
service_id = service_old.service_id
provider_details = service_old.provider_details
for provider_name in provider_details:
provider_detail = provider_details[provider_name]
access_urls = provider_detail.access_urls
old_access_urls_map[provider_name] = {'access_urls': access_urls}
for access_url in access_urls:
if 'domain' in access_url:
old_domains.add(access_url['domain'])
# if there is a provider error, don't try dns update
for responder in responders:
for provider_name in responder:
if 'error' in responder[provider_name]:
return old_access_urls_map
# get new_domains
new_domains = set()
for responder in responders:
for provider_name in responder:
links = responder[provider_name]['links']
for link in links:
new_domains.add(link['domain'])
# find http -> https+san upgrade domains
upgraded_domains = set()
for domain in service_updates.domains:
for old_domain in service_old.domains:
if old_domain.domain == domain.domain:
if (
old_domain.protocol == 'http' and
domain.protocol == 'https' and
domain.certificate == 'san'
):
upgraded_domains.add(domain.domain)
break
# if domains have not been updated, return
if not service_updates.domains:
return old_access_urls_map
# force dns update when we encounter an upgraded domain
common_domains = new_domains.intersection(old_domains)
for domain_name in common_domains:
upgrade = False
for responder in responders:
for provider_name in responder:
links = responder[provider_name]['links']
for link in links:
if (
link['domain'] == domain_name and
link.get('certificate', None) == 'san' and
link['href'] is not None and
link['old_operator_url'] is not None
):
upgrade = True
if upgrade is True:
old_domains.remove(domain_name)
# if the old set of domains is the same as new set of domains return
if old_domains == new_domains:
return old_access_urls_map
# get the list of added, removed and common domains
added_domains = new_domains.difference(old_domains)
removed_domains = old_domains.difference(new_domains)
common_domains = new_domains.intersection(old_domains)
# prevent dns records for upgrade domains from being deleted
retain_domains = removed_domains.intersection(upgraded_domains)
removed_domains = removed_domains.difference(retain_domains)
LOG.info("Added Domains : {0} on service_id : {1} "
"for project_id: {2}".format(added_domains,
service_id,
project_id))
LOG.info("Removed Domains : {0} on service_id : {1} "
"for project_id: {2}".format(removed_domains,
service_id,
project_id))
LOG.info("Common Domains : {0} on service_id : {1} "
"for project_id: {2}".format(common_domains,
service_id,
project_id))
# add new domains
dns_links = self._update_added_domains(responders, added_domains)
# remove CNAME records for deleted domains
provider_details = service_old.provider_details
self._update_removed_domains(provider_details, removed_domains)
providers = []
for responder in responders:
for provider in responder:
providers.append(provider)
# in case of DNS error, return
for provider_name in dns_links:
if 'error' in dns_links[provider_name]:
error_msg = dns_links[provider_name]['error_detail']
error_dict = {
'error_msg': error_msg
}
if 'error_class' in dns_links[provider_name]:
error_dict['error_class'] = \
dns_links[provider_name]['error_class']
return self.responder.failed(providers, error_dict)
# gather the CNAMED links and remove stale links
dns_details = {}
for responder in responders:
for provider_name in responder:
provider_detail = service_old.provider_details[provider_name]
old_access_urls = provider_detail.access_urls
new_access_urls = dns_links[provider_name]['access_urls']
access_urls = []
for link in responder[provider_name]['links']:
if link['domain'] in removed_domains:
continue
elif link['domain'] in added_domains:
# iterate through new access urls and get access url
operator_url = None
for new_access_url in new_access_urls:
if new_access_url['domain'] == link['domain']:
operator_url = new_access_url['operator_url']
break
access_url = {
'domain': link['domain'],
'provider_url': link['href'],
'operator_url': operator_url}
# if it is a shared ssl access url, we need to store it
if new_access_url.get('shared_ssl_flag', False):
access_url['shared_ssl_flag'] = True
access_urls.append(access_url)
elif link['domain'] in common_domains:
# iterate through old access urls and get access url
operator_url = None
old_access_url = None
for old_access_url in old_access_urls:
if old_access_url['domain'] == link['domain']:
operator_url = old_access_url['operator_url']
break
access_url = {
'domain': link['domain'],
'provider_url': link['href'],
'operator_url': operator_url}
# if it is a shared ssl access url, we need to store it
if old_access_url.get('shared_ssl_flag', False):
access_url['shared_ssl_flag'] = True
access_urls.append(access_url)
# find upgraded domains and create placeholders for them
for domain in service_updates.domains:
is_upgrade = False
for old_domain in service_old.domains:
if old_domain.domain == domain.domain:
if (
old_domain.protocol == 'http' and
domain.protocol == 'https' and
domain.certificate == 'san'
):
is_upgrade = True
break
if is_upgrade is True:
old_access_url_for_domain = (
service_old.provider_details.values()[0].
get_domain_access_url(domain.domain))
# add placeholder access url for upgraded domain
# the access_url dict here should be missing an entry
# for http san domain since provider url is
# determined only after an ssl cert is provisioned
access_urls.append({
'domain': domain.domain,
'provider_url': None,
'operator_url': None,
'old_operator_url': old_access_url_for_domain[
'operator_url'
]
})
# keep log_delivery urls intact when both old and new
# services have log_delivery enabled
if (
service_old.log_delivery.enabled is True and
service_updates.log_delivery.enabled is True
):
for old_access_url in old_access_urls:
if 'log_delivery' in old_access_url:
access_urls.append(old_access_url)
dns_details[provider_name] = {'access_urls': access_urls}
return self.responder.updated(dns_details)
def gather_cname_links(self, service_obj):
provider_details = service_obj.provider_details
dns_details = {}
for provider_name in provider_details:
access_urls = []
for link in provider_details[provider_name].access_urls:
# if this is a log delivery URL, don't add
if 'log_delivery' in link:
continue
access_url = {
'domain': link['domain'],
'provider_url': link['provider_url'],
'operator_url': link['operator_url']
}
# Need to indicate if this access_url is a shared ssl
# access url, since its has different shard_prefix and
# num_shard
if link.get('shared_ssl_flag', None):
access_url['shared_ssl_flag'] = True
else:
access_url['shared_ssl_flag'] = False
access_urls.append(access_url)
dns_details[provider_name] = {'access_urls': access_urls}
return dns_details
def enable(self, service_obj):
dns_details = self.gather_cname_links(service_obj)
try:
for provider_name in dns_details:
access_urls = dns_details[provider_name]['access_urls']
for access_url in access_urls:
provider_url = access_url['provider_url']
operator_url = access_url['operator_url']
shared_ssl_flag = access_url['shared_ssl_flag']
self._change_cname_record(operator_url,
provider_url,
shared_ssl_flag)
except Exception as e:
error_msg = 'Rackspace DNS Exception: {0}'.format(e)
error_class = e.__class__
error = {
'error_msg': error_msg,
'error_class': error_class
}
LOG.error(error_msg)
return self.responder.failed(dns_details.keys(), error)
else:
return self.responder.updated(dns_details)
def disable(self, service_obj):
dns_details = self.gather_cname_links(service_obj)
try:
provider_url = self._driver.rackdns_conf.url_404
for provider_name in dns_details:
access_urls = dns_details[provider_name]['access_urls']
for access_url in access_urls:
operator_url = access_url['operator_url']
shared_ssl_flag = access_url['shared_ssl_flag']
self._change_cname_record(operator_url,
provider_url,
shared_ssl_flag)
except Exception as e:
error_msg = 'Rackspace DNS Exception: {0}'.format(e)
error_class = e.__class__
error = {
'error_msg': error_msg,
'error_class': error_class
}
LOG.error(error_msg)
return self.responder.failed(dns_details.keys(), error)
else:
return self.responder.updated(dns_details)
def modify_cname(self, access_url, new_cert):
self._change_cname_record(access_url=access_url,
target_url=new_cert, shared_ssl_flag=False)
def is_shard_full(self, shard_name):
count = 0
try:
shard_domain = self.client.find(name=shard_name)
except exc.NotFound:
LOG.error("Shards not configured properly, could not find {0}.")
return True
records = shard_domain.list_records(limit=100)
count += len(records)
# Loop until all records are printed
while True:
try:
records = self.client.list_records_next_page()
count += len(records)
except exc.NoMoreResults:
break
LOG.info(
"There were a total of {0} record(s) for {1}.".format(
count,
shard_name
))
if count >= self._driver.rackdns_conf.records_limit:
return True
else:
return False
|
PetrDlouhy/django
|
refs/heads/master
|
tests/swappable_models/__init__.py
|
12133432
| |
rcos/Observatory
|
refs/heads/master
|
observatory/dashboard/templatetags/event_bubble.py
|
2
|
# Copyright (c) 2010, individual contributors (see AUTHORS file)
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from django.template.loader import render_to_string
register = template.Library()
def event_bubble(event, specify_type, show_content = False, extra_classes = ""):
return render_to_string("partials/event_bubble.html", {
"event": event,
"specify_type": specify_type,
"show_content": show_content,
"extra_classes": extra_classes
})
register.simple_tag(event_bubble)
|
sergecodd/FireFox-OS
|
refs/heads/master
|
B2G/gecko/toolkit/components/telemetry/gen-histogram-enum.py
|
2
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Write out a C++ enum definition whose members are the names of
# histograms. The histograms are defined in a file provided as a
# command-line argument.
from __future__ import with_statement
import sys
import histogram_tools
banner = """/* This file is auto-generated, see gen-histogram-enum.py. */
"""
def main(argv):
filename = argv[0]
print banner
print "enum ID {"
for histogram in histogram_tools.from_file(filename):
cpp_guard = histogram.cpp_guard()
if cpp_guard:
print "#if defined(%s)" % cpp_guard
print " %s," % histogram.name()
if cpp_guard:
print "#endif"
print " HistogramCount"
print "};"
main(sys.argv[1:])
|
drewhill/Emp-Dir
|
refs/heads/master
|
src/ProjClass.py
|
1
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 04 19:57:11 2013
@author: drewhill
"""
import os
class project:
def __init__(self,newID,val="tempload"): #constructor
self.name = val
self.id = newID
self.notes=""
self.empList=[]
self.lead=""
self.desc=""
self.sharepointLink=""
self.photo=""
def setNotes(self,val=""):
self.inFile()
self.notes = val
self.outFile()
def setDesc(self,val=""):
self.inFile()
self.desc = val
self.outFile()
def setSharepointLink(self,val=""):
self.inFile()
self.sharepointLink=val
self.outFile()
def setPhotoLink(self,val=""):
self.inFile()
self.photo=val
self.outFile()
def addEmp(self,val):
self.inFile()
if val not in self.empList:
self.empList.append(val)
self.outFile()
def setLead(self,val=""):
self.inFile()
self.lead = val
self.outFile()
def remEmp(self,val):
self.inFile()
self.empList.remove(val)
self.outFile()
def chgLead(self,val=""):
self.inFile()
self.lead = val
self.outFile()
def retList(self):
return self.empList
def retLead(self):
return self.lead
def retID(self):
return self.id
def retDesc(self):
return self.desc
def retEmailList(self):
emList=str()
from EmpClass import employee
for empID in self.empList:
tempEmp=employee(empID)
tempEmp.inFile()
empEm=tempEmp.retEmail()
if not empEm=="":
emList+=empEm+"; "
return emList #return a list of employee emails
def retSharepointLink(self):
return self.sharepointLink
def retPhotoLink(self):
return self.photo
def renameProj(self,val):
self.inFile()
if not self.name == val:
self.name = val
self.outFile()
from ProjListClass import ProjList
tempList = ProjList()
tempList.inFile() #get most updated list
tempList.renamePL(self.id,val)
def inFile(self):
if not os.path.isfile(os.path.join(os.getcwd(),"projects",str(self.id)+".prj")):
return -1
with open(os.path.join(os.getcwd(),"projects",str(self.id)+".prj"),'r') as f:
line=f.readline()
line=line.rstrip("\n") #"ID"
line=f.readline()
line=line.rstrip("\n") #ID
self.id=int(line)
line=f.readline()
line=line.rstrip("\n") #"Name"
line=f.readline()
line=line.rstrip("\n") #Name
self.name=line
line=f.readline()
line=line.rstrip("\n") #"Lead"
line=f.readline()
line=line.rstrip("\n") #Lead
if not line=="Desc":
self.lead=int(line)
line=f.readline()
line=line.rstrip("\n") #"Desc"
else:
self.lead=""
line=f.readline()
line=line.rstrip("\n") #Desc
if not line=="Notes":
line = "\n".join(line.split("<br>"))
self.desc=line
line=f.readline()
line=line.rstrip("\n") #"Notes"
else:
self.desc=""
line=f.readline()
line=line.rstrip("\n") #Notes
if not line=="Sharepoint Link":
self.notes=line
line=f.readline()
line=line.rstrip("\n") #"Sharepoint"
else:
self.notes=""
line=f.readline()
line=line.rstrip("\n") #sharepointLink
if not line=="Photo Link":
self.sharepointLink=line
line=f.readline()
line=line.rstrip("\n") #PhotoLink
else:
self.sharepointLink=""
line=f.readline()
line=line.rstrip("\n") #"Photo Link"
if not line=="EmpList":
self.photo=line
line=f.readline()
line=line.rstrip("\n") #EmpList
else:
self.photo=""
self.empList=list() #clear it, start over
for line in f:
line=line.rstrip("\n")
self.empList.append(int(line))
def outFile(self):
with open(os.path.join(os.getcwd(),"projects",str(self.id)+".prj"),'w+') as f:
f.write("ID"+"\n")
f.write(str(self.id)+"\n")
f.write("Name"+"\n")
f.write(str(self.name)+"\n")
f.write("Lead"+"\n")
if(not self.lead == ""):
f.write(str(self.lead)+"\n")
f.write("Desc"+"\n")
if(not self.desc == ""):
tempDesc=self.desc
tempDesc = "<br>".join(tempDesc.split("\n"))
tempDesc = "<br>".join(tempDesc.split("\r"))
f.write(str(tempDesc)+"\n")
f.write("Notes"+"\n")
if(not self.notes == ""):
f.write(str(self.notes)+"\n")
f.write("Sharepoint Link"+"\n")
if(not self.sharepointLink == ""):
f.write(str(self.sharepointLink)+"\n")
#TODO: Sanitize output for colons and newlines
f.write("Photo Link"+"\n")
if(not self.photo == ""):
f.write(str(self.photo)+"\n")
f.write("EmpList"+"\n")
for empID in self.empList:
f.write(str(empID))
f.write("\n")
|
dongjoon-hyun/tensorflow
|
refs/heads/master
|
tensorflow/contrib/graph_editor/__init__.py
|
24
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow Graph Editor.
See the
[Graph Editor](https://tensorflow.org/api_guides/python/contrib.graph_editor)
guide.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.graph_editor.edit import *
from tensorflow.contrib.graph_editor.reroute import *
from tensorflow.contrib.graph_editor.select import *
from tensorflow.contrib.graph_editor.subgraph import *
from tensorflow.contrib.graph_editor.transform import *
from tensorflow.contrib.graph_editor.util import *
# pylint: enable=wildcard-import
# some useful aliases
# pylint: disable=g-bad-import-order
from tensorflow.contrib.graph_editor import subgraph as _subgraph
from tensorflow.contrib.graph_editor import util as _util
# pylint: enable=g-bad-import-order
ph = _util.make_placeholder_from_dtype_and_shape
sgv = _subgraph.make_view
sgv_scope = _subgraph.make_view_from_scope
del absolute_import
del division
del print_function
|
Endika/odoomrp-utils
|
refs/heads/8.0
|
partner_address_type_default/__init__.py
|
465
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from . import models
|
gmartinezramirez/dotfiles
|
refs/heads/master
|
scripts/.config/scripts/i3-wk-switch/i3-wk-switch.py
|
1
|
#!/usr/bin/python
"""Emulates xmonad's workspace switching behavior in i3"""
# pylint: disable=no-member
import logging
import sys
from pprint import pformat
import i3
import time
LOG = logging.getLogger()
def setup_logger(level):
"""Initializes logger with debug level"""
LOG.setLevel(logging.DEBUG)
channel = logging.FileHandler("/tmp/i3-wk-switcher.log")
channel.setLevel(level)
formatter = logging.Formatter('[%(levelname)s] %(message)s')
channel.setFormatter(formatter)
LOG.addHandler(channel)
def get_focused_workspace():
"""Get workspace that is currently focused"""
actives = [wk for wk in i3.get_workspaces() if wk['focused']]
assert len(actives) == 1
return actives[0]
def get_active_outputs():
"""Returns outputs (monitors) that are active"""
return [outp for outp in i3.get_outputs() if outp['active']]
def get_workspace(num):
"""Returns workspace with num or None of it does not exist"""
want_workspace_cands = [wk for wk in i3.get_workspaces()
if wk['num'] == num]
assert len(want_workspace_cands) in [0, 1]
if len(want_workspace_cands) == 0:
return None
else:
return want_workspace_cands[0]
def switch_workspace(num):
"""Switches to workspace number"""
i3.workspace('number %d' % num)
def swap_visible_workspaces(wk_a, wk_b):
"""Swaps two workspaces that are visible"""
switch_workspace(wk_a['num'])
i3.command('move', 'workspace to output ' + wk_b['output'])
switch_workspace(wk_b['num'])
i3.command('move', 'workspace to output ' + wk_a['output'])
def change_workspace(num):
"""
Switches to workspace num like xmonad.
Always sets focused output to workspace num. If the workspace is on
another output, then the workspaces are "shifted" among the outputs.
"""
# Allow for string or int type for argument
num = int(num)
focused_workspace = get_focused_workspace()
original_output = focused_workspace['output']
LOG.debug('Switching to workspace:{} on output:{}, display: {}:'.format(
num, focused_workspace['output'], pformat(focused_workspace, indent=2)))
# Check if already on workspace
if int(focused_workspace['num']) == num:
LOG.debug('Already on correct workspace')
return
# Get workspace we want to switch to
want_workspace = get_workspace(num)
if want_workspace is None:
LOG.debug('Switching to workspace because it does not exist, i3 will create it')
switch_workspace(num)
return
LOG.debug('Want workspace:\n' + pformat(want_workspace, indent=2))
# Save workspace originally showing on want_workspace's output
other_output = [outp for outp in get_active_outputs()
if outp['name'] == want_workspace['output']][0]
LOG.debug('Other_output=%s', pformat(other_output, indent=2))
other_workspace = [wk for wk in i3.get_workspaces()
if wk['name'] == other_output['current_workspace']][0]
LOG.debug('Other workspace:\n' + pformat(other_workspace, indent=2))
# Check if wanted workspace is on focused output
if focused_workspace['output'] == want_workspace['output']:
LOG.debug('Wanted workspace already on focused output, '
'switching as normal')
switch_workspace(num)
return
# Check if wanted workspace is on other output
if not want_workspace['visible']:
LOG.debug('Workspace to switch to is hidden')
# Switch to workspace on other output
switch_workspace(num)
LOG.debug('Wanted workspace is on other output')
# Wanted workspace is visible, so swap workspaces
swap_visible_workspaces(want_workspace, focused_workspace)
# Focus other_workspace
switch_workspace(other_workspace['num'])
# Focus on wanted workspace
time.sleep(.15)
LOG.debug('Setting focus to {}'.format(original_output))
i3.command('focus', 'output', original_output)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: %s WORKSPACE_NUM' % sys.argv[0]
sys.exit(1)
setup_logger(logging.DEBUG)
try:
change_workspace(sys.argv[1])
except Exception:
LOG.exception('An error occured')
|
asydorchuk/ml
|
refs/heads/master
|
classes/cs231n/assignment3/cs231n/__init__.py
|
12133432
| |
haxoza/django
|
refs/heads/master
|
tests/staticfiles_tests/apps/no_label/__init__.py
|
12133432
| |
goodwinnk/intellij-community
|
refs/heads/master
|
python/testData/lineMarkerTest/eggs/spam/__init__.py
|
12133432
| |
ironbox360/django
|
refs/heads/master
|
tests/datetimes/tests.py
|
345
|
from __future__ import unicode_literals
import datetime
from unittest import skipIf
from django.test import TestCase, override_settings
from django.utils import timezone
from .models import Article, Category, Comment
try:
import pytz
except ImportError:
pytz = None
class DateTimesTests(TestCase):
def test_related_model_traverse(self):
a1 = Article.objects.create(
title="First one",
pub_date=datetime.datetime(2005, 7, 28, 9, 0, 0),
)
a2 = Article.objects.create(
title="Another one",
pub_date=datetime.datetime(2010, 7, 28, 10, 0, 0),
)
a3 = Article.objects.create(
title="Third one, in the first day",
pub_date=datetime.datetime(2005, 7, 28, 17, 0, 0),
)
a1.comments.create(
text="Im the HULK!",
pub_date=datetime.datetime(2005, 7, 28, 9, 30, 0),
)
a1.comments.create(
text="HULK SMASH!",
pub_date=datetime.datetime(2005, 7, 29, 1, 30, 0),
)
a2.comments.create(
text="LMAO",
pub_date=datetime.datetime(2010, 7, 28, 10, 10, 10),
)
a3.comments.create(
text="+1",
pub_date=datetime.datetime(2005, 8, 29, 10, 10, 10),
)
c = Category.objects.create(name="serious-news")
c.articles.add(a1, a3)
self.assertQuerysetEqual(
Comment.objects.datetimes("article__pub_date", "year"), [
datetime.datetime(2005, 1, 1),
datetime.datetime(2010, 1, 1),
],
lambda d: d,
)
self.assertQuerysetEqual(
Comment.objects.datetimes("article__pub_date", "month"), [
datetime.datetime(2005, 7, 1),
datetime.datetime(2010, 7, 1),
],
lambda d: d
)
self.assertQuerysetEqual(
Comment.objects.datetimes("article__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
datetime.datetime(2010, 7, 28),
],
lambda d: d
)
self.assertQuerysetEqual(
Article.objects.datetimes("comments__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
datetime.datetime(2005, 7, 29),
datetime.datetime(2005, 8, 29),
datetime.datetime(2010, 7, 28),
],
lambda d: d
)
self.assertQuerysetEqual(
Article.objects.datetimes("comments__approval_date", "day"), []
)
self.assertQuerysetEqual(
Category.objects.datetimes("articles__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
],
lambda d: d,
)
@skipIf(pytz is None, "this test requires pytz")
@override_settings(USE_TZ=True)
def test_21432(self):
now = timezone.localtime(timezone.now().replace(microsecond=0))
Article.objects.create(title="First one", pub_date=now)
qs = Article.objects.datetimes('pub_date', 'second')
self.assertEqual(qs[0], now)
def test_datetimes_returns_available_dates_for_given_scope_and_given_field(self):
pub_dates = [
datetime.datetime(2005, 7, 28, 12, 15),
datetime.datetime(2005, 7, 29, 2, 15),
datetime.datetime(2005, 7, 30, 5, 15),
datetime.datetime(2005, 7, 31, 19, 15)]
for i, pub_date in enumerate(pub_dates):
Article(pub_date=pub_date, title='title #{}'.format(i)).save()
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'year'),
["datetime.datetime(2005, 1, 1, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'month'),
["datetime.datetime(2005, 7, 1, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day'),
["datetime.datetime(2005, 7, 28, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 31, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day', order='ASC'),
["datetime.datetime(2005, 7, 28, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 31, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day', order='DESC'),
["datetime.datetime(2005, 7, 31, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 28, 0, 0)"])
def test_datetimes_has_lazy_iterator(self):
pub_dates = [
datetime.datetime(2005, 7, 28, 12, 15),
datetime.datetime(2005, 7, 29, 2, 15),
datetime.datetime(2005, 7, 30, 5, 15),
datetime.datetime(2005, 7, 31, 19, 15)]
for i, pub_date in enumerate(pub_dates):
Article(pub_date=pub_date, title='title #{}'.format(i)).save()
# Use iterator() with datetimes() to return a generator that lazily
# requests each result one at a time, to save memory.
dates = []
with self.assertNumQueries(0):
article_datetimes_iterator = Article.objects.datetimes('pub_date', 'day', order='DESC').iterator()
with self.assertNumQueries(1):
for article in article_datetimes_iterator:
dates.append(article)
self.assertEqual(dates, [
datetime.datetime(2005, 7, 31, 0, 0),
datetime.datetime(2005, 7, 30, 0, 0),
datetime.datetime(2005, 7, 29, 0, 0),
datetime.datetime(2005, 7, 28, 0, 0)])
|
billyhunt/osf.io
|
refs/heads/develop
|
api_tests/institutions/views/__init__.py
|
12133432
| |
mdaniel/intellij-community
|
refs/heads/master
|
python/testData/addImport/relativeImportTooDeepWithSameLevelUsed/pkg1/pkg2/pkg3/pkg4/__init__.py
|
12133432
| |
mattcaldwell/djangocms-text-ckeditor
|
refs/heads/master
|
djangocms_text_ckeditor/migrations/__init__.py
|
12133432
| |
mavenlin/tensorflow
|
refs/heads/master
|
tensorflow/contrib/seq2seq/python/kernel_tests/__init__.py
|
12133432
| |
maestro-hybrid-cloud/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/volumes/volumes/__init__.py
|
12133432
| |
studentenportal/web
|
refs/heads/master
|
apps/user_stats/management/__init__.py
|
12133432
| |
MozillaSecurity/FuzzManager
|
refs/heads/master
|
server/ec2spotmanager/common/__init__.py
|
12133432
| |
hef/samba
|
refs/heads/bind9_10
|
python/samba/tests/dcerpc/sam.py
|
27
|
# -*- coding: utf-8 -*-
#
# Unix SMB/CIFS implementation.
# Copyright © Jelmer Vernooij <jelmer@samba.org> 2008
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Tests for samba.dcerpc.sam."""
from samba.dcerpc import samr, security
from samba.tests import RpcInterfaceTestCase
# FIXME: Pidl should be doing this for us
def toArray((handle, array, num_entries)):
ret = []
for x in range(num_entries):
ret.append((array.entries[x].idx, array.entries[x].name))
return ret
class SamrTests(RpcInterfaceTestCase):
def setUp(self):
super(SamrTests, self).setUp()
self.conn = samr.samr("ncalrpc:", self.get_loadparm())
def test_connect5(self):
(level, info, handle) = self.conn.Connect5(None, 0, 1, samr.ConnectInfo1())
def test_connect2(self):
handle = self.conn.Connect2(None, security.SEC_FLAG_MAXIMUM_ALLOWED)
self.assertTrue(handle is not None)
def test_EnumDomains(self):
handle = self.conn.Connect2(None, security.SEC_FLAG_MAXIMUM_ALLOWED)
domains = toArray(self.conn.EnumDomains(handle, 0, -1))
self.conn.Close(handle)
|
edx/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/django/contrib/comments/views/__init__.py
|
12133432
| |
caseyclements/bokeh
|
refs/heads/master
|
bokeh/compat/mplexporter/__init__.py
|
64
|
from .renderers import Renderer
from .exporter import Exporter
|
utkbansal/kuma
|
refs/heads/master
|
kuma/users/migrations/0004_auto_20150722_1243.py
|
6
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from json import loads
from django.core.exceptions import ObjectDoesNotExist
from django.db import models, migrations
def move_to_user(apps, schema_editor):
User = apps.get_model('users', 'User')
unknowns = []
users = User.objects.all().select_related('userprofile')
count = users.count()
if count:
print('Porting %d user profiles' % count)
for i, user in enumerate(users.iterator()):
if i % 10000 == 0:
print('%d/%d done' % (i, count))
try:
profile = user.userprofile_set.all().first()
except ObjectDoesNotExist:
continue
if profile:
user.bio = profile.bio
user.content_flagging_email = profile.content_flagging_email
user.fullname = profile.fullname
user.homepage = profile.homepage
user.irc_nickname = profile.irc_nickname
user.locale = profile.locale or 'en-US'
user.location = profile.location
user.organization = profile.organization
user.timezone = profile.timezone or 'US/Pacific'
user.title = profile.title
user.tags = profile.tags
if profile.misc:
websites = loads(profile.misc).get('websites', {})
for name, url in websites.iteritems():
# make sure the stuff in the websites blob
# matches the field names we expect
try:
field_name = '%s_url' % name
user._meta.get_field(field_name)
except models.FieldDoesNotExist:
print('Tried porting profile %s and field %s' %
(profile.id, name))
raise
else:
setattr(user, field_name, url)
user.save()
else:
unknowns.append(user.id)
if unknowns:
print('Found users whose profile could not be found: %s' % unknowns)
class Migration(migrations.Migration):
dependencies = [
('users', '0003_auto_20150722_1242'),
]
operations = [
migrations.RunPython(move_to_user),
]
|
georgemarshall/django
|
refs/heads/master
|
tests/middleware/extra_urls.py
|
61
|
from django.urls import path
from . import views
urlpatterns = [
path('customurlconf/noslash', views.empty_view),
path('customurlconf/slash/', views.empty_view),
path('customurlconf/needsquoting#/', views.empty_view),
]
|
teoliphant/numpy-refactor
|
refs/heads/refactor
|
numpy/lib/info.py
|
99
|
"""
Basic functions used by several sub-packages and
useful to have in the main name-space.
Type Handling
-------------
================ ===================
iscomplexobj Test for complex object, scalar result
isrealobj Test for real object, scalar result
iscomplex Test for complex elements, array result
isreal Test for real elements, array result
imag Imaginary part
real Real part
real_if_close Turns complex number with tiny imaginary part to real
isneginf Tests for negative infinity, array result
isposinf Tests for positive infinity, array result
isnan Tests for nans, array result
isinf Tests for infinity, array result
isfinite Tests for finite numbers, array result
isscalar True if argument is a scalar
nan_to_num Replaces NaN's with 0 and infinities with large numbers
cast Dictionary of functions to force cast to each type
common_type Determine the minimum common type code for a group
of arrays
mintypecode Return minimal allowed common typecode.
================ ===================
Index Tricks
------------
================ ===================
mgrid Method which allows easy construction of N-d
'mesh-grids'
``r_`` Append and construct arrays: turns slice objects into
ranges and concatenates them, for 2d arrays appends rows.
index_exp Konrad Hinsen's index_expression class instance which
can be useful for building complicated slicing syntax.
================ ===================
Useful Functions
----------------
================ ===================
select Extension of where to multiple conditions and choices
extract Extract 1d array from flattened array according to mask
insert Insert 1d array of values into Nd array according to mask
linspace Evenly spaced samples in linear space
logspace Evenly spaced samples in logarithmic space
fix Round x to nearest integer towards zero
mod Modulo mod(x,y) = x % y except keeps sign of y
amax Array maximum along axis
amin Array minimum along axis
ptp Array max-min along axis
cumsum Cumulative sum along axis
prod Product of elements along axis
cumprod Cumluative product along axis
diff Discrete differences along axis
angle Returns angle of complex argument
unwrap Unwrap phase along given axis (1-d algorithm)
sort_complex Sort a complex-array (based on real, then imaginary)
trim_zeros Trim the leading and trailing zeros from 1D array.
vectorize A class that wraps a Python function taking scalar
arguments into a generalized function which can handle
arrays of arguments using the broadcast rules of
numerix Python.
================ ===================
Shape Manipulation
------------------
================ ===================
squeeze Return a with length-one dimensions removed.
atleast_1d Force arrays to be > 1D
atleast_2d Force arrays to be > 2D
atleast_3d Force arrays to be > 3D
vstack Stack arrays vertically (row on row)
hstack Stack arrays horizontally (column on column)
column_stack Stack 1D arrays as columns into 2D array
dstack Stack arrays depthwise (along third dimension)
split Divide array into a list of sub-arrays
hsplit Split into columns
vsplit Split into rows
dsplit Split along third dimension
================ ===================
Matrix (2D Array) Manipulations
-------------------------------
================ ===================
fliplr 2D array with columns flipped
flipud 2D array with rows flipped
rot90 Rotate a 2D array a multiple of 90 degrees
eye Return a 2D array with ones down a given diagonal
diag Construct a 2D array from a vector, or return a given
diagonal from a 2D array.
mat Construct a Matrix
bmat Build a Matrix from blocks
================ ===================
Polynomials
-----------
================ ===================
poly1d A one-dimensional polynomial class
poly Return polynomial coefficients from roots
roots Find roots of polynomial given coefficients
polyint Integrate polynomial
polyder Differentiate polynomial
polyadd Add polynomials
polysub Substract polynomials
polymul Multiply polynomials
polydiv Divide polynomials
polyval Evaluate polynomial at given argument
================ ===================
Import Tricks
-------------
================ ===================
ppimport Postpone module import until trying to use it
ppimport_attr Postpone module import until trying to use its attribute
ppresolve Import postponed module and return it.
================ ===================
Machine Arithmetics
-------------------
================ ===================
machar_single Single precision floating point arithmetic parameters
machar_double Double precision floating point arithmetic parameters
================ ===================
Threading Tricks
----------------
================ ===================
ParallelExec Execute commands in parallel thread.
================ ===================
1D Array Set Operations
-----------------------
Set operations for 1D numeric arrays based on sort() function.
================ ===================
ediff1d Array difference (auxiliary function).
unique Unique elements of an array.
intersect1d Intersection of 1D arrays with unique elements.
setxor1d Set exclusive-or of 1D arrays with unique elements.
in1d Test whether elements in a 1D array are also present in
another array.
union1d Union of 1D arrays with unique elements.
setdiff1d Set difference of 1D arrays with unique elements.
================ ===================
"""
depends = ['core','testing']
global_symbols = ['*']
|
mirzadelic/django-social-example
|
refs/heads/master
|
django_social_example/manage.py
|
3
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_app.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
TansyArron/pants
|
refs/heads/master
|
src/python/pants/reporting/html_reporter.py
|
9
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import cgi
import os
import re
import uuid
from collections import defaultdict, namedtuple
from six import string_types
from six.moves import range
from pants.base.build_environment import get_buildroot
from pants.base.mustache import MustacheRenderer
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.reporting.linkify import linkify
from pants.reporting.report import Report
from pants.reporting.reporter import Reporter
from pants.reporting.reporting_utils import items_to_report_element
from pants.util.dirutil import safe_mkdir
class HtmlReporter(Reporter):
"""HTML reporting to files.
The files are intended to be served by the ReportingServer,
not accessed directly from the filesystem.
"""
# HTML reporting settings.
# html_dir: Where the report files go.
# template_dir: Where to find mustache templates.
Settings = namedtuple('Settings', Reporter.Settings._fields + ('html_dir', 'template_dir'))
def __init__(self, run_tracker, settings):
Reporter.__init__(self, run_tracker, settings)
# The main report, and associated tool outputs, go under this dir.
self._html_dir = settings.html_dir
# We render HTML from mustache templates.
self._renderer = MustacheRenderer(settings.template_dir, __name__)
# We serve files relative to the build root.
self._buildroot = get_buildroot()
self._html_path_base = os.path.relpath(self._html_dir, self._buildroot)
# We write the main report body to this file object.
self._report_file = None
# We redirect stdout, stderr etc. of tool invocations to these files.
self._output_files = defaultdict(dict) # workunit_id -> {path -> fileobj}.
self._linkify_memo = {}
def report_path(self):
"""The path to the main report file."""
return os.path.join(self._html_dir, 'build.html')
def open(self):
"""Implementation of Reporter callback."""
safe_mkdir(os.path.dirname(self._html_dir))
self._report_file = open(self.report_path(), 'w')
def close(self):
"""Implementation of Reporter callback."""
self._report_file.close()
# Make sure everything's closed.
for files in self._output_files.values():
for f in files.values():
f.close()
def start_workunit(self, workunit):
"""Implementation of Reporter callback."""
# We use these properties of the workunit to decide how to render information about it.
is_bootstrap = workunit.has_label(WorkUnitLabel.BOOTSTRAP)
is_tool = workunit.has_label(WorkUnitLabel.TOOL)
is_multitool = workunit.has_label(WorkUnitLabel.MULTITOOL)
is_test = workunit.has_label(WorkUnitLabel.TEST)
# Get useful properties from the workunit.
workunit_dict = workunit.to_dict()
if workunit_dict['cmd']:
workunit_dict['cmd'] = linkify(self._buildroot, workunit_dict['cmd'].replace('$', '\\\\$'),
self._linkify_memo)
# Create the template arguments.
args = {'indent': len(workunit.ancestors()) * 10,
'html_path_base': self._html_path_base,
'workunit': workunit_dict,
'header_text': workunit.name,
'initially_open': is_test or not (is_bootstrap or is_tool or is_multitool),
'is_tool': is_tool,
'is_multitool': is_multitool}
args.update({'collapsible': lambda x: self._renderer.render_callable('collapsible', x, args)})
# Render the workunit's div.
s = self._renderer.render_name('workunit_start', args)
if is_tool:
# This workunit is a tool invocation, so render the appropriate content.
# We use the same args, slightly modified.
del args['initially_open']
if is_test:
# Have test framework stdout open by default, but not that of other tools.
# This is an arbitrary choice, but one that turns out to be useful to users in practice.
args['stdout_initially_open'] = True
s += self._renderer.render_name('tool_invocation_start', args)
# ... and we're done.
self._emit(s)
# CSS classes from pants.css that we use to style the header text to reflect the outcome.
_outcome_css_classes = ['aborted', 'failure', 'warning', 'success', 'unknown']
def end_workunit(self, workunit):
"""Implementation of Reporter callback."""
# Create the template arguments.
duration = workunit.duration()
timing = '{:.3f}'.format(duration)
unaccounted_time = None
# Background work may be idle a lot, no point in reporting that as unaccounted.
if self.is_under_main_root(workunit):
unaccounted_time_secs = workunit.unaccounted_time()
if unaccounted_time_secs >= 1 and unaccounted_time_secs > 0.05 * duration:
unaccounted_time = '{:.3f}'.format(unaccounted_time_secs)
args = {'workunit': workunit.to_dict(),
'status': HtmlReporter._outcome_css_classes[workunit.outcome()],
'timing': timing,
'unaccounted_time': unaccounted_time,
'aborted': workunit.outcome() == WorkUnit.ABORTED}
s = ''
if workunit.has_label(WorkUnitLabel.TOOL):
s += self._renderer.render_name('tool_invocation_end', args)
s += self._renderer.render_name('workunit_end', args)
self._emit(s)
# Update the timings.
def render_timings(timings):
timings_dict = timings.get_all()
for item in timings_dict:
item['timing_string'] = '{:.3f}'.format(item['timing'])
args = {
'timings': timings_dict
}
return self._renderer.render_name('aggregated_timings', args)
self._overwrite('cumulative_timings', render_timings(self.run_tracker.cumulative_timings))
self._overwrite('self_timings', render_timings(self.run_tracker.self_timings))
# Update the artifact cache stats.
def render_cache_stats(artifact_cache_stats):
def fix_detail_id(e, _id):
return e if isinstance(e, string_types) else e + (_id, )
msg_elements = []
for cache_name, stat in artifact_cache_stats.stats_per_cache.items():
msg_elements.extend([
cache_name + ' artifact cache: ',
# Explicitly set the detail ids, so their displayed/hidden state survives a refresh.
fix_detail_id(items_to_report_element(stat.hit_targets, 'hit'), 'cache-hit-details'),
', ',
fix_detail_id(items_to_report_element(stat.miss_targets, 'miss'), 'cache-miss-details'),
'.'
])
if not msg_elements:
msg_elements = ['No artifact cache use.']
return self._render_message(*msg_elements)
self._overwrite('artifact_cache_stats',
render_cache_stats(self.run_tracker.artifact_cache_stats))
for f in self._output_files[workunit.id].values():
f.close()
def handle_output(self, workunit, label, s):
"""Implementation of Reporter callback."""
if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all.
path = os.path.join(self._html_dir, '{}.{}'.format(workunit.id, label))
output_files = self._output_files[workunit.id]
if path not in output_files:
f = open(path, 'w')
output_files[path] = f
else:
f = output_files[path]
f.write(self._htmlify_text(s).encode('utf-8'))
# We must flush in the same thread as the write.
f.flush()
_log_level_css_map = {
Report.FATAL: 'fatal',
Report.ERROR: 'error',
Report.WARN: 'warn',
Report.INFO: 'info',
Report.DEBUG: 'debug'
}
def do_handle_log(self, workunit, level, *msg_elements):
"""Implementation of Reporter callback."""
content = '<span class="{}">{}</span>'.format(
HtmlReporter._log_level_css_map[level], self._render_message(*msg_elements))
# Generate some javascript that appends the content to the workunit's div.
args = {
'content_id': uuid.uuid4(), # Identifies this content.
'workunit_id': workunit.id, # The workunit this reporting content belongs to.
'content': content, # The content to append.
}
s = self._renderer.render_name('append_to_workunit', args)
# Emit that javascript to the main report body.
self._emit(s)
def _render_message(self, *msg_elements):
elements = []
detail_ids = []
for element in msg_elements:
# Each element can be a message or a (message, detail) pair, as received by handle_log().
#
# However, as an internal implementation detail, we also allow an element to be a tuple
# (message, detail, detail_initially_visible[, detail_id])
#
# - If the detail exists, clicking on the text will toggle display of the detail and close
# all other details in this message.
# - If detail_initially_visible is True, the detail will be displayed by default.
#
# Toggling is managed via detail_ids: when clicking on a detail, it closes all details
# in this message with detail_ids different than that of the one being clicked on.
# We allow detail_id to be explicitly specified, so that the open/closed state can be
# preserved through refreshes. For example, when looking at the artifact cache stats,
# if "hits" are open and "misses" are closed, we want to remember that even after
# the cache stats are updated and the message re-rendered.
if isinstance(element, string_types):
element = [element]
defaults = ('', None, None, False)
# Map assumes None for missing values, so this will pick the default for those.
(text, detail, detail_id, detail_initially_visible) = \
map(lambda x, y: x or y, element, defaults)
element_args = {'text': self._htmlify_text(text)}
if detail is not None:
detail_id = detail_id or uuid.uuid4()
detail_ids.append(detail_id)
element_args.update({
'detail': self._htmlify_text(detail),
'detail_initially_visible': detail_initially_visible,
'detail-id': detail_id
})
elements.append(element_args)
args = {'elements': elements,
'all-detail-ids': detail_ids}
return self._renderer.render_name('message', args)
def _emit(self, s):
"""Append content to the main report file."""
if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all.
self._report_file.write(s)
self._report_file.flush() # We must flush in the same thread as the write.
def _overwrite(self, filename, s):
"""Overwrite a file with the specified contents."""
if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all.
with open(os.path.join(self._html_dir, filename), 'w') as f:
f.write(s)
def _htmlify_text(self, s):
"""Make text HTML-friendly."""
colored = self._handle_ansi_color_codes(cgi.escape(s.decode('utf-8')))
return linkify(self._buildroot, colored, self._linkify_memo).replace('\n', '</br>')
_ANSI_COLOR_CODE_RE = re.compile(r'\033\[((?:\d|;)*)m')
def _handle_ansi_color_codes(self, s):
"""Replace ansi escape sequences with spans of appropriately named css classes."""
parts = HtmlReporter._ANSI_COLOR_CODE_RE.split(s)
ret = []
span_depth = 0
# Note that len(parts) is always odd: text, code, text, code, ..., text.
for i in range(0, len(parts), 2):
ret.append(parts[i])
if i + 1 < len(parts):
for code in parts[i + 1].split(';'):
if code == 0: # Reset.
while span_depth > 0:
ret.append('</span>')
span_depth -= 1
else:
ret.append('<span class="ansi-{}">'.format(code))
span_depth += 1
while span_depth > 0:
ret.append('</span>')
span_depth -= 1
return ''.join(ret)
|
horizontracy/rpi_tool
|
refs/heads/master
|
api/venv/lib/python2.7/site-packages/flask_restful/reqparse.py
|
8
|
from copy import deepcopy
from flask import current_app, request
from werkzeug.datastructures import MultiDict, FileStorage
from werkzeug import exceptions
import flask_restful
import decimal
import inspect
import six
class Namespace(dict):
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
_friendly_location = {
u'json': u'the JSON body',
u'form': u'the post body',
u'args': u'the query string',
u'values': u'the post body or the query string',
u'headers': u'the HTTP headers',
u'cookies': u'the request\'s cookies',
u'files': u'an uploaded file',
}
text_type = lambda x: six.text_type(x)
class Argument(object):
"""
:param name: Either a name or a list of option strings, e.g. foo or
-f, --foo.
:param default: The value produced if the argument is absent from the
request.
:param dest: The name of the attribute to be added to the object
returned by :meth:`~reqparse.RequestParser.parse_args()`.
:param bool required: Whether or not the argument may be omitted (optionals
only).
:param action: The basic type of action to be taken when this argument
is encountered in the request. Valid options are "store" and "append".
:param ignore: Whether to ignore cases where the argument fails type
conversion
:param type: The type to which the request argument should be
converted. If a type raises an exception, the message in the
error will be returned in the response. Defaults to :class:`unicode`
in python2 and :class:`str` in python3.
:param location: The attributes of the :class:`flask.Request` object
to source the arguments from (ex: headers, args, etc.), can be an
iterator. The last item listed takes precedence in the result set.
:param choices: A container of the allowable values for the argument.
:param help: A brief description of the argument, returned in the
response when the argument is invalid with the name of the argument and
the message passed to any exception raised by a type converter.
:param bool case_sensitive: Whether argument values in the request are
case sensitive or not (this will convert all values to lowercase)
:param bool store_missing: Whether the arguments default value should
be stored if the argument is missing from the request.
:param bool trim: If enabled, trims whitespace around the argument.
"""
def __init__(self, name, default=None, dest=None, required=False,
ignore=False, type=text_type, location=('json', 'values',),
choices=(), action='store', help=None, operators=('=',),
case_sensitive=True, store_missing=True, trim=False):
self.name = name
self.default = default
self.dest = dest
self.required = required
self.ignore = ignore
self.location = location
self.type = type
self.choices = choices
self.action = action
self.help = help
self.case_sensitive = case_sensitive
self.operators = operators
self.store_missing = store_missing
self.trim = trim
def source(self, request):
"""Pulls values off the request in the provided location
:param request: The flask request object to parse arguments from
"""
if isinstance(self.location, six.string_types):
value = getattr(request, self.location, MultiDict())
if callable(value):
value = value()
if value is not None:
return value
else:
values = MultiDict()
for l in self.location:
value = getattr(request, l, None)
if callable(value):
value = value()
if value is not None:
values.update(value)
return values
return MultiDict()
def convert(self, value, op):
# Don't cast None
if value is None:
return None
# and check if we're expecting a filestorage and haven't overridden `type`
# (required because the below instantiation isn't valid for FileStorage)
elif isinstance(value, FileStorage) and self.type == FileStorage:
return value
try:
return self.type(value, self.name, op)
except TypeError:
try:
if self.type is decimal.Decimal:
return self.type(str(value), self.name)
else:
return self.type(value, self.name)
except TypeError:
return self.type(value)
def handle_validation_error(self, error, bundle_errors):
"""Called when an error is raised while parsing. Aborts the request
with a 400 status and an error message
:param error: the error that was raised
:param bundle_errors: do not abort when first error occurs, return a
dict with the name of the argument and the error message to be
bundled
"""
help_str = '(%s) ' % self.help if self.help else ''
error_msg = ' '.join([help_str, str(error)]) if help_str else str(error)
if current_app.config.get("BUNDLE_ERRORS", False) or bundle_errors:
msg = {self.name: "%s" % (error_msg)}
return error, msg
msg = {self.name: "%s" % (error_msg)}
flask_restful.abort(400, message=msg)
def parse(self, request, bundle_errors=False):
"""Parses argument value(s) from the request, converting according to
the argument's type.
:param request: The flask request object to parse arguments from
:param do not abort when first error occurs, return a
dict with the name of the argument and the error message to be
bundled
"""
source = self.source(request)
results = []
# Sentinels
_not_found = False
_found = True
for operator in self.operators:
name = self.name + operator.replace("=", "", 1)
if name in source:
# Account for MultiDict and regular dict
if hasattr(source, "getlist"):
values = source.getlist(name)
else:
values = [source.get(name)]
for value in values:
if hasattr(value, "strip") and self.trim:
value = value.strip()
if hasattr(value, "lower") and not self.case_sensitive:
value = value.lower()
if hasattr(self.choices, "__iter__"):
self.choices = [choice.lower()
for choice in self.choices]
try:
value = self.convert(value, operator)
except Exception as error:
if self.ignore:
continue
return self.handle_validation_error(error, bundle_errors)
if self.choices and value not in self.choices:
if current_app.config.get("BUNDLE_ERRORS", False) or bundle_errors:
return self.handle_validation_error(
ValueError(u"{0} is not a valid choice".format(
value)), bundle_errors)
self.handle_validation_error(
ValueError(u"{0} is not a valid choice".format(
value)), bundle_errors)
if name in request.unparsed_arguments:
request.unparsed_arguments.pop(name)
results.append(value)
if not results and self.required:
if isinstance(self.location, six.string_types):
error_msg = u"Missing required parameter in {0}".format(
_friendly_location.get(self.location, self.location)
)
else:
friendly_locations = [_friendly_location.get(loc, loc)
for loc in self.location]
error_msg = u"Missing required parameter in {0}".format(
' or '.join(friendly_locations)
)
if current_app.config.get("BUNDLE_ERRORS", False) or bundle_errors:
return self.handle_validation_error(ValueError(error_msg), bundle_errors)
self.handle_validation_error(ValueError(error_msg), bundle_errors)
if not results:
if callable(self.default):
return self.default(), _not_found
else:
return self.default, _not_found
if self.action == 'append':
return results, _found
if self.action == 'store' or len(results) == 1:
return results[0], _found
return results, _found
class RequestParser(object):
"""Enables adding and parsing of multiple arguments in the context of a
single request. Ex::
from flask import request
parser = RequestParser()
parser.add_argument('foo')
parser.add_argument('int_bar', type=int)
args = parser.parse_args()
:param bool trim: If enabled, trims whitespace on all arguments in this
parser
:param bool bundle_errors: If enabled, do not abort when first error occurs,
return a dict with the name of the argument and the error message to be
bundled and return all validation errors
"""
def __init__(self, argument_class=Argument, namespace_class=Namespace,
trim=False, bundle_errors=False):
self.args = []
self.argument_class = argument_class
self.namespace_class = namespace_class
self.trim = trim
self.bundle_errors = bundle_errors
def add_argument(self, *args, **kwargs):
"""Adds an argument to be parsed.
Accepts either a single instance of Argument or arguments to be passed
into :class:`Argument`'s constructor.
See :class:`Argument`'s constructor for documentation on the
available options.
"""
if len(args) == 1 and isinstance(args[0], self.argument_class):
self.args.append(args[0])
else:
self.args.append(self.argument_class(*args, **kwargs))
#Do not know what other argument classes are out there
if self.trim and self.argument_class is Argument:
#enable trim for appended element
self.args[-1].trim = True
return self
def parse_args(self, req=None, strict=False):
"""Parse all arguments from the provided request and return the results
as a Namespace
:param strict: if req includes args not in parser, throw 400 BadRequest exception
"""
if req is None:
req = request
namespace = self.namespace_class()
# A record of arguments not yet parsed; as each is found
# among self.args, it will be popped out
req.unparsed_arguments = dict(self.argument_class('').source(req)) if strict else {}
errors = {}
for arg in self.args:
value, found = arg.parse(req, self.bundle_errors)
if isinstance(value, ValueError):
errors.update(found)
found = None
if found or arg.store_missing:
namespace[arg.dest or arg.name] = value
if errors:
flask_restful.abort(400, message=errors)
if strict and req.unparsed_arguments:
raise exceptions.BadRequest('Unknown arguments: %s'
% ', '.join(req.unparsed_arguments.keys()))
return namespace
def copy(self):
""" Creates a copy of this RequestParser with the same set of arguments """
parser_copy = self.__class__(self.argument_class, self.namespace_class)
parser_copy.args = deepcopy(self.args)
return parser_copy
def replace_argument(self, name, *args, **kwargs):
""" Replace the argument matching the given name with a new version. """
new_arg = self.argument_class(name, *args, **kwargs)
for index, arg in enumerate(self.args[:]):
if new_arg.name == arg.name:
del self.args[index]
self.args.append(new_arg)
break
return self
def remove_argument(self, name):
""" Remove the argument matching the given name. """
for index, arg in enumerate(self.args[:]):
if name == arg.name:
del self.args[index]
break
return self
|
EvanK/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/digital_ocean/digital_ocean_tag_facts.py
|
29
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: digital_ocean_tag_facts
short_description: Gather facts about DigitalOcean tags
description:
- This module can be used to gather facts about DigitalOcean provided tags.
author: "Abhijeet Kasurde (@Akasurde)"
version_added: "2.6"
options:
tag_name:
description:
- Tag name that can be used to identify and reference a tag.
required: false
requirements:
- "python >= 2.6"
extends_documentation_fragment: digital_ocean.documentation
'''
EXAMPLES = '''
- name: Gather facts about all tags
digital_ocean_tag_facts:
oauth_token: "{{ oauth_token }}"
- name: Gather facts about tag with given name
digital_ocean_tag_facts:
oauth_token: "{{ oauth_token }}"
tag_name: "extra_awesome_tag"
- name: Get resources from tag name
digital_ocean_tag_facts:
register: resp_out
- set_fact:
resources: "{{ item.resources }}"
loop: "{{ resp_out.data|json_query(name) }}"
vars:
name: "[?name=='extra_awesome_tag']"
- debug: var=resources
'''
RETURN = '''
data:
description: DigitalOcean tag facts
returned: success
type: list
sample: [
{
"name": "extra-awesome",
"resources": {
"droplets": {
"count": 1,
...
}
}
},
]
'''
from traceback import format_exc
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.digital_ocean import DigitalOceanHelper
from ansible.module_utils._text import to_native
def core(module):
tag_name = module.params.get('tag_name', None)
rest = DigitalOceanHelper(module)
base_url = 'tags?'
if tag_name is not None:
response = rest.get("%s/%s" % (base_url, tag_name))
status_code = response.status_code
if status_code != 200:
module.fail_json(msg="Failed to retrieve tags for DigitalOcean")
resp_json = response.json
tag = resp_json['tag']
else:
tag = rest.get_paginated_data(base_url=base_url, data_key_name='tags')
module.exit_json(changed=False, data=tag)
def main():
argument_spec = DigitalOceanHelper.digital_ocean_argument_spec()
argument_spec.update(
tag_name=dict(type='str', required=False),
)
module = AnsibleModule(argument_spec=argument_spec)
try:
core(module)
except Exception as e:
module.fail_json(msg=to_native(e), exception=format_exc())
if __name__ == '__main__':
main()
|
suyashphadtare/vestasi-erp-1
|
refs/heads/develop
|
erpnext/erpnext/setup/doctype/sms_settings/sms_settings.py
|
32
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe import _, throw, msgprint
from frappe.utils import cstr, nowdate
from frappe.model.document import Document
class SMSSettings(Document):
pass
def validate_receiver_nos(receiver_list):
validated_receiver_list = []
for d in receiver_list:
# remove invalid character
for x in [' ', '+', '-', '(', ')']:
d = d.replace(x, '')
validated_receiver_list.append(d)
if not validated_receiver_list:
throw(_("Please enter valid mobile nos"))
return validated_receiver_list
def get_sender_name():
"returns name as SMS sender"
sender_name = frappe.db.get_value('Global Defaults', None, 'sms_sender_name') or \
'ERPNXT'
if len(sender_name) > 6 and \
frappe.db.get_default("country") == "India":
throw("""As per TRAI rule, sender name must be exactly 6 characters.
Kindly change sender name in Setup --> Global Defaults.
Note: Hyphen, space, numeric digit, special characters are not allowed.""")
return sender_name
@frappe.whitelist()
def get_contact_number(contact_name, value, key):
"returns mobile number of the contact"
number = frappe.db.sql("""select mobile_no, phone from tabContact where name=%s and %s=%s""" %
('%s', key, '%s'), (contact_name, value))
return number and (number[0][0] or number[0][1]) or ''
@frappe.whitelist()
def send_sms(receiver_list, msg, sender_name = ''):
import json
if isinstance(receiver_list, basestring):
receiver_list = json.loads(receiver_list)
if not isinstance(receiver_list, list):
receiver_list = [receiver_list]
receiver_list = validate_receiver_nos(receiver_list)
arg = {
'receiver_list' : receiver_list,
'message' : msg,
'sender_name' : sender_name or get_sender_name()
}
if frappe.db.get_value('SMS Settings', None, 'sms_gateway_url'):
ret = send_via_gateway(arg)
msgprint(ret)
else:
msgprint(_("Please Update SMS Settings"))
def send_via_gateway(arg):
ss = frappe.get_doc('SMS Settings', 'SMS Settings')
args = {ss.message_parameter : arg.get('message')}
for d in ss.get("static_parameter_details"):
args[d.parameter] = d.value
resp = []
for d in arg.get('receiver_list'):
args[ss.receiver_parameter] = d
resp.append(send_request(ss.sms_gateway_url, args))
return resp
# Send Request
# =========================================================
def send_request(gateway_url, args):
import httplib, urllib
server, api_url = scrub_gateway_url(gateway_url)
conn = httplib.HTTPConnection(server) # open connection
headers = {}
headers['Accept'] = "text/plain, text/html, */*"
conn.request('GET', api_url + urllib.urlencode(args), headers = headers) # send request
resp = conn.getresponse() # get response
resp = resp.read()
return resp
# Split gateway url to server and api url
# =========================================================
def scrub_gateway_url(url):
url = url.replace('http://', '').strip().split('/')
server = url.pop(0)
api_url = '/' + '/'.join(url)
if not api_url.endswith('?'):
api_url += '?'
return server, api_url
# Create SMS Log
# =========================================================
def create_sms_log(arg, sent_sms):
sl = frappe.get_doc('SMS Log')
sl.sender_name = arg['sender_name']
sl.sent_on = nowdate()
sl.receiver_list = cstr(arg['receiver_list'])
sl.message = arg['message']
sl.no_of_requested_sms = len(arg['receiver_list'])
sl.no_of_sent_sms = sent_sms
sl.save()
|
tlksio/tlksio
|
refs/heads/develop
|
env/lib/python3.4/site-packages/setuptools/tests/fixtures.py
|
135
|
try:
from unittest import mock
except ImportError:
import mock
import pytest
from . import contexts
@pytest.yield_fixture
def user_override():
"""
Override site.USER_BASE and site.USER_SITE with temporary directories in
a context.
"""
with contexts.tempdir() as user_base:
with mock.patch('site.USER_BASE', user_base):
with contexts.tempdir() as user_site:
with mock.patch('site.USER_SITE', user_site):
with contexts.save_user_site_setting():
yield
@pytest.yield_fixture
def tmpdir_cwd(tmpdir):
with tmpdir.as_cwd() as orig:
yield orig
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.