code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
#!/usr/bin/env python
import moose
import yacml
import sys
def main( ):
yacmlFile = sys.argv[1]
yacml.loadModel( yacmlFile )
print( '[INFO] Done ' )
if __name__ == '__main__':
main()
| dilawar/moose-chemical | test/run_test.py | Python | gpl-2.0 | 203 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Default configuration of the authors module."""
# Stores all types of acceptable identifiers types.
# `authorid` is the only internal and the only required Invenio author
# identifier.
AUTHORS_IDENTIFIERS_TYPES = ['authorid']
| egabancho/invenio | invenio/modules/authors/config.py | Python | gpl-2.0 | 1,020 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# settingsaction - [insert a few words of module description on this line]
# Copyright (C) 2003-2014 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
"""Backe end for personal settings"""
import os
import tempfile
import shared.returnvalues as returnvalues
from shared.functional import validate_input_and_cert
from shared.handlers import correct_handler
from shared.init import initialize_main_variables
from shared.settings import parse_and_save_settings, parse_and_save_widgets, \
parse_and_save_profile, parse_and_save_ssh, parse_and_save_davs, \
parse_and_save_ftps
from shared.profilekeywords import get_keywords_dict as profile_keywords
from shared.settingskeywords import get_keywords_dict as settings_keywords
from shared.widgetskeywords import get_keywords_dict as widgets_keywords
def extend_defaults(defaults, user_args):
"""Extract topic from untrusted user_args dictionary and safely extend
defaults with topic-specific defaults before validation.
"""
topic = user_args.get('topic', defaults['topic'])[-1]
if topic == 'general':
keywords_dict = settings_keywords()
elif topic == 'widgets':
keywords_dict = widgets_keywords()
elif topic == 'profile':
keywords_dict = profile_keywords()
elif topic == 'sftp':
keywords_dict = {'publickeys': '', 'password': ''}
elif topic == 'webdavs':
keywords_dict = {'publickeys': '', 'password': ''}
elif topic == 'ftps':
keywords_dict = {'publickeys': '', 'password': ''}
else:
# should never get here
keywords_dict = {}
for keyword in keywords_dict.keys():
defaults[keyword] = ['']
return defaults
def signature():
"""Signature of the main function"""
defaults = {'topic': ['general']}
return ['text', defaults]
def main(client_id, user_arguments_dict):
"""Main function used by front end"""
(configuration, logger, output_objects, op_name) = \
initialize_main_variables(client_id, op_header=False)
output_objects.append({'object_type': 'header', 'text'
: '%s settings' % configuration.short_title })
defaults = signature()[1]
extend_defaults(defaults, user_arguments_dict)
(validate_status, accepted) = validate_input_and_cert(
user_arguments_dict,
defaults,
output_objects,
client_id,
configuration,
allow_rejects=False,
)
if not validate_status:
return (accepted, returnvalues.CLIENT_ERROR)
if not correct_handler('POST'):
output_objects.append(
{'object_type': 'error_text', 'text'
: 'Only accepting POST requests to prevent unintended updates'})
return (output_objects, returnvalues.CLIENT_ERROR)
topic = accepted['topic'][-1]
topic_mrsl = ''
if topic == 'general':
keywords_dict = settings_keywords()
elif topic == 'widgets':
keywords_dict = widgets_keywords()
elif topic == 'profile':
keywords_dict = profile_keywords()
elif topic in ('sftp', 'webdavs', 'ftps'):
# We don't use mRSL parser here
keywords_dict = {}
else:
# should never get here
keywords_dict = {}
for keyword in keywords_dict.keys():
received_arguments = accepted[keyword]
if received_arguments != None and received_arguments != ['\r\n'
]:
topic_mrsl += '''::%s::
%s
''' % (keyword.upper(),
'\n'.join(received_arguments))
# Save content to temp file
try:
(filehandle, tmptopicfile) = tempfile.mkstemp(text=True)
os.write(filehandle, topic_mrsl)
os.close(filehandle)
except Exception:
output_objects.append(
{'object_type': 'error_text', 'text':
'Problem writing temporary topic file on server.'})
return (output_objects, returnvalues.SYSTEM_ERROR)
# Parse topic
if topic == 'general':
(parse_status, parse_msg) = \
parse_and_save_settings(tmptopicfile, client_id,
configuration)
elif topic == 'widgets':
(parse_status, parse_msg) = \
parse_and_save_widgets(tmptopicfile, client_id,
configuration)
elif topic == 'profile':
(parse_status, parse_msg) = \
parse_and_save_profile(tmptopicfile, client_id,
configuration)
elif topic == 'sftp':
publickeys = '\n'.join(accepted.get('publickeys', ['']))
password = accepted.get('password', [''])[-1].strip()
(parse_status, parse_msg) = \
parse_and_save_ssh(publickeys, password, client_id,
configuration)
elif topic == 'webdavs':
publickeys = '\n'.join(accepted.get('publickeys', ['']))
password = accepted.get('password', [''])[-1].strip()
(parse_status, parse_msg) = \
parse_and_save_davs(publickeys, password, client_id,
configuration)
elif topic == 'ftps':
publickeys = '\n'.join(accepted.get('publickeys', ['']))
password = accepted.get('password', [''])[-1].strip()
(parse_status, parse_msg) = \
parse_and_save_ftps(publickeys, password, client_id,
configuration)
else:
output_objects.append({'object_type': 'error_text', 'text'
: 'No such settings topic: %s' % topic
})
return (output_objects, returnvalues.CLIENT_ERROR)
try:
os.remove(tmptopicfile)
except Exception, exc:
pass # probably deleted by parser!
# output_objects.append(
# {"object_type":"error_text", "text":
# "Could not remove temporary topic file %s, exception: %s" % \
# (tmptopicfile, exc)})
if not parse_status:
output_objects.append({'object_type': 'error_text', 'text'
: 'Error parsing %s settings file: %s'
% (topic, parse_msg)})
return (output_objects, returnvalues.CLIENT_ERROR)
# print saved topic
output_objects.append({'object_type': 'text', 'text'
: 'Saved %s settings:' % topic})
# Enable next lines for debug
#for line in topic_mrsl.split('\n'):
# output_objects.append({'object_type': 'text', 'text': line})
output_objects.append({'object_type': 'link',
'destination': 'settings.py?topic=%s' % topic,
'class': 'backlink',
'title': 'Go back to %s settings' % topic,
'text': 'Back to %s settings' % topic})
return (output_objects, returnvalues.OK)
| heromod/migrid | mig/shared/functionality/settingsaction.py | Python | gpl-2.0 | 7,805 |
try:
from flask_babelex import Domain
except ImportError:
def gettext(string, **variables):
return string % variables
def ngettext(singular, plural, num, **variables):
variables.setdefault('num', num)
return (singular if num == 1 else plural) % variables
def lazy_gettext(string, **variables):
return gettext(string, **variables)
class Translations(object):
''' dummy Translations class for WTForms, no translation support '''
def gettext(self, string):
return string
def ngettext(self, singular, plural, n):
return singular if n == 1 else plural
else:
from flask_admin import translations
class CustomDomain(Domain):
def __init__(self):
super(CustomDomain, self).__init__(translations.__path__[0], domain='admin')
def get_translations_path(self, ctx):
view = get_current_view()
if view is not None:
dirname = view.admin.translations_path
if dirname is not None:
return dirname
return super(CustomDomain, self).get_translations_path(ctx)
domain = CustomDomain()
gettext = domain.gettext
ngettext = domain.ngettext
lazy_gettext = domain.lazy_gettext
try:
from wtforms.i18n import messages_path
except ImportError:
from wtforms.ext.i18n.utils import messages_path
wtforms_domain = Domain(messages_path(), domain='wtforms')
class Translations(object):
''' Fixes WTForms translation support and uses wtforms translations '''
def gettext(self, string):
t = wtforms_domain.get_translations()
return t.ugettext(string)
def ngettext(self, singular, plural, n):
t = wtforms_domain.get_translations()
return t.ungettext(singular, plural, n)
# lazy imports
from .helpers import get_current_view
| Widiot/simpleblog | venv/lib/python3.5/site-packages/flask_admin/babel.py | Python | mit | 1,945 |
from .cmd import *
from .io import *
| gautelinga/BERNAISE | common/__init__.py | Python | mit | 37 |
import os
from nav_util import *
def signal():
while g.signalling:
os.system("(python tone2.py 8000 3000 1200;python tone2.py 8000 3000 1000) 2>/dev/null")
def obstaclebeep():
os.system("python tone2.py 16000 3000 2400 2>/dev/null")
def dospeak(s, p):
if '#' in s:
s = s.replace('#', str(g.speakcount))
os.system("espeak -a500 -p%d '%s' >/dev/null 2>&1" % (p, s))
def speak(str):
p = 50
if g.VIN == "car2":
p = 80
start_new_thread(dospeak, (str, p))
def blinkleds():
g.ledstate = (g.ledstate + 1)%7
setleds(0, g.ledstate)
def warningblink(state):
if state == True:
if g.warningblinking == True:
return
setleds(7, 0)
g.warningblinking = True
else:
if g.warningblinking == False:
return
setleds(0, 7)
g.warningblinking = False
def setleds(mask, code):
print("setleds %d %d" % (mask, code))
if False:
cmd = "/home/pi/can-utils/cansend can0 '461#060000006D3%d3%d00'" % (
mask, code)
os.system(cmd)
else:
g.ledcmd = (mask, code)
| sics-sse/moped | position/car-control-old/nav_signal.py | Python | gpl-2.0 | 1,125 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import httplib
# causes httplib to return the partial response from a server in case the read fails to be complete.
def patch_http_response_read(func):
def inner(*args):
try:
return func(*args)
except httplib.IncompleteRead, e:
return e.partial
return inner
httplib.HTTPResponse.read = patch_http_response_read(httplib.HTTPResponse.read) | XueAlfred/MALAnalysis | scraper-code/myanimelist/myanimelist.py | Python | mit | 405 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2017-04-03 14:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('server', '0012_4_14_fault_definitions'),
]
operations = [
migrations.RenameField(
model_name='AttributeSet',
old_name='active',
new_name='enabled',
),
migrations.AlterField(
model_name='AttributeSet',
name='enabled',
field=models.BooleanField(default=True, verbose_name='enabled'),
),
migrations.RenameField(
model_name='AttributeSet',
old_name='attributes',
new_name='included_attributes',
),
migrations.AlterField(
model_name='AttributeSet',
name='included_attributes',
field=models.ManyToManyField(
blank=True, to='server.Attribute',
verbose_name='included attributes'
),
),
migrations.RenameField(
model_name='AttributeSet',
old_name='excludes',
new_name='excluded_attributes',
),
migrations.AlterField(
model_name='AttributeSet',
name='excluded_attributes',
field=models.ManyToManyField(
blank=True,
related_name='ExcludedAttributesGroup',
to='server.Attribute',
verbose_name='excluded attributes'
),
),
migrations.AddField(
model_name='AttributeSet',
name='description',
field=models.TextField(blank=True, null=True, verbose_name='description'),
),
migrations.AlterModelOptions(
name='AttributeSet',
options={
'permissions': (('can_save_attributeset', 'Can save Attributes Set'),),
'verbose_name': 'Attribute Set',
'verbose_name_plural': 'Attribute Sets'
},
),
]
| migasfree/migasfree | migasfree/server/migrations/0013_4_14_attribute_set.py | Python | gpl-3.0 | 2,093 |
#!/usr/bin/python
# -*- coding: latin-1 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
"Simple XML manipulation"
__author__ = "Mariano Reingart (reingart@gmail.com)"
__copyright__ = "Copyright (C) 2008/009 Mariano Reingart"
__license__ = "LGPL 3.0"
__version__ = "1.02c"
import xml.dom.minidom
from decimal import Decimal
import datetime
import time
DEBUG = False
# Functions to serialize/unserialize special immutable types:
datetime_u = lambda s: datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%S")
datetime_m = lambda dt: dt.isoformat('T')
date_u = lambda s: datetime.datetime.strptime(s[0:10], "%Y-%m-%d").date()
date_m = lambda d: d.strftime("%Y-%m-%d")
time_u = lambda s: datetime.datetime.strptime(s, "%H:%M:%S").time()
time_m = lambda d: d.strftime("%H%M%S")
bool_u = lambda s: {'0':False, 'false': False, '1': True, 'true': True}[s]
# aliases:
class Alias():
def __init__(self, py_type, xml_type):
self.py_type, self.xml_type = py_type, xml_type
def __call__(self, value):
return self.py_type(value)
def __repr__(self):
return "<alias '%s' for '%s'>" % (self.xml_type, self.py_type)
byte = Alias(str,'byte')
short = Alias(int,'short')
double = Alias(float,'double')
integer = Alias(long,'integer')
DateTime = datetime.datetime
Date = datetime.date
Time = datetime.time
# Define convertion function (python type): xml schema type
TYPE_MAP = {str:'string',unicode:'string',
bool:'boolean', short:'short', byte:'byte',
int:'int', long:'long', integer:'integer',
float:'float', double:'double',
Decimal:'decimal',
datetime.datetime:'dateTime', datetime.date:'date',
}
TYPE_MARSHAL_FN = {datetime.datetime:datetime_m, datetime.date:date_m,}
TYPE_UNMARSHAL_FN = {datetime.datetime:datetime_u, datetime.date:date_u,
bool:bool_u,
}
class OrderedDict(dict):
"Minimal ordered dictionary for xsd:sequences"
def __init__(self):
self.__keys = []
self.array = False
def __setitem__(self, key, value):
if key not in self.__keys:
self.__keys.append(key)
dict.__setitem__(self, key, value)
def insert(self, key, value, index=0):
if key not in self.__keys:
self.__keys.insert(index, key)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
if key in self.__keys:
self.__keys.remove(key)
dict.__delitem__(self, key)
def __iter__(self):
return iter(self.__keys)
def keys(self):
return self.__keys
def items(self):
return [(key, self[key]) for key in self.__keys]
def update(self, other):
for k,v in other.items():
self[k] = v
if isinstance(other, OrderedDict):
self.array = other.array
def __str__(self):
return "*%s*" % dict.__str__(self)
def __repr__(self):
s= "*{%s}*" % ", ".join(['%s: %s' % (repr(k),repr(v)) for k,v in self.items()])
if self.array and False:
s = "[%s]" % s
return s
class SimpleXMLElement(object):
"Simple XML manipulation (simil PHP)"
def __init__(self, text = None, elements = None, document = None, namespace = None, prefix=None):
self.__ns = namespace
self.__prefix = prefix
if text:
try:
self.__document = xml.dom.minidom.parseString(text)
except:
if DEBUG: print text
raise
self.__elements = [self.__document.documentElement]
else:
self.__elements = elements
self.__document = document
def add_child(self,name,text=None,ns=True):
"Adding a child tag to a node"
if not ns or not self.__ns:
if DEBUG: print "adding %s" % (name)
element = self.__document.createElement(name)
else:
if DEBUG: print "adding %s ns %s %s" % (name, self.__ns,ns)
if self.__prefix:
element = self.__document.createElementNS(self.__ns, "%s:%s" % (self.__prefix, name))
else:
element = self.__document.createElementNS(self.__ns, name)
if text:
if isinstance(text, unicode):
element.appendChild(self.__document.createTextNode(text))
else:
element.appendChild(self.__document.createTextNode(str(text)))
self._element.appendChild(element)
return SimpleXMLElement(
elements=[element],
document=self.__document,
namespace=self.__ns,
prefix=self.__prefix)
def __setattr__(self, tag, text):
"Add text child tag node (short form)"
if tag.startswith("_"):
object.__setattr__(self, tag, text)
else:
if DEBUG: print "__setattr__(%s,%s)" % (tag, text)
self.add_child(tag,text)
def add_comment(self, data):
"Add an xml comment to this child"
comment = self.__document.createComment(data)
self._element.appendChild(comment)
def as_xml(self,filename=None,pretty=False):
"Return the XML representation of the document"
if not pretty:
return self.__document.toxml('UTF-8')
else:
return self.__document.toprettyxml(encoding='UTF-8')
def __repr__(self):
"Return the XML representation of this tag"
return self._element.toxml('UTF-8')
def get_name(self):
"Return the tag name of this node"
return self._element.tagName
def get_local_name(self):
"Return the tag loca name (prefix:name) of this node"
return self._element.localName
def get_prefix(self):
"Return the namespace prefix of this node"
return self._element.prefix
def get_namespace_uri(self, ns):
"Return the namespace uri for a prefix"
v = self.__document.documentElement.attributes['xmlns:%s' % ns]
return v.value
def attributes(self):
"Return a dict of attributes for this tag"
#TODO: use slice syntax [:]?
return self._element.attributes
def __getitem__(self, item):
"Return xml tag attribute value or a slice of attributes (iter)"
if DEBUG: print "__getitem__(%s)" % item
if isinstance(item,basestring):
if self._element.hasAttribute(item):
return self._element.attributes[item].value
elif isinstance(item, slice):
# return a list with name:values
return self._element.attributes.items()[item]
else:
# return element by index (position)
element = self.__elements[item]
return SimpleXMLElement(
elements=[element],
document=self.__document,
namespace=self.__ns,
prefix=self.__prefix)
def add_attribute(self, name, value):
"Set an attribute value from a string"
self._element.setAttribute(name, value)
def __setitem__(self, item, value):
"Set an attribute value"
if isinstance(item,basestring):
self.add_attribute(item, value)
elif isinstance(item, slice):
# set multiple attributes at once
for k, v in value.items():
self.add_attribute(k, v)
def __call__(self, tag=None, ns=None, children=False, error=True):
"Search (even in child nodes) and return a child tag by name"
try:
if tag is None:
# if no name given, iterate over siblings (same level)
return self.__iter__()
if children:
# future: filter children? by ns?
return self.children()
elements = None
if isinstance(tag, int):
# return tag by index
elements=[self.__elements[tag]]
if ns and not elements:
for ns_uri in isinstance(ns, (tuple, list)) and ns or (ns, ):
if DEBUG: print "searching %s by ns=%s" % (tag,ns_uri)
elements = self._element.getElementsByTagNameNS(ns_uri, tag)
if elements:
break
if self.__ns and not elements:
if DEBUG: print "searching %s by ns=%s" % (tag, self.__ns)
elements = self._element.getElementsByTagNameNS(self.__ns, tag)
if not elements:
if DEBUG: print "searching %s " % (tag)
elements = self._element.getElementsByTagName(tag)
if not elements:
if DEBUG: print self._element.toxml()
if error:
raise AttributeError("No elements found")
else:
return
return SimpleXMLElement(
elements=elements,
document=self.__document,
namespace=self.__ns,
prefix=self.__prefix)
except AttributeError, e:
raise AttributeError("Tag not found: %s (%s)" % (tag, str(e)))
def __getattr__(self, tag):
"Shortcut for __call__"
return self.__call__(tag)
def __iter__(self):
"Iterate over xml tags at this level"
try:
for __element in self.__elements:
yield SimpleXMLElement(
elements=[__element],
document=self.__document,
namespace=self.__ns,
prefix=self.__prefix)
except:
raise
def __dir__(self):
"List xml children tags names"
return [node.tagName for node
in self._element.childNodes
if node.nodeType != node.TEXT_NODE]
def children(self):
"Return xml children tags element"
elements=[__element for __element in self._element.childNodes
if __element.nodeType == __element.ELEMENT_NODE]
if not elements:
return None
#raise IndexError("Tag %s has no children" % self._element.tagName)
return SimpleXMLElement(
elements=elements,
document=self.__document,
namespace=self.__ns,
prefix=self.__prefix)
def __len__(self):
"Return elements count"
return len(self.__elements)
def __contains__( self, item):
"Search for a tag name in this element or child nodes"
return self._element.getElementsByTagName(item)
def __unicode__(self):
"Returns the unicode text nodes of the current element"
if self._element.childNodes:
rc = u""
for node in self._element.childNodes:
if node.nodeType == node.TEXT_NODE:
rc = rc + node.data
return rc
return ''
def __str__(self):
"Returns the str text nodes of the current element"
return unicode(self).encode("utf8","ignore")
def __int__(self):
"Returns the integer value of the current element"
return int(self.__str__())
def __float__(self):
"Returns the float value of the current element"
try:
return float(self.__str__())
except:
raise IndexError(self._element.toxml())
_element = property(lambda self: self.__elements[0])
def unmarshall(self, types):
"Convert to python values the current serialized xml element"
# types is a dict of {tag name: convertion function}
# example: types={'p': {'a': int,'b': int}, 'c': [{'d':str}]}
# expected xml: <p><a>1</a><b>2</b></p><c><d>hola</d><d>chau</d>
# returnde value: {'p': {'a':1,'b':2}, `'c':[{'d':'hola'},{'d':'chau'}]}
d = {}
for node in self():
name = str(node.get_local_name())
try:
fn = types[name]
except (KeyError, ), e:
raise TypeError("Tag: %s invalid" % (name,))
if isinstance(fn,list):
value = []
children = node.children()
for child in children and children() or []:
value.append(child.unmarshall(fn[0]))
elif isinstance(fn,dict):
children = node.children()
value = children and children.unmarshall(fn)
else:
if fn is None: # xsd:anyType not unmarshalled
value = node
elif str(node) or fn == str:
try:
# get special desserialization function (if any)
fn = TYPE_UNMARSHAL_FN.get(fn,fn)
value = fn(unicode(node))
except (ValueError, TypeError), e:
raise ValueError("Tag: %s: %s" % (name, unicode(e)))
else:
value = None
d[name] = value
return d
def marshall(self, name, value, add_child=True, add_comments=False, ns=False):
"Analize python value and add the serialized XML element using tag name"
if isinstance(value, dict): # serialize dict (<key>value</key>)
child = add_child and self.add_child(name,ns=ns) or self
for k,v in value.items():
child.marshall(k, v, add_comments=add_comments, ns=ns)
elif isinstance(value, tuple): # serialize tuple (<key>value</key>)
child = add_child and self.add_child(name,ns=ns) or self
for k,v in value:
getattr(self,name).marshall(k, v, add_comments=add_comments, ns=ns)
elif isinstance(value, list): # serialize lists
child=self.add_child(name,ns=ns)
if add_comments:
child.add_comment("Repetitive array of:")
for t in value:
child.marshall(name,t, False, add_comments=add_comments, ns=ns)
elif isinstance(value, basestring): # do not convert strings or unicodes
self.add_child(name,value,ns=ns)
elif value is None: # sent a empty tag?
self.add_child(name,ns=ns)
elif value in TYPE_MAP.keys():
# add commented placeholders for simple tipes (for examples/help only)
child = self.add_child(name,ns=ns)
child.add_comment(TYPE_MAP[value])
else: # the rest of object types are converted to string
# get special serialization function (if any)
fn = TYPE_MARSHAL_FN.get(type(value),str)
self.add_child(name,fn(value),ns=ns)
def import_node(self, other):
x = self.__document.importNode(other._element, True) # deep copy
self._element.appendChild(x)
if __name__ == "__main__":
span = SimpleXMLElement('<span><a href="python.org.ar">pyar</a><prueba><i>1</i><float>1.5</float></prueba></span>')
assert str(span.a)==str(span('a'))==str(span.a(0))=="pyar"
assert span.a['href']=="python.org.ar"
assert int(span.prueba.i)==1 and float(span.prueba.float)==1.5
span1 = SimpleXMLElement('<span><a href="google.com">google</a><a>yahoo</a><a>hotmail</a></span>')
assert [str(a) for a in span1.a()] == ['google', 'yahoo', 'hotmail']
span1.add_child('a','altavista')
span1.b = "ex msn"
d = {'href':'http://www.bing.com/', 'alt': 'Bing'}
span1.b[:] = d
assert sorted([(k,v) for k,v in span1.b[:]]) == sorted(d.items())
print span1.as_xml()
assert 'b' in span1
span.import_node(span1)
print span.as_xml()
| trosa/forca | gluon/contrib/pysimplesoap/simplexml.py | Python | gpl-2.0 | 16,161 |
def sum_values_above_threshold(value_string, threshold):
# Complete the function body. (4 MARKS)
''' (str, int) -> int
Precondition: value_string.isdigit() returns True
Return the sum of the individual digits in value_string that are
greaterthan threshold.
>>> sum_values_above_threshold('153382', 4)
13
>>> sum_values_above_threshold('12345', 5)
0
'''
sum = 0
for i in value_string:
if int(i) > threshold:
sum += int(i)
return sum
| mdnu/snake | csc work/final/w16/Q1.py | Python | mit | 525 |
import select
import Queue
import logging as log
from connection import *
class ConnectionManager:
def __init__(self):
self.Connections = []
self.NewConnectionQueue = Queue.Queue(0)
self.DeadConnections = []
def AddConnection(self, connection):
self.NewConnectionQueue.put(connection)
def RemoveConnection(self, connection):
try:
connectionIndex = self.Connections.index(connection)
connectionObject = self.Connections.pop(connectionIndex)
connectionObject.Close()
log.info("ConnectionManager stopped managing a connection")
except ValueError:
log.info("ConnectionManager tried to remove connection that didn't exist")
pass
def Run(self):
log.info("Connection Manager now running.")
while 1:
#Manager any new connections
while not self.NewConnectionQueue.empty():
log.info("Connection Manager got a new connection to manage.")
self.Connections.append(self.NewConnectionQueue.get())
if self.Connections != []:
#Read data from connections that have sent us something
read, write, err = select.select(self.Connections, self.Connections, self.Connections)
for connection in read:
if not connection.Connected or not connection.RecvCommands():
self.DeadConnections.append(connection)
#Send data to ready connections for which we have data
for connection in write:
connection.SendCommands()
connection.CheckTimeout()
if not connection.Connected:
self.DeadConnections.append(connection)
#Clean up dead connections
for connection in self.DeadConnections:
self.RemoveConnection(connection)
del self.DeadConnections[:]
| SlashRoot/WHAT | what_apps/push/standalonewebsocketserver/connectionManager.py | Python | mit | 2,116 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "segtest.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| aldryn/aldryn-segmentation | test_project/manage.py | Python | bsd-3-clause | 250 |
"""
RESTful API for controlling and monitoring EVA.
"""
import eva
import eva.globe
import eva.gpg
import eva.rest.resources
import falcon
import json
import re
import wsgiref.simple_server
class RequireJSON(object):
def process_request(self, req, resp):
if not req.client_accepts_json:
raise falcon.HTTPNotAcceptable('This API only supports responses encoded as JSON.')
if req.method in ('POST', 'PUT') and req.content_length not in (None, 0):
if 'application/json' not in req.content_type:
raise falcon.HTTPUnsupportedMediaType('This API only supports requests encoded as JSON.')
class RequireGPGSignedRequests(eva.globe.GlobalMixin):
TIME_DIFF_THRESHOLD = 2.0
def __init__(self, gpg_key_ids):
self.gpg_key_ids = gpg_key_ids
self.header_regex = re.compile(r'^X-EVA-Request-Signature-\d+$', re.IGNORECASE)
def _gpg_signature_from_headers(self, headers):
signature = []
keys = sorted(headers.keys())
for key in keys:
if not self.header_regex.match(key):
continue
signature += [headers[key]]
return signature
def _check_signature(self, payload, signature):
checker = eva.gpg.GPGSignatureChecker(payload, signature)
result = checker.verify()
if result.exit_code != 0:
self.logger.warning('GPG verification of request failed: %s', result.stderr[0])
for line in result.stderr:
self.logger.warning(line)
raise falcon.HTTPUnauthorized('GPG verification of request failed.')
if result.key_id is None:
self.logger.warning('GPG key ID not parsed correctly from GPG output, dropping request.')
raise falcon.HTTPUnauthorized('GPG verification of request failed.')
self.logger.info('Request is signed by %s with %s key %s at %s', result.signer, result.key_type, result.key_id, eva.strftime_iso8601(result.timestamp))
if result.key_id not in self.gpg_key_ids:
self.logger.warning("GPG key ID '%s' is not in whitelist, dropping request.", result.key_id)
raise falcon.HTTPUnauthorized('Only few of mere mortals may try to enter the twilight zone.')
time_diff = eva.now_with_timezone() - result.timestamp
time_diff_secs = abs(time_diff.total_seconds())
if time_diff_secs > self.TIME_DIFF_THRESHOLD:
self.logger.warning("GPG signature differs from local time with %.1f seconds, over threshold of %.1f seconds, dropping request.", time_diff_secs, self.TIME_DIFF_THRESHOLD)
raise falcon.HTTPUnauthorized('Too high time difference between server and client; is your clock correct?')
self.logger.info('Permitting access to %s with %s key %s', result.signer, result.key_type, result.key_id)
def process_request(self, req, resp):
if req.method == 'GET':
return
signature = self._gpg_signature_from_headers(req.headers)
self.logger.info('Verifying request signature:')
[self.logger.info(s) for s in signature]
self._check_signature(req.context['body'], signature)
class JSONTranslator(object):
def process_request(self, req, resp):
req.context['body'] = ''
if req.content_length in (None, 0):
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body', 'A valid JSON document is required.')
try:
req.context['body'] = body.decode('utf-8')
req.context['doc'] = json.loads(req.context['body'])
except (ValueError, UnicodeDecodeError):
raise falcon.HTTPError(
falcon.HTTP_753,
'Malformed JSON', 'Could not decode the request body. The JSON was incorrect or not encoded as UTF-8.',
)
def process_response(self, req, resp, resource):
if 'result' not in req.context:
return
resp.body = json.dumps(req.context['result'])
class Server(eva.config.ConfigurableObject, eva.globe.GlobalMixin):
"""
Run a HTTP REST API based on Falcon web framework.
"""
CONFIG = {
'gpg_key_ids': {
'type': 'list_string',
'default': '',
}
}
OPTIONAL_CONFIG = [
'gpg_key_ids',
]
def init(self):
gpg_middleware = RequireGPGSignedRequests(self.env['gpg_key_ids'])
gpg_middleware.set_globe(self.globe)
self.app = falcon.API(middleware=[
RequireJSON(),
JSONTranslator(),
gpg_middleware,
])
self._resources = []
self._setup_resources()
self.server = None
def start(self, host, port):
self.server = wsgiref.simple_server.make_server(host, port, self.app)
self.server.timeout = 0.001
def _setup_resources(self):
self._add_resource('control', '/control/{method}', eva.rest.resources.ControlResource())
self._add_resource('health', '/health', eva.rest.resources.HealthResource())
self._add_resource('job', '/jobs/{job_id}', eva.rest.resources.JobResource())
self._add_resource('jobs', '/jobs', eva.rest.resources.JobsResource())
self._add_resource('process', '/process/{method}', eva.rest.resources.ProcessResource())
def _add_resource(self, name, path, resource):
self._resources += [name]
setattr(self, name, resource)
resource.set_globe(self.globe)
self.app.add_route(path, resource)
def set_eventloop_instance(self, eventloop):
for resource in self._resources:
instance = getattr(self, resource)
instance.set_eventloop_instance(eventloop)
def respond_to_next_request(self):
if not self.server:
return
self.server.handle_request()
| metno/EVA | eva/rest/__init__.py | Python | gpl-2.0 | 5,891 |
# Copyright (C) 2006-2007 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
"""
STABLE.
"""
import random
import logging
from gi.repository import Gio
colors = [['#B20008', '#FF2B34'],
['#FF2B34', '#B20008'],
['#E6000A', '#FF2B34'],
['#FF2B34', '#E6000A'],
['#FFADCE', '#FF2B34'],
['#9A5200', '#FF2B34'],
['#FF2B34', '#9A5200'],
['#FF8F00', '#FF2B34'],
['#FF2B34', '#FF8F00'],
['#FFC169', '#FF2B34'],
['#807500', '#FF2B34'],
['#FF2B34', '#807500'],
['#BE9E00', '#FF2B34'],
['#FF2B34', '#BE9E00'],
['#F8E800', '#FF2B34'],
['#008009', '#FF2B34'],
['#FF2B34', '#008009'],
['#00B20D', '#FF2B34'],
['#FF2B34', '#00B20D'],
['#8BFF7A', '#FF2B34'],
['#00588C', '#FF2B34'],
['#FF2B34', '#00588C'],
['#005FE4', '#FF2B34'],
['#FF2B34', '#005FE4'],
['#BCCDFF', '#FF2B34'],
['#5E008C', '#FF2B34'],
['#FF2B34', '#5E008C'],
['#7F00BF', '#FF2B34'],
['#FF2B34', '#7F00BF'],
['#D1A3FF', '#FF2B34'],
['#9A5200', '#FF8F00'],
['#FF8F00', '#9A5200'],
['#C97E00', '#FF8F00'],
['#FF8F00', '#C97E00'],
['#FFC169', '#FF8F00'],
['#807500', '#FF8F00'],
['#FF8F00', '#807500'],
['#BE9E00', '#FF8F00'],
['#FF8F00', '#BE9E00'],
['#F8E800', '#FF8F00'],
['#008009', '#FF8F00'],
['#FF8F00', '#008009'],
['#00B20D', '#FF8F00'],
['#FF8F00', '#00B20D'],
['#8BFF7A', '#FF8F00'],
['#00588C', '#FF8F00'],
['#FF8F00', '#00588C'],
['#005FE4', '#FF8F00'],
['#FF8F00', '#005FE4'],
['#BCCDFF', '#FF8F00'],
['#5E008C', '#FF8F00'],
['#FF8F00', '#5E008C'],
['#A700FF', '#FF8F00'],
['#FF8F00', '#A700FF'],
['#D1A3FF', '#FF8F00'],
['#B20008', '#FF8F00'],
['#FF8F00', '#B20008'],
['#FF2B34', '#FF8F00'],
['#FF8F00', '#FF2B34'],
['#FFADCE', '#FF8F00'],
['#807500', '#F8E800'],
['#F8E800', '#807500'],
['#BE9E00', '#F8E800'],
['#F8E800', '#BE9E00'],
['#FFFA00', '#EDDE00'],
['#008009', '#F8E800'],
['#F8E800', '#008009'],
['#00EA11', '#F8E800'],
['#F8E800', '#00EA11'],
['#8BFF7A', '#F8E800'],
['#00588C', '#F8E800'],
['#F8E800', '#00588C'],
['#00A0FF', '#F8E800'],
['#F8E800', '#00A0FF'],
['#BCCEFF', '#F8E800'],
['#5E008C', '#F8E800'],
['#F8E800', '#5E008C'],
['#AC32FF', '#F8E800'],
['#F8E800', '#AC32FF'],
['#D1A3FF', '#F8E800'],
['#B20008', '#F8E800'],
['#F8E800', '#B20008'],
['#FF2B34', '#F8E800'],
['#F8E800', '#FF2B34'],
['#FFADCE', '#F8E800'],
['#9A5200', '#F8E800'],
['#F8E800', '#9A5200'],
['#FF8F00', '#F8E800'],
['#F8E800', '#FF8F00'],
['#FFC169', '#F8E800'],
['#008009', '#00EA11'],
['#00EA11', '#008009'],
['#00B20D', '#00EA11'],
['#00EA11', '#00B20D'],
['#8BFF7A', '#00EA11'],
['#00588C', '#00EA11'],
['#00EA11', '#00588C'],
['#005FE4', '#00EA11'],
['#00EA11', '#005FE4'],
['#BCCDFF', '#00EA11'],
['#5E008C', '#00EA11'],
['#00EA11', '#5E008C'],
['#7F00BF', '#00EA11'],
['#00EA11', '#7F00BF'],
['#D1A3FF', '#00EA11'],
['#B20008', '#00EA11'],
['#00EA11', '#B20008'],
['#FF2B34', '#00EA11'],
['#00EA11', '#FF2B34'],
['#FFADCE', '#00EA11'],
['#9A5200', '#00EA11'],
['#00EA11', '#9A5200'],
['#FF8F00', '#00EA11'],
['#00EA11', '#FF8F00'],
['#FFC169', '#00EA11'],
['#807500', '#00EA11'],
['#00EA11', '#807500'],
['#BE9E00', '#00EA11'],
['#00EA11', '#BE9E00'],
['#F8E800', '#00EA11'],
['#00588C', '#00A0FF'],
['#00A0FF', '#00588C'],
['#005FE4', '#00A0FF'],
['#00A0FF', '#005FE4'],
['#BCCDFF', '#00A0FF'],
['#5E008C', '#00A0FF'],
['#00A0FF', '#5E008C'],
['#9900E6', '#00A0FF'],
['#00A0FF', '#9900E6'],
['#D1A3FF', '#00A0FF'],
['#B20008', '#00A0FF'],
['#00A0FF', '#B20008'],
['#FF2B34', '#00A0FF'],
['#00A0FF', '#FF2B34'],
['#FFADCE', '#00A0FF'],
['#9A5200', '#00A0FF'],
['#00A0FF', '#9A5200'],
['#FF8F00', '#00A0FF'],
['#00A0FF', '#FF8F00'],
['#FFC169', '#00A0FF'],
['#807500', '#00A0FF'],
['#00A0FF', '#807500'],
['#BE9E00', '#00A0FF'],
['#00A0FF', '#BE9E00'],
['#F8E800', '#00A0FF'],
['#008009', '#00A0FF'],
['#00A0FF', '#008009'],
['#00B20D', '#00A0FF'],
['#00A0FF', '#00B20D'],
['#8BFF7A', '#00A0FF'],
['#5E008C', '#AC32FF'],
['#AC32FF', '#5E008C'],
['#7F00BF', '#AC32FF'],
['#AC32FF', '#7F00BF'],
['#D1A3FF', '#AC32FF'],
['#B20008', '#AC32FF'],
['#AC32FF', '#B20008'],
['#FF2B34', '#AC32FF'],
['#AC32FF', '#FF2B34'],
['#FFADCE', '#AC32FF'],
['#9A5200', '#AC32FF'],
['#AC32FF', '#9A5200'],
['#FF8F00', '#AC32FF'],
['#AC32FF', '#FF8F00'],
['#FFC169', '#AC32FF'],
['#807500', '#AC32FF'],
['#AC32FF', '#807500'],
['#BE9E00', '#AC32FF'],
['#AC32FF', '#BE9E00'],
['#F8E800', '#AC32FF'],
['#008009', '#AC32FF'],
['#AC32FF', '#008009'],
['#00B20D', '#AC32FF'],
['#AC32FF', '#00B20D'],
['#8BFF7A', '#AC32FF'],
['#00588C', '#AC32FF'],
['#AC32FF', '#00588C'],
['#005FE4', '#AC32FF'],
['#AC32FF', '#005FE4'],
['#BCCDFF', '#AC32FF']]
def _parse_string(color_string):
if not isinstance(color_string, (str, unicode)):
logging.error('Invalid color string: %r', color_string)
return None
if color_string == 'white':
return ['#ffffff', '#414141']
elif color_string == 'insensitive':
return ['#ffffff', '#e2e2e2']
splitted = color_string.split(',')
if len(splitted) == 2:
return [splitted[0], splitted[1]]
else:
return None
class XoColor:
def __init__(self, color_string=None):
parsed_color = None
if color_string is None:
settings = Gio.Settings('org.sugarlabs.user')
color_string = settings.get_string('color')
if color_string is not None:
parsed_color = _parse_string(color_string)
if parsed_color is None:
n = int(random.random() * (len(colors) - 1))
[self.stroke, self.fill] = colors[n]
else:
[self.stroke, self.fill] = parsed_color
def __cmp__(self, other):
if isinstance(other, XoColor):
if self.stroke == other.stroke and self.fill == other.fill:
return 0
return -1
def get_stroke_color(self):
return self.stroke
def get_fill_color(self):
return self.fill
def to_string(self):
return '%s,%s' % (self.stroke, self.fill)
if __name__ == '__main__':
import sys
import re
f = open(sys.argv[1], 'r')
print 'colors = ['
for line in f.readlines():
match = re.match(r'fill: ([A-Z0-9]*) stroke: ([A-Z0-9]*)', line)
print "['#%s', '#%s'], \\" % (match.group(2), match.group(1))
print ']'
f.close()
| manuq/sugar-toolkit-gtk3 | src/sugar3/graphics/xocolor.py | Python | lgpl-2.1 | 8,657 |
#!/usr/bin/env python
"""OSX specific actions.
Most of these actions share an interface (in/out rdfvalues) with linux actions
of the same name. OSX-only actions are registered with the server via
libs/server_stubs.py
"""
import ctypes
import logging
import os
import re
import shutil
import StringIO
import sys
import tarfile
import pytsk3
from grr.client import actions
from grr.client import client_utils_common
from grr.client import client_utils_osx
from grr.client.client_actions import standard
from grr.client.osx.objc import ServiceManagement
from grr.lib import config_lib
from grr.lib import utils
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import protodict as rdf_protodict
from grr.parsers import osx_launchd
class Error(Exception):
"""Base error class."""
class UnsupportedOSVersionError(Error):
"""This action not supported on this os version."""
# struct sockaddr_dl {
# u_char sdl_len; /* Total length of sockaddr */
# u_char sdl_family; /* AF_LINK */
# u_short sdl_index; /* if != 0, system given index for interface */
# u_char sdl_type; /* interface type */
# u_char sdl_nlen; /* interface name length, no trailing 0 reqd. */
# u_char sdl_alen; /* link level address length */
# u_char sdl_slen; /* link layer selector length */
# char sdl_data[12]; /* minimum work area, can be larger;
# contains both if name and ll address */
# };
# Interfaces can have names up to 15 chars long and sdl_data contains name + mac
# but no separators - we need to make sdl_data at least 15+6 bytes.
class Sockaddrdl(ctypes.Structure):
"""The sockaddr_dl struct."""
_fields_ = [
("sdl_len", ctypes.c_ubyte),
("sdl_family", ctypes.c_ubyte),
("sdl_index", ctypes.c_ushort),
("sdl_type", ctypes.c_ubyte),
("sdl_nlen", ctypes.c_ubyte),
("sdl_alen", ctypes.c_ubyte),
("sdl_slen", ctypes.c_ubyte),
("sdl_data", ctypes.c_ubyte * 24),
]
# struct sockaddr_in {
# __uint8_t sin_len;
# sa_family_t sin_family;
# in_port_t sin_port;
# struct in_addr sin_addr;
# char sin_zero[8];
# };
class Sockaddrin(ctypes.Structure):
"""The sockaddr_in struct."""
_fields_ = [
("sin_len", ctypes.c_ubyte),
("sin_family", ctypes.c_ubyte),
("sin_port", ctypes.c_ushort),
("sin_addr", ctypes.c_ubyte * 4),
("sin_zero", ctypes.c_char * 8)
]
# struct sockaddr_in6 {
# __uint8_t sin6_len; /* length of this struct */
# sa_family_t sin6_family; /* AF_INET6 (sa_family_t) */
# in_port_t sin6_port; /* Transport layer port */
# __uint32_t sin6_flowinfo; /* IP6 flow information */
# struct in6_addr sin6_addr; /* IP6 address */
# __uint32_t sin6_scope_id; /* scope zone index */
# };
class Sockaddrin6(ctypes.Structure):
"""The sockaddr_in6 struct."""
_fields_ = [
("sin6_len", ctypes.c_ubyte),
("sin6_family", ctypes.c_ubyte),
("sin6_port", ctypes.c_ushort),
("sin6_flowinfo", ctypes.c_ubyte * 4),
("sin6_addr", ctypes.c_ubyte * 16),
("sin6_scope_id", ctypes.c_ubyte * 4)
]
# struct ifaddrs *ifa_next; /* Pointer to next struct */
# char *ifa_name; /* Interface name */
# u_int ifa_flags; /* Interface flags */
# struct sockaddr *ifa_addr; /* Interface address */
# struct sockaddr *ifa_netmask; /* Interface netmask */
# struct sockaddr *ifa_broadaddr; /* Interface broadcast address */
# struct sockaddr *ifa_dstaddr; /* P2P interface destination */
# void *ifa_data; /* Address specific data */
class Ifaddrs(ctypes.Structure):
pass
setattr(Ifaddrs, "_fields_", [
("ifa_next", ctypes.POINTER(Ifaddrs)),
("ifa_name", ctypes.POINTER(ctypes.c_char)),
("ifa_flags", ctypes.c_uint),
("ifa_addr", ctypes.POINTER(ctypes.c_char)),
("ifa_netmask", ctypes.POINTER(ctypes.c_char)),
("ifa_broadaddr", ctypes.POINTER(ctypes.c_char)),
("ifa_destaddr", ctypes.POINTER(ctypes.c_char)),
("ifa_data", ctypes.POINTER(ctypes.c_char))
])
class EnumerateInterfaces(actions.ActionPlugin):
"""Enumerate all MAC addresses of all NICs."""
out_rdfvalue = rdf_client.Interface
def Run(self, unused_args):
"""Enumerate all MAC addresses."""
libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("c"))
ifa = Ifaddrs()
p_ifa = ctypes.pointer(ifa)
libc.getifaddrs(ctypes.pointer(p_ifa))
addresses = {}
macs = {}
ifs = set()
m = p_ifa
while m:
ifname = ctypes.string_at(m.contents.ifa_name)
ifs.add(ifname)
try:
iffamily = ord(m.contents.ifa_addr[1])
if iffamily == 0x2: # AF_INET
data = ctypes.cast(m.contents.ifa_addr, ctypes.POINTER(Sockaddrin))
ip4 = "".join(map(chr, data.contents.sin_addr))
address_type = rdf_client.NetworkAddress.Family.INET
address = rdf_client.NetworkAddress(address_type=address_type,
packed_bytes=ip4)
addresses.setdefault(ifname, []).append(address)
if iffamily == 0x12: # AF_LINK
data = ctypes.cast(m.contents.ifa_addr, ctypes.POINTER(Sockaddrdl))
iflen = data.contents.sdl_nlen
addlen = data.contents.sdl_alen
macs[ifname] = "".join(
map(chr, data.contents.sdl_data[iflen:iflen + addlen]))
if iffamily == 0x1E: # AF_INET6
data = ctypes.cast(m.contents.ifa_addr, ctypes.POINTER(Sockaddrin6))
ip6 = "".join(map(chr, data.contents.sin6_addr))
address_type = rdf_client.NetworkAddress.Family.INET6
address = rdf_client.NetworkAddress(address_type=address_type,
packed_bytes=ip6)
addresses.setdefault(ifname, []).append(address)
except ValueError:
# Some interfaces don't have a iffamily and will raise a null pointer
# exception. We still want to send back the name.
pass
m = m.contents.ifa_next
libc.freeifaddrs(p_ifa)
for interface in ifs:
mac = macs.setdefault(interface, "")
address_list = addresses.setdefault(interface, "")
args = {"ifname": interface}
if mac:
args["mac_address"] = mac
if address_list:
args["addresses"] = address_list
self.SendReply(**args)
class GetInstallDate(actions.ActionPlugin):
"""Estimate the install date of this system."""
out_rdfvalue = rdf_protodict.DataBlob
def Run(self, unused_args):
for f in ["/var/log/CDIS.custom", "/var", "/private"]:
try:
stat = os.stat(f)
self.SendReply(integer=int(stat.st_ctime))
return
except OSError:
pass
self.SendReply(integer=0)
class EnumerateFilesystems(actions.ActionPlugin):
"""Enumerate all unique filesystems local to the system."""
out_rdfvalue = rdf_client.Filesystem
def Run(self, unused_args):
"""List all local filesystems mounted on this system."""
for fs_struct in client_utils_osx.GetFileSystems():
self.SendReply(device=fs_struct.f_mntfromname,
mount_point=fs_struct.f_mntonname,
type=fs_struct.f_fstypename)
drive_re = re.compile("r?disk[0-9].*")
for drive in os.listdir("/dev"):
if not drive_re.match(drive):
continue
path = os.path.join("/dev", drive)
try:
img_inf = pytsk3.Img_Info(path)
# This is a volume or a partition - we send back a TSK device.
self.SendReply(device=path)
vol_inf = pytsk3.Volume_Info(img_inf)
for volume in vol_inf:
if volume.flags == pytsk3.TSK_VS_PART_FLAG_ALLOC:
offset = volume.start * vol_inf.info.block_size
self.SendReply(device=path + ":" + str(offset),
type="partition")
except (IOError, RuntimeError):
continue
class OSXEnumerateRunningServices(actions.ActionPlugin):
"""Enumerate all running launchd jobs."""
in_rdfvalue = None
out_rdfvalue = rdf_client.OSXServiceInformation
def GetRunningLaunchDaemons(self):
"""Get running launchd jobs from objc ServiceManagement framework."""
sm = ServiceManagement()
return sm.SMGetJobDictionaries("kSMDomainSystemLaunchd")
def Run(self, unused_arg):
"""Get running launchd jobs.
Raises:
UnsupportedOSVersionError: for OS X earlier than 10.6
"""
osxversion = client_utils_osx.OSXVersion()
version_array = osxversion.VersionAsMajorMinor()
if version_array[:2] < [10, 6]:
raise UnsupportedOSVersionError(
"ServiceManagment API unsupported on < 10.6. This client is %s" %
osxversion.VersionString())
launchd_list = self.GetRunningLaunchDaemons()
self.parser = osx_launchd.OSXLaunchdJobDict(launchd_list)
for job in self.parser.Parse():
response = self.CreateServiceProto(job)
self.SendReply(response)
def CreateServiceProto(self, job):
"""Create the Service protobuf.
Args:
job: Launchdjobdict from servicemanagement framework.
Returns:
sysinfo_pb2.OSXServiceInformation proto
"""
service = rdf_client.OSXServiceInformation(
label=job.get("Label"), program=job.get("Program"),
sessiontype=job.get("LimitLoadToSessionType"),
lastexitstatus=int(job["LastExitStatus"]),
timeout=int(job["TimeOut"]), ondemand=bool(job["OnDemand"]))
for arg in job.get("ProgramArguments", "", stringify=False):
# Returns CFArray of CFStrings
service.args.Append(unicode(arg))
mach_dict = job.get("MachServices", {}, stringify=False)
for key, value in mach_dict.iteritems():
service.machservice.Append("%s:%s" % (key, value))
job_mach_dict = job.get("PerJobMachServices", {}, stringify=False)
for key, value in job_mach_dict.iteritems():
service.perjobmachservice.Append("%s:%s" % (key, value))
if "PID" in job:
service.pid = job["PID"].value
return service
class Uninstall(actions.ActionPlugin):
"""Remove the service that starts us at startup."""
out_rdfvalue = rdf_protodict.DataBlob
def Run(self, unused_arg):
"""This kills us with no cleanups."""
logging.debug("Disabling service")
msg = "Service disabled."
if hasattr(sys, "frozen"):
grr_binary = os.path.abspath(sys.executable)
elif __file__:
grr_binary = os.path.abspath(__file__)
try:
os.remove(grr_binary)
except OSError:
msg = "Could not remove binary."
try:
os.remove(config_lib.CONFIG["Client.plist_path"])
except OSError:
if "Could not" in msg:
msg += " Could not remove plist file."
else:
msg = "Could not remove plist file."
# Get the directory we are running in from pyinstaller. This is either the
# GRR directory which we should delete (onedir mode) or a generated temp
# directory which we can delete without problems in onefile mode.
directory = getattr(sys, "_MEIPASS", None)
if directory:
shutil.rmtree(directory, ignore_errors=True)
self.SendReply(string=msg)
class InstallDriver(actions.ActionPlugin):
"""Installs a driver.
Note that only drivers with a signature that validates with
client_config.DRIVER_SIGNING_CERT can be loaded.
"""
in_rdfvalue = rdf_client.DriverInstallTemplate
def _FindKext(self, path):
"""Find the .kext directory under path.
Args:
path: path string to search
Returns:
kext directory path string or raises if not found.
Raises:
RuntimeError: if there is no kext under the path.
"""
for directory, _, _ in os.walk(path):
if directory.endswith(".kext"):
return directory
raise RuntimeError("No .kext directory under %s" % path)
def Run(self, args):
"""Initializes the driver."""
# This action might crash the box so we need to flush the transaction log.
self.SyncTransactionLog()
if not args.driver:
raise IOError("No driver supplied.")
pub_key = config_lib.CONFIG.Get("Client.driver_signing_public_key")
if not args.driver.Verify(pub_key):
raise OSError("Driver signature signing failure.")
if args.force_reload:
client_utils_osx.UninstallDriver(args.driver_name)
# Wrap the tarball in a file like object for tarfile to handle it.
driver_buf = StringIO.StringIO(args.driver.data)
# Unpack it to a temporary directory.
with utils.TempDirectory() as kext_tmp_dir:
driver_archive = tarfile.open(fileobj=driver_buf, mode="r:gz")
driver_archive.extractall(kext_tmp_dir)
# Now load it.
kext_path = self._FindKext(kext_tmp_dir)
logging.debug("Loading kext %s", kext_path)
client_utils_osx.InstallDriver(kext_path)
class UninstallDriver(actions.ActionPlugin):
"""Unloads a memory driver.
Only if the request contains a valid signature the driver will be uninstalled.
"""
in_rdfvalue = rdf_client.DriverInstallTemplate
def Run(self, args):
"""Unloads a driver."""
pub_key = config_lib.CONFIG["Client.driver_signing_public_key"]
if not args.driver.Verify(pub_key):
raise OSError("Driver signature signing failure.")
# Unload the driver and pass exceptions through
client_utils_osx.UninstallDriver(args.driver_name)
class UpdateAgent(standard.ExecuteBinaryCommand):
"""Updates the GRR agent to a new version."""
def ProcessFile(self, path, args):
cmd = "/usr/sbin/installer"
cmd_args = ["-pkg", path, "-target", "/"]
time_limit = args.time_limit
res = client_utils_common.Execute(cmd, cmd_args, time_limit=time_limit,
bypass_whitelist=True)
(stdout, stderr, status, time_used) = res
# Limit output to 10MB so our response doesn't get too big.
stdout = stdout[:10 * 1024 * 1024]
stderr = stderr[:10 * 1024 * 1024]
result = rdf_client.ExecuteBinaryResponse(stdout=stdout,
stderr=stderr,
exit_status=status,
# We have to return microseconds.
time_used=int(1e6 * time_used))
self.SendReply(result)
| statik/grr | client/client_actions/osx/osx.py | Python | apache-2.0 | 14,584 |
#!/usr/bin/env python
import sys
sys.path.insert(0, '../Tools')
import fitzhugh as model
import prcNetwork
import torus_2D
import numpy as np
class torus(torus_2D.torus_2D):
model = model
V_trigger = 0.
figsize = (13, 6.5)
def __init__(self, system, network, traces, info=None, position=None):
torus_2D.torus_2D.__init__(self, system, network, traces, info, position)
self.quiver = None
def erase_traces(self):
torus_2D.torus_2D.erase_traces(self)
if self.quiver:
self.quiver.remove()
self.quiver = None
def vectorField_prc(self):
self.erase_traces()
phase, coupling = self.system.threeCellCoupling(0.03, self.network.coupling_strength)
coupling_function = prcNetwork.interp_torus_vec(phase, phase, coupling)
coupling_function.findRoots(GRID=self.GRID)
self.quiver = coupling_function.plot(self.GRID, self.ax_traces, period=1.)
self.fig.canvas.draw()
if __name__ == "__main__":
import pylab as pl
import system as sys
import network3N as netw
import traces as tra
import info as nf
info = nf.info()
system = sys.system(info=info)
network = netw.network(info=info)
traces = tra.traces(system, network, info=info)
t = torus(system, network, traces, info=info)
system.torus = t
t.vectorField_prc()
pl.show()
| jusjusjus/Motiftoolbox | Fitzhugh_PRC/torus.py | Python | gpl-2.0 | 1,328 |
from typing import Generic, TypeVar, List
import pytest
import jedi
from jedi.inference.value import ModuleValue
def interpreter(code, namespace, *args, **kwargs):
return jedi.Interpreter(code, [namespace], *args, **kwargs)
def test_on_code():
from functools import wraps
i = interpreter("wraps.__code__", {'wraps': wraps})
assert i.infer()
def test_generics_without_definition():
# Used to raise a recursion error
T = TypeVar('T')
class Stack(Generic[T]):
def __init__(self):
self.items = [] # type: List[T]
def push(self, item):
self.items.append(item)
def pop(self):
# type: () -> T
return self.items.pop()
class StackWrapper():
def __init__(self):
self.stack = Stack()
self.stack.push(1)
s = StackWrapper()
assert not interpreter('s.stack.pop().', locals()).complete()
@pytest.mark.parametrize(
'code, expected', [
('Foo().method()', 'str'),
('Foo.method()', 'str'),
('foo.method()', 'str'),
('Foo().read()', 'str'),
('Foo.read()', 'str'),
('foo.read()', 'str'),
]
)
def test_generics_methods(code, expected, class_findable):
T = TypeVar("T")
class Reader(Generic[T]):
@classmethod
def read(cls) -> T:
return cls()
def method(self) -> T:
return 1
class Foo(Reader[str]):
def transform(self) -> int:
return 42
foo = Foo()
defs = jedi.Interpreter(code, [locals()]).infer()
if class_findable:
def_, = defs
assert def_.name == expected
else:
assert not defs
def test_mixed_module_cache():
"""Caused by #1479"""
interpreter = jedi.Interpreter('jedi', [{'jedi': jedi}])
d, = interpreter.infer()
assert d.name == 'jedi'
inference_state = interpreter._inference_state
jedi_module, = inference_state.module_cache.get(('jedi',))
assert isinstance(jedi_module, ModuleValue)
def test_signature():
"""
For performance reasons we use the signature of the compiled object and not
the tree object.
"""
def some_signature(foo):
pass
from inspect import Signature, Parameter
some_signature.__signature__ = Signature([
Parameter('bar', kind=Parameter.KEYWORD_ONLY, default=1)
])
s, = jedi.Interpreter('some_signature', [locals()]).goto()
assert s.docstring() == 'some_signature(*, bar=1)'
def test_compiled_signature_annotation_string():
import typing
def func(x: typing.Type, y: typing.Union[typing.Type, int]):
pass
func.__name__ = 'not_func'
s, = jedi.Interpreter('func()', [locals()]).get_signatures(1, 5)
assert s.params[0].description == 'param x: Type'
assert s.params[1].description == 'param y: Union[Type, int]'
| snakeleon/YouCompleteMe-x64 | third_party/ycmd/third_party/jedi_deps/jedi/test/test_inference/test_mixed.py | Python | gpl-3.0 | 2,874 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.traceable_stack."""
from tensorflow.python.framework import test_util
from tensorflow.python.framework import traceable_stack
from tensorflow.python.platform import googletest
from tensorflow.python.util import tf_inspect as inspect
_LOCAL_OBJECT = lambda x: x
_THIS_FILENAME = inspect.getsourcefile(_LOCAL_OBJECT)
class TraceableObjectTest(test_util.TensorFlowTestCase):
def testSetFilenameAndLineFromCallerUsesCallersStack(self):
t_obj = traceable_stack.TraceableObject(17)
# Do not separate placeholder from the set_filename_and_line_from_caller()
# call one line below it as it is used to calculate the latter's line
# number.
placeholder = lambda x: x
result = t_obj.set_filename_and_line_from_caller()
expected_lineno = inspect.getsourcelines(placeholder)[1] + 1
self.assertEqual(expected_lineno, t_obj.lineno)
self.assertEqual(_THIS_FILENAME, t_obj.filename)
self.assertEqual(t_obj.SUCCESS, result)
def testSetFilenameAndLineFromCallerRespectsOffset(self):
def call_set_filename_and_line_from_caller(t_obj):
# We expect to retrieve the line number from _our_ caller.
return t_obj.set_filename_and_line_from_caller(offset=1)
t_obj = traceable_stack.TraceableObject(None)
# Do not separate placeholder from the
# call_set_filename_and_line_from_caller() call one line below it as it is
# used to calculate the latter's line number.
placeholder = lambda x: x
result = call_set_filename_and_line_from_caller(t_obj)
expected_lineno = inspect.getsourcelines(placeholder)[1] + 1
self.assertEqual(expected_lineno, t_obj.lineno)
self.assertEqual(t_obj.SUCCESS, result)
def testSetFilenameAndLineFromCallerHandlesRidiculousOffset(self):
t_obj = traceable_stack.TraceableObject('The quick brown fox.')
# This line shouldn't die.
result = t_obj.set_filename_and_line_from_caller(offset=300)
# We expect a heuristic to be used because we are not currently 300 frames
# down on the stack. The filename and lineno of the outermost frame are not
# predictable -- in some environments the filename is this test file, but in
# other environments it is not (e.g. due to a test runner calling this
# file). Therefore we only test that the called function knows it applied a
# heuristic for the ridiculous stack offset.
self.assertEqual(t_obj.HEURISTIC_USED, result)
class TraceableStackTest(test_util.TensorFlowTestCase):
def testPushPeekPopObj(self):
t_stack = traceable_stack.TraceableStack()
t_stack.push_obj(42.0)
t_stack.push_obj('hope')
expected_lifo_peek = ['hope', 42.0]
self.assertEqual(expected_lifo_peek, list(t_stack.peek_objs()))
self.assertEqual('hope', t_stack.pop_obj())
self.assertEqual(42.0, t_stack.pop_obj())
def testPushPeekTopObj(self):
t_stack = traceable_stack.TraceableStack()
t_stack.push_obj(42.0)
t_stack.push_obj('hope')
self.assertEqual('hope', t_stack.peek_top_obj())
def testPushPopPreserveLifoOrdering(self):
t_stack = traceable_stack.TraceableStack()
t_stack.push_obj(0)
t_stack.push_obj(1)
t_stack.push_obj(2)
t_stack.push_obj(3)
obj_3 = t_stack.pop_obj()
obj_2 = t_stack.pop_obj()
obj_1 = t_stack.pop_obj()
obj_0 = t_stack.pop_obj()
self.assertEqual(3, obj_3)
self.assertEqual(2, obj_2)
self.assertEqual(1, obj_1)
self.assertEqual(0, obj_0)
def testPushObjSetsFilenameAndLineInfoForCaller(self):
t_stack = traceable_stack.TraceableStack()
# We expect that the line number recorded for the 1-object will come from
# the call to t_stack.push_obj(1). Do not separate the next two lines!
placeholder_1 = lambda x: x
t_stack.push_obj(1)
# We expect that the line number recorded for the 2-object will come from
# the call to call_push_obj() and _not_ the call to t_stack.push_obj().
def call_push_obj(obj):
t_stack.push_obj(obj, offset=1)
# Do not separate the next two lines!
placeholder_2 = lambda x: x
call_push_obj(2)
expected_lineno_1 = inspect.getsourcelines(placeholder_1)[1] + 1
expected_lineno_2 = inspect.getsourcelines(placeholder_2)[1] + 1
t_obj_2, t_obj_1 = t_stack.peek_traceable_objs()
self.assertEqual(expected_lineno_2, t_obj_2.lineno)
self.assertEqual(expected_lineno_1, t_obj_1.lineno)
if __name__ == '__main__':
googletest.main()
| tensorflow/tensorflow | tensorflow/python/framework/traceable_stack_test.py | Python | apache-2.0 | 5,145 |
import flask
from flask import render_template
from flask import request
from flask import url_for
from flask import jsonify
import uuid
import json
import logging
# Date handling
import arrow # Replacement for datetime, based on moment.js
import datetime # But we still need time
from dateutil import tz # For interpreting local times
# OAuth2 - Google library implementation for convenience
from oauth2client import client
import httplib2 # used in oauth2 flow
# Google API for services
from apiclient import discovery
###
# Globals
###
import CONFIG
app = flask.Flask(__name__)
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
CLIENT_SECRET_FILE = CONFIG.GOOGLE_LICENSE_KEY ## You'll need this
APPLICATION_NAME = 'MeetMe class project'
#############################
#
# Pages (routed from URLs)
#
#############################
@app.route("/")
@app.route("/index")
def index():
app.logger.debug("Entering index")
if 'begin_date' not in flask.session:
init_session_values()
flask.session['current_page']='index.html'
flask.session['current_url']='index'
return render_template('index.html')
@app.route("/choose")
def choose():
## We'll need authorization to list calendars
## I wanted to put what follows into a function, but had
## to pull it back here because the redirect has to be a
## 'return'
app.logger.debug("Checking credentials for Google calendar access")
credentials = valid_credentials()
if not credentials:
app.logger.debug("Redirecting to authorization")
return flask.redirect(flask.url_for('oauth2callback'))
gcal_service = get_gcal_service(credentials)
app.logger.debug("Returned from get_gcal_service")
flask.session['calendars'] = list_calendars(gcal_service)
return render_template(flask.session['current_page'])
@app.route("/meeting/create")
def create():
app.logger.debug("Entering create")
if 'begin_date' not in flask.session:
init_session_values()
flask.session['current_page']='create.html'
flask.session['current_url']='/meeting/create'
return render_template('create.html')
@app.route("/meeting/participate/<id>")
def add_particepant(id):
app.logger.debug("Entering participate")
if 'begin_date' not in flask.session:
init_session_values()
flask.session['current_page']='edit.html'
flask.session['current_url']='/meeting/participate/'+id
return render_template('add_particepant.html')
@app.route("/test")
def test():
return render_template('create.html')
####
#
# Google calendar authorization:
# Returns us to the main /choose screen after inserting
# the calendar_service object in the session state. May
# redirect to OAuth server first, and may take multiple
# trips through the oauth2 callback function.
#
# Protocol for use ON EACH REQUEST:
# First, check for valid credentials
# If we don't have valid credentials
# Get credentials (jump to the oauth2 protocol)
# (redirects back to /choose, this time with credentials)
# If we do have valid credentials
# Get the service object
#
# The final result of successful authorization is a 'service'
# object. We use a 'service' object to actually retrieve data
# from the Google services. Service objects are NOT serializable ---
# we can't stash one in a cookie. Instead, on each request we
# get a fresh serivce object from our credentials, which are
# serializable.
#
# Note that after authorization we always redirect to /choose;
# If this is unsatisfactory, we'll need a session variable to use
# as a 'continuation' or 'return address' to use instead.
#
####
def valid_credentials():
"""
Returns OAuth2 credentials if we have valid
credentials in the session. This is a 'truthy' value.
Return None if we don't have credentials, or if they
have expired or are otherwise invalid. This is a 'falsy' value.
"""
if 'credentials' not in flask.session:
return None
credentials = client.OAuth2Credentials.from_json(
flask.session['credentials'])
if (credentials.invalid or
credentials.access_token_expired):
return None
return credentials
def get_gcal_service(credentials):
"""
We need a Google calendar 'service' object to obtain
list of calendars, busy times, etc. This requires
authorization. If authorization is already in effect,
we'll just return with the authorization. Otherwise,
control flow will be interrupted by authorization, and we'll
end up redirected back to /choose *without a service object*.
Then the second call will succeed without additional authorization.
"""
app.logger.debug("Entering get_gcal_service")
http_auth = credentials.authorize(httplib2.Http())
service = discovery.build('calendar', 'v3', http=http_auth)
app.logger.debug("Returning service")
return service
@app.route('/oauth2callback')
def oauth2callback():
"""
The 'flow' has this one place to call back to. We'll enter here
more than once as steps in the flow are completed, and need to keep
track of how far we've gotten. The first time we'll do the first
step, the second time we'll skip the first step and do the second,
and so on.
"""
app.logger.debug("Entering oauth2callback")
flow = client.flow_from_clientsecrets(
CLIENT_SECRET_FILE,
scope= SCOPES,
redirect_uri=flask.url_for('oauth2callback', _external=True))
## Note we are *not* redirecting above. We are noting *where*
## we will redirect to, which is this function.
## The *second* time we enter here, it's a callback
## with 'code' set in the URL parameter. If we don't
## see that, it must be the first time through, so we
## need to do step 1.
app.logger.debug("Got flow")
if 'code' not in flask.request.args:
app.logger.debug("Code not in flask.request.args")
auth_uri = flow.step1_get_authorize_url()
return flask.redirect(auth_uri)
## This will redirect back here, but the second time through
## we'll have the 'code' parameter set
else:
## It's the second time through ... we can tell because
## we got the 'code' argument in the URL.
app.logger.debug("Code was in flask.request.args")
auth_code = flask.request.args.get('code')
credentials = flow.step2_exchange(auth_code)
flask.session['credentials'] = credentials.to_json()
## Now I can build the service and execute the query,
## but for the moment I'll just log it and go back to
## the main screen
app.logger.debug("Got credentials")
return flask.redirect(flask.url_for('choose'))
#####
#
# Option setting: Buttons or forms that add some
# information into session state. Don't do the
# computation here; use of the information might
# depend on what other information we have.
# Setting an option sends us back to the main display
# page, where we may put the new information to use.
#
#####
@app.route('/setrange', methods=['POST'])
def setrange():
"""
User chose a date range with the bootstrap daterange
widget.
"""
app.logger.debug("Entering setrange")
flask.flash("Setrange gave us '{}'".format(
request.form.get('daterange')))
daterange = request.form.get('daterange')
flask.session['daterange'] = daterange
daterange_parts = daterange.split()
flask.session['begin_date'] = interpret_date(daterange_parts[0])
flask.session['end_date'] = interpret_date(daterange_parts[2])
app.logger.debug("Setrange parsed {} - {} dates as {} - {}".format(
daterange_parts[0], daterange_parts[1],
flask.session['begin_date'], flask.session['end_date']))
return flask.redirect(flask.url_for("choose"))
####
#
# Initialize session variables
#
####
def init_session_values():
"""
Start with some reasonable defaults for date and time ranges.
Note this must be run in app context ... can't call from main.
"""
# Default date span = tomorrow to 1 week from now
now = arrow.now('local')
tomorrow = now.replace(days=+1)
nextweek = now.replace(days=+7)
flask.session["begin_date"] = tomorrow.floor('day').isoformat()
flask.session["end_date"] = nextweek.ceil('day').isoformat()
flask.session["daterange"] = "{} - {}".format(
tomorrow.format("MM/DD/YYYY"),
nextweek.format("MM/DD/YYYY"))
# Default time span each day, 8 to 5
flask.session["begin_time"] = interpret_time("9am")
flask.session["end_time"] = interpret_time("5pm")
app.logger.debug(flask.session["begin_time"] )
app.logger.debug(flask.session["end_time"] )
def interpret_time( text ):
"""
Read time in a human-compatible format and
interpret as ISO format with local timezone.
May throw exception if time can't be interpreted. In that
case it will also flash a message explaining accepted formats.
"""
app.logger.debug("Decoding time '{}'".format(text))
time_formats = ["ha", "h:mma", "h:mm a", "H:mm"]
try:
as_arrow = arrow.get(text, time_formats).replace(tzinfo=tz.tzlocal())
app.logger.debug("Succeeded interpreting time")
except:
app.logger.debug("Failed to interpret time")
flask.flash("Time '{}' didn't match accepted formats 13:30 or 1:30pm"
.format(text))
raise
return as_arrow.isoformat()
def interpret_date( text ):
"""
Convert text of date to ISO format used internally,
with the local time zone.
"""
try:
as_arrow = arrow.get(text, "MM/DD/YYYY").replace(
tzinfo=tz.tzlocal())
except:
flask.flash("Date '{}' didn't fit expected format 12/31/2001")
raise
return as_arrow.isoformat()
def next_day(isotext):
"""
ISO date + 1 day (used in query to Google calendar)
"""
as_arrow = arrow.get(isotext)
return as_arrow.replace(days=+1).isoformat()
def end_of_day(isotext):
"""
ISO date + 1 day (used in query to Google calendar)
"""
as_arrow = arrow.get(isotext)
return as_arrow.replace(days=+1,seconds=-1).isoformat()
####
#
# Functions (NOT pages) that return some information
#
####
def list_calendars(service):
"""
Given a google 'service' object, return a list of
calendars. Each calendar is represented by a dict, so that
it can be stored in the session object and converted to
json for cookies. The returned list is sorted to have
the primary calendar first, and selected (that is, displayed in
Google Calendars web app) calendars before unselected calendars.
"""
app.logger.debug("Entering list_calendars")
calendar_list = service.calendarList().list().execute()["items"]
result = [ ]
for cal in calendar_list:
kind = cal["kind"]
id = cal["id"]
if "description" in cal:
desc = cal["description"]
else:
desc = "(no description)"
summary = cal["summary"]
# Optional binary attributes with False as default
selected = ("selected" in cal) and cal["selected"]
primary = ("primary" in cal) and cal["primary"]
result.append(
{ "kind": kind,
"id": id,
"summary": summary,
"selected": selected,
"primary": primary
})
return sorted(result, key=cal_sort_key)
def cal_sort_key( cal ):
"""
Sort key for the list of calendars: primary calendar first,
then other selected calendars, then unselected calendars.
(" " sorts before "X", and tuples are compared piecewise)
"""
if cal["selected"]:
selected_key = " "
else:
selected_key = "X"
if cal["primary"]:
primary_key = " "
else:
primary_key = "X"
return (primary_key, selected_key, cal["summary"])
################
#
# Busytime code starts here
#
################
@app.route("/_calc_busy_time",methods=['post'])
def _calc_busy_time():
app.logger.debug("Checking credentials for Google calendar access")
credentials = valid_credentials()
if not credentials:
app.logger.debug("Redirecting to authorization")
return flask.redirect(flask.url_for('oauth2callback'))
gcal_service = get_gcal_service(credentials)
flask.session['selected_cal']= request.form.getlist('calendars')
app.logger.debug(flask.session['selected_cal'])
flask.session['events'] = get_busy_time(gcal_service)
return flask.redirect(flask.session['current_url'])
# return jsonify(result=flask.session[events])
def get_busy_time(service):
app.logger.debug(flask.session['begin_date'])
from_date = flask.session['begin_date']
to_time = end_of_day(flask.session['end_date'])
start_time=format_arrow_time(flask.session['begin_time'])
end_time=format_arrow_time(flask.session['end_time'])
events=list()
for cal in flask.session['selected_cal']:
eventsResult = service.events().list(
calendarId=cal,
timeMin=from_date,
timeMax=to_time,
maxResults=100, singleEvents=True,
orderBy='startTime').execute()
events.extend(eventsResult.get('items', []))
# app.logger.debug(events)
for event in events:
event_start_time=format_arrow_time(event['start']['dateTime'])
event_end_time=format_arrow_time(event['end']['dateTime'])
app.logger.debug("Rules start time {} - end time {} | {} start time {} - end time {}".format(start_time,end_time,event["summary"],event_start_time,event_end_time))
if event_start_time > end_time:
app.logger.debug("too late event starts: "+event_start_time)
events.remove(event)
elif event_end_time < start_time:
events.remove(event)
app.logger.debug("too early event ends: "+event_end_time)
elif 'transparency' in event and event['transparency']=="transparent":
app.logger.debug('Event is Available')
events.remove(event)
return events
#################
#
# Functions used within the templates
#
#################
@app.template_filter( 'fmtdate' )
def format_arrow_date( date ):
try:
normal = arrow.get( date )
return normal.format("ddd MM/DD/YYYY")
except:
return "(bad date)"
@app.template_filter( 'fmttime' )
def format_arrow_time( time ):
try:
normal = arrow.get( time )
return normal.format("HH:mm")
except:
return "(bad time)"
#############
if __name__ == "__main__":
# App is created above so that it will
# exist whether this is 'main' or not
# (e.g., if we are running in a CGI script)
app.secret_key = str(uuid.uuid4())
app.debug=CONFIG.DEBUG
app.logger.setLevel(logging.DEBUG)
# We run on localhost only if debugging,
# otherwise accessible to world
if CONFIG.DEBUG:
# Reachable only from the same computer
app.run(port=CONFIG.PORT)
else:
# Reachable from anywhere
app.run(port=CONFIG.PORT,host="0.0.0.0")
| Oalamoud/proj7-Gcal | main.py | Python | artistic-2.0 | 15,064 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
gateway tests - Object Wrappers
Copyright 2009-2013 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
from omero.gateway.utils import ServiceOptsDict
import pytest
class TestServiceOptsDict (object):
def test_constructor(self):
assert ServiceOptsDict() == {}
assert ServiceOptsDict() is not {}
assert ServiceOptsDict() is not dict()
d = {"omero.group":-1}
d = ServiceOptsDict(d)
resd = d.get("omero.group")
assert isinstance(resd, str)
assert d.get("omero.group") == str(d["omero.group"])
d = ServiceOptsDict(x=1,y=2)
assert d.get("x") == "1"
assert d.get("y") == "2"
# ServiceOptsDict can be passed initializing data, but it needs to be a dict
pytest.raises(AttributeError, ServiceOptsDict, kwargs={"data":"data"})
s = ServiceOptsDict(data={'option':'a'})
def test_keys(self):
d = ServiceOptsDict()
assert d.keys() == []
d = ServiceOptsDict({"omero.group":-1})
k = d.keys()
assert d.has_key('omero.group')
pytest.raises(TypeError, d.keys, None)
def test_values(self):
d = ServiceOptsDict()
assert d.values() == []
d = ServiceOptsDict({"omero.group":-1})
assert d.values() == ["-1"]
pytest.raises(TypeError, d.values, None)
d = ServiceOptsDict({"a":None, "b":True, "c":"foo", "d":1, "e":1.45, "f":[], "g":{}})
assert d.values() == ['foo', '1.45', '1']
def test_items(self):
d = ServiceOptsDict()
assert d.items() == []
d = ServiceOptsDict({"omero.group":-1})
assert d.items() == [("omero.group", "-1")]
pytest.raises(TypeError, d.items, None)
def test_has_key(self):
d = ServiceOptsDict()
assert not d.has_key('omero')
d = ServiceOptsDict({"omero.group":-1, "omero.user": 1})
k = d.keys()
k.sort()
assert k == ['omero.group', 'omero.user']
pytest.raises(TypeError, d.has_key)
def test_contains(self):
d = ServiceOptsDict()
assert not ('omero.group' in d)
assert 'omero.group' not in d
d = ServiceOptsDict({"omero.group":-1, "omero.user": 1})
assert 'omero.group' in d
assert 'omero.user' in d
assert 'omero.share' not in d
def test_len(self):
d = ServiceOptsDict()
assert len(d) == 0
d = ServiceOptsDict({"omero.group":-1, "omero.user": 1})
assert len(d) == 2
def test_getitem(self):
d = ServiceOptsDict({"omero.group":-1, "omero.user": 1})
assert d["omero.group"] == "-1"
assert d["omero.user"] == "1"
d["omero.share"] = 2
d["foo"] = "bar"
assert d["omero.share"] == "2"
assert d["foo"] == "bar"
del d["omero.user"]
assert d == {"omero.group": "-1", 'foo': 'bar', "omero.share": "2"}
pytest.raises(TypeError, d.__getitem__)
assert d.get("omero.user") == None
assert d.get("omero.user", "5"), "5"
def test_setitem(self):
# string
d = ServiceOptsDict({"omero.share": "2","omero.user": "1"})
d["omero.group"] = "-1"
# unicode
d = ServiceOptsDict({"omero.share": u'2',"omero.user": u'1'})
d["omero.group"] = u'-1'
# int
d = ServiceOptsDict({"omero.share": 1,"omero.user": 2})
d["omero.group"] = -1
# long
import sys
maxint = sys.maxint
d = ServiceOptsDict({"omero.group": (maxint+1)})
d["omero.user"] = (maxint+1)
# Setter passed None as value remove from internal dict
d = ServiceOptsDict({"omero.share": "2","omero.user": "1"})
assert d.get("omero.share") != None
d.setOmeroShare()
assert d.get("omero.share") == None
assert d.get("omero.user") != None
d.setOmeroUser()
assert d.get("omero.user") == None
try:
d = ServiceOptsDict({"omero.group": True})
d["omero.user"] = True
except:
pass
else:
self.fail("AttributeError: ServiceOptsDict argument must be a string, unicode or numeric type")
try:
d = ServiceOptsDict({"omero.group": []})
d["omero.user"] = []
except:
pass
else:
self.fail("AttributeError: ServiceOptsDict argument must be a string, unicode or numeric type")
try:
d = ServiceOptsDict({"omero.group": {}})
d["omero.user"] = {}
except:
pass
else:
self.fail("AttributeError: ServiceOptsDict argument must be a string, unicode or numeric type")
def test_clear(self):
d = ServiceOptsDict({"omero.group":-1, "omero.user": 1, "omero.share": 2})
d.clear()
assert d == {}
pytest.raises(TypeError, d.clear, None)
def test_repr(self):
d = ServiceOptsDict()
assert repr(d) == '<ServiceOptsDict: {}>'
d["omero.group"] = -1
assert repr(d) == "<ServiceOptsDict: {'omero.group': '-1'}>"
def test_copy(self):
def getHash(obj):
return hex(id(obj))
d = ServiceOptsDict({"omero.group":-1, "omero.user": 1, "omero.share": 2})
assert d.copy() == d
assert getHash(d.copy()) != getHash(d)
assert ServiceOptsDict().copy() == ServiceOptsDict()
assert getHash(ServiceOptsDict().copy()) != getHash(ServiceOptsDict())
pytest.raises(TypeError, d.copy, None)
def test_setter_an_getter(self):
group = -1
user = 1
share = 2
d = ServiceOptsDict()
d.set("omero.group", group)
assert d.get("omero.group") == d.getOmeroGroup()
d.setOmeroGroup(group)
assert d.get("omero.group") == d.getOmeroGroup()
d.set("omero.user", user)
assert d.get("omero.user") == d.getOmeroUser()
d.setOmeroUser(user)
assert d.get("omero.user") == d.getOmeroUser()
d.set("omero.share", share)
assert d.get("omero.share") == d.getOmeroShare()
d.setOmeroShare(share)
assert d.get("omero.share") == d.getOmeroShare()
| jballanc/openmicroscopy | components/tools/OmeroPy/test/gatewaytest/test_utils.py | Python | gpl-2.0 | 6,683 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('applications', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='question',
name='has_option_other',
field=models.BooleanField(default=False, help_text=b"Used only with 'Choices' question type", verbose_name=b"Allow for 'Other' answer?"),
),
migrations.AlterField(
model_name='question',
name='is_multiple_choice',
field=models.BooleanField(default=False, help_text=b"Used only with 'Choices' question type", verbose_name=b'Are there multiple choices allowed?'),
),
migrations.AlterField(
model_name='question',
name='is_required',
field=models.BooleanField(default=True, verbose_name=b'Is the answer to the question required?'),
),
]
| patjouk/djangogirls | applications/migrations/0002_auto_20150308_2229.py | Python | bsd-3-clause | 1,002 |
def agts(queue):
queue.add('adenine-thymine_complex_stack.py', ncpus=4, walltime=40)
| qsnake/gpaw | gpaw/test/big/vdw/submit.adenine-thymine_complex_stack.agts.py | Python | gpl-3.0 | 89 |
# Copyright 2017 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api import converters
from neutron_lib.db import constants as const
def validate_string(String):
if String is None:
String = ''
return String
CLASSIFICATION_GROUP_RESOURCE_MAP = {
'id': {
'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True, 'primary_key': True},
'name': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': '',
'validate': {'type:string': const.NAME_FIELD_SIZE}},
'description': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': '',
'validate': {'type:string': const.DESCRIPTION_FIELD_SIZE},
'convert_to': validate_string},
'project_id': {
'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:string': const.PROJECT_ID_FIELD_SIZE},
'is_visible': True},
'shared': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': False,
'convert_to': converters.convert_to_boolean},
'operator': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': 'AND',
'validate': {'type:string': const.NAME_FIELD_SIZE},
'convert_to': validate_string},
'classification': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': [],
'convert_to': converters.convert_to_list},
'classification_group': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': []},
}
CLASSIFICATION_RESOURCE_MAP = {
'id': {
'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True, 'primary_key': True},
'name': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': None,
'validate': {'type:string': const.NAME_FIELD_SIZE},
'convert_to': validate_string},
'description': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': None,
'validate': {'type:string': const.DESCRIPTION_FIELD_SIZE},
'convert_to': validate_string},
'project_id': {
'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:string': const.PROJECT_ID_FIELD_SIZE},
'is_visible': True},
'shared': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': False,
'convert_to': converters.convert_to_boolean},
'c_type': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': None,
'validate': {'type:string': const.NAME_FIELD_SIZE},
'convert_to': validate_string},
'negated': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': False,
'convert_to': converters.convert_to_boolean},
'definition': {
'allow_post': True, 'allow_put': True,
'is_visible': True,
'convert_to': converters.convert_none_to_empty_dict},
}
CLASSIFICATION_TYPE_RESOURCE_MAP = {
'type': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': None,
'validate': {'type:string': const.NAME_FIELD_SIZE},
'convert_to': validate_string},
'supported_parameters': {
'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': [],
'convert_to': converters.convert_to_list},
}
| openstack/neutron-classifier | neutron_classifier/common/resources.py | Python | apache-2.0 | 4,144 |
##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2013 Uwe Hermann <uwe@hermann-uwe.de>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
'''
TODO.
'''
from .pd import *
| martinling/libsigrokdecode | decoders/i2s_dump/__init__.py | Python | gpl-3.0 | 868 |
#!/usr/bin/python -W ignore
#
# This script provides named entity recogntion with the NER-tagger from Stanford NLP.
#
# Each line is tokenized into sentences -- a few lines are not correctly split into sentences by cc-segment-stories.
#
# http://www-nlp.stanford.edu/software/CRF-NER.shtml
# http://www.nltk.org/api/nltk.tag.html#module-nltk.tag.stanford
#
# 20130101012604.000|20130101012626.000|NER_03|Bill/PERSON|Clinton/PERSON|U.S./LOCATION
# Start time|End time|Primary tag(|Word/ner tag)*
#
# Written by FFS, 2014-08-09
#
# Changelog:
#
# 2014-08-13 Set #!/usr/bin/python -W ignore to turn off Unicode warnings
# 2014-08-10 Server mode via pyner -- note dict output
# 2014-08-09 Forked from PartsOfSpeech-StanfordNLP-01.py
#
# -----------------------------------------------------------------------------------------------------------------
# User input
import sys, os.path
scriptname = os.path.basename(sys.argv[0])
filename = sys.argv[1]
# Help screen
if filename == "-h" :
print "".join([ "\n","\t","This is a production script for named entity recognition -- issue:","\n" ])
print "".join([ "\t","\t",scriptname," $FIL.seg > $FIL.pos or" ])
print "".join([ "\t","\t",scriptname," $FIL.seg | sponge $FIL.seg" ])
print "".join([ "\n","\t","or use the seg-NER bash script for bulk processing." ])
print "".join([ "\n","\t","Limit to six instances to avoid socket errors.","\n" ])
quit()
# Libraries
import datetime, re
# Server mode (TCP/IP sockets, mixed and caseless, supported by pyner)
import ner
Mix = ner.SocketNER(host='localhost', port=2020, output_format='slashTags')
UPP = ner.SocketNER(host='localhost', port=2021, output_format='slashTags')
# Pattern for tokenizing
# http://www.clips.ua.ac.be/pages/pattern-en
from pattern.en import tokenize
# Counter
n = 0
# A. Get the lines from the file
with open(filename) as fp:
for line in fp:
# B. Split each line into fields
field = line.split("|")
# Pretty debug
# print('\n'.join('{}: {}'.format(*k) for k in enumerate(field)))
# C. Header and footer
if len(field[0]) != 18:
print line,
continue
# D. Program credit
if n == 0:
credit=["NER_03|",datetime.datetime.now().strftime("%Y-%m-%d %H:%M"),"|Source_Program=stanford-ner 3.4, ",scriptname,"|Source_Person=Jenny Rose Finkel, FFS|Codebook=Category/Entity"]
print "".join(credit)
n=1
# E. Segment tags and other non-caption tags
if field[2] == "SEG":
print line,
continue
elif len(field[2]) != 3:
print line,
continue
# F. Get the text, clean leading chevrons -- if BOM, strip non-ascii, otherwise remove individually
try:
text = re.sub('^[>,\ ]{0,6}','', field[3])
if re.search("(\xef\xbf\xbd)", text): text = ''.join([x for x in text if ord(x) < 128])
text = str(text).replace('\x00 ','').replace('\xef\xbf\xbd','')
text = str(text).replace('\xf7','').replace('\xc3\xba','').replace('\xb6','').replace('\xa9','').replace('\xe2\x99\xaa','')
text = str(text).replace('\xc3\xaf','').replace('\x5c','').replace('\xf1','').replace('\xe1','').replace('\xe7','').replace('\xfa','')
text = str(text).replace('\xf3','').replace('\xed','').replace('\xe9','').replace('\xe0','').replace('\xae','').replace('\xc2','')
text = str(text).replace('\xc3','').replace('\xa2','').replace('\xbf','')
# print text
except IndexError:
print line
continue
# G. Remove clearly wrong unicode characters -- BOM, NULL (only utf8 hex works)
line = str(line).replace('\x00 ','').replace('\xef\xbf\xbd','')
print line,
# H. Ensure the text is split into sentences
# tokenize(string, punctuation=".,;:!?()[]{}`''\"@#$^&*+-|=~_", replace={})
for sentence in tokenize(text):
all = ""
# I. Select the parser
if sentence.isupper() or sentence.islower(): st = UPP
else: st = Mix
# J. Parts of speech with stanford-ner via pyner
reply = st.get_entities(sentence)
# {u'PERSON': [u'Bill Clinton'], u'LOCATION': [u'U.S.'], u'O': [u'was President of the']}
try:
for tup in reply.items():
names = ""
if tup[0] == "O" or not tup[0] : continue
for name in tup[1]:
names = "".join([names,"/",name])
all = "".join([all,"|",tup[0],names])
if all != "": print "".join([field[0],"|",field[1],"|NER_03",all])
# I/PRP|'M/MD|JOHN/NNP|DOE/NNP
except (UnicodeDecodeError, UnicodeEncodeError, IndexError, AssertionError):
print "".join([field[0],"|",field[1],"|NER_03","|NA"])
continue
# K. Close the file
fp.close()
# EOF
| RedHenLab/NLP | NER-StanfordNLP-annotate.py | Python | gpl-2.0 | 4,821 |
import logging
import logging.config
from sha3 import sha3_256
from bitcoin import privtopub
import struct
import os
import sys
import rlp
import db
import random
from rlp import big_endian_to_int, int_to_big_endian
logger = logging.getLogger(__name__)
# decorator
def debug(label):
def deb(f):
def inner(*args, **kwargs):
i = random.randrange(1000000)
print label, i, 'start', args
x = f(*args, **kwargs)
print label, i, 'end', x
return x
return inner
return deb
def sha3(seed):
return sha3_256(seed).digest()
def privtoaddr(x):
if len(x) > 32:
x = x.decode('hex')
return sha3(privtopub(x)[1:])[12:].encode('hex')
def zpad(x, l):
return '\x00' * max(0, l - len(x)) + x
def coerce_addr_to_bin(x):
if isinstance(x, (int, long)):
return zpad(int_to_big_endian(x), 20).encode('hex')
elif len(x) == 40 or len(x) == 0:
return x.decode('hex')
else:
return zpad(x, 20)[-20:]
def coerce_addr_to_hex(x):
if isinstance(x, (int, long)):
return zpad(int_to_big_endian(x), 20).encode('hex')
elif len(x) == 40 or len(x) == 0:
return x
else:
return zpad(x, 20)[-20:].encode('hex')
def coerce_to_int(x):
if isinstance(x, (int, long)):
return x
elif len(x) == 40:
return big_endian_to_int(x.decode('hex'))
else:
return big_endian_to_int(x)
def coerce_to_bytes(x):
if isinstance(x, (int, long)):
return int_to_big_endian(x)
elif len(x) == 40:
return x.decode('hex')
else:
return x
def int_to_big_endian4(integer):
''' 4 bytes big endian integer'''
return struct.pack('>I', integer)
def recursive_int_to_big_endian(item):
''' convert all int to int_to_big_endian recursively
'''
if isinstance(item, (int, long)):
return int_to_big_endian(item)
elif isinstance(item, (list, tuple)):
res = []
for item in item:
res.append(recursive_int_to_big_endian(item))
return res
return item
def rlp_encode(item):
'''
item can be nested string/integer/list of string/integer
'''
return rlp.encode(recursive_int_to_big_endian(item))
# Format encoders/decoders for bin, addr, int
def decode_hash(v):
'''decodes a bytearray from hash'''
if v == '':
return ''
return db_get(v)
def decode_bin(v):
'''decodes a bytearray from serialization'''
if not isinstance(v, (str, unicode)):
raise Exception("Value must be binary, not RLP array")
return v
def decode_addr(v):
'''decodes an address from serialization'''
if len(v) not in [0, 20]:
raise Exception("Serialized addresses must be empty or 20 bytes long!")
return v.encode('hex')
def decode_int(v):
'''decodes and integer from serialization'''
if len(v) > 0 and v[0] == '\x00':
raise Exception("No leading zero bytes allowed for integers")
return big_endian_to_int(v)
def decode_root(root):
if isinstance(root, list):
if len(rlp.encode(root)) >= 32:
raise Exception("Direct RLP roots must have length <32")
elif isinstance(root, (str, unicode)):
if len(root) != 0 and len(root) != 32:
raise Exception("String roots must be empty or length-32")
else:
raise Exception("Invalid root")
return root
def encode_hash(v):
'''encodes a bytearray into hash'''
if v == '':
return ''
k = sha3(v)
db_put(k, v)
return k
def encode_bin(v):
'''encodes a bytearray into serialization'''
return v
def encode_root(v):
'''encodes a trie root into serialization'''
return v
def encode_addr(v):
'''encodes an address into serialization'''
if not isinstance(v, (str, unicode)) or len(v) not in [0, 40]:
raise Exception("Address must be empty or 40 chars long")
return v.decode('hex')
def encode_int(v):
'''encodes an integer into serialization'''
if not isinstance(v, (int, long)) or v < 0 or v >= 2 ** 256:
raise Exception("Integer invalid or out of range")
return int_to_big_endian(v)
decoders = {
"hash": decode_hash,
"bin": decode_bin,
"addr": decode_addr,
"int": decode_int,
"trie_root": decode_root,
}
encoders = {
"hash": encode_hash,
"bin": encode_bin,
"addr": encode_addr,
"int": encode_int,
"trie_root": encode_root,
}
printers = {
"hash": lambda v: '0x'+v.encode('hex'),
"bin": lambda v: '0x'+v.encode('hex'),
"addr": lambda v: v,
"int": lambda v: str(v),
"trie_root": lambda v: v.encode('hex')
}
def print_func_call(ignore_first_arg=False, max_call_number=100):
''' utility function to facilitate debug, it will print input args before
function call, and print return value after function call
usage:
@print_func_call
def some_func_to_be_debu():
pass
:param ignore_first_arg: whether print the first arg or not.
useful when ignore the `self` parameter of an object method call
'''
from functools import wraps
def display(x):
x = str(x)
try:
x.decode('ascii')
except:
return 'NON_PRINTABLE'
return x
local = {'call_number': 0}
def inner(f):
@wraps(f)
def wrapper(*args, **kwargs):
local['call_number'] = local['call_number'] + 1
tmp_args = args[1:] if ignore_first_arg and len(args) else args
this_call_number = local['call_number']
print('{0}#{1} args: {2}, {3}'.format(
f.__name__,
this_call_number,
', '.join([display(x) for x in tmp_args]),
', '.join(display(key) + '=' + str(value)
for key, value in kwargs.iteritems())
))
res = f(*args, **kwargs)
print('{0}#{1} return: {2}'.format(
f.__name__,
this_call_number,
display(res)))
if local['call_number'] > 100:
raise Exception("Touch max call number!")
return res
return wrapper
return inner
class DataDir(object):
ethdirs = {
"linux2": "~/.pyethereum",
"darwin": "~/Library/Application Support/Pyethereum/",
"win32": "~/AppData/Roaming/Pyethereum",
"win64": "~/AppData/Roaming/Pyethereum",
}
def __init__(self):
self._path = None
def set(self, path):
path = os.path.abspath(path)
if not os.path.exists(path):
os.makedirs(path)
assert os.path.isdir(path)
self._path = path
def _set_default(self):
p = self.ethdirs.get(sys.platform, self.ethdirs['linux2'])
self.set(os.path.expanduser(os.path.normpath(p)))
@property
def path(self):
if not self._path:
self._set_default()
return self._path
data_dir = DataDir()
def get_db_path():
return os.path.join(data_dir.path, 'statedb')
def get_index_path():
return os.path.join(data_dir.path, 'indexdb')
def db_put(key, value):
database = db.DB(get_db_path())
res = database.put(key, value)
database.commit()
return res
def db_get(key):
database = db.DB(get_db_path())
return database.get(key)
def dump_state(trie):
res = ''
for k, v in trie.to_dict().items():
res+= '%r:%r\n'%(k.encode('hex'), v.encode('hex'))
return res
def configure_logging(loggerlevels=':DEBUG', verbosity=1):
logconfig = dict(
version=1,
disable_existing_loggers=False,
formatters=dict(
debug=dict(
format='%(threadName)s:%(module)s: %(message)s'
),
minimal=dict(
format='%(message)s'
),
),
handlers=dict(
default={
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'minimal'
},
verbose={
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'debug'
},
),
loggers=dict()
)
for loggerlevel in filter(lambda _: ':' in _, loggerlevels.split(',')):
name, level = loggerlevel.split(':')
logconfig['loggers'][name] = dict(
handlers=['verbose'], level=level, propagate=False)
if len(logconfig['loggers']) == 0:
logconfig['loggers'][''] = dict(
handlers=['default'],
level={0: 'ERROR', 1: 'WARNING', 2: 'INFO', 3: 'DEBUG'}.get(
verbosity),
propagate=True)
logging.config.dictConfig(logconfig)
# logging.debug("logging set up like that: %r", logconfig)
class Denoms():
def __init__(self):
self.wei = 1
self.babbage = 10**3
self.lovelace = 10**6
self.shannon = 10**9
self.szabo = 10**12
self.finney = 10**15
self.ether = 10**18
self.turing = 2**256
denoms = Denoms()
| ethers/pyethereum | pyethereum/utils.py | Python | mit | 9,141 |
#!/usr/bin/env python
"""Tests for the stats_store classes."""
import math
import pandas
# pylint: disable=unused-import,g-bad-import-order
from grr.lib import server_plugins
# pylint: enable=unused-import,g-bad-import-order
from grr.lib import aff4
from grr.lib import data_store
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import stats
from grr.lib import test_lib
from grr.lib.aff4_objects import stats_store
class StatsStoreTest(test_lib.GRRBaseTest):
def setUp(self):
super(StatsStoreTest, self).setUp()
self.process_id = "some_pid"
self.stats_store = aff4.FACTORY.Create(
None, "StatsStore", mode="w", token=self.token)
def testCountersAreWrittenToDataStore(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/counter"]
self.assertTrue(counter)
stored_value = rdfvalue.StatsStoreValue(
value_type=rdfvalue.MetricMetadata.ValueType.INT,
int_value=1)
self.assertEqual(counter[0], ("aff4:stats_store/counter",
stored_value.SerializeToString(),
42))
def testCountersWithFieldsAreWrittenToDataStore(self):
stats.STATS.RegisterCounterMetric("counter", fields=[("source", str)])
stats.STATS.IncrementCounter("counter", fields=["http"])
stats.STATS.IncrementCounter("counter", delta=2, fields=["rpc"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
# Check that no plain counter is written.
values = [rdfvalue.StatsStoreValue(x[1]) for x in row
if x[0] == "aff4:stats_store/counter"]
self.assertEqual(len(values), 2)
http_field_value = rdfvalue.StatsStoreFieldValue(
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR,
str_value="http")
rpc_field_value = rdfvalue.StatsStoreFieldValue(
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR,
str_value="rpc")
# Check that counter with source=http is written.
http_counter = [x for x in values
if x.fields_values == [http_field_value]]
self.assertTrue(http_counter)
self.assertEqual(http_counter[0].value_type,
rdfvalue.MetricMetadata.ValueType.INT)
self.assertEqual(http_counter[0].int_value, 1)
# Check that counter with source=rpc is written.
rpc_counter = [x for x in values
if x.fields_values == [rpc_field_value]]
self.assertTrue(rpc_counter)
self.assertEqual(rpc_counter[0].value_type,
rdfvalue.MetricMetadata.ValueType.INT)
self.assertEqual(rpc_counter[0].int_value, 2)
def testEventMetricsAreWrittenToDataStore(self):
stats.STATS.RegisterEventMetric("foo_event")
stats.STATS.RecordEvent("foo_event", 5)
stats.STATS.RecordEvent("foo_event", 15)
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
values = [rdfvalue.StatsStoreValue(x[1]) for x in row
if x[0] == "aff4:stats_store/foo_event"]
self.assertEqual(len(values), 1)
stored_value = values[0]
self.assertEqual(stored_value.value_type,
rdfvalue.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(stored_value.distribution_value.count, 2)
self.assertEqual(stored_value.distribution_value.sum, 20)
def testEventMetricsWithFieldsAreWrittenToDataStore(self):
stats.STATS.RegisterEventMetric("foo_event", fields=[("source", str)])
stats.STATS.RecordEvent("foo_event", 5, fields=["http"])
stats.STATS.RecordEvent("foo_event", 15, fields=["rpc"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
values = [rdfvalue.StatsStoreValue(x[1]) for x in row
if x[0] == "aff4:stats_store/foo_event"]
self.assertEqual(len(values), 2)
http_field_value = rdfvalue.StatsStoreFieldValue(
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR,
str_value="http")
rpc_field_value = rdfvalue.StatsStoreFieldValue(
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR,
str_value="rpc")
# Check that distribution with source=http is written.
http_events = [x for x in values
if x.fields_values == [http_field_value]]
self.assertTrue(http_events)
self.assertEqual(http_events[0].value_type,
rdfvalue.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(http_events[0].distribution_value.count, 1)
self.assertEqual(http_events[0].distribution_value.sum, 5)
# Check that distribution with source=rpc is written.
rpc_events = [x for x in values
if x.fields_values == [rpc_field_value]]
self.assertTrue(rpc_events)
self.assertEqual(rpc_events[0].value_type,
rdfvalue.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(rpc_events[0].distribution_value.count, 1)
self.assertEqual(rpc_events[0].distribution_value.sum, 15)
def testStringGaugeValuesAreWrittenToDataStore(self):
stats.STATS.RegisterGaugeMetric("str_gauge", str)
stats.STATS.SetGaugeValue("str_gauge", "some_value")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/str_gauge"]
self.assertTrue(counter)
stored_value = rdfvalue.StatsStoreValue(
value_type=rdfvalue.MetricMetadata.ValueType.STR,
str_value="some_value")
self.assertEqual(counter[0], ("aff4:stats_store/str_gauge",
stored_value.SerializeToString(),
42))
def testIntGaugeValuesAreWrittenToDataStore(self):
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/int_gauge"]
self.assertTrue(counter)
stored_value = rdfvalue.StatsStoreValue(
value_type=rdfvalue.MetricMetadata.ValueType.INT,
int_value=4242)
self.assertEqual(counter[0], ("aff4:stats_store/int_gauge",
stored_value.SerializeToString(),
42))
def testLaterValuesDoNotOverridePrevious(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
counters = [x for x in row if x[0] == "aff4:stats_store/counter"]
self.assertEqual(len(counters), 2)
counters = sorted(counters, key=lambda x: x[2])
stored_value = rdfvalue.StatsStoreValue(
value_type=rdfvalue.MetricMetadata.ValueType.INT,
int_value=1)
self.assertEqual(counters[0], ("aff4:stats_store/counter",
stored_value.SerializeToString(),
42))
stored_value = rdfvalue.StatsStoreValue(
value_type=rdfvalue.MetricMetadata.ValueType.INT,
int_value=2)
self.assertEqual(counters[1], ("aff4:stats_store/counter",
stored_value.SerializeToString(),
43))
def testValuesAreFetchedCorrectly(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
stats_history = self.stats_store.ReadStats(
process_id=self.process_id,
timestamp=self.stats_store.ALL_TIMESTAMPS)
self.assertEqual(stats_history["counter"], [(1, 42), (2, 43)])
self.assertEqual(stats_history["int_gauge"], [(4242, 42), (4242, 43)])
def testFetchedValuesCanBeLimitedByTimeRange(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
stats_history = self.stats_store.ReadStats(process_id=self.process_id,
timestamp=(0, 42))
self.assertEqual(stats_history["counter"], [(1, 42)])
self.assertEqual(stats_history["int_gauge"], [(4242, 42)])
def testFetchedValuesCanBeLimitedByName(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
stats_history = self.stats_store.ReadStats(process_id=self.process_id,
predicate_regex="counter")
self.assertEqual(stats_history["counter"], [(1, 42), (2, 43)])
self.assertTrue("int_gauge" not in stats_history)
def testDeleteStatsInTimeRangeWorksCorrectly(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=44,
sync=True)
self.stats_store.DeleteStats(process_id=self.process_id, timestamp=(0, 43),
sync=True)
stats_history = self.stats_store.ReadStats(process_id=self.process_id)
self.assertEqual(stats_history["counter"], [(2, 44)])
self.assertEqual(stats_history["int_gauge"], [(4242, 44)])
def testDeleteStatsInTimeRangeWorksCorrectlyWithFields(self):
stats.STATS.RegisterCounterMetric("counter", fields=[("source", str)])
stats.STATS.IncrementCounter("counter", fields=["http"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter", fields=["http"])
stats.STATS.IncrementCounter("counter", fields=["rpc"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=44,
sync=True)
self.stats_store.DeleteStats(process_id=self.process_id, timestamp=(0, 43),
sync=True)
stats_history = self.stats_store.ReadStats(process_id=self.process_id)
self.assertEqual(stats_history["counter"]["http"], [(2, 44)])
self.assertEqual(stats_history["counter"]["rpc"], [(1, 44)])
def testReturnsListOfAllUsedProcessIds(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
self.stats_store.WriteStats(process_id="pid1", sync=True)
self.stats_store.WriteStats(process_id="pid2", sync=True)
self.assertEqual(sorted(self.stats_store.ListUsedProcessIds()),
["pid1", "pid2"])
def testMultiReadStatsWorksCorrectly(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id="pid1", timestamp=42, sync=True)
self.stats_store.WriteStats(process_id="pid2", timestamp=42, sync=True)
self.stats_store.WriteStats(process_id="pid2", timestamp=43, sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id="pid1", timestamp=43, sync=True)
results = self.stats_store.MultiReadStats()
self.assertEqual(sorted(results.keys()), ["pid1", "pid2"])
self.assertEqual(results["pid1"]["counter"], [(1, 42), (2, 43)])
self.assertEqual(results["pid2"]["counter"], [(1, 42), (1, 43)])
def testMultiReadStatsLimitsResultsByTimeRange(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id="pid1", timestamp=42, sync=True)
self.stats_store.WriteStats(process_id="pid2", timestamp=42, sync=True)
self.stats_store.WriteStats(process_id="pid2", timestamp=44, sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id="pid1", timestamp=44, sync=True)
results = self.stats_store.MultiReadStats(
timestamp=(43, 100))
self.assertEqual(sorted(results.keys()), ["pid1", "pid2"])
self.assertEqual(results["pid1"]["counter"], [(2, 44)])
self.assertEqual(results["pid2"]["counter"], [(1, 44)])
def testReadMetadataReturnsAllUsedMetadata(self):
# Register metrics
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterCounterMetric("counter_with_fields",
fields=[("source", str)])
stats.STATS.RegisterEventMetric("events")
stats.STATS.RegisterEventMetric("events_with_fields",
fields=[("source", str)])
stats.STATS.RegisterGaugeMetric("str_gauge", str)
stats.STATS.RegisterGaugeMetric("str_gauge_with_fields", str,
fields=[("task", int)])
# Check that there are no metadata for registered metrics.
metadata = self.stats_store.ReadMetadata(process_id=self.process_id)
self.assertFalse("counter" in metadata)
self.assertFalse("counter_with_fields" in metadata)
self.assertFalse("events" in metadata)
self.assertFalse("events_with_fields" in metadata)
self.assertFalse("str_gauge" in metadata)
self.assertFalse("str_gauge_with_fields" in metadata)
# Write stats to the data store. Metadata should be
# written as well.
self.stats_store.WriteStats(process_id=self.process_id,
timestamp=42, sync=True)
# Check that metadata were written into the store.
metadata = self.stats_store.ReadMetadata(process_id=self.process_id)
# Field definitions used in assertions below.
source_field_def = rdfvalue.MetricFieldDefinition(
field_name="source",
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR)
task_field_def = rdfvalue.MetricFieldDefinition(
field_name="task",
field_type=rdfvalue.MetricFieldDefinition.FieldType.INT)
self.assertTrue("counter" in metadata)
self.assertEqual(metadata["counter"].varname, "counter")
self.assertEqual(metadata["counter"].metric_type, stats.MetricType.COUNTER)
self.assertEqual(metadata["counter"].value_type,
rdfvalue.MetricMetadata.ValueType.INT)
self.assertListEqual(list(metadata["counter"].fields_defs), [])
self.assertTrue("counter_with_fields" in metadata)
self.assertEqual(metadata["counter_with_fields"].varname,
"counter_with_fields")
self.assertEqual(metadata["counter_with_fields"].metric_type,
stats.MetricType.COUNTER)
self.assertEqual(metadata["counter_with_fields"].value_type,
rdfvalue.MetricMetadata.ValueType.INT)
self.assertListEqual(list(metadata["counter_with_fields"].fields_defs),
[source_field_def])
self.assertTrue("events" in metadata)
self.assertEqual(metadata["events"].varname, "events")
self.assertEqual(metadata["events"].metric_type, stats.MetricType.EVENT)
self.assertEqual(metadata["events"].value_type,
rdfvalue.MetricMetadata.ValueType.DISTRIBUTION)
self.assertListEqual(list(metadata["events"].fields_defs), [])
self.assertTrue("events_with_fields" in metadata)
self.assertEqual(metadata["events_with_fields"].varname,
"events_with_fields")
self.assertEqual(metadata["events_with_fields"].metric_type,
stats.MetricType.EVENT)
self.assertEqual(metadata["events_with_fields"].value_type,
rdfvalue.MetricMetadata.ValueType.DISTRIBUTION)
self.assertListEqual(list(metadata["events_with_fields"].fields_defs),
[source_field_def])
self.assertTrue("str_gauge" in metadata)
self.assertEqual(metadata["str_gauge"].varname, "str_gauge")
self.assertEqual(metadata["str_gauge"].metric_type, stats.MetricType.GAUGE)
self.assertEqual(metadata["str_gauge"].value_type,
rdfvalue.MetricMetadata.ValueType.STR)
self.assertListEqual(list(metadata["str_gauge"].fields_defs), [])
self.assertTrue("str_gauge_with_fields" in metadata)
self.assertEqual(metadata["str_gauge_with_fields"].varname,
"str_gauge_with_fields")
self.assertEqual(metadata["str_gauge_with_fields"].metric_type,
stats.MetricType.GAUGE)
self.assertEqual(metadata["str_gauge_with_fields"].value_type,
rdfvalue.MetricMetadata.ValueType.STR)
self.assertListEqual(list(metadata["str_gauge_with_fields"].fields_defs),
[task_field_def])
def testMultiReadMetadataReturnsAllUsedMetadata(self):
stats.STATS.RegisterCounterMetric("counter")
# Check that there are no metadata for registered metrics.
metadata_by_id = self.stats_store.MultiReadMetadata(
process_ids=["pid1", "pid2"])
self.assertFalse("counter" in metadata_by_id["pid1"])
self.assertFalse("counter" in metadata_by_id["pid2"])
# Write stats to the data store. Metadata should be
# written as well.
self.stats_store.WriteStats(process_id="pid1",
timestamp=42, sync=True)
# Now metadata should be found only for the pid1.
metadata_by_id = self.stats_store.MultiReadMetadata(
process_ids=["pid1", "pid2"])
self.assertTrue("counter" in metadata_by_id["pid1"])
self.assertFalse("counter" in metadata_by_id["pid2"])
# Write stats for the pid2 and check again.
self.stats_store.WriteStats(process_id="pid2",
timestamp=42, sync=True)
metadata_by_id = self.stats_store.MultiReadMetadata(
process_ids=["pid1", "pid2"])
self.assertTrue("counter" in metadata_by_id["pid1"])
self.assertTrue("counter" in metadata_by_id["pid2"])
class StatsStoreDataQueryTest(test_lib.GRRBaseTest):
"""Tests for StatsStoreDataQuery class."""
def setUp(self):
super(StatsStoreDataQueryTest, self).setUp()
self.process_id = "some_pid"
self.stats_store = aff4.FACTORY.Create(
None, "StatsStore", mode="w", token=self.token)
def testUsingInCallNarrowsQuerySpace(self):
# Create sample data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterCounterMetric("counter_with_fields",
fields=[("source", str)])
stats.STATS.IncrementCounter("counter")
stats.STATS.IncrementCounter("counter_with_fields",
fields=["http"])
stats.STATS.IncrementCounter("counter_with_fields",
fields=["rpc"])
# Write to data store.
self.stats_store.WriteStats(process_id=self.process_id,
timestamp=42, sync=True)
# Read them back and apply queries with In() and InAll() calls.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("counter").SeriesCount(), 1)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("counter_with_fields").InAll().SeriesCount(), 2)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("counter_with_fields").In("http").SeriesCount(),
1)
def testInCallAcceptsRegularExpressions(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid1").In("counter").SeriesCount(), 1)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid2").In("counter").SeriesCount(), 1)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid.*").In("counter").SeriesCount(), 2)
def testInTimeRangeLimitsQueriesByTime(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(140),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Check that InTimeRange works as expected.
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().InTimeRange(
rdfvalue.RDFDatetime().FromSecondsFromEpoch(80),
rdfvalue.RDFDatetime().FromSecondsFromEpoch(120)).ts
self.assertListEqual(list(ts), [2])
self.assertListEqual(list(ts.index), [pandas.Timestamp(100 * 1e9)])
def testInTimeRangeRaisesIfAppliedBeforeTakeMethod(self):
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(RuntimeError, query.In("counter").InTimeRange,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(80),
rdfvalue.RDFDatetime().FromSecondsFromEpoch(120))
def testTakeValueUsesPlainValuesToBuildTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().ts
self.assertListEqual(list(ts), [1, 2])
self.assertListEqual(list(ts.index), [pandas.Timestamp(42 * 1e9),
pandas.Timestamp(100 * 1e9)])
def testTakeValueRaisesIfDistributionIsEncountered(self):
# Initialize and write test data.
stats.STATS.RegisterEventMetric("events")
stats.STATS.RecordEvent("events", 42)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(ValueError, query.In("events").TakeValue)
def testTakeDistributionCountUsesDistributionCountsToBuildTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterEventMetric("events")
stats.STATS.RecordEvent("events", 42)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.RecordEvent("events", 43)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("events").TakeDistributionCount().ts
self.assertListEqual(list(ts), [1, 2])
self.assertListEqual(list(ts.index), [pandas.Timestamp(42 * 1e9),
pandas.Timestamp(100 * 1e9)])
def testTakeDistributionCountRaisesIfPlainValueIsEncountered(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(ValueError, query.In("counter").TakeDistributionCount)
def testTakeDistributionSumUsesDistributionSumsToBuildTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterEventMetric("events")
stats.STATS.RecordEvent("events", 42)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.RecordEvent("events", 43)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("events").TakeDistributionSum().ts
self.assertListEqual(list(ts), [42, 85])
self.assertListEqual(list(ts.index), [pandas.Timestamp(42 * 1e9),
pandas.Timestamp(100 * 1e9)])
def testTakeDistributionSumRaisesIfPlainValueIsEncountered(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(ValueError, query.In("counter").TakeDistributionSum)
def testResampleCallResamplesTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(15),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(45),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Resample(
rdfvalue.Duration("30s")).ts
self.assertAlmostEqual(ts[0], 1.5)
self.assertAlmostEqual(ts[1], 3.0)
self.assertListEqual(list(ts.index), [pandas.Timestamp(0 * 1e9),
pandas.Timestamp(30 * 1e9)])
def testResampleCallDoesNotFillGaps(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(75),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Resample(
rdfvalue.Duration("30s")).ts
self.assertAlmostEqual(ts[0], 1.0)
self.assertTrue(math.isnan(ts[1]))
self.assertAlmostEqual(ts[2], 2.0)
self.assertListEqual(list(ts.index), [pandas.Timestamp(0 * 1e9),
pandas.Timestamp(30 * 1e9),
pandas.Timestamp(60 * 1e9)])
def testResampleRaisesIfAppliedBeforeTakeMethod(self):
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(RuntimeError, query.In("counter").Resample,
rdfvalue.Duration("30s"))
def testFillMissingCallFillsGapsInTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(120),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Resample(
rdfvalue.Duration("30s")).FillMissing(rdfvalue.Duration("60s")).ts
self.assertAlmostEqual(ts[0], 1.0)
self.assertAlmostEqual(ts[1], 1.0)
self.assertAlmostEqual(ts[2], 1.0)
self.assertTrue(math.isnan(ts[3]))
self.assertAlmostEqual(ts[4], 2.0)
self.assertListEqual(list(ts.index), [pandas.Timestamp(0 * 1e9),
pandas.Timestamp(30 * 1e9),
pandas.Timestamp(60 * 1e9),
pandas.Timestamp(90 * 1e9),
pandas.Timestamp(120 * 1e9)])
def testFillMissingRaisesIfAppliedBeforeTakeMethod(self):
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(RuntimeError, query.In("counter").FillMissing, 3)
def testFillMissingRaisesIfTimeWindowIsNotDivisibleBySamplingInterval(self):
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(RuntimeError, query.In("counter").TakeValue().Resample(
rdfvalue.Duration("25s")).FillMissing, rdfvalue.Duration("60s"))
def testAggregateViaSumAggregatesMultipleTimeSeriesIntoOne(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("pid.*").In("counter").TakeValue().Resample(
rdfvalue.Duration("30s")).FillMissing(
rdfvalue.Duration("10m")).AggregateViaSum().ts
# We expect 2 time series in the query:
# 1970-01-01 00:00:00 1
# 1970-01-01 00:00:30 1
# 1970-01-01 00:01:00 1
# 1970-01-01 00:01:30 2
#
# and:
# 1970-01-01 00:01:30 2
#
# Therefore we expect the sum to look like:
# 1970-01-01 00:00:00 1
# 1970-01-01 00:00:30 1
# 1970-01-01 00:01:00 1
# 1970-01-01 00:01:30 4
self.assertAlmostEqual(ts[0], 1)
self.assertAlmostEqual(ts[1], 1)
self.assertAlmostEqual(ts[2], 1)
self.assertAlmostEqual(ts[3], 4)
self.assertListEqual(list(ts.index), [pandas.Timestamp(0 * 1e9),
pandas.Timestamp(30 * 1e9),
pandas.Timestamp(60 * 1e9),
pandas.Timestamp(90 * 1e9)])
def testSeriesCountReturnsNumberOfDataSeriesInCurrentQuery(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid.*").SeriesCount(), 2)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid1").In("counter").SeriesCount(), 1)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid.*").In("counter").SeriesCount(), 2)
def testRateAppliesRateRollingFunctionToSingleTimeSerie(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
for i in range(5):
for _ in range(i):
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(10 * i),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Resample(
rdfvalue.Duration("10s")).Rate(rdfvalue.Duration("30s")).ts
# We expect following time serie:
# 1970-01-01 00:00:00 0
# 1970-01-01 00:00:10 1
# 1970-01-01 00:00:20 3
# 1970-01-01 00:00:30 6
# 1970-01-01 00:00:40 10
#
# Therefore we expect the following after applying Rate():
# 1970-01-01 00:00:30 0.2
# 1970-01-01 00:00:40 0.3
self.assertAlmostEqual(ts[0], 0.2)
self.assertAlmostEqual(ts[1], 0.3)
self.assertListEqual(list(ts.index), [pandas.Timestamp(30 * 1e9),
pandas.Timestamp(40 * 1e9)])
def testScaleAppliesScaleFunctionToSingleTimeSerie(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Scale(3).ts
self.assertListEqual(list(ts), [3, 6])
self.assertListEqual(list(ts.index), [pandas.Timestamp(42 * 1e9),
pandas.Timestamp(100 * 1e9)])
def testMeanReturnsZeroIfQueryHasNoTimeSeries(self):
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("counter").TakeValue().Mean(), 0)
def testMeanRaisesIfCalledOnMultipleTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(RuntimeError,
query.In("pid.*").In("counter").TakeValue().Mean)
def testMeanReducesTimeSerieToSingleNumber(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
for i in range(5):
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(10 * i),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertAlmostEqual(query.In("counter").TakeValue().Mean(), 3)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
| pchaigno/grreat | lib/aff4_objects/stats_store_test.py | Python | apache-2.0 | 40,610 |
import unittest
def wrap(s, max_len):
last_ws = 0
for i in range(len(s)):
if s[i] == ' ':
last_ws = i
if i == max_len:
return s[:last_ws], s[last_ws + 1:]
return s, ''
def wrap_to_lines(s, max_len):
while s:
line, s = wrap(s, max_len)
yield line
def _prop(obj, prop):
if hasattr(obj, prop):
return getattr(obj, prop)
else:
return '<%s>' % prop
def pronoun_sub(text, who=None, **kwargs):
if len(text) <= 1:
return text
for k in ['player', 'dobj', 'iobj', 'this']:
if not k in kwargs:
kwargs[k] = None
if who is None:
who = kwargs['player']
output = ''
i = 0
while i < len(text):
if text[i] == '%':
code = text[i + 1]
if code == '%':
output += '%'
elif code == 's':
output += _prop(who, 'ps')
elif code == 'o':
output += _prop(who, 'po')
elif code == 'p':
output += _prop(who, 'pp')
elif code == 'r':
output += _prop(who, 'pr')
elif code == 'n':
output += _prop(who, 'name')
elif code == 'd':
output += _prop(kwargs['dobj'], 'name')
elif code == 'i':
output += _prop(kwargs['iobj'], 'name')
elif code == 't':
output += _prop(kwargs['this'], 'name')
i += 1
else:
output += text[i]
i += 1
return output
def english_list(what, empty='nothing', and_=' and ', sep=', ', penum=','):
count = len(what)
if count == 0:
return empty
elif count == 1:
return str(what[0])
elif count == 2:
return str(what[0]) + and_ + str(what[1])
s = ''
for i in range(count):
if i > 0:
if i == count - 1:
s += penum + and_
else:
s += sep
s += str(what[i])
return s
class Foo:
def __init__(self):
self.ps = 'he'
self.po = 'him'
self.pp = 'his'
self.pr = 'himself'
self.name = 'Foo'
self.bar = 'quux'
self.plural = False
class TestCase(unittest.TestCase):
def test_substition(self):
who = Foo()
cases = [
('_%s_', '_he_'),
('_%o_', '_him_'),
('_%p_', '_his_'),
('_%r_', '_himself_'),
('_%n_', '_Foo_'),
('_%d_', '_<name>_') ]
for text, expected in cases:
actual = pronoun_sub(text, who)
self.assertEqual(actual, expected)
def test_wrap(self):
s = '1 3 5 7 9'
self.assertEqual(wrap(s, 1), ('1', '3 5 7 9'))
self.assertEqual(wrap(s, 2), ('1', '3 5 7 9'))
self.assertEqual(wrap(s, 3), ('1 3', '5 7 9'))
self.assertEqual(wrap(s, 4), ('1 3', '5 7 9'))
def test_english_list(self):
cases = [
([], 'nothing'),
([1,2], '1 and 2'),
([1,2,3], '1, 2, and 3'),
([1,2,3,4], '1, 2, 3, and 4') ]
for xs, expected in cases:
actual = english_list(xs)
self.assertEqual(actual, expected)
if __name__ == '__main__':
unittest.main() | basp/neko | string_utils.py | Python | mit | 3,313 |
from django.shortcuts import render
#using generics
from django.views.generic import TemplateView
# Create your views here.
class BaseView(TemplateView):
template_name = 'base.html'
class port1View(TemplateView):
template_name = 'port1.html'
class port2View(TemplateView):
template_name = 'port2.html'
class port3View(TemplateView):
template_name = 'port3.html'
class port4View(TemplateView):
template_name = 'port4.html'
class port5View(TemplateView):
template_name = 'port5.html'
class port6View(TemplateView):
template_name = 'port6.html' | nandosarracino/mymainsite | mainsite/views.py | Python | mit | 557 |
# -*- encoding: utf-8 -*-
def select_all_but_last_logical_tie_in_pitched_runs(expr=None):
r'''Selects all but last logical tie in pitched runs.
.. container:: example
::
>>> selector = selectortools.select_all_but_last_logical_tie_in_pitched_runs()
>>> print(format(selector))
selectortools.Selector(
callbacks=(
selectortools.PrototypeSelectorCallback(
prototype=scoretools.Leaf,
),
selectortools.RunSelectorCallback(
prototype=(
scoretools.Note,
scoretools.Chord,
),
),
selectortools.LogicalTieSelectorCallback(
flatten=False,
pitched=False,
trivial=True,
),
selectortools.SliceSelectorCallback(
stop=-1,
apply_to_each=True,
),
selectortools.FlattenSelectorCallback(
depth=1,
),
),
)
::
>>> staff = Staff("c' d' ~ d' e' r f' g' r a' b' ~ b' c''")
>>> tuplet = Tuplet((2, 3), staff[2:5])
>>> tuplet = Tuplet((2, 3), staff[5:8])
>>> print(format(staff))
\new Staff {
c'4
d'4 ~
\times 2/3 {
d'4
e'4
r4
}
f'4
g'4
\times 2/3 {
r4
a'4
b'4 ~
}
b'4
c''4
}
::
>>> for x in selector(staff):
... x
...
LogicalTie(Note("c'4"),)
LogicalTie(Note("d'4"), Note("d'4"))
LogicalTie(Note("f'4"),)
LogicalTie(Note("a'4"),)
LogicalTie(Note("b'4"), Note("b'4"))
'''
from abjad.tools import selectortools
selector = selectortools.select_pitched_runs()
selector = selector.by_logical_tie(flatten=False)
selector = selector[:-1]
selector = selector.flatten(depth=1)
if expr is None:
return selector
return selector(expr) | mscuthbert/abjad | abjad/tools/selectortools/select_all_but_last_logical_tie_in_pitched_runs.py | Python | gpl-3.0 | 2,516 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import gps
#from time import sleep
session = gps.gps()
session.read()
session.stream()
cycles = 0
while 1:
session.read()
#print 'Fix mode ' , session.fix.mode
#print 'Fix ' , ("NO_FIX","FIX","DGPS_FIX")[session.fix.mode - 1]
#print 'Satellites in use: ', session.satellites_used
if (session.fix.mode > 1):
cycles = cycles + 1
if cycles > 3: # 1 GSV per 4 RMC on Sirf
print session.satellites_used
break
else:
cycles = 0
#sleep(1)
| lyusupov/Argus | src/init/GPSWaitFix.py | Python | gpl-2.0 | 572 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010, Monash e-Research Centre
# (Monash University, Australia)
# Copyright (c) 2010, VeRSI Consortium
# (Victorian eResearch Strategic Initiative, Australia)
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the VeRSI, the VeRSI Consortium members, nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE7
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
test_models.py
http://docs.djangoproject.com/en/dev/topics/testing/
.. moduleauthor:: Russell Sim <russell.sim@monash.edu>
"""
from compare import ensure
from django.test import TestCase
from nose.plugins.skip import SkipTest
from tardis.tardis_portal.forms import RightsForm
from tardis.tardis_portal.models import Experiment, License
class RightsFormTestCase(TestCase):
def setUp(self):
self.restrictiveLicense = License(name="Restrictive License",
url="http://example.test/rl",
internal_description="Description...",
allows_distribution=False)
self.restrictiveLicense.save()
self.permissiveLicense = License(name="Permissive License",
url="http://example.test/pl",
internal_description="Description...",
allows_distribution=True)
self.permissiveLicense.save()
self.inactiveLicense = License(name="Inactive License",
url="http://example.test/ial",
internal_description="Description...",
allows_distribution=True,
is_active=False)
self.inactiveLicense.save()
def test_ensures_suitable_license(self):
suitableCombinations = (
(Experiment.PUBLIC_ACCESS_NONE, ''),
(Experiment.PUBLIC_ACCESS_METADATA, ''),
(Experiment.PUBLIC_ACCESS_NONE, self.restrictiveLicense.id),
(Experiment.PUBLIC_ACCESS_METADATA, self.restrictiveLicense.id),
(Experiment.PUBLIC_ACCESS_FULL, self.permissiveLicense.id),
)
unsuitableCombinations = (
(Experiment.PUBLIC_ACCESS_NONE, self.permissiveLicense.id),
(Experiment.PUBLIC_ACCESS_METADATA, self.permissiveLicense.id),
(Experiment.PUBLIC_ACCESS_METADATA, self.inactiveLicense.id),
(Experiment.PUBLIC_ACCESS_FULL, self.inactiveLicense.id),
(Experiment.PUBLIC_ACCESS_FULL, ''),
(Experiment.PUBLIC_ACCESS_FULL, self.restrictiveLicense.id),
)
# Check we accept valid input
for public_access, license_id in suitableCombinations:
print "Suitable combination: %d %s" % (public_access, license_id)
data = {'public_access': str(public_access),
'license': license_id }
form = RightsForm(data)
ensure(form.is_valid(), True, form.errors)
# Check we reject invalid input
for public_access, license_id in unsuitableCombinations:
print "Unsuitable combination: %d %s" % (public_access, license_id)
data = {'public_access': str(public_access),
'license': license_id }
form = RightsForm(data)
ensure(form.is_valid(), False)
def test_needs_confirmation(self):
suitable_data = {'public_access': str(Experiment.PUBLIC_ACCESS_NONE),
'license': ''}
| pansapiens/mytardis | tardis/tardis_portal/tests/test_forms.py | Python | bsd-3-clause | 4,976 |
import numpy as np
def order_to_match(array, reference):
"""
Given an array ``array``, return the index array needed to make it the same as ``reference``
"""
return np.argsort(array)[np.argsort(np.argsort(reference))]
| astrofrog/sedfitter | sedfitter/utils/misc.py | Python | bsd-2-clause | 236 |
# -*- coding: utf-8 -*-
from __future__ import with_statement
from django.forms import *
from django.test import TestCase
from django.utils.translation import ugettext_lazy, override
from regressiontests.forms.models import Cheese
class FormsRegressionsTestCase(TestCase):
def test_class(self):
# Tests to prevent against recurrences of earlier bugs.
extra_attrs = {'class': 'special'}
class TestForm(Form):
f1 = CharField(max_length=10, widget=TextInput(attrs=extra_attrs))
f2 = CharField(widget=TextInput(attrs=extra_attrs))
self.assertHTMLEqual(TestForm(auto_id=False).as_p(), u'<p>F1: <input type="text" class="special" name="f1" maxlength="10" /></p>\n<p>F2: <input type="text" class="special" name="f2" /></p>')
def test_regression_3600(self):
# Tests for form i18n #
# There were some problems with form translations in #3600
class SomeForm(Form):
username = CharField(max_length=10, label=ugettext_lazy('Username'))
f = SomeForm()
self.assertHTMLEqual(f.as_p(), '<p><label for="id_username">Username:</label> <input id="id_username" type="text" name="username" maxlength="10" /></p>')
# Translations are done at rendering time, so multi-lingual apps can define forms)
with override('de'):
self.assertHTMLEqual(f.as_p(), '<p><label for="id_username">Benutzername:</label> <input id="id_username" type="text" name="username" maxlength="10" /></p>')
with override('pl', deactivate=True):
self.assertHTMLEqual(f.as_p(), u'<p><label for="id_username">Nazwa u\u017cytkownika:</label> <input id="id_username" type="text" name="username" maxlength="10" /></p>')
def test_regression_5216(self):
# There was some problems with form translations in #5216
class SomeForm(Form):
field_1 = CharField(max_length=10, label=ugettext_lazy('field_1'))
field_2 = CharField(max_length=10, label=ugettext_lazy('field_2'), widget=TextInput(attrs={'id': 'field_2_id'}))
f = SomeForm()
self.assertHTMLEqual(f['field_1'].label_tag(), '<label for="id_field_1">field_1</label>')
self.assertHTMLEqual(f['field_2'].label_tag(), '<label for="field_2_id">field_2</label>')
# Unicode decoding problems...
GENDERS = ((u'\xc5', u'En tied\xe4'), (u'\xf8', u'Mies'), (u'\xdf', u'Nainen'))
class SomeForm(Form):
somechoice = ChoiceField(choices=GENDERS, widget=RadioSelect(), label=u'\xc5\xf8\xdf')
f = SomeForm()
self.assertHTMLEqual(f.as_p(), u'<p><label for="id_somechoice_0">\xc5\xf8\xdf:</label> <ul>\n<li><label for="id_somechoice_0"><input type="radio" id="id_somechoice_0" value="\xc5" name="somechoice" /> En tied\xe4</label></li>\n<li><label for="id_somechoice_1"><input type="radio" id="id_somechoice_1" value="\xf8" name="somechoice" /> Mies</label></li>\n<li><label for="id_somechoice_2"><input type="radio" id="id_somechoice_2" value="\xdf" name="somechoice" /> Nainen</label></li>\n</ul></p>')
# Testing choice validation with UTF-8 bytestrings as input (these are the
# Russian abbreviations "мес." and "шт.".
UNITS = (('\xd0\xbc\xd0\xb5\xd1\x81.', '\xd0\xbc\xd0\xb5\xd1\x81.'), ('\xd1\x88\xd1\x82.', '\xd1\x88\xd1\x82.'))
f = ChoiceField(choices=UNITS)
self.assertEqual(f.clean(u'\u0448\u0442.'), u'\u0448\u0442.')
self.assertEqual(f.clean('\xd1\x88\xd1\x82.'), u'\u0448\u0442.')
# Translated error messages used to be buggy.
with override('ru'):
f = SomeForm({})
self.assertHTMLEqual(f.as_p(), u'<ul class="errorlist"><li>\u041e\u0431\u044f\u0437\u0430\u0442\u0435\u043b\u044c\u043d\u043e\u0435 \u043f\u043e\u043b\u0435.</li></ul>\n<p><label for="id_somechoice_0">\xc5\xf8\xdf:</label> <ul>\n<li><label for="id_somechoice_0"><input type="radio" id="id_somechoice_0" value="\xc5" name="somechoice" /> En tied\xe4</label></li>\n<li><label for="id_somechoice_1"><input type="radio" id="id_somechoice_1" value="\xf8" name="somechoice" /> Mies</label></li>\n<li><label for="id_somechoice_2"><input type="radio" id="id_somechoice_2" value="\xdf" name="somechoice" /> Nainen</label></li>\n</ul></p>')
# Deep copying translated text shouldn't raise an error)
from django.utils.translation import gettext_lazy
class CopyForm(Form):
degree = IntegerField(widget=Select(choices=((1, gettext_lazy('test')),)))
f = CopyForm()
def test_misc(self):
# There once was a problem with Form fields called "data". Let's make sure that
# doesn't come back.
class DataForm(Form):
data = CharField(max_length=10)
f = DataForm({'data': 'xyzzy'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'data': u'xyzzy'})
# A form with *only* hidden fields that has errors is going to be very unusual.
class HiddenForm(Form):
data = IntegerField(widget=HiddenInput)
f = HiddenForm({})
self.assertHTMLEqual(f.as_p(), u'<ul class="errorlist"><li>(Hidden field data) This field is required.</li></ul>\n<p> <input type="hidden" name="data" id="id_data" /></p>')
self.assertHTMLEqual(f.as_table(), u'<tr><td colspan="2"><ul class="errorlist"><li>(Hidden field data) This field is required.</li></ul><input type="hidden" name="data" id="id_data" /></td></tr>')
def test_xss_error_messages(self):
###################################################
# Tests for XSS vulnerabilities in error messages #
###################################################
# The forms layer doesn't escape input values directly because error messages
# might be presented in non-HTML contexts. Instead, the message is just marked
# for escaping by the template engine. So we'll need to construct a little
# silly template to trigger the escaping.
from django.template import Template, Context
t = Template('{{ form.errors }}')
class SomeForm(Form):
field = ChoiceField(choices=[('one', 'One')])
f = SomeForm({'field': '<script>'})
self.assertHTMLEqual(t.render(Context({'form': f})), u'<ul class="errorlist"><li>field<ul class="errorlist"><li>Select a valid choice. <script> is not one of the available choices.</li></ul></li></ul>')
class SomeForm(Form):
field = MultipleChoiceField(choices=[('one', 'One')])
f = SomeForm({'field': ['<script>']})
self.assertHTMLEqual(t.render(Context({'form': f})), u'<ul class="errorlist"><li>field<ul class="errorlist"><li>Select a valid choice. <script> is not one of the available choices.</li></ul></li></ul>')
from regressiontests.forms.models import ChoiceModel
class SomeForm(Form):
field = ModelMultipleChoiceField(ChoiceModel.objects.all())
f = SomeForm({'field': ['<script>']})
self.assertHTMLEqual(t.render(Context({'form': f})), u'<ul class="errorlist"><li>field<ul class="errorlist"><li>"<script>" is not a valid value for a primary key.</li></ul></li></ul>')
def test_regression_14234(self):
"""
Re-cleaning an instance that was added via a ModelForm should not raise
a pk uniqueness error.
"""
class CheeseForm(ModelForm):
class Meta:
model = Cheese
form = CheeseForm({
'name': 'Brie',
})
self.assertTrue(form.is_valid())
obj = form.save()
obj.name = 'Camembert'
obj.full_clean()
| LethusTI/supportcenter | vendor/django/tests/regressiontests/forms/tests/regressions.py | Python | gpl-3.0 | 7,700 |
#!/usr/bin/env python2.7
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Build and upload docker images to Google Container Registry per matrix."""
from __future__ import print_function
import argparse
import atexit
import multiprocessing
import os
import shutil
import subprocess
import sys
import tempfile
# Langauage Runtime Matrix
import client_matrix
python_util_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../run_tests/python_utils'))
sys.path.append(python_util_dir)
import dockerjob
import jobset
_IMAGE_BUILDER = 'tools/run_tests/dockerize/build_interop_image.sh'
_LANGUAGES = client_matrix.LANG_RUNTIME_MATRIX.keys()
# All gRPC release tags, flattened, deduped and sorted.
_RELEASES = sorted(list(set(
i for l in client_matrix.LANG_RELEASE_MATRIX.values() for i in l)))
# Destination directory inside docker image to keep extra info from build time.
_BUILD_INFO = '/var/local/build_info'
argp = argparse.ArgumentParser(description='Run interop tests.')
argp.add_argument('--gcr_path',
default='gcr.io/grpc-testing',
help='Path of docker images in Google Container Registry')
argp.add_argument('--release',
default='master',
choices=['all', 'master'] + _RELEASES,
help='github commit tag to checkout. When building all '
'releases defined in client_matrix.py, use "all". Valid only '
'with --git_checkout.')
argp.add_argument('-l', '--language',
choices=['all'] + sorted(_LANGUAGES),
nargs='+',
default=['all'],
help='Test languages to build docker images for.')
argp.add_argument('--git_checkout',
action='store_true',
help='Use a separate git clone tree for building grpc stack. '
'Required when using --release flag. By default, current'
'tree and the sibling will be used for building grpc stack.')
argp.add_argument('--git_checkout_root',
default='/export/hda3/tmp/grpc_matrix',
help='Directory under which grpc-go/java/main repo will be '
'cloned. Valid only with --git_checkout.')
argp.add_argument('--keep',
action='store_true',
help='keep the created local images after uploading to GCR')
args = argp.parse_args()
def add_files_to_image(image, with_files, label=None):
"""Add files to a docker image.
image: docker image name, i.e. grpc_interop_java:26328ad8
with_files: additional files to include in the docker image.
label: label string to attach to the image.
"""
tag_idx = image.find(':')
if tag_idx == -1:
jobset.message('FAILED', 'invalid docker image %s' % image, do_newline=True)
sys.exit(1)
orig_tag = '%s_' % image
subprocess.check_output(['docker', 'tag', image, orig_tag])
lines = ['FROM ' + orig_tag]
if label:
lines.append('LABEL %s' % label)
temp_dir = tempfile.mkdtemp()
atexit.register(lambda: subprocess.call(['rm', '-rf', temp_dir]))
# Copy with_files inside the tmp directory, which will be the docker build
# context.
for f in with_files:
shutil.copy(f, temp_dir)
lines.append('COPY %s %s/' % (os.path.basename(f), _BUILD_INFO))
# Create a Dockerfile.
with open(os.path.join(temp_dir, 'Dockerfile'), 'w') as f:
f.write('\n'.join(lines))
jobset.message('START', 'Repackaging %s' % image, do_newline=True)
build_cmd = ['docker', 'build', '--rm', '--tag', image, temp_dir]
subprocess.check_output(build_cmd)
dockerjob.remove_image(orig_tag, skip_nonexistent=True)
def build_image_jobspec(runtime, env, gcr_tag):
"""Build interop docker image for a language with runtime.
runtime: a <lang><version> string, for example go1.8.
env: dictionary of env to passed to the build script.
gcr_tag: the tag for the docker image (i.e. v1.3.0).
"""
basename = 'grpc_interop_%s' % runtime
tag = '%s/%s:%s' % (args.gcr_path, basename, gcr_tag)
build_env = {
'INTEROP_IMAGE': tag,
'BASE_NAME': basename,
'TTY_FLAG': '-t'
}
build_env.update(env)
build_job = jobset.JobSpec(
cmdline=[_IMAGE_BUILDER],
environ=build_env,
shortname='build_docker_%s' % runtime,
timeout_seconds=30*60)
build_job.tag = tag
return build_job
def build_all_images_for_lang(lang):
"""Build all docker images for a language across releases and runtimes."""
if not args.git_checkout:
if args.release != 'master':
print('WARNING: --release is set but will be ignored\n')
releases = ['master']
else:
if args.release == 'all':
releases = client_matrix.LANG_RELEASE_MATRIX[lang]
else:
# Build a particular release.
if args.release not in ['master'] + client_matrix.LANG_RELEASE_MATRIX[lang]:
jobset.message('SKIPPED',
'%s for %s is not defined' % (args.release, lang),
do_newline=True)
return []
releases = [args.release]
images = []
for release in releases:
images += build_all_images_for_release(lang, release)
jobset.message('SUCCESS',
'All docker images built for %s at %s.' % (lang, releases),
do_newline=True)
return images
def build_all_images_for_release(lang, release):
"""Build all docker images for a release across all runtimes."""
docker_images = []
build_jobs = []
env = {}
# If we not using current tree or the sibling for grpc stack, do checkout.
if args.git_checkout:
stack_base = checkout_grpc_stack(lang, release)
var ={'go': 'GRPC_GO_ROOT', 'java': 'GRPC_JAVA_ROOT'}.get(lang, 'GRPC_ROOT')
env[var] = stack_base
for runtime in client_matrix.LANG_RUNTIME_MATRIX[lang]:
job = build_image_jobspec(runtime, env, release)
docker_images.append(job.tag)
build_jobs.append(job)
jobset.message('START', 'Building interop docker images.', do_newline=True)
print('Jobs to run: \n%s\n' % '\n'.join(str(j) for j in build_jobs))
num_failures, _ = jobset.run(
build_jobs, newline_on_success=True, maxjobs=multiprocessing.cpu_count())
if num_failures:
jobset.message('FAILED', 'Failed to build interop docker images.',
do_newline=True)
docker_images_cleanup.extend(docker_images)
sys.exit(1)
jobset.message('SUCCESS',
'All docker images built for %s at %s.' % (lang, release),
do_newline=True)
if release != 'master':
commit_log = os.path.join(stack_base, 'commit_log')
if os.path.exists(commit_log):
for image in docker_images:
add_files_to_image(image, [commit_log], 'release=%s' % release)
return docker_images
def cleanup():
if not args.keep:
for image in docker_images_cleanup:
dockerjob.remove_image(image, skip_nonexistent=True)
docker_images_cleanup = []
atexit.register(cleanup)
def checkout_grpc_stack(lang, release):
"""Invokes 'git check' for the lang/release and returns directory created."""
assert args.git_checkout and args.git_checkout_root
if not os.path.exists(args.git_checkout_root):
os.makedirs(args.git_checkout_root)
repo = client_matrix.get_github_repo(lang)
# Get the subdir name part of repo
# For example, 'git@github.com:grpc/grpc-go.git' should use 'grpc-go'.
repo_dir = os.path.splitext(os.path.basename(repo))[0]
stack_base = os.path.join(args.git_checkout_root, repo_dir)
# Assume the directory is reusable for git checkout.
if not os.path.exists(stack_base):
subprocess.check_call(['git', 'clone', '--recursive', repo],
cwd=os.path.dirname(stack_base))
# git checkout.
jobset.message('START', 'git checkout %s from %s' % (release, stack_base),
do_newline=True)
# We should NEVER do checkout on current tree !!!
assert not os.path.dirname(__file__).startswith(stack_base)
output = subprocess.check_output(
['git', 'checkout', release], cwd=stack_base, stderr=subprocess.STDOUT)
commit_log = subprocess.check_output(['git', 'log', '-1'], cwd=stack_base)
jobset.message('SUCCESS', 'git checkout', output + commit_log, do_newline=True)
# Write git log to commit_log so it can be packaged with the docker image.
with open(os.path.join(stack_base, 'commit_log'), 'w') as f:
f.write(commit_log)
return stack_base
languages = args.language if args.language != ['all'] else _LANGUAGES
for lang in languages:
docker_images = build_all_images_for_lang(lang)
for image in docker_images:
jobset.message('START', 'Uploading %s' % image, do_newline=True)
# docker image name must be in the format <gcr_path>/<image>:<gcr_tag>
assert image.startswith(args.gcr_path) and image.find(':') != -1
subprocess.call(['gcloud', 'docker', '--', 'push', image])
| quizlet/grpc | tools/interop_matrix/create_matrix_images.py | Python | apache-2.0 | 9,421 |
import subprocess
def kill_processes_as(user, process_name=None):
"""
Kill all processes of a given name belonging to a given user.
:param user:
:param process_name: Name of process to kill, if not specified or None will
kill all of the user's processes.
:return:
"""
cmd_args = ["sudo", "-H", "-u", user, "-s",
"killall", "-9", "-u", user]
if process_name is not None:
cmd_args.append(process_name)
subprocess.call(cmd_args,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
| HaliteChallenge/Halite-II | apiserver/worker/util.py | Python | mit | 601 |
# -*- coding: utf-8 -*-
"""
Search on Nyaa using RSS.
"""
__author__ = 'Sohhla'
from shared.log import LoggerManager
import xml.etree.ElementTree as XMLParser
from datetime import datetime
import common
class NyaaRSS():
"""
Search on Nyaa using RSS.
"""
def __init__(self, network):
self.log = LoggerManager().get_logger("NyaaRSS")
self.network = network
def search(self, text="", dic=None, category="1_37"): #"1_37" = English translated anime
"""
Returned dictionary struct:
dict[RESULT_NUMBER]["title"] = str
dict[RESULT_NUMBER]["link"] = str
dict[RESULT_NUMBER]["date"] = datetime
dict[RESULT_NUMBER]["downloads"] = int
:type text: str
:param text: search_terms
:type dic: dict
:param dic: where the data will be stored. If None, a new dict is created.
:type category: str
:param category: site specific filter to search only for anime.
"""
if dic is None:
dic = {}
search_terms = text.strip().split(" ")
url = self.__get_url(search_terms, category)
xml = self.network.get_data(url)
return self.__parse_rss(xml, dic, search_terms)
@staticmethod
def __get_url(search_terms, category):
url = ""
if len(search_terms)>0:
# Without using quotes (i.e. -"10-bit"), Nyaa reads "10-bit" as "10bit"
nyaa_terms = []
for term in search_terms:
if term.rfind('-')>0:
if term.startswith('-'):
nyaa_terms.append("-\"%s\"" % term[1:])
else:
nyaa_terms.append("\"%s\"" % term[1:])
else:
nyaa_terms.append(term)
text = "+".join(nyaa_terms)
url = "https://www.nyaa.se/?page=rss&term=%s&cats=%s" % (text,category)
return url
@staticmethod
def __parse_rss(xml, dic, search_terms):
if xml is not "":
try:
cont = 0
xml = xml.encode("utf8","ignore")
rss = XMLParser.fromstring(xml)
channel = rss[0]
for item in channel.findall('item'):
title = item.find('title').text
link = item.find('link').text
date = item.find('pubDate').text
date_parsed = datetime.strptime(date,"%a, %d %b %Y %H:%M:%S +0000")
description = item.find('description').text
values = [int(s) for s in description.split() if s.isdigit()]
# TODO: use seeders/leechers
seeders = values[0] # not yet used...
leechers= values[1] # not yet used...
downloads=values[2]
if common.terms_match(title,search_terms):
dic["n"+str(cont)] = {}
dic["n"+str(cont)]["title"] = title
dic["n"+str(cont)]["link"] = link
dic["n"+str(cont)]["date"] = date_parsed
dic["n"+str(cont)]["downloads"] = downloads
cont+=1
# TODO: check all exceptions possible instead of just "Exception"
except:
pass
return dic | geosohh/AnimeTorr | animetorr/downloader/anime/nyaa_rss.py | Python | gpl-2.0 | 3,385 |
import logging
import urllib2
from StringIO import StringIO
class MockLoggingHandler(logging.Handler):
"""Mock logging handler to check for expected logs."""
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class MockOpener(object):
def __init__(self, msg, error=False, verify_data=lambda x: x.get_data() is None):
self.msg = msg
self.error = error
self.verify_data = verify_data
def open(self, req):
if not isinstance(req, urllib2.Request):
raise TypeError
if not self.verify_data(req):
raise ValueError
if self.error:
raise urllib2.HTTPError('http://example.com', 404,
'nothing to see', {}, StringIO(self.msg))
return StringIO(self.msg)
| chrigl/docker-library | plone-chrigl-debian/src/collective.flattr/collective/flattr/tests/mocks.py | Python | apache-2.0 | 1,128 |
# -*- coding: utf-8 -*-
# (c) 2015 Tuomas Airaksinen
#
# This file is part of automate-wsgi.
#
# automate-wsgi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# automate-wsgi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with automate-wsgi. If not, see <http://www.gnu.org/licenses/>.
__author__ = "Tuomas Airaksinen"
__copyright__ = "Copyright 2015, Tuomas Airaksinen"
__credits__ = []
__license__ = "GPL"
__version__ = '0.9.2'
__maintainer__ = "Tuomas Airaksinen"
__email__ = "tuomas.airaksinen@gmail.com"
__status__ = "Beta"
from .abstractwsgi import TornadoService
| tuomas2/automate-wsgi | automate_wsgi/__init__.py | Python | gpl-3.0 | 1,018 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from base import BaseHandler
import time
import sys
sys.path.append('..')
from models.user import User
class SigninHandler(BaseHandler):
def get(self):
self.render("signin.html")
def post(self):
uname = self.get_argument("name")
email = self.get_argument("email")
password = self.get_argument("password")
npassword = self.get_argument("npassword")
if password==npassword and not User.is_email_exist(email) and not User.is_uname_exist(uname):
sdate = time.strftime("%Y-%m-%d",time.localtime())
User.add_one_user(email,uname,password,sdate,'',"default.jpg","N")
user = User.get_user_by_name(uname)
self.set_secure_cookie("user",user[0].uname)
self.set_secure_cookie("uid","user[0].uid")
self.redirect("/")
else:
self.redirect("/signin")
| oujiaqi/suiyue | routes/signin.py | Python | apache-2.0 | 947 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from neutron.common import config as neutron_config # noqa
from gbpservice.neutron.services.grouppolicy import config
from gbpservice.neutron.tests.unit import common as cm
from gbpservice.neutron.tests.unit.services.grouppolicy import (
extensions as test_ext)
from gbpservice.neutron.tests.unit.services.grouppolicy import (
test_grouppolicy_plugin as test_plugin)
class ExtensionDriverTestBase(test_plugin.GroupPolicyPluginTestCase):
_extension_drivers = ['test']
_extension_path = os.path.dirname(os.path.abspath(test_ext.__file__))
def setUp(self, policy_drivers=None, core_plugin=None,
l3_plugin=None, ml2_options=None,
sc_plugin=None, qos_plugin=None, trunk_plugin=None):
config.cfg.CONF.set_override('extension_drivers',
self._extension_drivers,
group='group_policy')
if self._extension_path:
config.cfg.CONF.set_override(
'api_extensions_path', self._extension_path)
super(ExtensionDriverTestBase, self).setUp(
core_plugin=core_plugin, l3_plugin=l3_plugin,
ml2_options=ml2_options, sc_plugin=sc_plugin,
qos_plugin=qos_plugin, trunk_plugin=trunk_plugin)
class ExtensionDriverTestCase(ExtensionDriverTestBase):
def test_pt_attr(self):
# Test create with default value.
pt = self.create_policy_target()
policy_target_id = pt['policy_target']['id']
val = pt['policy_target']['pt_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_targets', policy_target_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target']['pt_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_targets')
val = res['policy_targets'][0]['pt_extension']
self.assertIsNone(val)
# Test create with explict value.
pt = self.create_policy_target(pt_extension="abc")
policy_target_id = pt['policy_target']['id']
val = pt['policy_target']['pt_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_targets', policy_target_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target']['pt_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_target': {'pt_extension': "def"}}
req = self.new_update_request('policy_targets', data, policy_target_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target']['pt_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_targets', policy_target_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target']['pt_extension']
self.assertEqual("def", val)
def test_ptg_attr(self):
# Test create with default value.
ptg = self.create_policy_target_group()
policy_target_group_id = ptg['policy_target_group']['id']
val = ptg['policy_target_group']['ptg_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_target_groups',
policy_target_group_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target_group']['ptg_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_target_groups')
val = res['policy_target_groups'][0]['ptg_extension']
self.assertIsNone(val)
# Test create with explict value.
ptg = self.create_policy_target_group(ptg_extension="abc")
policy_target_group_id = ptg['policy_target_group']['id']
val = ptg['policy_target_group']['ptg_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_target_groups',
policy_target_group_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target_group']['ptg_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_target_group': {'ptg_extension': "def"}}
req = self.new_update_request('policy_target_groups', data,
policy_target_group_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target_group']['ptg_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_target_groups',
policy_target_group_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target_group']['ptg_extension']
self.assertEqual("def", val)
def test_l2p_attr(self):
# Test create with default value.
l2p = self.create_l2_policy()
l2_policy_id = l2p['l2_policy']['id']
val = l2p['l2_policy']['l2p_extension']
self.assertIsNone(val)
req = self.new_show_request('l2_policies', l2_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l2_policy']['l2p_extension']
self.assertIsNone(val)
# Test list.
res = self._list('l2_policies')
val = res['l2_policies'][0]['l2p_extension']
self.assertIsNone(val)
# Test create with explict value.
l2p = self.create_l2_policy(l2p_extension="abc")
l2_policy_id = l2p['l2_policy']['id']
val = l2p['l2_policy']['l2p_extension']
self.assertEqual("abc", val)
req = self.new_show_request('l2_policies', l2_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l2_policy']['l2p_extension']
self.assertEqual("abc", val)
# Test update.
data = {'l2_policy': {'l2p_extension': "def"}}
req = self.new_update_request('l2_policies', data, l2_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l2_policy']['l2p_extension']
self.assertEqual("def", val)
req = self.new_show_request('l2_policies', l2_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l2_policy']['l2p_extension']
self.assertEqual("def", val)
def test_l3p_attr(self):
# Test create with default value.
l3p = self.create_l3_policy()
l3_policy_id = l3p['l3_policy']['id']
val = l3p['l3_policy']['l3p_extension']
self.assertIsNone(val)
req = self.new_show_request('l3_policies', l3_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l3_policy']['l3p_extension']
self.assertIsNone(val)
# Test list.
res = self._list('l3_policies')
val = res['l3_policies'][0]['l3p_extension']
self.assertIsNone(val)
# Test create with explict value.
l3p = self.create_l3_policy(l3p_extension="abc")
l3_policy_id = l3p['l3_policy']['id']
val = l3p['l3_policy']['l3p_extension']
self.assertEqual("abc", val)
req = self.new_show_request('l3_policies', l3_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l3_policy']['l3p_extension']
self.assertEqual("abc", val)
# Test update.
data = {'l3_policy': {'l3p_extension': "def"}}
req = self.new_update_request('l3_policies', data, l3_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l3_policy']['l3p_extension']
self.assertEqual("def", val)
req = self.new_show_request('l3_policies', l3_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l3_policy']['l3p_extension']
self.assertEqual("def", val)
def test_pc_attr(self):
# Test create with default value.
pc = self.create_policy_classifier()
policy_classifier_id = pc['policy_classifier']['id']
val = pc['policy_classifier']['pc_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_classifiers', policy_classifier_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_classifier']['pc_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_classifiers')
val = res['policy_classifiers'][0]['pc_extension']
self.assertIsNone(val)
# Test create with explict value.
pc = self.create_policy_classifier(pc_extension="abc")
policy_classifier_id = pc['policy_classifier']['id']
val = pc['policy_classifier']['pc_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_classifiers', policy_classifier_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_classifier']['pc_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_classifier': {'pc_extension': "def"}}
req = self.new_update_request('policy_classifiers', data,
policy_classifier_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_classifier']['pc_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_classifiers', policy_classifier_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_classifier']['pc_extension']
self.assertEqual("def", val)
def test_pa_attr(self):
# Test create with default value.
pa = self.create_policy_action()
policy_action_id = pa['policy_action']['id']
val = pa['policy_action']['pa_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_actions', policy_action_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_action']['pa_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_actions')
val = res['policy_actions'][0]['pa_extension']
self.assertIsNone(val)
# Test create with explict value.
pa = self.create_policy_action(pa_extension="abc")
policy_action_id = pa['policy_action']['id']
val = pa['policy_action']['pa_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_actions', policy_action_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_action']['pa_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_action': {'pa_extension': "def"}}
req = self.new_update_request('policy_actions', data, policy_action_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_action']['pa_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_actions', policy_action_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_action']['pa_extension']
self.assertEqual("def", val)
def test_pr_attr(self):
# Create necessary parameters.
classifier = self.create_policy_classifier(
name="class1", protocol="tcp", direction="out",
port_range="50:100")
classifier_id = classifier['policy_classifier']['id']
# Test create with default value.
pr = self.create_policy_rule(policy_classifier_id=classifier_id)
policy_rule_id = pr['policy_rule']['id']
val = pr['policy_rule']['pr_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_rules', policy_rule_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule']['pr_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_rules')
val = res['policy_rules'][0]['pr_extension']
self.assertIsNone(val)
# Test create with explict value.
pr = self.create_policy_rule(policy_classifier_id=classifier_id,
pr_extension="abc")
policy_rule_id = pr['policy_rule']['id']
val = pr['policy_rule']['pr_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_rules', policy_rule_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule']['pr_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_rule': {'pr_extension': "def"}}
req = self.new_update_request('policy_rules', data, policy_rule_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule']['pr_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_rules', policy_rule_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule']['pr_extension']
self.assertEqual("def", val)
def test_prs_attr(self):
# Test create with default value.
prs = self.create_policy_rule_set(policy_rules=[])
policy_rule_set_id = prs['policy_rule_set']['id']
val = prs['policy_rule_set']['prs_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_rule_sets', policy_rule_set_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule_set']['prs_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_rule_sets')
val = res['policy_rule_sets'][0]['prs_extension']
self.assertIsNone(val)
# Test create with explict value.
prs = self.create_policy_rule_set(policy_rules=[], prs_extension="abc")
policy_rule_set_id = prs['policy_rule_set']['id']
val = prs['policy_rule_set']['prs_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_rule_sets', policy_rule_set_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule_set']['prs_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_rule_set': {'prs_extension': "def"}}
req = self.new_update_request('policy_rule_sets', data,
policy_rule_set_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule_set']['prs_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_rule_sets', policy_rule_set_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule_set']['prs_extension']
self.assertEqual("def", val)
def test_nsp_attr(self):
# Test create with default value.
nsp = self.create_network_service_policy()
network_service_policy_id = nsp['network_service_policy']['id']
val = nsp['network_service_policy']['nsp_extension']
self.assertIsNone(val)
req = self.new_show_request('network_service_policies',
network_service_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['network_service_policy']['nsp_extension']
self.assertIsNone(val)
# Test list.
res = self._list('network_service_policies')
val = res['network_service_policies'][0]['nsp_extension']
self.assertIsNone(val)
# Test create with explict value.
nsp = self.create_network_service_policy(nsp_extension="abc")
network_service_policy_id = nsp['network_service_policy']['id']
val = nsp['network_service_policy']['nsp_extension']
self.assertEqual("abc", val)
req = self.new_show_request('network_service_policies',
network_service_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['network_service_policy']['nsp_extension']
self.assertEqual("abc", val)
# Test update.
data = {'network_service_policy': {'nsp_extension': "def"}}
req = self.new_update_request('network_service_policies', data,
network_service_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['network_service_policy']['nsp_extension']
self.assertEqual("def", val)
req = self.new_show_request('network_service_policies',
network_service_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['network_service_policy']['nsp_extension']
self.assertEqual("def", val)
def test_es_attr(self):
self._test_attr('external_segment')
def test_ep_attr(self):
self._test_attr('external_policy')
def test_np_attr(self):
self._test_attr('nat_pool')
def _test_attr(self, type):
# Test create with default value.
acronim = _acronim(type)
plural = cm.get_resource_plural(type)
obj = getattr(self, 'create_%s' % type)()
id = obj[type]['id']
val = obj[type][acronim + '_extension']
self.assertIsNone(val)
req = self.new_show_request(plural, id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res[type][acronim + '_extension']
self.assertIsNone(val)
# Test list.
res = self._list(plural)
val = res[plural][0][acronim + '_extension']
self.assertIsNone(val)
# Test create with explict value.
kwargs = {acronim + '_extension': "abc"}
obj = getattr(self, 'create_%s' % type)(**kwargs)
id = obj[type]['id']
val = obj[type][acronim + '_extension']
self.assertEqual("abc", val)
req = self.new_show_request(plural, id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res[type][acronim + '_extension']
self.assertEqual("abc", val)
# Test update.
data = {type: {acronim + '_extension': "def"}}
req = self.new_update_request(plural, data, id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res[type][acronim + '_extension']
self.assertEqual("def", val)
req = self.new_show_request(plural, id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res[type][acronim + '_extension']
self.assertEqual("def", val)
def _acronim(type):
return ''.join([x[0] for x in type.split('_')])
| noironetworks/group-based-policy | gbpservice/neutron/tests/unit/services/grouppolicy/test_extension_driver_api.py | Python | apache-2.0 | 19,823 |
#!/usr/bin/env python
#########################################################################################
#
# Test function for sct_spinalcord script
#
# replace the shell test script in sct 1.0
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Author: Augustin Roux
# modified: 2014-08-10
#
# About the license: see the file LICENSE.TXT
#########################################################################################
import sct_utils as sct
import commands
def test(path_data):
# parameters
folder_data = 't2/'
file_data = ['t2.nii.gz', 't2_seg.nii.gz']
# define command
cmd = 'sct_smooth_spinalcord' \
' -i '+path_data+folder_data+file_data[0]+ \
' -s '+path_data+folder_data+file_data[1]+ \
' -smooth 5'
# return
#return sct.run(cmd, 0)
return commands.getstatusoutput(cmd)
# call to function
if __name__ == "__main__":
# call main function
test()
| 3324fr/spinalcordtoolbox | testing/test_sct_smooth_spinalcord.py | Python | mit | 1,053 |
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium.webdriver.support.ui import WebDriverWait
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from unittest import skip
import time
import sys
class FunctionalTest(StaticLiveServerTestCase):
@classmethod
def setUpClass(cls):
for arg in sys.argv:
if 'liveserver' in arg:
cls.server_url = "http://"+arg.split("=")[1]
## inflicts cheat method when testing live server
cls.live_server_url = cls.server_url
return
super().setUpClass()
cls.server_url = cls.live_server_url
@classmethod
def tearDownClass(cls):
if cls.server_url == cls.live_server_url:
super().tearDownClass()
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def check_for_row_in_list_table(self, row_text):
'''
Not a test function
Helper Function to find the given text in table
'''
table = self.browser.find_element_by_id('id_list_table')
rows = table.find_elements_by_tag_name('tr')
self.assertIn( row_text, [row.text for row in rows] )
def get_item_input_box(self):
return self.browser.find_element_by_id('id_text')
def wait_for_element_with_id(self, element_id):
WebDriverWait(self.browser, 30).until(
lambda b: b.find_element_by_id(element_id),
"Could not find element with id {}: Page text was:\n{}".format(
element_id, self.browser.find_element_by_tag_name('body').text
)
)
def wait_to_be_logged_in(self, email):
self.wait_for_element_with_id('id_logout')
navbar = self.browser.find_element_by_css_selector(".navbar")
self.assertIn(email , navbar.text)
def wait_to_be_logged_out(self, email):
self.wait_for_element_with_id('id_login')
navbar = self.browser.find_element_by_css_selector(".navbar")
self.assertNotIn(email , navbar.text)
| naveenpanwar/tgoat | functional_tests/base.py | Python | gpl-3.0 | 2,208 |
"""The tests for the Sonarr platform."""
import unittest
import time
from datetime import datetime
import pytest
from homeassistant.components.sensor import sonarr
from tests.common import get_test_home_assistant
def mocked_exception(*args, **kwargs):
"""Mock exception thrown by requests.get."""
raise OSError
def mocked_requests_get(*args, **kwargs):
"""Mock requests.get invocations."""
class MockResponse:
"""Class to represent a mocked response."""
def __init__(self, json_data, status_code):
"""Initialize the mock response class."""
self.json_data = json_data
self.status_code = status_code
def json(self):
"""Return the json of the response."""
return self.json_data
today = datetime.date(datetime.fromtimestamp(time.time()))
url = str(args[0])
if 'api/calendar' in url:
return MockResponse([
{
"seriesId": 3,
"episodeFileId": 0,
"seasonNumber": 4,
"episodeNumber": 11,
"title": "Easy Com-mercial, Easy Go-mercial",
"airDate": str(today),
"airDateUtc": "2014-01-27T01:30:00Z",
"overview": "To compete with fellow “restaurateur,” Ji...",
"hasFile": "false",
"monitored": "true",
"sceneEpisodeNumber": 0,
"sceneSeasonNumber": 0,
"tvDbEpisodeId": 0,
"series": {
"tvdbId": 194031,
"tvRageId": 24607,
"imdbId": "tt1561755",
"title": "Bob's Burgers",
"cleanTitle": "bobsburgers",
"status": "continuing",
"overview": "Bob's Burgers follows a third-generation ...",
"airTime": "5:30pm",
"monitored": "true",
"qualityProfileId": 1,
"seasonFolder": "true",
"lastInfoSync": "2014-01-26T19:25:55.4555946Z",
"runtime": 30,
"images": [
{
"coverType": "banner",
"url": "http://slurm.trakt.us/images/bann.jpg"
},
{
"coverType": "poster",
"url": "http://slurm.trakt.us/images/poster00.jpg"
},
{
"coverType": "fanart",
"url": "http://slurm.trakt.us/images/fan6.jpg"
}
],
"seriesType": "standard",
"network": "FOX",
"useSceneNumbering": "false",
"titleSlug": "bobs-burgers",
"path": "T:\\Bob's Burgers",
"year": 0,
"firstAired": "2011-01-10T01:30:00Z",
"qualityProfile": {
"value": {
"name": "SD",
"allowed": [
{
"id": 1,
"name": "SDTV",
"weight": 1
},
{
"id": 8,
"name": "WEBDL-480p",
"weight": 2
},
{
"id": 2,
"name": "DVD",
"weight": 3
}
],
"cutoff": {
"id": 1,
"name": "SDTV",
"weight": 1
},
"id": 1
},
"isLoaded": "true"
},
"seasons": [
{
"seasonNumber": 4,
"monitored": "true"
},
{
"seasonNumber": 3,
"monitored": "true"
},
{
"seasonNumber": 2,
"monitored": "true"
},
{
"seasonNumber": 1,
"monitored": "true"
},
{
"seasonNumber": 0,
"monitored": "false"
}
],
"id": 66
},
"downloading": "false",
"id": 14402
}
], 200)
elif 'api/command' in url:
return MockResponse([
{
"name": "RescanSeries",
"startedOn": "0001-01-01T00:00:00Z",
"stateChangeTime": "2014-02-05T05:09:09.2366139Z",
"sendUpdatesToClient": "true",
"state": "pending",
"id": 24
}
], 200)
elif 'api/wanted/missing' in url or 'totalRecords' in url:
return MockResponse(
{
"page": 1,
"pageSize": 15,
"sortKey": "airDateUtc",
"sortDirection": "descending",
"totalRecords": 1,
"records": [
{
"seriesId": 1,
"episodeFileId": 0,
"seasonNumber": 5,
"episodeNumber": 4,
"title": "Archer Vice: House Call",
"airDate": "2014-02-03",
"airDateUtc": "2014-02-04T03:00:00Z",
"overview": "Archer has to stage an that ... ",
"hasFile": "false",
"monitored": "true",
"sceneEpisodeNumber": 0,
"sceneSeasonNumber": 0,
"tvDbEpisodeId": 0,
"absoluteEpisodeNumber": 50,
"series": {
"tvdbId": 110381,
"tvRageId": 23354,
"imdbId": "tt1486217",
"title": "Archer (2009)",
"cleanTitle": "archer2009",
"status": "continuing",
"overview": "At ISIS, an international spy ...",
"airTime": "7:00pm",
"monitored": "true",
"qualityProfileId": 1,
"seasonFolder": "true",
"lastInfoSync": "2014-02-05T04:39:28.550495Z",
"runtime": 30,
"images": [
{
"coverType": "banner",
"url": "http://slurm.trakt.us//57.12.jpg"
},
{
"coverType": "poster",
"url": "http://slurm.trakt.u/57.12-300.jpg"
},
{
"coverType": "fanart",
"url": "http://slurm.trakt.us/image.12.jpg"
}
],
"seriesType": "standard",
"network": "FX",
"useSceneNumbering": "false",
"titleSlug": "archer-2009",
"path": "E:\\Test\\TV\\Archer (2009)",
"year": 2009,
"firstAired": "2009-09-18T02:00:00Z",
"qualityProfile": {
"value": {
"name": "SD",
"cutoff": {
"id": 1,
"name": "SDTV"
},
"items": [
{
"quality": {
"id": 1,
"name": "SDTV"
},
"allowed": "true"
},
{
"quality": {
"id": 8,
"name": "WEBDL-480p"
},
"allowed": "true"
},
{
"quality": {
"id": 2,
"name": "DVD"
},
"allowed": "true"
},
{
"quality": {
"id": 4,
"name": "HDTV-720p"
},
"allowed": "false"
},
{
"quality": {
"id": 9,
"name": "HDTV-1080p"
},
"allowed": "false"
},
{
"quality": {
"id": 10,
"name": "Raw-HD"
},
"allowed": "false"
},
{
"quality": {
"id": 5,
"name": "WEBDL-720p"
},
"allowed": "false"
},
{
"quality": {
"id": 6,
"name": "Bluray-720p"
},
"allowed": "false"
},
{
"quality": {
"id": 3,
"name": "WEBDL-1080p"
},
"allowed": "false"
},
{
"quality": {
"id": 7,
"name": "Bluray-1080p"
},
"allowed": "false"
}
],
"id": 1
},
"isLoaded": "true"
},
"seasons": [
{
"seasonNumber": 5,
"monitored": "true"
},
{
"seasonNumber": 4,
"monitored": "true"
},
{
"seasonNumber": 3,
"monitored": "true"
},
{
"seasonNumber": 2,
"monitored": "true"
},
{
"seasonNumber": 1,
"monitored": "true"
},
{
"seasonNumber": 0,
"monitored": "false"
}
],
"id": 1
},
"downloading": "false",
"id": 55
}
]
}, 200)
elif 'api/queue' in url:
return MockResponse([
{
"series": {
"title": "Game of Thrones",
"sortTitle": "game thrones",
"seasonCount": 6,
"status": "continuing",
"overview": "Seven noble families fight for land ...",
"network": "HBO",
"airTime": "21:00",
"images": [
{
"coverType": "fanart",
"url": "http://thetvdb.com/banners/fanart/-83.jpg"
},
{
"coverType": "banner",
"url": "http://thetvdb.com/banners/-g19.jpg"
},
{
"coverType": "poster",
"url": "http://thetvdb.com/banners/posters-34.jpg"
}
],
"seasons": [
{
"seasonNumber": 0,
"monitored": "false"
},
{
"seasonNumber": 1,
"monitored": "false"
},
{
"seasonNumber": 2,
"monitored": "true"
},
{
"seasonNumber": 3,
"monitored": "false"
},
{
"seasonNumber": 4,
"monitored": "false"
},
{
"seasonNumber": 5,
"monitored": "true"
},
{
"seasonNumber": 6,
"monitored": "true"
}
],
"year": 2011,
"path": "/Volumes/Media/Shows/Game of Thrones",
"profileId": 5,
"seasonFolder": "true",
"monitored": "true",
"useSceneNumbering": "false",
"runtime": 60,
"tvdbId": 121361,
"tvRageId": 24493,
"tvMazeId": 82,
"firstAired": "2011-04-16T23:00:00Z",
"lastInfoSync": "2016-02-05T16:40:11.614176Z",
"seriesType": "standard",
"cleanTitle": "gamethrones",
"imdbId": "tt0944947",
"titleSlug": "game-of-thrones",
"certification": "TV-MA",
"genres": [
"Adventure",
"Drama",
"Fantasy"
],
"tags": [],
"added": "2015-12-28T13:44:24.204583Z",
"ratings": {
"votes": 1128,
"value": 9.4
},
"qualityProfileId": 5,
"id": 17
},
"episode": {
"seriesId": 17,
"episodeFileId": 0,
"seasonNumber": 3,
"episodeNumber": 8,
"title": "Second Sons",
"airDate": "2013-05-19",
"airDateUtc": "2013-05-20T01:00:00Z",
"overview": "King’s Landing hosts a wedding, and ...",
"hasFile": "false",
"monitored": "false",
"absoluteEpisodeNumber": 28,
"unverifiedSceneNumbering": "false",
"id": 889
},
"quality": {
"quality": {
"id": 7,
"name": "Bluray-1080p"
},
"revision": {
"version": 1,
"real": 0
}
},
"size": 4472186820,
"title": "Game.of.Thrones.S03E08.Second.Sons.2013.1080p.",
"sizeleft": 0,
"timeleft": "00:00:00",
"estimatedCompletionTime": "2016-02-05T22:46:52.440104Z",
"status": "Downloading",
"trackedDownloadStatus": "Ok",
"statusMessages": [],
"downloadId": "SABnzbd_nzo_Mq2f_b",
"protocol": "usenet",
"id": 1503378561
}
], 200)
elif 'api/series' in url:
return MockResponse([
{
"title": "Marvel's Daredevil",
"alternateTitles": [{
"title": "Daredevil",
"seasonNumber": -1
}],
"sortTitle": "marvels daredevil",
"seasonCount": 2,
"totalEpisodeCount": 26,
"episodeCount": 26,
"episodeFileCount": 26,
"sizeOnDisk": 79282273693,
"status": "continuing",
"overview": "Matt Murdock was blinded in a tragic accident...",
"previousAiring": "2016-03-18T04:01:00Z",
"network": "Netflix",
"airTime": "00:01",
"images": [
{
"coverType": "fanart",
"url": "/sonarr/MediaCover/7/fanart.jpg?lastWrite="
},
{
"coverType": "banner",
"url": "/sonarr/MediaCover/7/banner.jpg?lastWrite="
},
{
"coverType": "poster",
"url": "/sonarr/MediaCover/7/poster.jpg?lastWrite="
}
],
"seasons": [
{
"seasonNumber": 1,
"monitored": "false",
"statistics": {
"previousAiring": "2015-04-10T04:01:00Z",
"episodeFileCount": 13,
"episodeCount": 13,
"totalEpisodeCount": 13,
"sizeOnDisk": 22738179333,
"percentOfEpisodes": 100
}
},
{
"seasonNumber": 2,
"monitored": "false",
"statistics": {
"previousAiring": "2016-03-18T04:01:00Z",
"episodeFileCount": 13,
"episodeCount": 13,
"totalEpisodeCount": 13,
"sizeOnDisk": 56544094360,
"percentOfEpisodes": 100
}
}
],
"year": 2015,
"path": "F:\\TV_Shows\\Marvels Daredevil",
"profileId": 6,
"seasonFolder": "true",
"monitored": "true",
"useSceneNumbering": "false",
"runtime": 55,
"tvdbId": 281662,
"tvRageId": 38796,
"tvMazeId": 1369,
"firstAired": "2015-04-10T04:00:00Z",
"lastInfoSync": "2016-09-09T09:02:49.4402575Z",
"seriesType": "standard",
"cleanTitle": "marvelsdaredevil",
"imdbId": "tt3322312",
"titleSlug": "marvels-daredevil",
"certification": "TV-MA",
"genres": [
"Action",
"Crime",
"Drama"
],
"tags": [],
"added": "2015-05-15T00:20:32.7892744Z",
"ratings": {
"votes": 461,
"value": 8.9
},
"qualityProfileId": 6,
"id": 7
}
], 200)
elif 'api/diskspace' in url:
return MockResponse([
{
"path": "/data",
"label": "",
"freeSpace": 282500067328,
"totalSpace": 499738734592
}
], 200)
elif 'api/system/status' in url:
return MockResponse({
"version": "2.0.0.1121",
"buildTime": "2014-02-08T20:49:36.5560392Z",
"isDebug": "false",
"isProduction": "true",
"isAdmin": "true",
"isUserInteractive": "false",
"startupPath": "C:\\ProgramData\\NzbDrone\\bin",
"appData": "C:\\ProgramData\\NzbDrone",
"osVersion": "6.2.9200.0",
"isMono": "false",
"isLinux": "false",
"isWindows": "true",
"branch": "develop",
"authentication": "false",
"startOfWeek": 0,
"urlBase": ""
}, 200)
else:
return MockResponse({
"error": "Unauthorized"
}, 401)
class TestSonarrSetup(unittest.TestCase):
"""Test the Sonarr platform."""
# pylint: disable=invalid-name
DEVICES = []
def add_devices(self, devices, update):
"""Mock add devices."""
for device in devices:
self.DEVICES.append(device)
def setUp(self):
"""Initialize values for this testcase class."""
self.DEVICES = []
self.hass = get_test_home_assistant()
self.hass.config.time_zone = 'America/Los_Angeles'
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_diskspace_no_paths(self, req_mock):
"""Test getting all disk space."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [],
'monitored_conditions': [
'diskspace'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual('263.10', device.state)
self.assertEqual('mdi:harddisk', device.icon)
self.assertEqual('GB', device.unit_of_measurement)
self.assertEqual('Sonarr Disk Space', device.name)
self.assertEqual(
'263.10/465.42GB (56.53%)',
device.device_state_attributes["/data"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_diskspace_paths(self, req_mock):
"""Test getting diskspace for included paths."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'diskspace'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual('263.10', device.state)
self.assertEqual('mdi:harddisk', device.icon)
self.assertEqual('GB', device.unit_of_measurement)
self.assertEqual('Sonarr Disk Space', device.name)
self.assertEqual(
'263.10/465.42GB (56.53%)',
device.device_state_attributes["/data"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_commands(self, req_mock):
"""Test getting running commands."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'commands'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:code-braces', device.icon)
self.assertEqual('Commands', device.unit_of_measurement)
self.assertEqual('Sonarr Commands', device.name)
self.assertEqual(
'pending',
device.device_state_attributes["RescanSeries"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_queue(self, req_mock):
"""Test getting downloads in the queue."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'queue'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:download', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Queue', device.name)
self.assertEqual(
'100.00%',
device.device_state_attributes["Game of Thrones S03E08"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_series(self, req_mock):
"""Test getting the number of series."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'series'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Shows', device.unit_of_measurement)
self.assertEqual('Sonarr Series', device.name)
self.assertEqual(
'26/26 Episodes',
device.device_state_attributes["Marvel's Daredevil"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_wanted(self, req_mock):
"""Test getting wanted episodes."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'wanted'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Wanted', device.name)
self.assertEqual(
'2014-02-03',
device.device_state_attributes["Archer (2009) S05E04"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_upcoming_multiple_days(self, req_mock):
"""Test the upcoming episodes for multiple days."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'upcoming'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Upcoming', device.name)
self.assertEqual(
'S04E11',
device.device_state_attributes["Bob's Burgers"]
)
@pytest.mark.skip
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_upcoming_today(self, req_mock):
"""Test filtering for a single day.
Sonarr needs to respond with at least 2 days
"""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '1',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'upcoming'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Upcoming', device.name)
self.assertEqual(
'S04E11',
device.device_state_attributes["Bob's Burgers"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_system_status(self, req_mock):
"""Test getting system status."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '2',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'status'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual('2.0.0.1121', device.state)
self.assertEqual('mdi:information', device.icon)
self.assertEqual('Sonarr Status', device.name)
self.assertEqual(
'6.2.9200.0',
device.device_state_attributes['osVersion'])
@pytest.mark.skip
@unittest.mock.patch('requests.get', side_effect=mocked_requests_get)
def test_ssl(self, req_mock):
"""Test SSL being enabled."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '1',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'upcoming'
],
"ssl": "true"
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(1, device.state)
self.assertEqual('s', device.ssl)
self.assertEqual('mdi:television', device.icon)
self.assertEqual('Episodes', device.unit_of_measurement)
self.assertEqual('Sonarr Upcoming', device.name)
self.assertEqual(
'S04E11',
device.device_state_attributes["Bob's Burgers"]
)
@unittest.mock.patch('requests.get', side_effect=mocked_exception)
def test_exception_handling(self, req_mock):
"""Test exception being handled."""
config = {
'platform': 'sonarr',
'api_key': 'foo',
'days': '1',
'unit': 'GB',
"include_paths": [
'/data'
],
'monitored_conditions': [
'upcoming'
]
}
sonarr.setup_platform(self.hass, config, self.add_devices, None)
for device in self.DEVICES:
device.update()
self.assertEqual(None, device.state)
| LinuxChristian/home-assistant | tests/components/sensor/test_sonarr.py | Python | apache-2.0 | 34,600 |
import d
class A:
a= "a"
def test(self):
print "aa"
class B:
b= "b"
def test(self):
print "bb"
d.helper(B)
a=A()
a.test()
| h2oloopan/easymerge | EasyMerge/testcase/self_written/c.py | Python | mit | 134 |
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
'pytz',
'dogpile.cache',
'pyramid_dogpile_cache',
'Flask>=0.10.1',
'flask-admin',
'psycopg2',
'pymemcache',
'mock',
'alembic'
]
setup(name='gamification-engine',
version='0.1.18',
description='gamification-engine',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"Topic :: Software Development :: Libraries",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: GNU Affero General Public License v3"
],
author='Marcel Sander, Jens Janiuk',
author_email='marcel@gamification-software.com',
license='MIT',
url='https://www.gamification-software.com',
keywords='web wsgi bfg pylons pyramid gamification',
packages=find_packages()+["gengine_quickstart_template",],
include_package_data=True,
zip_safe=False,
test_suite='gengine',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = gengine:main
[console_scripts]
initialize_gengine_db = gengine.scripts.initializedb:main
gengine_quickstart = gengine.scripts.quickstart:main
""",
)
| ArneBab/gamification-engine | setup.py | Python | mit | 1,784 |
#!/usr/bin/env python
#
# euclid graphics maths module
#
# Copyright (c) 2006 Alex Holkner
# Alex.Holkner@mail.google.com
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
'''euclid graphics maths module
Documentation and tests are included in the file "euclid.txt", or online
at http://code.google.com/p/pyeuclid
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
__revision__ = '$Revision$'
import math
import operator
import types
# Some magic here. If _use_slots is True, the classes will derive from
# object and will define a __slots__ class variable. If _use_slots is
# False, classes will be old-style and will not define __slots__.
#
# _use_slots = True: Memory efficient, probably faster in future versions
# of Python, "better".
# _use_slots = False: Ordinary classes, much faster than slots in current
# versions of Python (2.4 and 2.5).
_use_slots = True
# If True, allows components of Vector2 and Vector3 to be set via swizzling;
# e.g. v.xyz = (1, 2, 3). This is much, much slower than the more verbose
# v.x = 1; v.y = 2; v.z = 3, and slows down ordinary element setting as
# well. Recommended setting is False.
_enable_swizzle_set = False
# Requires class to derive from object.
if _enable_swizzle_set:
_use_slots = True
# Implement _use_slots magic.
class _EuclidMetaclass(type):
def __new__(cls, name, bases, dct):
if '__slots__' in dct:
dct['__getstate__'] = cls._create_getstate(dct['__slots__'])
dct['__setstate__'] = cls._create_setstate(dct['__slots__'])
if _use_slots:
return type.__new__(cls, name, bases + (object,), dct)
else:
if '__slots__' in dct:
del dct['__slots__']
return types.ClassType.__new__(types.ClassType, name, bases, dct)
@classmethod
def _create_getstate(cls, slots):
def __getstate__(self):
d = {}
for slot in slots:
d[slot] = getattr(self, slot)
return d
return __getstate__
@classmethod
def _create_setstate(cls, slots):
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
return __setstate__
__metaclass__ = _EuclidMetaclass
class Vector2:
__slots__ = ['x', 'y']
__hash__ = None
def __init__(self, x=0, y=0):
self.x = x
self.y = y
def __copy__(self):
return self.__class__(self.x, self.y)
copy = __copy__
def __repr__(self):
return 'Vector2(%.2f, %.2f)' % (self.x, self.y)
def __eq__(self, other):
if isinstance(other, Vector2):
return self.x == other.x and \
self.y == other.y
else:
assert hasattr(other, '__len__') and len(other) == 2
return self.x == other[0] and \
self.y == other[1]
def __ne__(self, other):
return not self.__eq__(other)
def __nonzero__(self):
return self.x != 0 or self.y != 0
def __len__(self):
return 2
def __getitem__(self, key):
return (self.x, self.y)[key]
def __setitem__(self, key, value):
l = [self.x, self.y]
l[key] = value
self.x, self.y = l
def __iter__(self):
return iter((self.x, self.y))
def __getattr__(self, name):
try:
return tuple([(self.x, self.y)['xy'.index(c)] \
for c in name])
except ValueError:
raise AttributeError, name
if _enable_swizzle_set:
# This has detrimental performance on ordinary setattr as well
# if enabled
def __setattr__(self, name, value):
if len(name) == 1:
object.__setattr__(self, name, value)
else:
try:
l = [self.x, self.y]
for c, v in map(None, name, value):
l['xy'.index(c)] = v
self.x, self.y = l
except ValueError:
raise AttributeError, name
def __add__(self, other):
if isinstance(other, Vector2):
# Vector + Vector -> Vector
# Vector + Point -> Point
# Point + Point -> Vector
if self.__class__ is other.__class__:
_class = Vector2
else:
_class = Point2
return _class(self.x + other.x,
self.y + other.y)
else:
assert hasattr(other, '__len__') and len(other) == 2
return Vector2(self.x + other[0],
self.y + other[1])
__radd__ = __add__
def __iadd__(self, other):
if isinstance(other, Vector2):
self.x += other.x
self.y += other.y
else:
self.x += other[0]
self.y += other[1]
return self
def __sub__(self, other):
if isinstance(other, Vector2):
# Vector - Vector -> Vector
# Vector - Point -> Point
# Point - Point -> Vector
if self.__class__ is other.__class__:
_class = Vector2
else:
_class = Point2
return _class(self.x - other.x,
self.y - other.y)
else:
assert hasattr(other, '__len__') and len(other) == 2
return Vector2(self.x - other[0],
self.y - other[1])
def __rsub__(self, other):
if isinstance(other, Vector2):
return Vector2(other.x - self.x,
other.y - self.y)
else:
assert hasattr(other, '__len__') and len(other) == 2
return Vector2(other.x - self[0],
other.y - self[1])
def __mul__(self, other):
assert type(other) in (int, long, float)
return Vector2(self.x * other,
self.y * other)
__rmul__ = __mul__
def __imul__(self, other):
assert type(other) in (int, long, float)
self.x *= other
self.y *= other
return self
def __div__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.div(self.x, other),
operator.div(self.y, other))
def __rdiv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.div(other, self.x),
operator.div(other, self.y))
def __floordiv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.floordiv(self.x, other),
operator.floordiv(self.y, other))
def __rfloordiv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.floordiv(other, self.x),
operator.floordiv(other, self.y))
def __truediv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.truediv(self.x, other),
operator.truediv(self.y, other))
def __rtruediv__(self, other):
assert type(other) in (int, long, float)
return Vector2(operator.truediv(other, self.x),
operator.truediv(other, self.y))
def __neg__(self):
return Vector2(-self.x,
-self.y)
__pos__ = __copy__
def __abs__(self):
return math.sqrt(self.x ** 2 + \
self.y ** 2)
magnitude = __abs__
def magnitude_squared(self):
return self.x ** 2 + \
self.y ** 2
def normalize(self):
d = self.magnitude()
if d:
self.x /= d
self.y /= d
return self
def normalized(self):
d = self.magnitude()
if d:
return Vector2(self.x / d,
self.y / d)
return self.copy()
def dot(self, other):
assert isinstance(other, Vector2)
return self.x * other.x + \
self.y * other.y
def cross(self):
return Vector2(self.y, -self.x)
def reflect(self, normal):
# assume normal is normalized
assert isinstance(normal, Vector2)
d = 2 * (self.x * normal.x + self.y * normal.y)
return Vector2(self.x - d * normal.x,
self.y - d * normal.y)
class Vector3:
__slots__ = ['x', 'y', 'z']
__hash__ = None
def __init__(self, x=0, y=0, z=0):
self.x = x
self.y = y
self.z = z
def __copy__(self):
return self.__class__(self.x, self.y, self.z)
copy = __copy__
def __repr__(self):
return 'Vector3(%.2f, %.2f, %.2f)' % (self.x,
self.y,
self.z)
def __eq__(self, other):
if isinstance(other, Vector3):
return self.x == other.x and \
self.y == other.y and \
self.z == other.z
else:
assert hasattr(other, '__len__') and len(other) == 3
return self.x == other[0] and \
self.y == other[1] and \
self.z == other[2]
def __ne__(self, other):
return not self.__eq__(other)
def __nonzero__(self):
return self.x != 0 or self.y != 0 or self.z != 0
def __len__(self):
return 3
def __getitem__(self, key):
return (self.x, self.y, self.z)[key]
def __setitem__(self, key, value):
l = [self.x, self.y, self.z]
l[key] = value
self.x, self.y, self.z = l
def __iter__(self):
return iter((self.x, self.y, self.z))
def __getattr__(self, name):
try:
return tuple([(self.x, self.y, self.z)['xyz'.index(c)] \
for c in name])
except ValueError:
raise AttributeError, name
if _enable_swizzle_set:
# This has detrimental performance on ordinary setattr as well
# if enabled
def __setattr__(self, name, value):
if len(name) == 1:
object.__setattr__(self, name, value)
else:
try:
l = [self.x, self.y, self.z]
for c, v in map(None, name, value):
l['xyz'.index(c)] = v
self.x, self.y, self.z = l
except ValueError:
raise AttributeError, name
def __add__(self, other):
if isinstance(other, Vector3):
# Vector + Vector -> Vector
# Vector + Point -> Point
# Point + Point -> Vector
if self.__class__ is other.__class__:
_class = Vector3
else:
_class = Point3
return _class(self.x + other.x,
self.y + other.y,
self.z + other.z)
else:
assert hasattr(other, '__len__') and len(other) == 3
return Vector3(self.x + other[0],
self.y + other[1],
self.z + other[2])
__radd__ = __add__
def __iadd__(self, other):
if isinstance(other, Vector3):
self.x += other.x
self.y += other.y
self.z += other.z
else:
self.x += other[0]
self.y += other[1]
self.z += other[2]
return self
def __sub__(self, other):
if isinstance(other, Vector3):
# Vector - Vector -> Vector
# Vector - Point -> Point
# Point - Point -> Vector
if self.__class__ is other.__class__:
_class = Vector3
else:
_class = Point3
return Vector3(self.x - other.x,
self.y - other.y,
self.z - other.z)
else:
assert hasattr(other, '__len__') and len(other) == 3
return Vector3(self.x - other[0],
self.y - other[1],
self.z - other[2])
def __rsub__(self, other):
if isinstance(other, Vector3):
return Vector3(other.x - self.x,
other.y - self.y,
other.z - self.z)
else:
assert hasattr(other, '__len__') and len(other) == 3
return Vector3(other.x - self[0],
other.y - self[1],
other.z - self[2])
def __mul__(self, other):
if isinstance(other, Vector3):
# TODO component-wise mul/div in-place and on Vector2; docs.
if self.__class__ is Point3 or other.__class__ is Point3:
_class = Point3
else:
_class = Vector3
return _class(self.x * other.x,
self.y * other.y,
self.z * other.z)
else:
assert type(other) in (int, long, float)
return Vector3(self.x * other,
self.y * other,
self.z * other)
__rmul__ = __mul__
def __imul__(self, other):
assert type(other) in (int, long, float)
self.x *= other
self.y *= other
self.z *= other
return self
def __div__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.div(self.x, other),
operator.div(self.y, other),
operator.div(self.z, other))
def __rdiv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.div(other, self.x),
operator.div(other, self.y),
operator.div(other, self.z))
def __floordiv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.floordiv(self.x, other),
operator.floordiv(self.y, other),
operator.floordiv(self.z, other))
def __rfloordiv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.floordiv(other, self.x),
operator.floordiv(other, self.y),
operator.floordiv(other, self.z))
def __truediv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.truediv(self.x, other),
operator.truediv(self.y, other),
operator.truediv(self.z, other))
def __rtruediv__(self, other):
assert type(other) in (int, long, float)
return Vector3(operator.truediv(other, self.x),
operator.truediv(other, self.y),
operator.truediv(other, self.z))
def __neg__(self):
return Vector3(-self.x,
-self.y,
-self.z)
__pos__ = __copy__
def __abs__(self):
return math.sqrt(self.x ** 2 + \
self.y ** 2 + \
self.z ** 2)
magnitude = __abs__
def magnitude_squared(self):
return self.x ** 2 + \
self.y ** 2 + \
self.z ** 2
def normalize(self):
d = self.magnitude()
if d:
self.x /= d
self.y /= d
self.z /= d
return self
def normalized(self):
d = self.magnitude()
if d:
return Vector3(self.x / d,
self.y / d,
self.z / d)
return self.copy()
def dot(self, other):
assert isinstance(other, Vector3)
return self.x * other.x + \
self.y * other.y + \
self.z * other.z
def cross(self, other):
assert isinstance(other, Vector3)
return Vector3(self.y * other.z - self.z * other.y,
-self.x * other.z + self.z * other.x,
self.x * other.y - self.y * other.x)
def reflect(self, normal):
# assume normal is normalized
assert isinstance(normal, Vector3)
d = 2 * (self.x * normal.x + self.y * normal.y + self.z * normal.z)
return Vector3(self.x - d * normal.x,
self.y - d * normal.y,
self.z - d * normal.z)
# a b c
# e f g
# i j k
class Matrix3:
__slots__ = list('abcefgijk')
def __init__(self):
self.identity()
def __copy__(self):
M = Matrix3()
M.a = self.a
M.b = self.b
M.c = self.c
M.e = self.e
M.f = self.f
M.g = self.g
M.i = self.i
M.j = self.j
M.k = self.k
return M
copy = __copy__
def __repr__(self):
return ('Matrix3([% 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f])') \
% (self.a, self.b, self.c,
self.e, self.f, self.g,
self.i, self.j, self.k)
def __getitem__(self, key):
return [self.a, self.e, self.i,
self.b, self.f, self.j,
self.c, self.g, self.k][key]
def __setitem__(self, key, value):
L = self[:]
L[key] = value
(self.a, self.e, self.i,
self.b, self.f, self.j,
self.c, self.g, self.k) = L
def __mul__(self, other):
if isinstance(other, Matrix3):
# Caching repeatedly accessed attributes in local variables
# apparently increases performance by 20%. Attrib: Will McGugan.
Aa = self.a
Ab = self.b
Ac = self.c
Ae = self.e
Af = self.f
Ag = self.g
Ai = self.i
Aj = self.j
Ak = self.k
Ba = other.a
Bb = other.b
Bc = other.c
Be = other.e
Bf = other.f
Bg = other.g
Bi = other.i
Bj = other.j
Bk = other.k
C = Matrix3()
C.a = Aa * Ba + Ab * Be + Ac * Bi
C.b = Aa * Bb + Ab * Bf + Ac * Bj
C.c = Aa * Bc + Ab * Bg + Ac * Bk
C.e = Ae * Ba + Af * Be + Ag * Bi
C.f = Ae * Bb + Af * Bf + Ag * Bj
C.g = Ae * Bc + Af * Bg + Ag * Bk
C.i = Ai * Ba + Aj * Be + Ak * Bi
C.j = Ai * Bb + Aj * Bf + Ak * Bj
C.k = Ai * Bc + Aj * Bg + Ak * Bk
return C
elif isinstance(other, Point2):
A = self
B = other
P = Point2(0, 0)
P.x = A.a * B.x + A.b * B.y + A.c
P.y = A.e * B.x + A.f * B.y + A.g
return P
elif isinstance(other, Vector2):
A = self
B = other
V = Vector2(0, 0)
V.x = A.a * B.x + A.b * B.y
V.y = A.e * B.x + A.f * B.y
return V
else:
other = other.copy()
other._apply_transform(self)
return other
def __imul__(self, other):
assert isinstance(other, Matrix3)
# Cache attributes in local vars (see Matrix3.__mul__).
Aa = self.a
Ab = self.b
Ac = self.c
Ae = self.e
Af = self.f
Ag = self.g
Ai = self.i
Aj = self.j
Ak = self.k
Ba = other.a
Bb = other.b
Bc = other.c
Be = other.e
Bf = other.f
Bg = other.g
Bi = other.i
Bj = other.j
Bk = other.k
self.a = Aa * Ba + Ab * Be + Ac * Bi
self.b = Aa * Bb + Ab * Bf + Ac * Bj
self.c = Aa * Bc + Ab * Bg + Ac * Bk
self.e = Ae * Ba + Af * Be + Ag * Bi
self.f = Ae * Bb + Af * Bf + Ag * Bj
self.g = Ae * Bc + Af * Bg + Ag * Bk
self.i = Ai * Ba + Aj * Be + Ak * Bi
self.j = Ai * Bb + Aj * Bf + Ak * Bj
self.k = Ai * Bc + Aj * Bg + Ak * Bk
return self
def identity(self):
self.a = self.f = self.k = 1.
self.b = self.c = self.e = self.g = self.i = self.j = 0
return self
def scale(self, x, y):
self *= Matrix3.new_scale(x, y)
return self
def translate(self, x, y):
self *= Matrix3.new_translate(x, y)
return self
def rotate(self, angle):
self *= Matrix3.new_rotate(angle)
return self
# Static constructors
def new_identity(cls):
self = cls()
return self
new_identity = classmethod(new_identity)
def new_scale(cls, x, y):
self = cls()
self.a = x
self.f = y
return self
new_scale = classmethod(new_scale)
def new_translate(cls, x, y):
self = cls()
self.c = x
self.g = y
return self
new_translate = classmethod(new_translate)
def new_rotate(cls, angle):
self = cls()
s = math.sin(angle)
c = math.cos(angle)
self.a = self.f = c
self.b = -s
self.e = s
return self
new_rotate = classmethod(new_rotate)
# a b c d
# e f g h
# i j k l
# m n o p
class Matrix4:
__slots__ = list('abcdefghijklmnop')
def __init__(self):
self.identity()
def __copy__(self):
M = Matrix4()
M.a = self.a
M.b = self.b
M.c = self.c
M.d = self.d
M.e = self.e
M.f = self.f
M.g = self.g
M.h = self.h
M.i = self.i
M.j = self.j
M.k = self.k
M.l = self.l
M.m = self.m
M.n = self.n
M.o = self.o
M.p = self.p
return M
copy = __copy__
def __repr__(self):
return ('Matrix4([% 8.2f % 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f % 8.2f\n' \
' % 8.2f % 8.2f % 8.2f % 8.2f])') \
% (self.a, self.b, self.c, self.d,
self.e, self.f, self.g, self.h,
self.i, self.j, self.k, self.l,
self.m, self.n, self.o, self.p)
def __getitem__(self, key):
return [self.a, self.e, self.i, self.m,
self.b, self.f, self.j, self.n,
self.c, self.g, self.k, self.o,
self.d, self.h, self.l, self.p][key]
def __setitem__(self, key, value):
L = self[:]
L[key] = value
(self.a, self.e, self.i, self.m,
self.b, self.f, self.j, self.n,
self.c, self.g, self.k, self.o,
self.d, self.h, self.l, self.p) = L
def __mul__(self, other):
if isinstance(other, Matrix4):
# Cache attributes in local vars (see Matrix3.__mul__).
Aa = self.a
Ab = self.b
Ac = self.c
Ad = self.d
Ae = self.e
Af = self.f
Ag = self.g
Ah = self.h
Ai = self.i
Aj = self.j
Ak = self.k
Al = self.l
Am = self.m
An = self.n
Ao = self.o
Ap = self.p
Ba = other.a
Bb = other.b
Bc = other.c
Bd = other.d
Be = other.e
Bf = other.f
Bg = other.g
Bh = other.h
Bi = other.i
Bj = other.j
Bk = other.k
Bl = other.l
Bm = other.m
Bn = other.n
Bo = other.o
Bp = other.p
C = Matrix4()
C.a = Aa * Ba + Ab * Be + Ac * Bi + Ad * Bm
C.b = Aa * Bb + Ab * Bf + Ac * Bj + Ad * Bn
C.c = Aa * Bc + Ab * Bg + Ac * Bk + Ad * Bo
C.d = Aa * Bd + Ab * Bh + Ac * Bl + Ad * Bp
C.e = Ae * Ba + Af * Be + Ag * Bi + Ah * Bm
C.f = Ae * Bb + Af * Bf + Ag * Bj + Ah * Bn
C.g = Ae * Bc + Af * Bg + Ag * Bk + Ah * Bo
C.h = Ae * Bd + Af * Bh + Ag * Bl + Ah * Bp
C.i = Ai * Ba + Aj * Be + Ak * Bi + Al * Bm
C.j = Ai * Bb + Aj * Bf + Ak * Bj + Al * Bn
C.k = Ai * Bc + Aj * Bg + Ak * Bk + Al * Bo
C.l = Ai * Bd + Aj * Bh + Ak * Bl + Al * Bp
C.m = Am * Ba + An * Be + Ao * Bi + Ap * Bm
C.n = Am * Bb + An * Bf + Ao * Bj + Ap * Bn
C.o = Am * Bc + An * Bg + Ao * Bk + Ap * Bo
C.p = Am * Bd + An * Bh + Ao * Bl + Ap * Bp
return C
elif isinstance(other, Point3):
A = self
B = other
P = Point3(0, 0, 0)
P.x = A.a * B.x + A.b * B.y + A.c * B.z + A.d
P.y = A.e * B.x + A.f * B.y + A.g * B.z + A.h
P.z = A.i * B.x + A.j * B.y + A.k * B.z + A.l
return P
elif isinstance(other, Vector3):
A = self
B = other
V = Vector3(0, 0, 0)
V.x = A.a * B.x + A.b * B.y + A.c * B.z
V.y = A.e * B.x + A.f * B.y + A.g * B.z
V.z = A.i * B.x + A.j * B.y + A.k * B.z
return V
else:
other = other.copy()
other._apply_transform(self)
return other
def __imul__(self, other):
assert isinstance(other, Matrix4)
# Cache attributes in local vars (see Matrix3.__mul__).
Aa = self.a
Ab = self.b
Ac = self.c
Ad = self.d
Ae = self.e
Af = self.f
Ag = self.g
Ah = self.h
Ai = self.i
Aj = self.j
Ak = self.k
Al = self.l
Am = self.m
An = self.n
Ao = self.o
Ap = self.p
Ba = other.a
Bb = other.b
Bc = other.c
Bd = other.d
Be = other.e
Bf = other.f
Bg = other.g
Bh = other.h
Bi = other.i
Bj = other.j
Bk = other.k
Bl = other.l
Bm = other.m
Bn = other.n
Bo = other.o
Bp = other.p
self.a = Aa * Ba + Ab * Be + Ac * Bi + Ad * Bm
self.b = Aa * Bb + Ab * Bf + Ac * Bj + Ad * Bn
self.c = Aa * Bc + Ab * Bg + Ac * Bk + Ad * Bo
self.d = Aa * Bd + Ab * Bh + Ac * Bl + Ad * Bp
self.e = Ae * Ba + Af * Be + Ag * Bi + Ah * Bm
self.f = Ae * Bb + Af * Bf + Ag * Bj + Ah * Bn
self.g = Ae * Bc + Af * Bg + Ag * Bk + Ah * Bo
self.h = Ae * Bd + Af * Bh + Ag * Bl + Ah * Bp
self.i = Ai * Ba + Aj * Be + Ak * Bi + Al * Bm
self.j = Ai * Bb + Aj * Bf + Ak * Bj + Al * Bn
self.k = Ai * Bc + Aj * Bg + Ak * Bk + Al * Bo
self.l = Ai * Bd + Aj * Bh + Ak * Bl + Al * Bp
self.m = Am * Ba + An * Be + Ao * Bi + Ap * Bm
self.n = Am * Bb + An * Bf + Ao * Bj + Ap * Bn
self.o = Am * Bc + An * Bg + Ao * Bk + Ap * Bo
self.p = Am * Bd + An * Bh + Ao * Bl + Ap * Bp
return self
def transform(self, other):
A = self
B = other
P = Point3(0, 0, 0)
P.x = A.a * B.x + A.b * B.y + A.c * B.z + A.d
P.y = A.e * B.x + A.f * B.y + A.g * B.z + A.h
P.z = A.i * B.x + A.j * B.y + A.k * B.z + A.l
w = A.m * B.x + A.n * B.y + A.o * B.z + A.p
if w != 0:
P.x /= w
P.y /= w
P.z /= w
return P
def identity(self):
self.a = self.f = self.k = self.p = 1.
self.b = self.c = self.d = self.e = self.g = self.h = \
self.i = self.j = self.l = self.m = self.n = self.o = 0
return self
def scale(self, x, y, z):
self *= Matrix4.new_scale(x, y, z)
return self
def translate(self, x, y, z):
self *= Matrix4.new_translate(x, y, z)
return self
def rotatex(self, angle):
self *= Matrix4.new_rotatex(angle)
return self
def rotatey(self, angle):
self *= Matrix4.new_rotatey(angle)
return self
def rotatez(self, angle):
self *= Matrix4.new_rotatez(angle)
return self
def rotate_axis(self, angle, axis):
self *= Matrix4.new_rotate_axis(angle, axis)
return self
def rotate_euler(self, heading, attitude, bank):
self *= Matrix4.new_rotate_euler(heading, attitude, bank)
return self
def rotate_triple_axis(self, x, y, z):
self *= Matrix4.new_rotate_triple_axis(x, y, z)
return self
def transpose(self):
(self.a, self.e, self.i, self.m,
self.b, self.f, self.j, self.n,
self.c, self.g, self.k, self.o,
self.d, self.h, self.l, self.p) = \
(self.a, self.b, self.c, self.d,
self.e, self.f, self.g, self.h,
self.i, self.j, self.k, self.l,
self.m, self.n, self.o, self.p)
def transposed(self):
M = self.copy()
M.transpose()
return M
# Static constructors
def new(cls, *values):
M = cls()
M[:] = values
return M
new = classmethod(new)
def new_identity(cls):
self = cls()
return self
new_identity = classmethod(new_identity)
def new_scale(cls, x, y, z):
self = cls()
self.a = x
self.f = y
self.k = z
return self
new_scale = classmethod(new_scale)
def new_translate(cls, x, y, z):
self = cls()
self.d = x
self.h = y
self.l = z
return self
new_translate = classmethod(new_translate)
def new_rotatex(cls, angle):
self = cls()
s = math.sin(angle)
c = math.cos(angle)
self.f = self.k = c
self.g = -s
self.j = s
return self
new_rotatex = classmethod(new_rotatex)
def new_rotatey(cls, angle):
self = cls()
s = math.sin(angle)
c = math.cos(angle)
self.a = self.k = c
self.c = s
self.i = -s
return self
new_rotatey = classmethod(new_rotatey)
def new_rotatez(cls, angle):
self = cls()
s = math.sin(angle)
c = math.cos(angle)
self.a = self.f = c
self.b = -s
self.e = s
return self
new_rotatez = classmethod(new_rotatez)
def new_rotate_axis(cls, angle, axis):
assert(isinstance(axis, Vector3))
vector = axis.normalized()
x = vector.x
y = vector.y
z = vector.z
self = cls()
s = math.sin(angle)
c = math.cos(angle)
c1 = 1. - c
# from the glRotate man page
self.a = x * x * c1 + c
self.b = x * y * c1 - z * s
self.c = x * z * c1 + y * s
self.e = y * x * c1 + z * s
self.f = y * y * c1 + c
self.g = y * z * c1 - x * s
self.i = x * z * c1 - y * s
self.j = y * z * c1 + x * s
self.k = z * z * c1 + c
return self
new_rotate_axis = classmethod(new_rotate_axis)
def new_rotate_euler(cls, heading, attitude, bank):
# from http://www.euclideanspace.com/
ch = math.cos(heading)
sh = math.sin(heading)
ca = math.cos(attitude)
sa = math.sin(attitude)
cb = math.cos(bank)
sb = math.sin(bank)
self = cls()
self.a = ch * ca
self.b = sh * sb - ch * sa * cb
self.c = ch * sa * sb + sh * cb
self.e = sa
self.f = ca * cb
self.g = -ca * sb
self.i = -sh * ca
self.j = sh * sa * cb + ch * sb
self.k = -sh * sa * sb + ch * cb
return self
new_rotate_euler = classmethod(new_rotate_euler)
def new_rotate_triple_axis(cls, x, y, z):
m = cls()
m.a, m.b, m.c = x.x, y.x, z.x
m.e, m.f, m.g = x.y, y.y, z.y
m.i, m.j, m.k = x.z, y.z, z.z
return m
new_rotate_triple_axis = classmethod(new_rotate_triple_axis)
def new_look_at(cls, eye, at, up):
z = (eye - at).normalized()
x = up.cross(z).normalized()
y = z.cross(x)
m = cls.new_rotate_triple_axis(x, y, z)
m.d, m.h, m.l = eye.x, eye.y, eye.z
return m
new_look_at = classmethod(new_look_at)
def new_perspective(cls, fov_y, aspect, near, far):
# from the gluPerspective man page
f = 1 / math.tan(fov_y / 2)
self = cls()
assert near != 0.0 and near != far
self.a = f / aspect
self.f = f
self.k = (far + near) / (near - far)
self.l = 2 * far * near / (near - far)
self.o = -1
self.p = 0
return self
new_perspective = classmethod(new_perspective)
def determinant(self):
return ((self.a * self.f - self.e * self.b)
* (self.k * self.p - self.o * self.l)
- (self.a * self.j - self.i * self.b)
* (self.g * self.p - self.o * self.h)
+ (self.a * self.n - self.m * self.b)
* (self.g * self.l - self.k * self.h)
+ (self.e * self.j - self.i * self.f)
* (self.c * self.p - self.o * self.d)
- (self.e * self.n - self.m * self.f)
* (self.c * self.l - self.k * self.d)
+ (self.i * self.n - self.m * self.j)
* (self.c * self.h - self.g * self.d))
def inverse(self):
tmp = Matrix4()
d = self.determinant();
if abs(d) < 0.001:
# No inverse, return identity
return tmp
else:
d = 1.0 / d;
tmp.a = d * (self.f * (self.k * self.p - self.o * self.l) + self.j * (self.o * self.h - self.g * self.p) + self.n * (self.g * self.l - self.k * self.h));
tmp.e = d * (self.g * (self.i * self.p - self.m * self.l) + self.k * (self.m * self.h - self.e * self.p) + self.o * (self.e * self.l - self.i * self.h));
tmp.i = d * (self.h * (self.i * self.n - self.m * self.j) + self.l * (self.m * self.f - self.e * self.n) + self.p * (self.e * self.j - self.i * self.f));
tmp.m = d * (self.e * (self.n * self.k - self.j * self.o) + self.i * (self.f * self.o - self.n * self.g) + self.m * (self.j * self.g - self.f * self.k));
tmp.b = d * (self.j * (self.c * self.p - self.o * self.d) + self.n * (self.k * self.d - self.c * self.l) + self.b * (self.o * self.l - self.k * self.p));
tmp.f = d * (self.k * (self.a * self.p - self.m * self.d) + self.o * (self.i * self.d - self.a * self.l) + self.c * (self.m * self.l - self.i * self.p));
tmp.j = d * (self.l * (self.a * self.n - self.m * self.b) + self.p * (self.i * self.b - self.a * self.j) + self.d * (self.m * self.j - self.i * self.n));
tmp.n = d * (self.i * (self.n * self.c - self.b * self.o) + self.m * (self.b * self.k - self.j * self.c) + self.a * (self.j * self.o - self.n * self.k));
tmp.c = d * (self.n * (self.c * self.h - self.g * self.d) + self.b * (self.g * self.p - self.o * self.h) + self.f * (self.o * self.d - self.c * self.p));
tmp.g = d * (self.o * (self.a * self.h - self.e * self.d) + self.c * (self.e * self.p - self.m * self.h) + self.g * (self.m * self.d - self.a * self.p));
tmp.k = d * (self.p * (self.a * self.f - self.e * self.b) + self.d * (self.e * self.n - self.m * self.f) + self.h * (self.m * self.b - self.a * self.n));
tmp.o = d * (self.m * (self.f * self.c - self.b * self.g) + self.a * (self.n * self.g - self.f * self.o) + self.e * (self.b * self.o - self.n * self.c));
tmp.d = d * (self.b * (self.k * self.h - self.g * self.l) + self.f * (self.c * self.l - self.k * self.d) + self.j * (self.g * self.d - self.c * self.h));
tmp.h = d * (self.c * (self.i * self.h - self.e * self.l) + self.g * (self.a * self.l - self.i * self.d) + self.k * (self.e * self.d - self.a * self.h));
tmp.l = d * (self.d * (self.i * self.f - self.e * self.j) + self.h * (self.a * self.j - self.i * self.b) + self.l * (self.e * self.b - self.a * self.f));
tmp.p = d * (self.a * (self.f * self.k - self.j * self.g) + self.e * (self.j * self.c - self.b * self.k) + self.i * (self.b * self.g - self.f * self.c));
return tmp;
class Quaternion:
# All methods and naming conventions based off
# http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions
# w is the real part, (x, y, z) are the imaginary parts
__slots__ = ['w', 'x', 'y', 'z']
def __init__(self, w=1, x=0, y=0, z=0):
self.w = w
self.x = x
self.y = y
self.z = z
def __copy__(self):
Q = Quaternion()
Q.w = self.w
Q.x = self.x
Q.y = self.y
Q.z = self.z
return Q
copy = __copy__
def __repr__(self):
return 'Quaternion(real=%.2f, imag=<%.2f, %.2f, %.2f>)' % \
(self.w, self.x, self.y, self.z)
def __mul__(self, other):
if isinstance(other, Quaternion):
Ax = self.x
Ay = self.y
Az = self.z
Aw = self.w
Bx = other.x
By = other.y
Bz = other.z
Bw = other.w
Q = Quaternion()
Q.x = Ax * Bw + Ay * Bz - Az * By + Aw * Bx
Q.y = -Ax * Bz + Ay * Bw + Az * Bx + Aw * By
Q.z = Ax * By - Ay * Bx + Az * Bw + Aw * Bz
Q.w = -Ax * Bx - Ay * By - Az * Bz + Aw * Bw
return Q
elif isinstance(other, Vector3):
w = self.w
x = self.x
y = self.y
z = self.z
Vx = other.x
Vy = other.y
Vz = other.z
ww = w * w
w2 = w * 2
wx2 = w2 * x
wy2 = w2 * y
wz2 = w2 * z
xx = x * x
x2 = x * 2
xy2 = x2 * y
xz2 = x2 * z
yy = y * y
yz2 = 2 * y * z
zz = z * z
return other.__class__(\
ww * Vx + wy2 * Vz - wz2 * Vy + \
xx * Vx + xy2 * Vy + xz2 * Vz - \
zz * Vx - yy * Vx,
xy2 * Vx + yy * Vy + yz2 * Vz + \
wz2 * Vx - zz * Vy + ww * Vy - \
wx2 * Vz - xx * Vy,
xz2 * Vx + yz2 * Vy + \
zz * Vz - wy2 * Vx - yy * Vz + \
wx2 * Vy - xx * Vz + ww * Vz)
else:
other = other.copy()
other._apply_transform(self)
return other
def __imul__(self, other):
assert isinstance(other, Quaternion)
Ax = self.x
Ay = self.y
Az = self.z
Aw = self.w
Bx = other.x
By = other.y
Bz = other.z
Bw = other.w
self.x = Ax * Bw + Ay * Bz - Az * By + Aw * Bx
self.y = -Ax * Bz + Ay * Bw + Az * Bx + Aw * By
self.z = Ax * By - Ay * Bx + Az * Bw + Aw * Bz
self.w = -Ax * Bx - Ay * By - Az * Bz + Aw * Bw
return self
def __abs__(self):
return math.sqrt(self.w ** 2 + \
self.x ** 2 + \
self.y ** 2 + \
self.z ** 2)
magnitude = __abs__
def magnitude_squared(self):
return self.w ** 2 + \
self.x ** 2 + \
self.y ** 2 + \
self.z ** 2
def identity(self):
self.w = 1
self.x = 0
self.y = 0
self.z = 0
return self
def rotate_axis(self, angle, axis):
self *= Quaternion.new_rotate_axis(angle, axis)
return self
def rotate_euler(self, heading, attitude, bank):
self *= Quaternion.new_rotate_euler(heading, attitude, bank)
return self
def rotate_matrix(self, m):
self *= Quaternion.new_rotate_matrix(m)
return self
def conjugated(self):
Q = Quaternion()
Q.w = self.w
Q.x = -self.x
Q.y = -self.y
Q.z = -self.z
return Q
def normalize(self):
d = self.magnitude()
if d != 0:
self.w /= d
self.x /= d
self.y /= d
self.z /= d
return self
def normalized(self):
d = self.magnitude()
if d != 0:
Q = Quaternion()
Q.w = self.w / d
Q.x = self.x / d
Q.y = self.y / d
Q.z = self.z / d
return Q
else:
return self.copy()
def get_angle_axis(self):
if self.w > 1:
self = self.normalized()
angle = 2 * math.acos(self.w)
s = math.sqrt(1 - self.w ** 2)
if s < 0.001:
return angle, Vector3(1, 0, 0)
else:
return angle, Vector3(self.x / s, self.y / s, self.z / s)
def get_euler(self):
t = self.x * self.y + self.z * self.w
if t > 0.4999:
heading = 2 * math.atan2(self.x, self.w)
attitude = math.pi / 2
bank = 0
elif t < -0.4999:
heading = -2 * math.atan2(self.x, self.w)
attitude = -math.pi / 2
bank = 0
else:
sqx = self.x ** 2
sqy = self.y ** 2
sqz = self.z ** 2
heading = math.atan2(2 * self.y * self.w - 2 * self.x * self.z,
1 - 2 * sqy - 2 * sqz)
attitude = math.asin(2 * t)
bank = math.atan2(2 * self.x * self.w - 2 * self.y * self.z,
1 - 2 * sqx - 2 * sqz)
return heading, attitude, bank
def get_matrix(self):
xx = self.x ** 2
xy = self.x * self.y
xz = self.x * self.z
xw = self.x * self.w
yy = self.y ** 2
yz = self.y * self.z
yw = self.y * self.w
zz = self.z ** 2
zw = self.z * self.w
M = Matrix4()
M.a = 1 - 2 * (yy + zz)
M.b = 2 * (xy - zw)
M.c = 2 * (xz + yw)
M.e = 2 * (xy + zw)
M.f = 1 - 2 * (xx + zz)
M.g = 2 * (yz - xw)
M.i = 2 * (xz - yw)
M.j = 2 * (yz + xw)
M.k = 1 - 2 * (xx + yy)
return M
# Static constructors
def new_identity(cls):
return cls()
new_identity = classmethod(new_identity)
def new_rotate_axis(cls, angle, axis):
assert(isinstance(axis, Vector3))
axis = axis.normalized()
s = math.sin(angle / 2)
Q = cls()
Q.w = math.cos(angle / 2)
Q.x = axis.x * s
Q.y = axis.y * s
Q.z = axis.z * s
return Q
new_rotate_axis = classmethod(new_rotate_axis)
def new_rotate_euler(cls, heading, attitude, bank):
Q = cls()
c1 = math.cos(heading / 2)
s1 = math.sin(heading / 2)
c2 = math.cos(attitude / 2)
s2 = math.sin(attitude / 2)
c3 = math.cos(bank / 2)
s3 = math.sin(bank / 2)
Q.w = c1 * c2 * c3 - s1 * s2 * s3
Q.x = s1 * s2 * c3 + c1 * c2 * s3
Q.y = s1 * c2 * c3 + c1 * s2 * s3
Q.z = c1 * s2 * c3 - s1 * c2 * s3
return Q
new_rotate_euler = classmethod(new_rotate_euler)
def new_rotate_matrix(cls, m):
if m[0*4 + 0] + m[1*4 + 1] + m[2*4 + 2] > 0.00000001:
t = m[0*4 + 0] + m[1*4 + 1] + m[2*4 + 2] + 1.0
s = 0.5/math.sqrt(t)
return cls(
s*t,
(m[1*4 + 2] - m[2*4 + 1])*s,
(m[2*4 + 0] - m[0*4 + 2])*s,
(m[0*4 + 1] - m[1*4 + 0])*s
)
elif m[0*4 + 0] > m[1*4 + 1] and m[0*4 + 0] > m[2*4 + 2]:
t = m[0*4 + 0] - m[1*4 + 1] - m[2*4 + 2] + 1.0
s = 0.5/math.sqrt(t)
return cls(
(m[1*4 + 2] - m[2*4 + 1])*s,
s*t,
(m[0*4 + 1] + m[1*4 + 0])*s,
(m[2*4 + 0] + m[0*4 + 2])*s
)
elif m[1*4 + 1] > m[2*4 + 2]:
t = -m[0*4 + 0] + m[1*4 + 1] - m[2*4 + 2] + 1.0
s = 0.5/math.sqrt(t)
return cls(
(m[2*4 + 0] - m[0*4 + 2])*s,
(m[0*4 + 1] + m[1*4 + 0])*s,
s*t,
(m[1*4 + 2] + m[2*4 + 1])*s
)
else:
t = -m[0*4 + 0] - m[1*4 + 1] + m[2*4 + 2] + 1.0
s = 0.5/math.sqrt(t)
return cls(
(m[0*4 + 1] - m[1*4 + 0])*s,
(m[2*4 + 0] + m[0*4 + 2])*s,
(m[1*4 + 2] + m[2*4 + 1])*s,
s*t
)
new_rotate_matrix = classmethod(new_rotate_matrix)
def new_interpolate(cls, q1, q2, t):
assert isinstance(q1, Quaternion) and isinstance(q2, Quaternion)
Q = cls()
costheta = q1.w * q2.w + q1.x * q2.x + q1.y * q2.y + q1.z * q2.z
if costheta < 0.:
costheta = -costheta
q1 = q1.conjugated()
elif costheta > 1:
costheta = 1
theta = math.acos(costheta)
if abs(theta) < 0.01:
Q.w = q2.w
Q.x = q2.x
Q.y = q2.y
Q.z = q2.z
return Q
sintheta = math.sqrt(1.0 - costheta * costheta)
if abs(sintheta) < 0.01:
Q.w = (q1.w + q2.w) * 0.5
Q.x = (q1.x + q2.x) * 0.5
Q.y = (q1.y + q2.y) * 0.5
Q.z = (q1.z + q2.z) * 0.5
return Q
ratio1 = math.sin((1 - t) * theta) / sintheta
ratio2 = math.sin(t * theta) / sintheta
Q.w = q1.w * ratio1 + q2.w * ratio2
Q.x = q1.x * ratio1 + q2.x * ratio2
Q.y = q1.y * ratio1 + q2.y * ratio2
Q.z = q1.z * ratio1 + q2.z * ratio2
return Q
new_interpolate = classmethod(new_interpolate)
# Geometry
# Much maths thanks to Paul Bourke, http://astronomy.swin.edu.au/~pbourke
# ---------------------------------------------------------------------------
class Geometry:
def _connect_unimplemented(self, other):
raise AttributeError, 'Cannot connect %s to %s' % \
(self.__class__, other.__class__)
def _intersect_unimplemented(self, other):
raise AttributeError, 'Cannot intersect %s and %s' % \
(self.__class__, other.__class__)
_intersect_point2 = _intersect_unimplemented
_intersect_line2 = _intersect_unimplemented
_intersect_circle = _intersect_unimplemented
_connect_point2 = _connect_unimplemented
_connect_line2 = _connect_unimplemented
_connect_circle = _connect_unimplemented
_intersect_point3 = _intersect_unimplemented
_intersect_line3 = _intersect_unimplemented
_intersect_sphere = _intersect_unimplemented
_intersect_plane = _intersect_unimplemented
_connect_point3 = _connect_unimplemented
_connect_line3 = _connect_unimplemented
_connect_sphere = _connect_unimplemented
_connect_plane = _connect_unimplemented
def intersect(self, other):
raise NotImplementedError
def connect(self, other):
raise NotImplementedError
def distance(self, other):
c = self.connect(other)
if c:
return c.length
return 0.0
def _intersect_point2_circle(P, C):
return abs(P - C.c) <= C.r
def _intersect_line2_line2(A, B):
d = B.v.y * A.v.x - B.v.x * A.v.y
if d == 0:
return None
dy = A.p.y - B.p.y
dx = A.p.x - B.p.x
ua = (B.v.x * dy - B.v.y * dx) / d
if not A._u_in(ua):
return None
ub = (A.v.x * dy - A.v.y * dx) / d
if not B._u_in(ub):
return None
return Point2(A.p.x + ua * A.v.x,
A.p.y + ua * A.v.y)
def _intersect_line2_circle(L, C):
a = L.v.magnitude_squared()
b = 2 * (L.v.x * (L.p.x - C.c.x) + \
L.v.y * (L.p.y - C.c.y))
c = C.c.magnitude_squared() + \
L.p.magnitude_squared() - \
2 * C.c.dot(L.p) - \
C.r ** 2
det = b ** 2 - 4 * a * c
if det < 0:
return None
sq = math.sqrt(det)
u1 = (-b + sq) / (2 * a)
u2 = (-b - sq) / (2 * a)
if not L._u_in(u1):
u1 = max(min(u1, 1.0), 0.0)
if not L._u_in(u2):
u2 = max(min(u2, 1.0), 0.0)
# Tangent
if u1 == u2:
return Point2(L.p.x + u1 * L.v.x,
L.p.y + u1 * L.v.y)
return LineSegment2(Point2(L.p.x + u1 * L.v.x,
L.p.y + u1 * L.v.y),
Point2(L.p.x + u2 * L.v.x,
L.p.y + u2 * L.v.y))
def _connect_point2_line2(P, L):
d = L.v.magnitude_squared()
assert d != 0
u = ((P.x - L.p.x) * L.v.x + \
(P.y - L.p.y) * L.v.y) / d
if not L._u_in(u):
u = max(min(u, 1.0), 0.0)
return LineSegment2(P,
Point2(L.p.x + u * L.v.x,
L.p.y + u * L.v.y))
def _connect_point2_circle(P, C):
v = P - C.c
v.normalize()
v *= C.r
return LineSegment2(P, Point2(C.c.x + v.x, C.c.y + v.y))
def _connect_line2_line2(A, B):
d = B.v.y * A.v.x - B.v.x * A.v.y
if d == 0:
# Parallel, connect an endpoint with a line
if isinstance(B, Ray2) or isinstance(B, LineSegment2):
p1, p2 = _connect_point2_line2(B.p, A)
return p2, p1
# No endpoint (or endpoint is on A), possibly choose arbitrary point
# on line.
return _connect_point2_line2(A.p, B)
dy = A.p.y - B.p.y
dx = A.p.x - B.p.x
ua = (B.v.x * dy - B.v.y * dx) / d
if not A._u_in(ua):
ua = max(min(ua, 1.0), 0.0)
ub = (A.v.x * dy - A.v.y * dx) / d
if not B._u_in(ub):
ub = max(min(ub, 1.0), 0.0)
return LineSegment2(Point2(A.p.x + ua * A.v.x, A.p.y + ua * A.v.y),
Point2(B.p.x + ub * B.v.x, B.p.y + ub * B.v.y))
def _connect_circle_line2(C, L):
d = L.v.magnitude_squared()
assert d != 0
u = ((C.c.x - L.p.x) * L.v.x + (C.c.y - L.p.y) * L.v.y) / d
if not L._u_in(u):
u = max(min(u, 1.0), 0.0)
point = Point2(L.p.x + u * L.v.x, L.p.y + u * L.v.y)
v = (point - C.c)
v.normalize()
v *= C.r
return LineSegment2(Point2(C.c.x + v.x, C.c.y + v.y), point)
def _connect_circle_circle(A, B):
v = B.c - A.c
v.normalize()
return LineSegment2(Point2(A.c.x + v.x * A.r, A.c.y + v.y * A.r),
Point2(B.c.x - v.x * B.r, B.c.y - v.y * B.r))
class Point2(Vector2, Geometry):
def __repr__(self):
return 'Point2(%.2f, %.2f)' % (self.x, self.y)
def intersect(self, other):
return other._intersect_point2(self)
def _intersect_circle(self, other):
return _intersect_point2_circle(self, other)
def connect(self, other):
return other._connect_point2(self)
def _connect_point2(self, other):
return LineSegment2(other, self)
def _connect_line2(self, other):
c = _connect_point2_line2(self, other)
if c:
return c._swap()
def _connect_circle(self, other):
c = _connect_point2_circle(self, other)
if c:
return c._swap()
class Line2(Geometry):
__slots__ = ['p', 'v']
def __init__(self, *args):
if len(args) == 3:
assert isinstance(args[0], Point2) and \
isinstance(args[1], Vector2) and \
type(args[2]) == float
self.p = args[0].copy()
self.v = args[1] * args[2] / abs(args[1])
elif len(args) == 2:
if isinstance(args[0], Point2) and isinstance(args[1], Point2):
self.p = args[0].copy()
self.v = args[1] - args[0]
elif isinstance(args[0], Point2) and isinstance(args[1], Vector2):
self.p = args[0].copy()
self.v = args[1].copy()
else:
raise AttributeError, '%r' % (args,)
elif len(args) == 1:
if isinstance(args[0], Line2):
self.p = args[0].p.copy()
self.v = args[0].v.copy()
else:
raise AttributeError, '%r' % (args,)
else:
raise AttributeError, '%r' % (args,)
if not self.v:
raise AttributeError, 'Line has zero-length vector'
def __copy__(self):
return self.__class__(self.p, self.v)
copy = __copy__
def __repr__(self):
return 'Line2(<%.2f, %.2f> + u<%.2f, %.2f>)' % \
(self.p.x, self.p.y, self.v.x, self.v.y)
p1 = property(lambda self: self.p)
p2 = property(lambda self: Point2(self.p.x + self.v.x,
self.p.y + self.v.y))
def _apply_transform(self, t):
self.p = t * self.p
self.v = t * self.v
def _u_in(self, u):
return True
def intersect(self, other):
return other._intersect_line2(self)
def _intersect_line2(self, other):
return _intersect_line2_line2(self, other)
def _intersect_circle(self, other):
return _intersect_line2_circle(self, other)
def connect(self, other):
return other._connect_line2(self)
def _connect_point2(self, other):
return _connect_point2_line2(other, self)
def _connect_line2(self, other):
return _connect_line2_line2(other, self)
def _connect_circle(self, other):
return _connect_circle_line2(other, self)
class Ray2(Line2):
def __repr__(self):
return 'Ray2(<%.2f, %.2f> + u<%.2f, %.2f>)' % \
(self.p.x, self.p.y, self.v.x, self.v.y)
def _u_in(self, u):
return u >= 0.0
class LineSegment2(Line2):
def __repr__(self):
return 'LineSegment2(<%.2f, %.2f> to <%.2f, %.2f>)' % \
(self.p.x, self.p.y, self.p.x + self.v.x, self.p.y + self.v.y)
def _u_in(self, u):
return u >= 0.0 and u <= 1.0
def __abs__(self):
return abs(self.v)
def magnitude_squared(self):
return self.v.magnitude_squared()
def _swap(self):
# used by connect methods to switch order of points
self.p = self.p2
self.v *= -1
return self
length = property(lambda self: abs(self.v))
class Circle(Geometry):
__slots__ = ['c', 'r']
def __init__(self, center, radius):
assert isinstance(center, Vector2) and type(radius) == float
self.c = center.copy()
self.r = radius
def __copy__(self):
return self.__class__(self.c, self.r)
copy = __copy__
def __repr__(self):
return 'Circle(<%.2f, %.2f>, radius=%.2f)' % \
(self.c.x, self.c.y, self.r)
def _apply_transform(self, t):
self.c = t * self.c
def intersect(self, other):
return other._intersect_circle(self)
def _intersect_point2(self, other):
return _intersect_point2_circle(other, self)
def _intersect_line2(self, other):
return _intersect_line2_circle(other, self)
def connect(self, other):
return other._connect_circle(self)
def _connect_point2(self, other):
return _connect_point2_circle(other, self)
def _connect_line2(self, other):
c = _connect_circle_line2(self, other)
if c:
return c._swap()
def _connect_circle(self, other):
return _connect_circle_circle(other, self)
# 3D Geometry
# -------------------------------------------------------------------------
def _connect_point3_line3(P, L):
d = L.v.magnitude_squared()
assert d != 0
u = ((P.x - L.p.x) * L.v.x + \
(P.y - L.p.y) * L.v.y + \
(P.z - L.p.z) * L.v.z) / d
if not L._u_in(u):
u = max(min(u, 1.0), 0.0)
return LineSegment3(P, Point3(L.p.x + u * L.v.x,
L.p.y + u * L.v.y,
L.p.z + u * L.v.z))
def _connect_point3_sphere(P, S):
v = P - S.c
v.normalize()
v *= S.r
return LineSegment3(P, Point3(S.c.x + v.x, S.c.y + v.y, S.c.z + v.z))
def _connect_point3_plane(p, plane):
n = plane.n.normalized()
d = p.dot(plane.n) - plane.k
return LineSegment3(p, Point3(p.x - n.x * d, p.y - n.y * d, p.z - n.z * d))
def _connect_line3_line3(A, B):
assert A.v and B.v
p13 = A.p - B.p
d1343 = p13.dot(B.v)
d4321 = B.v.dot(A.v)
d1321 = p13.dot(A.v)
d4343 = B.v.magnitude_squared()
denom = A.v.magnitude_squared() * d4343 - d4321 ** 2
if denom == 0:
# Parallel, connect an endpoint with a line
if isinstance(B, Ray3) or isinstance(B, LineSegment3):
return _connect_point3_line3(B.p, A)._swap()
# No endpoint (or endpoint is on A), possibly choose arbitrary
# point on line.
return _connect_point3_line3(A.p, B)
ua = (d1343 * d4321 - d1321 * d4343) / denom
if not A._u_in(ua):
ua = max(min(ua, 1.0), 0.0)
ub = (d1343 + d4321 * ua) / d4343
if not B._u_in(ub):
ub = max(min(ub, 1.0), 0.0)
return LineSegment3(Point3(A.p.x + ua * A.v.x,
A.p.y + ua * A.v.y,
A.p.z + ua * A.v.z),
Point3(B.p.x + ub * B.v.x,
B.p.y + ub * B.v.y,
B.p.z + ub * B.v.z))
def _connect_line3_plane(L, P):
d = P.n.dot(L.v)
if not d:
# Parallel, choose an endpoint
return _connect_point3_plane(L.p, P)
u = (P.k - P.n.dot(L.p)) / d
if not L._u_in(u):
# intersects out of range, choose nearest endpoint
u = max(min(u, 1.0), 0.0)
return _connect_point3_plane(Point3(L.p.x + u * L.v.x,
L.p.y + u * L.v.y,
L.p.z + u * L.v.z), P)
# Intersection
return None
def _connect_sphere_line3(S, L):
d = L.v.magnitude_squared()
assert d != 0
u = ((S.c.x - L.p.x) * L.v.x + \
(S.c.y - L.p.y) * L.v.y + \
(S.c.z - L.p.z) * L.v.z) / d
if not L._u_in(u):
u = max(min(u, 1.0), 0.0)
point = Point3(L.p.x + u * L.v.x, L.p.y + u * L.v.y, L.p.z + u * L.v.z)
v = (point - S.c)
v.normalize()
v *= S.r
return LineSegment3(Point3(S.c.x + v.x, S.c.y + v.y, S.c.z + v.z),
point)
def _connect_sphere_sphere(A, B):
v = B.c - A.c
v.normalize()
return LineSegment3(Point3(A.c.x + v.x * A.r,
A.c.y + v.y * A.r,
A.c.x + v.z * A.r),
Point3(B.c.x + v.x * B.r,
B.c.y + v.y * B.r,
B.c.x + v.z * B.r))
def _connect_sphere_plane(S, P):
c = _connect_point3_plane(S.c, P)
if not c:
return None
p2 = c.p2
v = p2 - S.c
v.normalize()
v *= S.r
return LineSegment3(Point3(S.c.x + v.x, S.c.y + v.y, S.c.z + v.z),
p2)
def _connect_plane_plane(A, B):
if A.n.cross(B.n):
# Planes intersect
return None
else:
# Planes are parallel, connect to arbitrary point
return _connect_point3_plane(A._get_point(), B)
def _intersect_point3_sphere(P, S):
return abs(P - S.c) <= S.r
def _intersect_line3_sphere(L, S):
a = L.v.magnitude_squared()
b = 2 * (L.v.x * (L.p.x - S.c.x) + \
L.v.y * (L.p.y - S.c.y) + \
L.v.z * (L.p.z - S.c.z))
c = S.c.magnitude_squared() + \
L.p.magnitude_squared() - \
2 * S.c.dot(L.p) - \
S.r ** 2
det = b ** 2 - 4 * a * c
if det < 0:
return None
sq = math.sqrt(det)
u1 = (-b + sq) / (2 * a)
u2 = (-b - sq) / (2 * a)
if not L._u_in(u1):
u1 = max(min(u1, 1.0), 0.0)
if not L._u_in(u2):
u2 = max(min(u2, 1.0), 0.0)
return LineSegment3(Point3(L.p.x + u1 * L.v.x,
L.p.y + u1 * L.v.y,
L.p.z + u1 * L.v.z),
Point3(L.p.x + u2 * L.v.x,
L.p.y + u2 * L.v.y,
L.p.z + u2 * L.v.z))
def _intersect_line3_plane(L, P):
d = P.n.dot(L.v)
if not d:
# Parallel
return None
u = (P.k - P.n.dot(L.p)) / d
if not L._u_in(u):
return None
return Point3(L.p.x + u * L.v.x,
L.p.y + u * L.v.y,
L.p.z + u * L.v.z)
def _intersect_plane_plane(A, B):
n1_m = A.n.magnitude_squared()
n2_m = B.n.magnitude_squared()
n1d2 = A.n.dot(B.n)
det = n1_m * n2_m - n1d2 ** 2
if det == 0:
# Parallel
return None
c1 = (A.k * n2_m - B.k * n1d2) / det
c2 = (B.k * n1_m - A.k * n1d2) / det
return Line3(Point3(c1 * A.n.x + c2 * B.n.x,
c1 * A.n.y + c2 * B.n.y,
c1 * A.n.z + c2 * B.n.z),
A.n.cross(B.n))
class Point3(Vector3, Geometry):
def __repr__(self):
return 'Point3(%.2f, %.2f, %.2f)' % (self.x, self.y, self.z)
def intersect(self, other):
return other._intersect_point3(self)
def _intersect_sphere(self, other):
return _intersect_point3_sphere(self, other)
def connect(self, other):
return other._connect_point3(self)
def _connect_point3(self, other):
if self != other:
return LineSegment3(other, self)
return None
def _connect_line3(self, other):
c = _connect_point3_line3(self, other)
if c:
return c._swap()
def _connect_sphere(self, other):
c = _connect_point3_sphere(self, other)
if c:
return c._swap()
def _connect_plane(self, other):
c = _connect_point3_plane(self, other)
if c:
return c._swap()
class Line3:
__slots__ = ['p', 'v']
def __init__(self, *args):
if len(args) == 3:
assert isinstance(args[0], Point3) and \
isinstance(args[1], Vector3) and \
type(args[2]) == float
self.p = args[0].copy()
self.v = args[1] * args[2] / abs(args[1])
elif len(args) == 2:
if isinstance(args[0], Point3) and isinstance(args[1], Point3):
self.p = args[0].copy()
self.v = args[1] - args[0]
elif isinstance(args[0], Point3) and isinstance(args[1], Vector3):
self.p = args[0].copy()
self.v = args[1].copy()
else:
raise AttributeError, '%r' % (args,)
elif len(args) == 1:
if isinstance(args[0], Line3):
self.p = args[0].p.copy()
self.v = args[0].v.copy()
else:
raise AttributeError, '%r' % (args,)
else:
raise AttributeError, '%r' % (args,)
# XXX This is annoying.
#if not self.v:
# raise AttributeError, 'Line has zero-length vector'
def __copy__(self):
return self.__class__(self.p, self.v)
copy = __copy__
def __repr__(self):
return 'Line3(<%.2f, %.2f, %.2f> + u<%.2f, %.2f, %.2f>)' % \
(self.p.x, self.p.y, self.p.z, self.v.x, self.v.y, self.v.z)
p1 = property(lambda self: self.p)
p2 = property(lambda self: Point3(self.p.x + self.v.x,
self.p.y + self.v.y,
self.p.z + self.v.z))
def _apply_transform(self, t):
self.p = t * self.p
self.v = t * self.v
def _u_in(self, u):
return True
def intersect(self, other):
return other._intersect_line3(self)
def _intersect_sphere(self, other):
return _intersect_line3_sphere(self, other)
def _intersect_plane(self, other):
return _intersect_line3_plane(self, other)
def connect(self, other):
return other._connect_line3(self)
def _connect_point3(self, other):
return _connect_point3_line3(other, self)
def _connect_line3(self, other):
return _connect_line3_line3(other, self)
def _connect_sphere(self, other):
return _connect_sphere_line3(other, self)
def _connect_plane(self, other):
c = _connect_line3_plane(self, other)
if c:
return c
class Ray3(Line3):
def __repr__(self):
return 'Ray3(<%.2f, %.2f, %.2f> + u<%.2f, %.2f, %.2f>)' % \
(self.p.x, self.p.y, self.p.z, self.v.x, self.v.y, self.v.z)
def _u_in(self, u):
return u >= 0.0
class LineSegment3(Line3):
def __repr__(self):
return 'LineSegment3(<%.2f, %.2f, %.2f> to <%.2f, %.2f, %.2f>)' % \
(self.p.x, self.p.y, self.p.z,
self.p.x + self.v.x, self.p.y + self.v.y, self.p.z + self.v.z)
def _u_in(self, u):
return u >= 0.0 and u <= 1.0
def __abs__(self):
return abs(self.v)
def magnitude_squared(self):
return self.v.magnitude_squared()
def _swap(self):
# used by connect methods to switch order of points
self.p = self.p2
self.v *= -1
return self
length = property(lambda self: abs(self.v))
class Sphere:
__slots__ = ['c', 'r']
def __init__(self, center, radius):
assert isinstance(center, Vector3) and type(radius) == float
self.c = center.copy()
self.r = radius
def __copy__(self):
return self.__class__(self.c, self.r)
copy = __copy__
def __repr__(self):
return 'Sphere(<%.2f, %.2f, %.2f>, radius=%.2f)' % \
(self.c.x, self.c.y, self.c.z, self.r)
def _apply_transform(self, t):
self.c = t * self.c
def intersect(self, other):
return other._intersect_sphere(self)
def _intersect_point3(self, other):
return _intersect_point3_sphere(other, self)
def _intersect_line3(self, other):
return _intersect_line3_sphere(other, self)
def connect(self, other):
return other._connect_sphere(self)
def _connect_point3(self, other):
return _connect_point3_sphere(other, self)
def _connect_line3(self, other):
c = _connect_sphere_line3(self, other)
if c:
return c._swap()
def _connect_sphere(self, other):
return _connect_sphere_sphere(other, self)
def _connect_plane(self, other):
c = _connect_sphere_plane(self, other)
if c:
return c
class Plane:
# n.p = k, where n is normal, p is point on plane, k is constant scalar
__slots__ = ['n', 'k']
def __init__(self, *args):
if len(args) == 3:
assert isinstance(args[0], Point3) and \
isinstance(args[1], Point3) and \
isinstance(args[2], Point3)
self.n = (args[1] - args[0]).cross(args[2] - args[0])
self.n.normalize()
self.k = self.n.dot(args[0])
elif len(args) == 2:
if isinstance(args[0], Point3) and isinstance(args[1], Vector3):
self.n = args[1].normalized()
self.k = self.n.dot(args[0])
elif isinstance(args[0], Vector3) and type(args[1]) == float:
self.n = args[0].normalized()
self.k = args[1]
else:
raise AttributeError, '%r' % (args,)
else:
raise AttributeError, '%r' % (args,)
if not self.n:
raise AttributeError, 'Points on plane are colinear'
def __copy__(self):
return self.__class__(self.n, self.k)
copy = __copy__
def __repr__(self):
return 'Plane(<%.2f, %.2f, %.2f>.p = %.2f)' % \
(self.n.x, self.n.y, self.n.z, self.k)
def _get_point(self):
# Return an arbitrary point on the plane
if self.n.z:
return Point3(0., 0., self.k / self.n.z)
elif self.n.y:
return Point3(0., self.k / self.n.y, 0.)
else:
return Point3(self.k / self.n.x, 0., 0.)
def _apply_transform(self, t):
p = t * self._get_point()
self.n = t * self.n
self.k = self.n.dot(p)
def intersect(self, other):
return other._intersect_plane(self)
def _intersect_line3(self, other):
return _intersect_line3_plane(other, self)
def _intersect_plane(self, other):
return _intersect_plane_plane(self, other)
def connect(self, other):
return other._connect_plane(self)
def _connect_point3(self, other):
return _connect_point3_plane(other, self)
def _connect_line3(self, other):
return _connect_line3_plane(other, self)
def _connect_sphere(self, other):
return _connect_sphere_plane(other, self)
def _connect_plane(self, other):
return _connect_plane_plane(other, self)
| RT-Thread/rtthread_fsl | utils/mdp/eMPL-pythonclient/euclid.py | Python | lgpl-2.1 | 69,409 |
# -*- coding: utf-8 -*-
# api.py
# Copyright (C) 2015 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import json
import base64
from six import StringIO
from uuid import uuid4
from twisted.internet import defer
from twisted.web.http_headers import Headers
from twisted.web.client import FileBodyProducer
from leap.soledad.client.http_target.support import readBody
from leap.soledad.common.errors import InvalidAuthTokenError
from leap.soledad.common.l2db.errors import HTTPError
from leap.soledad.common.l2db import SyncTarget
# we may want to collect statistics from the sync process
DO_STATS = False
if os.environ.get('SOLEDAD_STATS'):
DO_STATS = True
class SyncTargetAPI(SyncTarget):
"""
Declares public methods and implements u1db.SyncTarget.
"""
@property
def uuid(self):
return self._uuid
def set_creds(self, creds):
"""
Update credentials.
:param creds: A dictionary containing the uuid and token.
:type creds: dict
"""
uuid = creds['token']['uuid']
token = creds['token']['token']
self._uuid = uuid
auth = '%s:%s' % (uuid, token)
b64_token = base64.b64encode(auth)
self._auth_header = {'Authorization': ['Token %s' % b64_token]}
@property
def _base_header(self):
return self._auth_header.copy() if self._auth_header else {}
def _http_request(self, url, method='GET', body=None, headers=None,
content_type=None, body_reader=readBody,
body_producer=None):
headers = headers or self._base_header
if content_type:
headers.update({'content-type': [content_type]})
if not body_producer and body:
body = FileBodyProducer(StringIO(body))
elif body_producer:
# Upload case, check send.py
body = body_producer(body)
d = self._http.request(
method, url, headers=Headers(headers), bodyProducer=body)
d.addCallback(body_reader)
d.addErrback(_unauth_to_invalid_token_error)
return d
@defer.inlineCallbacks
def get_sync_info(self, source_replica_uid):
"""
Return information about known state of remote database.
Return the replica_uid and the current database generation of the
remote database, and its last-seen database generation for the client
replica.
:param source_replica_uid: The client-size replica uid.
:type source_replica_uid: str
:return: A deferred which fires with (target_replica_uid,
target_replica_generation, target_trans_id,
source_replica_last_known_generation,
source_replica_last_known_transaction_id)
:rtype: twisted.internet.defer.Deferred
"""
raw = yield self._http_request(self._url)
res = json.loads(raw)
defer.returnValue((
res['target_replica_uid'],
res['target_replica_generation'],
res['target_replica_transaction_id'],
res['source_replica_generation'],
res['source_transaction_id']
))
def record_sync_info(
self, source_replica_uid, source_replica_generation,
source_replica_transaction_id):
"""
Record tip information for another replica.
After sync_exchange has been processed, the caller will have
received new content from this replica. This call allows the
source replica instigating the sync to inform us what their
generation became after applying the documents we returned.
This is used to allow future sync operations to not need to repeat data
that we just talked about. It also means that if this is called at the
wrong time, there can be database records that will never be
synchronized.
:param source_replica_uid: The identifier for the source replica.
:type source_replica_uid: str
:param source_replica_generation: The database generation for the
source replica.
:type source_replica_generation: int
:param source_replica_transaction_id: The transaction id associated
with the source replica
generation.
:type source_replica_transaction_id: str
:return: A deferred which fires with the result of the query.
:rtype: twisted.internet.defer.Deferred
"""
data = json.dumps({
'generation': source_replica_generation,
'transaction_id': source_replica_transaction_id
})
return self._http_request(
self._url,
method='PUT',
body=data,
content_type='application/json')
@defer.inlineCallbacks
def sync_exchange(self, docs_by_generation, source_replica_uid,
last_known_generation, last_known_trans_id,
insert_doc_cb, ensure_callback=None,
sync_id=None):
"""
Find out which documents the remote database does not know about,
encrypt and send them. After that, receive documents from the remote
database.
:param docs_by_generations: A list of (doc_id, generation, trans_id)
of local documents that were changed since
the last local generation the remote
replica knows about.
:type docs_by_generations: list of tuples
:param source_replica_uid: The uid of the source replica.
:type source_replica_uid: str
:param last_known_generation: Target's last known generation.
:type last_known_generation: int
:param last_known_trans_id: Target's last known transaction id.
:type last_known_trans_id: str
:param insert_doc_cb: A callback for inserting received documents from
target. If not overriden, this will call u1db
insert_doc_from_target in synchronizer, which
implements the TAKE OTHER semantics.
:type insert_doc_cb: function
:param ensure_callback: A callback that ensures we know the target
replica uid if the target replica was just
created.
:type ensure_callback: function
:return: A deferred which fires with the new generation and
transaction id of the target replica.
:rtype: twisted.internet.defer.Deferred
"""
# ---------- phase 1: send docs to server ----------------------------
if DO_STATS:
self.sync_exchange_phase[0] += 1
# --------------------------------------------------------------------
self._ensure_callback = ensure_callback
if sync_id is None:
sync_id = str(uuid4())
self.source_replica_uid = source_replica_uid
# save a reference to the callback so we can use it after decrypting
self._insert_doc_cb = insert_doc_cb
gen_after_send, trans_id_after_send = yield self._send_docs(
docs_by_generation,
last_known_generation,
last_known_trans_id,
sync_id)
# ---------- phase 2: receive docs -----------------------------------
if DO_STATS:
self.sync_exchange_phase[0] += 1
# --------------------------------------------------------------------
cur_target_gen, cur_target_trans_id = yield self._receive_docs(
last_known_generation, last_known_trans_id,
ensure_callback, sync_id)
# update gen and trans id info in case we just sent and did not
# receive docs.
if gen_after_send is not None and gen_after_send > cur_target_gen:
cur_target_gen = gen_after_send
cur_target_trans_id = trans_id_after_send
# ---------- phase 3: sync exchange is over --------------------------
if DO_STATS:
self.sync_exchange_phase[0] += 1
# --------------------------------------------------------------------
defer.returnValue([cur_target_gen, cur_target_trans_id])
def _unauth_to_invalid_token_error(failure):
"""
An errback to translate unauthorized errors to our own invalid token
class.
:param failure: The original failure.
:type failure: twisted.python.failure.Failure
:return: Either the original failure or an invalid auth token error.
:rtype: twisted.python.failure.Failure
"""
failure.trap(HTTPError)
if failure.value.status == 401:
raise InvalidAuthTokenError
return failure
| leapcode/soledad | src/leap/soledad/client/http_target/api.py | Python | gpl-3.0 | 9,467 |
from django.contrib import admin
from .models import (
Provider,
OsFamily,
OperatingSystem,
Instance,
ServerPlan)
admin.site.register(Provider)
admin.site.register(OsFamily)
admin.site.register(OperatingSystem)
admin.site.register(Instance)
admin.site.register(ServerPlan)
| jcbalmeida/onecloud-store | store/computing/admin.py | Python | mit | 296 |
# UrbanFootprint v1.5
# Copyright (C) 2017 Calthorpe Analytics
#
# This file is part of UrbanFootprint version 1.5
#
# UrbanFootprint is distributed under the terms of the GNU General
# Public License version 3, as published by the Free Software Foundation. This
# code is distributed WITHOUT ANY WARRANTY, without implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License v3 for more details; see <http://www.gnu.org/licenses/>.
__author__ = 'calthorpe_analytics'
sample_placetypes = [
(1,"Mixed Use Centers & Corridors","Urban Mixed Use"),
(2,"Mixed Use Centers & Corridors","Urban Residential"),
(3,"Mixed Use Centers & Corridors","Urban Commercial"),
(4,"Mixed Use Centers & Corridors","City Mixed Use"),
(5,"Mixed Use Centers & Corridors","City Residential"),
(6,"Mixed Use Centers & Corridors","City Commercial"),
(7,"Mixed Use Centers & Corridors","Town Mixed Use"),
(8,"Mixed Use Centers & Corridors","Town Residential"),
(9,"Mixed Use Centers & Corridors","Town Commercial"),
(10,"Mixed Use Centers & Corridors","Village Mixed Use"),
(11,"Mixed Use Centers & Corridors","Village Residential"),
(12,"Mixed Use Centers & Corridors","Village Commercial"),
(13,"Mixed Use Centers & Corridors","Neighborhood Residential"),
(14,"Mixed Use Centers & Corridors","Neighborhood Low"),
(15,"Employment Areas","Office Focus"),
(16,"Employment Areas","Mixed Office and R&D"),
(17,"Employment Areas","Office/Industrial"),
(18,"Employment Areas","Industrial Focus"),
(19,"Employment Areas","Low-Density Employment Park"),
(20,"Suburban","High Intensity Activity Center"),
(21,"Suburban","Mid Intensity Activity Center"),
(22,"Suburban","Low Intensity Retail-Centered N'Hood"),
(23,"Suburban","Retail: Strip Mall/ Big Box"),
(24,"Suburban","Industrial/Office/Res Mixed High"),
(25,"Suburban","Industrial/Office/Res Mixed Low"),
(26,"Suburban Residential","Suburban Multifamily"),
(27,"Suburban Residential","Suburban Mixed Residential"),
(28,"Suburban Residential","Residential Subdivision"),
(29,"Suburban Residential","Large Lot Residential Area"),
(30,"Rural","Rural Residential"),
(31,"Rural","Rural Ranchettes"),
(32,"Rural","Rural Employment"),
(33,"Institutional","Campus/ University"),
(34,"Institutional","Institutional"),
(35,"Parks","Parks & Open Space")
]
# Create a dict {buildingtype_name:percent, ...} for the given placetype index
def building_type_percent_mix(placetype_index):
return map_dict_to_dict(lambda key, percents: [key, percents[placetype_index]],
filter_dict(lambda key, percents: percents[placetype_index] != 0, sample_placetype_buildingtype_mix))
# Each key here is a buildingtype name. Each array contains a percent of each of the placetypes. Note that some super categories like "MIXED USE" won't be used, but were part of the import process
sample_placetype_buildingtype_mix = {
#"MIXED USE":[0.01*57.0,0.01*15.0,0.01*12.0,0.01*44.0,0.01*14.0,0.01*5.0,0.01*38.0,0.01*12.5,0.01*20.0,0.01*20.0,0.01*0.0,0.01*10.0,0.01*2.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*45.0,0.01*5.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0],
"Skyscraper Mixed Use":[0.01*7,0.01*0,0.01*1,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"High-Rise Mixed Use":[0.01*15,0.01*2,0.01*1,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Mid-Rise Mixed Use":[0.01*17,0.01*3,0.01*2,0.01*10,0.01*2,0.01*1,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Low-Rise Mixed Use":[0.01*12,0.01*3,0.01*3,0.01*9,0.01*4,0.01*1,0.01*10,0.01*0,0.01*2,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Parking Structure/Mixed Use":[0.01*0,0.01*2,0.01*0,0.01*5,0.01*1,0.01*1,0.01*5,0.01*0,0.01*3,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Main Street Commercial/MU High (3-5 Floors)":[0.01*3,0.01*5,0.01*5,0.01*15,0.01*5,0.01*1,0.01*13,0.01*0,0.01*5,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*40,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Main Street Commercial/MU Low (1-2 Floors)":[0.01*3,0.01*0,0.01*0,0.01*5,0.01*2,0.01*1,0.01*10,0.01*13,0.01*10,0.01*15,0.01*0,0.01*10,0.01*2,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
#"RESIDENTIAL":[0.01*23.0,0.01*80.0,0.01*0.0,0.01*35.0,0.01*81.0,0.01*0.0,0.01*35.0,0.01*87.5,0.01*0.0,0.01*60.0,0.01*100.0,0.01*0.0,0.01*98.0,0.01*100.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*15.0,0.01*25.0,0.01*58.0,0.01*0.0,0.01*62.0,0.01*45.0,0.01*100.0,0.01*95.0,0.01*95.0,0.01*97.0,0.01*99.0,0.01*99.0,0.01*5.0,0.01*95.0,0.01*15.0,0.01*0.0],
"Skyscraper Residential":[0.01*5,0.01*12,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"High-Rise Residential":[0.01*7,0.01*15,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Urban Mid-Rise Residential":[0.01*8,0.01*34,0.01*0,0.01*10,0.01*20,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*21,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*45,0.01*0,0.01*0],
"Urban Podium Multi-Family":[0.01*3,0.01*14,0.01*0,0.01*10,0.01*26,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*25,0.01*5,0.01*0],
"Standard Podium Multi-Family":[0.01*0,0.01*5,0.01*0,0.01*5,0.01*10,0.01*0,0.01*5,0.01*20,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*11,0.01*0,0.01*35,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*25,0.01*0,0.01*0],
"Suburban Multifamily Apt/Condo":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*5,0.01*2,0.01*0,0.01*9,0.01*10,0.01*40,0.01*20,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Urban Townhome/Live-Work":[0.01*0,0.01*0,0.01*0,0.01*10,0.01*20,0.01*0,0.01*20,0.01*45,0.01*0,0.01*30,0.01*55,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Standard Townhome":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*5,0.01*0,0.01*0,0.01*11,0.01*15,0.01*20,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Garden Apartment":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*13,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*15,0.01*3,0.01*0,0.01*0,0.01*11,0.01*5,0.01*15,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Very Small Lot 3000":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*35,0.01*0,0.01*68,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Small Lot 4000":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*20,0.01*10,0.01*0,0.01*25,0.01*30,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*15,0.01*0,0.01*0,0.01*4,0.01*0,0.01*18,0.01*35,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*0],
"Medium Lot 5500":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*20,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*28,0.01*0,0.01*0,0.01*0,0.01*0,0.01*15,0.01*50,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Large Lot 7500":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*50,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*12,0.01*5,0.01*40,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Estate Lot":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*30,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Rural Residential":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*27,0.01*45,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Rural Ranchette":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*49,0.01*99,0.01*5,0.01*0,0.01*0,0.01*0],
#"COMMERCIAL/INDUSTRIAL":[0.01*20.0,0.01*5.0,0.01*88.0,0.01*21.0,0.01*5.0,0.01*95.0,0.01*27.0,0.01*0.0,0.01*80.0,0.01*20.0,0.01*0.0,0.01*90.0,0.01*0.0,0.01*0.0,0.01*100.0,0.01*100.0,0.01*100.0,0.01*100.0,0.01*100.0,0.01*40.0,0.01*70.0,0.01*42.0,0.01*100.0,0.01*38.0,0.01*55.0,0.01*0.0,0.01*5.0,0.01*5.0,0.01*3.0,0.01*0.0,0.01*1.0,0.01*95.0,0.01*5.0,0.01*85.0,0.01*100.0],
"Skyscraper Office":[0.01*0,0.01*0,0.01*8,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"High-Rise Office":[0.01*5,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Mid-Rise Office":[0.01*0,0.01*0,0.01*13,0.01*2,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Low-Rise Office":[0.01*0,0.01*0,0.01*39,0.01*3,0.01*0,0.01*42,0.01*19,0.01*0,0.01*15,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*8,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*30,0.01*0],
"Main Street Commercial (Retail + Office/Medical)":[0.01*5,0.01*0,0.01*8,0.01*5,0.01*0,0.01*31,0.01*5,0.01*0,0.01*50,0.01*15,0.01*0,0.01*80,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*25,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Parking Structure+Ground-Floor Retail":[0.01*5,0.01*0,0.01*5,0.01*3,0.01*0,0.01*5,0.01*0,0.01*0,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Parking Structure":[0.01*0,0.01*0,0.01*0,0.01*3,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*10,0.01*0],
"Office Park High":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*50,0.01*31,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Office Park Low":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*33,0.01*20,0.01*0,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Industrial High":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*8,0.01*5,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0],
"Industrial Low":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*3,0.01*15,0.01*20,0.01*20,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0],
"Warehouse High":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*10,0.01*7,0.01*12,0.01*10,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*9,0.01*24,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0],
"Warehouse Low":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*3,0.01*33,0.01*45,0.01*70,0.01*0,0.01*0,0.01*0,0.01*0,0.01*1,0.01*1,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0],
"Hotel High":[0.01*5,0.01*5,0.01*5,0.01*3,0.01*3,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Hotel Low":[0.01*0,0.01*0,0.01*0,0.01*2,0.01*2,0.01*2,0.01*3,0.01*0,0.01*3,0.01*5,0.01*0,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*2,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*100],
"Regional Mall":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*15,0.01*0,0.01*15,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Medium Intensity Strip Commercial (weighted avg)":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*2,0.01*0,0.01*0,0.01*3,0.01*0,0.01*0,0.01*20,0.01*5,0.01*3,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*50,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0],
"Low Intensity Strip Commercial (weighted avg)":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*2,0.01*0,0.01*0,0.01*0,0.01*5,0.01*7,0.01*20,0.01*5,0.01*0,0.01*30,0.01*35,0.01*35,0.01*0,0.01*0,0.01*0,0.01*5,0.01*5,0.01*3,0.01*0,0.01*0,0.01*0,0.01*0,0.01*5,0.01*0],
"Rural Employment":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*1,0.01*95,0.01*0,0.01*15,0.01*0],
#"INSTITUTIONAL (Itemized Civic)":[0.01*5.0,0.01*5.0,0.01*1.0,0.01*5.0,0.01*5.0,0.01*1.0,0.01*8.0,0.01*8.0,0.01*1.0,0.01*8.0,0.01*8.0,0.01*2.0,0.01*6.0,0.01*6.0,0.01*5.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*12.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*4.0,0.01*9.0,0.01*12.0,0.01*8.0,0.01*0.1,0.01*0.0,0.01*0.0,0.01*42.0,0.01*40.0,0.01*0.0],
"Campus/College High":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*40,0.01*0,0.01*0],
"Campus/College Low":[0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0],
"Hospital/Civic/Other Institutional":[0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*2,0.01*1,0.01*1,0.01*5,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*0,0.01*2,0.01*40,0.01*0],
"Urban Elementary School":[0.01*2.0,0.01*2.0,0.01*0.0,0.01*2.0,0.01*2.0,0.01*0.0,0.01*4.0,0.01*4.0,0.01*0.0,0.01*4.0,0.01*4.0,0.01*0.0,0.01*2.0,0.01*2.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0],
"Non-Urban Elementary School":[0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*6.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*2.0,0.01*3.0,0.01*6.0,0.01*4.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0],
"Urban Middle School":[0.01*1.0,0.01*1.0,0.01*0.0,0.01*1.0,0.01*1.0,0.01*0.0,0.01*2.0,0.01*2.0,0.01*0.0,0.01*2.0,0.01*2.0,0.01*0.0,0.01*2.0,0.01*2.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0],
"Non-Urban Middle School":[0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*4.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*1.0,0.01*3.0,0.01*3.0,0.01*2.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0],
"Urban High School":[0.01*1.0,0.01*1.0,0.01*0.0,0.01*1.0,0.01*1.0,0.01*0.0,0.01*1.0,0.01*1.0,0.01*0.0,0.01*1.0,0.01*1.0,0.01*0.0,0.01*1.0,0.01*1.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0],
"Non-Urban High School":[0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*2.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*1.0,0.01*3.0,0.01*3.0,0.01*2.0,0.01*0.1,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0,0.01*0.0],
#"STREETS/PARKS/OTHER CIVIC":[0.01*44.6,0.01*44.6,0.01*44.6,0.01*44.0,0.01*44.0,0.01*44.0,0.01*44.0,0.01*35.6,0.01*44.0,0.01*43.0,0.01*37.8,0.01*52.0,0.01*36.2,0.01*36.2,0.01*30.1,0.01*30.1,0.01*24.4,0.01*25.5,0.01*44.0,0.01*30.7,0.01*30.7,0.01*25.8,0.01*23.6,0.01*22.6,0.01*23.6,0.01*29.1,0.01*29.1,0.01*29.1,0.01*22.4,0.01*14.9,0.01*7.4,0.01*6.3,0.01*31.1,0.01*37.5,0.01*90.8],
#"Streets":[0.01*37,0.01*37,0.01*37,0.01*36,0.01*36,0.01*36,0.01*36,0.01*28,0.01*36,0.01*32,0.01*27,0.01*32,0.01*25,0.01*25,0.01*21,0.01*21,0.01*17,0.01*16,0.01*35,0.01*25,0.01*25,0.01*20,0.01*18,0.01*18,0.01*18,0.01*23,0.01*23,0.01*23,0.01*16,0.01*10,0.01*4,0.01*3,0.01*19,0.01*26,0.01*11],
#"Park":[0.01*7.0,0.01*7.0,0.01*7.0,0.01*7.0,0.01*7.0,0.01*7.0,0.01*7.0,0.01*7.0,0.01*7.0,0.01*10.0,0.01*10.0,0.01*15.0,0.01*10.0,0.01*10.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*3.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*4.0,0.01*1.0,0.01*1.0,0.01*1.0,0.01*10.0,0.01*10.0,0.01*78.0],
#"Detention/Utilities":[0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*1,0.01*5,0.01*1,0.01*1,0.01*5,0.01*5,0.01*3,0.01*5,0.01*5,0.01*2,0.01*2,0.01*2,0.01*2,0.01*2,0.01*2,0.01*2,0.01*2,0.01*2,0.01*2,0.01*4,0.01*2,0.01*2,0.01*2,0.01*2,0.01*2],
#"Total Civic":[0.01*6,0.01*6,0.01*2,0.01*6,0.01*6,0.01*2,0.01*9,0.01*9,0.01*2,0.01*9,0.01*9,0.01*7,0.01*7,0.01*7,0.01*10,0.01*5,0.01*3,0.01*5,0.01*5,0.01*2,0.01*2,0.01*14,0.01*2,0.01*2,0.01*2,0.01*6,0.01*11,0.01*14,0.01*10,0.01*4,0.01*2,0.01*2,0.01*44,0.01*42,0.01*2]
}
# Creates a stucture {buildingtype_id=1, name=Building Type Name, buildings={name=Building Name, percent=Precent of Buildingtype}, ...}
sample_buildingtype_buildings = map(lambda tup:
{'buildingtype_id':tup[0],
'name':tup[2][0],
'buildings':map(lambda building_tup: {
'name':building_tup[0],
'percent':building_tup[1]}, tup[1])
}, [
(1, (
("181 Fremont Street",0.01*30),
("Encinal Tower (Oakland, CA)",0.01*30),
("Skyscraper Mixed (John Hancock Center, Chicago)",0.01*40),
),
("Skyscraper Mixed Use",0.01*100),
),
(2, (
("High-Rise Mixed (Langstaff W-01)",0.01*0),
("High-Rise Mixed (SCAG DT Mixed Use)",0.01*0),
("High-Rise Residential (Visionaire, New York)",0.01*5),
("Skyscraper Mixed (Presidential Towers, Chicago)",0.01*5),
("High-Rise Mixed (The Infinity, San Francisco)",0.01*5),
("High-Rise Mixed (201 Folsom, San Francisco)",0.01*7),
("High-Rise Mixed (Atwater Place, Portland",0.01*28),
("High-Rise Mixed (601 4th Ave, Seattle)",0.01*50),
),
("High-Rise Mixed Use",0.01*100),
),
(3, (
("Mid-Rise Mixed (SCAG City Center MU)",0.01*0),
("Mid-Rise Mixed (937 Glisan, Portland)",0.01*20),
("Mid-Rise Mixed (The Edge, Portland)",0.01*40),
("Mid-Rise Mixed (The Gregory Lofts, Portland)",0.01*40),
),
("Mid-Rise Mixed Use",0.01*100),
),
(4, (
("Low-Rise Mixed (SCAG Dist. Center MU)",0.01*0),
("Mid-Rise Mixed (Museum Place, Portland OR)",0.01*36),
("Mid-Rise Mixed (Gaia Bldg, Berkeley)",0.01*2),
("Mid-Rise Mixed (Fine Arts, Berkeley)",0.01*2),
("Mid-Rise Mixed (East End Gateway, Sacramento)",0.01*2),
("Mid-Rise Mixed (Site 17, Seattle)",0.01*2),
("Mid-Rise Mixed (Alcyone, Seattle)",0.01*2),
("Mid-Rise Mixed (1885 University/New Californian, Berkeley)",0.01*2),
("Mid-Rise Mixed (Touriel Bldg, Berkeley)",0.01*2),
("Low-Rise Mixed (Cap Metro City Center MU)",0.01*0),
("Low-Rise Mixed (Stone Way Apts, Seattle)",0.01*20),
("Low-Rise Mixed (200 Second Street, Oakland)",0.01*5),
("Low-Rise Mixed (Cabrini First Hill Apts, Seattle)",0.01*5),
("Low-Rise Mixed (Kinsey Flats, Cincinnati, OH)",0.01*15),
("Low-Rise Mixed (Shattuck Lofts, Berkeley)",0.01*5),
),
("Low-Rise Mixed Use",0.01*100),
),
(5, (
("Parking Structure/Mixed Use (Fahrenheit Condos + Petco Parkade, San Diego CA)",0.01*40),
("Parking Structure/Mixed Use (2)",0.01*35),
("Parking Structure/Mixed Use (3)",0.01*25),
),
("Parking Structure/Mixed Use",0.01*100),
),
(6, (
("Main Street Commercial/MU (SACOG 19. MU Res Focus)",0.01*0),
("Main Street Commercial/MU (SACOG 18. MU Emp Focus)",0.01*0),
("Main Street Commercial/MU (SACOG 43. Natomas MU)",0.01*0),
("Main Street Commercial/MU (3400 Cesar Chavez St, SF, CA)",0.01*10),
("Main Street Commercial/MU (Belmont Dairy, Portland OR)",0.01*20),
("Main Street Commercial/MU (Venice Renaissance, Venice CA)",0.01*10),
("Main Street Commercial/MU (International Place, Harrisburg, PN)",0.01*40),
("Main Street Commercial/MU (Heilig-Levine, Raleigh NC)",0.01*20),
("Main Street Commercial/MU (SCAG Lifestyle Main Street)",0.01*0),
),
("Main Street Commercial/MU High (3-5 Floors)",0.01*100),
),
(7, (
("Main Street Commercial/MU Low (2100 Vine Street, Berkeley, CA)",0.01*0),
("Main Street Commercial/MU Low (480 Castro Street, San Francisco) (MU Walgreens)",0.01*50),
("Fill Me 2-story low-density mixed use ground floor retail",0.01*0),
("Fill Me 2-story low-density mixed use ground floor retail",0.01*0),
("Main Street Commercial/MU Low (5488 College Avenue, Oakland) (MU Retail)",0.01*50),
("Main Street Commercial/MU Low (300 30th Street at Church, San Francisco) (MU Church Produce)",0.01*0),
),
("Main Street Commercial/MU Low (1-2 Floors)",0.01*100),
),
(8, (
("Skyscraper Residential (AC Hi Rise Res, 50+5 floors)",0.01*0),
("Eureka Tower (Melbourne, AU)",0.01*0),
("Millenium Tower (San Francisco)",0.01*0),
("Skyscraper Residential (Rincon One, San Francisco)",0.01*50),
("Skyscraper Residential (Langstaff W-12)",0.01*50),
),
("Skyscraper Residential",0.01*100),
),
(9, (
("High-Rise Residential (AC Canal Tower Res, 20+5 floors)",0.01*0),
("High-Rise Residential (SCAG DT Res, 30-50 floors))",0.01*0),
("High-Rise Residential (AC Mid-Rise Res, 27+5 floors)",0.01*20),
("High-Rise Residential (Pacifica Honolulu, Oahu)",0.01*10),
("High-Rise Residential (Viridian, Nashville TN)",0.01*10),
("High-Rise Residential (199 New Montgomery, SF)",0.01*20),
("High-Rise Residential (The Metropolitan, SF)",0.01*20),
("High-Rise Residential (Pine & Franklin, SF)",0.01*20),
),
("High-Rise Residential",0.01*100),
),
(10, (
("Mid-Rise Residential (Langstaff E-09)",0.01*30),
("Mid-Rise Mixed (Eddy + Taylor Family Housing, SF)",0.01*3),
("Mid-Rise Mixed (Cubix Yerba Buena, SF)",0.01*1),
("Mid-Rise Residential (AC Res Type 2/MU)",0.01*1),
("Mid-Rise Residential (CapMetro Apt/Condo Hi)",0.01*65),
),
("Urban Mid-Rise Residential",0.01*100),
),
(11, (
("Low-Rise Residential (AC Low Rise Res/MU)",0.01*10),
("Low-Rise Residential (Alameda MF with Ret)",0.01*5),
("Low-Rise Residential (SACOG 45. Intense Urban Res)",0.01*5),
("Low-Rise Residential (SCAG Apt/Condo Hi)",0.01*5),
("Low-Rise Residential (Avalon Apts (Cahill Park), San Jose)",0.01*0),
("Low-Rise Residential (25-35 Dolores, SF)",0.01*20),
("Low-Rise Residential (Ironhorse Family Apartments, Oakland)",0.01*10),
("Low-Rise Residential (MODA Lofts, Stapleton, Denver)",0.01*25),
("Low-Rise Residential (Darling Florist Bldg, Berkeley)",0.01*20),
),
("Urban Podium Multi-Family",0.01*100),
),
(12, (
("Low-Rise Residential (AC Low Rise Res/MU)",0.01*0),
("Low-Rise Residential (Alameda MF with Ret)",0.01*0),
("Low-Rise Residential (SACOG 45. Intense Urban Res)",0.01*0),
("Low-Rise Residential (SCAG Apt/Condo Hi)",0.01*0),
("Low-Rise Residential (Avalon Apts (Cahill Park), San Jose)",0.01*55),
("Low-Rise Residential (25-35 Dolores, SF)",0.01*0),
("Low-Rise Residential (Ironhorse Family Apartments, Oakland)",0.01*20),
("Low-Rise Residential (MODA Lofts, Stapleton, Denver)",0.01*25),
("Low-Rise Residential (Darling Florist Bldg, Berkeley)",0.01*0),
),
("Standard Podium Multi-Family",0.01*100),
),
(13, (
("Multifamily Apt/Condo (SEGA Apt/Condo Low)",0.01*0),
("Multifamily Apt/Condo (SACOG 5. Med Hi-Den Res)",0.01*0),
("Multifamily Apt/Condo (Lenzen Square, San Jose)",0.01*20),
("Multifamily Apt/Condo (Linden Court, Oakland)",0.01*20),
("Multi-Family Apt/Condo (Sonoma Villero, Bothell, WA)",0.01*20),
("Multi-Family Apt/Condo (Town Lofts, Stapleton, Denver)",0.01*20),
("Multifamily Apt/Condo (Mabuhay Court, San Jose)",0.01*20),
("Multifamily Apt/Condo (SCAG Apt/Condo Med)",0.01*0),
),
("Suburban Multifamily Apt/Condo",0.01*100),
),
(14, (
("Townhome/Live-Work (Alameda Small Townhouse)",0.01*10),
("Townhome/Live-Work (SEGA Townhouse)",0.01*0),
("Townhome/Live-Work (Denver Brownstone, Stapleton, Denver)",0.01*10),
("Townhome/Live-Work (Pearl Townhome, Portland)",0.01*30),
("Townhome/Live-Work (Penthouse Row Homes, Stapleton, Denver)",0.01*25),
("Townhome/Live-Work (Backyard Row Home, Stapleton, Denver)",0.01*20),
("Townhome/Live-Work (Sky Terrace, Stapleton, Denver)",0.01*5),
("Townhome/Live-Work (SACOG D. Attached Res, 2-4 floors)",0.01*0),
),
("Urban Townhome/Live-Work",0.01*100),
),
(15, (
("Townhome/Live-Work (Alameda Small Townhouse)",0.01*10),
("Townhome/Live-Work (SEGA Townhouse)",0.01*25),
("Townhome/Live-Work (Denver Brownstone, Stapleton, Denver)",0.01*0),
("Townhome/Live-Work (Pearl Townhome, Portland)",0.01*0),
("Townhome/Live-Work (Penthouse Row Homes, Stapleton, Denver)",0.01*0),
("Townhome/Live-Work (Backyard Row Home, Stapleton, Denver)",0.01*20),
("Townhome/Live-Work (Sky Terrace, Stapleton, Denver)",0.01*45),
("Townhome/Live-Work (SACOG D. Attached Res, 2-4 floors)",0.01*0),
),
("Standard Townhome",0.01*100),
),
(16, (
("Garden Apartment (Corte Bella, Irthorn CA)",0.01*35),
("Garden Apartment (Victoria Townhomes, Seattle WA)",0.01*30),
("Stapleton Garden Apts (F1 Affordable Townhomes)",0.01*35),
),
("Garden Apartment",0.01*100),
),
(17, (
("Very Small Lot 2500 (SEGA Res 2500 sf, 1-2 floors)",0.01*0),
("Very Small Lot 2500 (The Boulders, Seattle WA)",0.01*0),
("Very Small Lot 2500 (Inverness Square, Murray UT)",0.01*0),
("Very Small Lot 2500 (Wild Sage Cohousing, Boulder CO)",0.01*0),
("Find new 3000-square-foot lot single family @ ~15 du/acre",0.01*0),
("Find new 3000-square-foot lot single family @ ~15 du/acre",0.01*0),
("Very Small Lot 2500 (Discovery Collection at Riverdale, Sacramento)",0.01*0),
("Very Small Lot 2500 (Coach House, Stapleton, Denver)",0.01*0),
("Very Small Lot 2500 (Garden Courts, Stapleton, Denver)",0.01*100),
),
("Very Small Lot 3000",0.01*100),
),
(18, (
("Small Lot 4000 (SEGA Res 4000 sf, 1-2 floors)",0.01*0),
("Small Lot 4000 (SACOG C. SF Small Lot, 1-2 floors)",0.01*0),
("Small Lot 4000 (John Laing SF, Stapleton, Denver)",0.01*30),
("Small Lot 4000 (Town Square, Sapleton, Denver)",0.01*30),
("Small Lot 4000 (Average, Albany)",0.01*30),
("Small Lot 4000 (Alameda SF Detached, 1-2 floors)",0.01*10),
),
("Small Lot 4000",0.01*100),
),
(19, (
("Medium Lot 5500 (SEGA Res 5500 sf, 1-2 floors)",0.01*0),
("Medium Lot 5500 (SACOG 3. Low Den Res, 1-2 floors)",0.01*0),
("Daybreak 5500",0.01*20),
("Medium Lot 5500 (Laguna West-Plan 3, Laguna West)",0.01*0),
("Medium Lot (Average, St. Francis Wood, San Francisco)",0.01*20),
("Medium Lot 5500 (Brentwood, Brentwood)",0.01*0),
("Medium Lot 5500 (Kentlands, Stapleton, Denver)",0.01*60),
),
("Medium Lot 5500",0.01*100),
),
(20, (
("Large Lot 7500 sf (SEGA Res 7500 sf, 1-2 floors)",0.01*0),
("Large Lot 7500 sf (SACOG B. SF Large Lot, 1-2 floors)",0.01*20),
("Large Lot 7500 (Average, View Park, Los Angeles)",0.01*20),
("Large Lot (Average, Gold Coast, Alameda, CA",0.01*20),
("Large Lot 7500 (Estate Home, Stapleton, Denver)",0.01*40),
),
("Large Lot 7500",0.01*100),
),
(21, (
("Estate Lot (SACOG 2. Very Low Den Res, , 1-2 floors)",0.01*10),
("Estate Lot (SCAG Large Lot, 1-2 floors)",0.01*10),
("Estate Lot (Average, Beverly Hills)",0.01*20),
("Estate Lot (Average, Old Palo Alto)",0.01*20),
("Estate Lot (Daybreak Estate, South Jordan)",0.01*20),
("Estate Lot (Windemere Estate, San Ramon)",0.01*20),
),
("Estate Lot",0.01*100),
),
(22, (
("Rural Residential (SACOG 1. Rural Res, 1-2 floors)",0.01*10),
("Rural Residential (SCAG Rural, 1-2 floors)",0.01*40),
("Rural Residential (Prairie Crossing Rural SF, Grayslake)",0.01*10),
("Rural Residential (SEGA Rural, 1-2 floors)",0.01*40),
),
("Rural Residential",0.01*100),
),
(23, (
("Rural/Ranchette (AFT 1.5 acre lot)",0.01*5),
("Ranchette 1 (near Fresno)",0.01*5),
("Ranchette 2",0.01*5),
("Rural/Ranchette (AFT 5 acre lot)",0.01*5),
("Ranchette 6 (Near Fresno)",0.01*10),
("Rural/Ranchette (AFT 10 acre lot)",0.01*10),
("Ranchette 4 (near Chowchilla)",0.01*10),
("Rural/Ranchette (AFT 20 acre lot)",0.01*10),
("Ranchette 5 (near Fresno)",0.01*20),
("Ranchette 3 (near Boonville)",0.01*20),
),
("Rural Ranchette",0.01*100),
),
(24, (
("Transbay Tower",0.01*20),
("Skyscraper Office (US Bank Tower, Los Angeles)",0.01*20),
("Skyscraper Office (Washington Mutual Tower, Seattle)",0.01*20),
("Aon Center (Chicago, IL)",0.01*0),
("Aon Center (Los Angeles, CA)",0.01*20),
("Bank of America Center (Los Angeles, CA)",0.01*20),
("Bank of America Tower (New York, NY)",0.01*0),
("120 Collins Street (Melbourne, AU)",0.01*0),
),
("Skyscraper Office",0.01*100),
),
(25, (
("High-Rise Office (AC Hi Rise Comm/MU, 36+5 floors)",0.01*0),
("High-Rise Office (AC Mid Rise Comm/MU, 22+5 floors)",0.01*0),
("High-Rise Mixed (Tabor Center, Denver)",0.01*30),
("High-Rise Office (560 Mission Street, San Francisco)",0.01*20),
("High Rise Office (555 Mission Street, San Francisco)",0.01*20),
("High-Rise Office (55 Second Street, San Francisco)",0.01*10),
("High-Rise Office (SACOG 46. CBD Ofice)",0.01*20),
),
("High-Rise Office",0.01*100),
),
(26, (
("Mid-Rise Office (Langstaff W-05)",0.01*20),
("Mid-Rise Mixed (AC Midrise Comm/MU)",0.01*0),
("Mid-Rise Office (Langstaff W-04)",0.01*20),
("Mid-Rise Office (AC Midrise Comm/MU, 10+4 floors)",0.01*0),
("Mid-Rise Office (SCAG City Center Office, 6-15 floors)",0.01*20),
("Mid-Rise Office (EPA Headquarters (Region 8), Denver)",0.01*20),
("Mid-Rise Office (SACOG 8. Hi Intensity Office)",0.01*20),
),
("Mid-Rise Office",0.01*100),
),
(27, (
("Low-Rise Office (AC Low Rise Office)",0.01*5),
("Low-Rise Office (CalPERS Headquarters, Sacramento)",0.01*5),
("Low-Rise Office (The Terry Thomas, Seattle)",0.01*20),
("Low-Rise Office (223 Yale @ Alley24, Seattle)",0.01*20),
("Low-Rise Office (Symantec Headquarters, Culver City)",0.01*30),
("Low-Rise Office (SACOG 98. Mod Inten. Office)",0.01*5),
("Low-Rise Office (R.D. Merrill Building, Seattle)",0.01*5),
("Low-Rise Office (SEGA Low Rise Office, 4-6 floors)",0.01*10),
),
("Low-Rise Office",0.01*100),
),
(28, (
("Main Street Commercial/MU Low (4185 Piedmont Avenue, Oakland) (Dentist Office)",0.01*5),
("Main Street Commercial/MU Low (1853 Solano Avenue, Berkeley) (Zachary's Pizza)",0.01*5),
("Main Street Commercial/MU Low (3170 College Avenue, Berkeley) (MU Noah's Bagels)",0.01*5),
("Main Street Commercial (Mechanics Bank, Kensington CA)",0.01*20),
("Main Street Commercial/MU Low (960 Cole Street, San Francisco) (Alpha Market)",0.01*25),
("Main Street Commercial/MU Low (1601 N Main Street, Walnut Creek) (MU Instrument Sales)",0.01*25),
("Main Street Commercial/MU Low (1616 N Main Street, Walnut Creek) (MU Crepe Vine)",0.01*15),
),
("Main Street Commercial (Retail + Office/Medical)",0.01*100),
),
(29, (
("Parking Structure + Ground-Floor Retail (15th and Pearl Structure, Boulder, CO))",0.01*30),
("Parking Structure + Ground-Floor Retail (8th and Hope, Los Angeles CA)",0.01*30),
("Parking Structure + Ground-Floor Retail (3)",0.01*40),
),
("Parking Structure+Ground-Floor Retail",0.01*100),
),
(30, (
("Parking Structure (1)",0.01*20),
("Parking Structure (Oak & Central, Alameda)",0.01*20),
("Parking Structure (2)",0.01*20),
("Parking Structure (Jack London Market, Oakland)",0.01*20),
("Parking Structure (3)",0.01*20),
),
("Parking Structure",0.01*100),
),
(31, (
("Office Park High (AC Low Rise Office)",0.01*0),
("Office Park High (SCAG Office Park, 2-4 floors)",0.01*0),
("Office Park High (SACOG Light Indus/Office, 2-4 floors)",0.01*0),
("Office Park High (SEGA Office Park 0.35, 2-4 floors)",0.01*10),
("Office Park High (SACOG 98. Mod Inten. Office)",0.01*30),
("Office Park High (Bishop Ranch BR-3, San Ramon)",0.01*20),
("Office Park High (Bishop Ranch BR-6, San Ramon",0.01*10),
("Office Park High (SEGA Low Rise Office, 4-6 floors)",0.01*30),
),
("Office Park High",0.01*100),
),
(32, (
("Office Park Low (Redwood Business Park, Petaluma)",0.01*25),
("Office Park Low (Nanometrics Bldg, Milpitas)",0.01*10),
("Office Park Low (Sonoma Technology Bldg, Petaluma)",0.01*50),
("Office Park Low (Bestronics Bldg, San Jose)",0.01*15),
),
("Office Park Low",0.01*100),
),
(33, (
("Industrial High (SEGA Flex R&D, 1-2 floors)",0.01*0),
("Industrial High (SACOG 13. Light Indus, 1-2 floors)",0.01*0),
("Harte-Hanks Building (Valencia Commerce Center)",0.01*15),
("FedEx Building,Gateway Office Park (South SF)",0.01*10),
("Industrial High (SF Produce Markets, San Francisco)",0.01*30),
("Industrial High (Odwalla Distribution Center, Berkeley)",0.01*20),
("Industrial High (Lyons Magnus Plant #1, Fresno)",0.01*25),
("Industrial High (SCAG Light Indus, 1-2 floors)",0.01*0),
),
("Industrial High",0.01*100),
),
(34, (
("Industrial Low (SEGA Heavy Ind, 1-2 floors)",0.01*20),
("Industrial Low (SACOG 14. Heavy Indus, 1-2 floors)",0.01*5),
("Industrial Low (Pacific Business Center, Fremont CA)",0.01*10),
("Industrial Low (Tank Farm Light Industrial, San Luis Obispo)",0.01*35),
("Industrial Low (SCAG Heavy Indus, 1-2 floors)",0.01*30),
),
("Industrial Low",0.01*100),
),
(35, (
("120 11th Street, San Francisco, CA 94103",0.01*20),
("1360 Egbert, San Francisco, CA 94124",0.01*20),
("Dynagraphics - 300 NW 14th Avenue, Portland, OR 97209",0.01*5),
("2181 NW Nicolai, Portland, OR 97210",0.01*5),
("NW Trunk & Bag Building - 522 N Thompson, Portland, OR 97227",0.01*5),
("McClaskey Building - 2755 NW 31st Avenue, Portland, OR 97210",0.01*20),
("525 SE Pine St, Portland, OR 97214",0.01*5),
("111 SE Madison Ave, Portland, OR 97214",0.01*5),
("WorkSpace - 2150 Folsom, San Francisco, CA 94110",0.01*10),
("1154-1158 Howard Street, San Francisco, CA 94103",0.01*5),
),
("Warehouse High",0.01*100),
),
(36, (
("9040 Carroll Way, San Diego, CA 92121 (Propertyline.com)",0.01*50),
("2003 West Avenue 140th, San Leandro, CA 94577 (Loopnet.com)",0.01*10),
("2300 Cesar Chavez, San Francisco, CA 94124",0.01*10),
("Warehouse 3 - Proposed Emeryville IKEA (in 1.2/1.6 FAR district)",0.01*30),
),
("Warehouse Low",0.01*100),
),
(37, (
("Hotel High (Four Seasons, San Francisco)",0.01*40),
("Hotel High (Walt Disney World Dolphin, Orlando)",0.01*20),
("Hotel High (Sheraton Grand, Sacramento)",0.01*40),
),
("Hotel High",0.01*100),
),
(38, (
("Hotel Low (Holiday Inn Express, Truckee)",0.01*30),
("Hotel Low (La Quinta Inn, Redding)",0.01*30),
("Hotel Low (Holiday Inn, Woodland Hills)",0.01*40),
),
("Hotel Low",0.01*100),
),
(39, (
("Regional Mall (SEGA General Commerical, 1-2 floors)",0.01*10),
("Regional Mall (SACOG 11. Regional Retail, 1-2 floors)",0.01*10),
("Regional Mall (Montclair Plaza, San Bernardino)",0.01*20),
("Regional Mall (Westfield Galleria, Roseville)",0.01*25),
("Regional Mall (Westfield Mission Valley, San Diego)",0.01*25),
("Regional Mall (SCAG Regional Mall, 1-2 floors)",0.01*10),
),
("Regional Mall",0.01*100),
),
(40, (
("Strip Commercial (SACOG 10. Comm/Nhood Retail, 1-2 floors)",0.01*10),
("Med-Intensity Strip Commercial (Plaza Cienega, Los Angeles)",0.01*20),
("Med-Intensity Strip Commercial (Greenway Plaza, Yonkers NY)",0.01*20),
("Med-Intensity Strip Commercial (Tanner Market, Pasadena)",0.01*20),
("Strip Commercial (SCAG Strip Commerical, 1-2 floors)",0.01*10),
("Strip Commercial (Cap Metro Strip Commerical, 1-2 floors)",0.01*20),
),
("Medium Intensity Strip Commercial (weighted avg)",0.01*100),
),
(41, (
("Strip Commercial (Gilroy Crossing, Gilroy)",0.01*5),
("Strip Commercial (Paso Robles Strip Retail, Paso Robles)",0.01*10),
("Strip Commercial (Renaissance Center West, Las Vegas)",0.01*15),
("Strip Commercial (Mission Viejo Commerce Center)",0.01*10),
("Strip Commercial (Mechanics Bank, Kensington CA)",0.01*0),
("Strip Commercial (Guernville Rd McDonald's, Santa Rosa CA)",0.01*40),
("Strip Commercial (Stanford Ranch, Roseville)",0.01*20),
),
("Low Intensity Strip Commercial (weighted avg)",0.01*100),
),
(42, (
("Oil Field",0.01*0),
("Occidential Elk Hills Oil Field",0.01*5),
("Farm",0.01*0),
("Very Large Farm",0.01*0),
("Large Farm (Near Watsonville)",0.01*5),
("Mid-sized farm 1300x1300 (near Manteca)",0.01*3),
("Small farm 1300x650 (near Modesto)",0.01*2),
("Very Small Farm 650x650 (Near Modesto)",0.01*1),
("Orchard",0.01*0),
("Very Large Orchard (Near Tracy)",0.01*5),
("Medium Orchard (Near Tracy)",0.01*3),
("Small Orchard (Near Ojai)",0.01*2),
("Organic Farm",0.01*0),
("Large Organic Farm (Frog Hollow, Brentwood)",0.01*3),
("Medium Organic Farm (Live Power Farm, Covelo)",0.01*2),
("Small Organic Farm (Gospel Flat Farm, Bollinas)",0.01*1),
("Livestock",0.01*0),
("Livestock Farm: Grassfed beef (Chilleno Valley Ranch, Petaluma)",0.01*27),
("Livestock Farm: Harris Ranch Feedlot (I-5 CA-145 Interchange)",0.01*20),
("Vineyard",0.01*0),
("Vineyard, Small (Martin Stelling Vineyard)",0.01*5),
("Vineyward, Medium (Quintessa Vineyard)",0.01*5),
("Vineyard, Large (Napa Valley Wine Company)",0.01*5),
("Prison",0.01*0),
("Recreation",0.01*0),
("Resource Extraction",0.01*0),
("Liberty Quarry Proposal (Temecula, CA)",0.01*5),
("Wind Farms",0.01*0),
("Castle & Cook Resorts Wind Farm Proposal (Lanai, HI)",0.01*1),
),
("Rural Employment",0.01*100),
),
(43, (
("Campus/College High",0.01*0),
("Campus/College High (LA City College, Los Angeles)",0.01*100),
("",0.01*0),
),
("Campus/College High",0.01*100),
),
(44, (
("Campus/College Low",0.01*100),
("",0.01*0),
("",0.01*0),
),
("Campus/College Low",0.01*100),
),
(45, (
("Hospital/Civic/Other Institutional",0.01*0),
("Hospital (Children's Hospital, Los Angeles)",0.01*100),
("",0.01*0),
),
("Hospital/Civic/Other Institutional",0.01*100),
),
(46, (
("Urban Elementary School",0.01*0),
("Urban Elementary School (Horace Mann ES, San Jose)",0.01*100),
("Urban Elementary School (Cragmont, Berkeley CA)",0.01*0),
("Ibarra Elementary School (San Diego)",0.01*0),
),
("Urban Elementary School",0.01*100),
),
(47, (
("Non-Urban Elementary School",0.01*100),
("",0.01*0),
("",0.01*0),
),
("Non-Urban Elementary School",0.01*100),
),
(48, (
("Urban Middle School",0.01*0),
("Urban Middle School (Willard, Berkeley CA)",0.01*0),
("Central Los Angeles Middle School",0.01*100),
),
("Urban Middle School",0.01*100),
),
(49, (
("Non-Urban Middle School",0.01*100),
("",0.01*0),
("",0.01*0),
),
("Non-Urban Middle School",0.01*100),
),
(50, (
("Urban High School",0.01*0),
("Urban High School (Berkeley High School, Berkeley CA)",0.01*0),
("",0.01*100),
),
("Urban High School",0.01*100),
),
(51, (
("Non-Urban High School",0.01*100),
("",0.01*0),
("",0.01*0),
),
("Non-Urban High School",0.01*100)
)
])
sample_building_uses = OrderedDict([
(1, (("181 Fremont Street",0.33,0,0.67,0),
("Encinal Tower (Oakland, CA)",0.45,0.05,0.5,0),
("Skyscraper Mixed (John Hancock Center, Chicago)",0.6,0.06,0.34,0)
)),
(2, (("High-Rise Mixed (Langstaff W-01)",0.64,0.04,0.32,0),
("High-Rise Mixed (SCAG DT Mixed Use)",0.55,0.03,0.42,0),
("High-Rise Residential (Visionaire, New York)",0.92,0.01,0.07,0),
("Skyscraper Mixed (Presidential Towers, Chicago)",0.89,0.05,0.06,0),
("High-Rise Mixed (The Infinity, San Francisco)",0.989,0.011,0,0),
("High-Rise Mixed (201 Folsom, San Francisco)",0.972,0.028,0,0),
("High-Rise Mixed (Atwater Place, Portland",0.971,0.029,0,0),
("High-Rise Mixed (601 4th Ave, Seattle)",0.222,0.039,0.739,0),
)),
(3, (("Mid-Rise Mixed (SCAG City Center MU)",0.8,0.2,0,0),
("Mid-Rise Mixed (937 Glisan, Portland)",0.938,0.062,0,0),
("Mid-Rise Mixed (The Edge, Portland)",0.571,0.124,0.306,0),
("Mid-Rise Mixed (The Gregory Lofts, Portland)",0.841,0.061,0.098,0),
)),
(4, (("Low-Rise Mixed (SCAG Dist. Center MU)",0.75,0.25,0,0),
("Mid-Rise Mixed (Museum Place, Portland OR)",0.7,0.3,0,0),
("Mid-Rise Mixed (Gaia Bldg, Berkeley)",0.87,0.13,0,0),
("Mid-Rise Mixed (Fine Arts, Berkeley)",0.903,0.097,0,0),
("Mid-Rise Mixed (East End Gateway, Sacramento)",0.932,0.068,0,0),
("Mid-Rise Mixed (Site 17, Seattle)",0.976,0.024,0,0),
("Mid-Rise Mixed (Alcyone, Seattle)",0.989,0.011,0,0),
("Mid-Rise Mixed (1885 University/New Californian, Berkeley)",0.9,0.1,0,0),
("Mid-Rise Mixed (Touriel Bldg, Berkeley)",0.938,0.062,0,0),
("Low-Rise Mixed (Cap Metro City Center MU)",0.4,0.2,0.4,0),
("Low-Rise Mixed (Stone Way Apts, Seattle)",0.903,0.097,0,0),
("Low-Rise Mixed (200 Second Street, Oakland)",0.893,0.107,0,0),
("Low-Rise Mixed (Cabrini First Hill Apts, Seattle)",0.854,0.146,0,0),
("Low-Rise Mixed (Kinsey Flats, Cincinnati, OH)",0.761,0.238,0,0),
("Low-Rise Mixed (Shattuck Lofts, Berkeley)",0.9,0.1,0,0),
)),
(5, (("Parking Structure/Mixed Use (Fahrenheit Condos + Petco Parkade, San Diego CA)",0.5,0.5,0,0),
("Parking Structure/Mixed Use (2)",0.5,0.5,0,0),
("Parking Structure/Mixed Use (3)",0.5,0.5,0,0),
)),
(6, (("Main Street Commercial/MU (SACOG 19. MU Res Focus)",0.7,0.25,0.05,0),
("Main Street Commercial/MU (SACOG 18. MU Emp Focus)",0.45,0.4,0.15,0),
("Main Street Commercial/MU (SACOG 43. Natomas MU)",0.7,0.25,0.05,0),
("Main Street Commercial/MU (3400 Cesar Chavez St, SF, CA)",0.81,0.19,0,0),
("Main Street Commercial/MU (Belmont Dairy, Portland OR)",0.78,0.22,0,0),
("Main Street Commercial/MU (Venice Renaissance, Venice CA)",0.77,0.23,0,0),
("Main Street Commercial/MU (International Place, Harrisburg, PN)",0.69,0.31,0,0),
("Main Street Commercial/MU (Heilig-Levine, Raleigh NC)",0,0.48,0.52,0),
("Main Street Commercial/MU (SCAG Lifestyle Main Street)",0.4,0.6,0,0),
)),
(7, (("Main Street Commercial/MU Low (2100 Vine Street, Berkeley, CA)",0.52,0.484139397,0,0),
("Main Street Commercial/MU Low (480 Castro Street, San Francisco) (MU Walgreens)",0.52,0.48,0,0),
("Fill Me 2-story low-density mixed use ground floor retail",0.52,0.48,0,0),
("Fill Me 2-story low-density mixed use ground floor retail",0.52,0.48,0,0),
("Main Street Commercial/MU Low (5488 College Avenue, Oakland) (MU Retail)",0.52,0.48,0,0),
("Main Street Commercial/MU Low (300 30th Street at Church, San Francisco) (MU Church Produce)",0.6,0.4,0,0),
("Main Street Commercial/MU Low (1-2 Floors)",0.536,0.464827879,0,0),
)),
(8, (("Skyscraper Residential (AC Hi Rise Res, 50+5 floors)",0.94,0.06,0,0),
("Eureka Tower (Melbourne, AU)",1),
("Millenium Tower (San Francisco)",1),
("Skyscraper Residential (Rincon One, San Francisco)",1,0,0,0),
("Skyscraper Residential (Langstaff W-12)",0.98,0.02,0,0),
)),
(9, (("High-Rise Residential (AC Canal Tower Res, 20+5 floors)",0.99,0.01,0,0),
("High-Rise Residential (SCAG DT Res, 30-50 floors))",0.97,0.03,0,0),
("High-Rise Residential (AC Mid-Rise Res, 27+5 floors)",0.94,0.06,0,0),
("High-Rise Residential (Pacifica Honolulu, Oahu)",0.9740319,0,0.01038724,0.01558086),
("High-Rise Residential (Viridian, Nashville TN)",0.98,0.02,0,0),
("High-Rise Residential (199 New Montgomery, SF)",0.974,0.026,0,0),
("High-Rise Residential (The Metropolitan, SF)",0.998,0.002,0,0),
("High-Rise Residential (Pine & Franklin, SF)",0.985,0.015,0,0),
)),
(10, (("Mid-Rise Residential (Langstaff E-09)",0.96,0.02,0.02,0),
("Mid-Rise Mixed (Eddy + Taylor Family Housing, SF)",0.922,0.078,0,0),
("Mid-Rise Mixed (Cubix Yerba Buena, SF)",0.91,0.09,0,0),
("Mid-Rise Residential (AC Res Type 2/MU)",0.94,0.06,0,0),
("Mid-Rise Residential (CapMetro Apt/Condo Hi)",0.9,0.1,0,0),
)),
(11, (("Low-Rise Residential (AC Low Rise Res/MU)",0.97,0.03,0,0),
("Low-Rise Residential (Alameda MF with Ret)",0.9,0.1,0,0),
("Low-Rise Residential (SACOG 45. Intense Urban Res)",0.95,0.05,0,0),
("Low-Rise Residential (SCAG Apt/Condo Hi)",1,0,0,0),
("Low-Rise Residential (Avalon Apts (Cahill Park), San Jose)",0.92,0.080320916,0,0),
("Low-Rise Residential (25-35 Dolores, SF)",1,0,0,0),
("Low-Rise Residential (Ironhorse Family Apartments, Oakland)",1,0,0,0),
("Low-Rise Residential (MODA Lofts, Stapleton, Denver)",1,0,0,0),
("Low-Rise Residential (Darling Florist Bldg, Berkeley)",0.9,0.1,0,0),
)),
(12, (("Low-Rise Residential (AC Low Rise Res/MU)",0.97,0.03,0,0),
("Low-Rise Residential (Alameda MF with Ret)",0.9,0.1,0,0),
("Low-Rise Residential (SACOG 45. Intense Urban Res)",0.95,0.05,0,0),
("Low-Rise Residential (SCAG Apt/Condo Hi)",1,0,0,0),
("Low-Rise Residential (Avalon Apts (Cahill Park), San Jose)",0.92,0.080320916,0,0),
("Low-Rise Residential (25-35 Dolores, SF)",1,0,0,0),
("Low-Rise Residential (Ironhorse Family Apartments, Oakland)",1,0,0,0),
("Low-Rise Residential (MODA Lofts, Stapleton, Denver)",1,0,0,0),
("Low-Rise Residential (Darling Florist Bldg, Berkeley)",0.9,0.1,0,0),
)),
(13, (("Multifamily Apt/Condo (SEGA Apt/Condo Low)",1,0,0,0),
("Multifamily Apt/Condo (SACOG 5. Med Hi-Den Res)",1,0,0,0),
("Multifamily Apt/Condo (Lenzen Square, San Jose)",1,0,0,0),
("Multifamily Apt/Condo (Linden Court, Oakland)",1,0,0,0),
("Multi-Family Apt/Condo (Sonoma Villero, Bothell, WA)",1,0,0,0),
("Multi-Family Apt/Condo (Town Lofts, Stapleton, Denver)",1,0,0,0),
("Multifamily Apt/Condo (Mabuhay Court, San Jose)",1,0,0,0),
("Multifamily Apt/Condo (SCAG Apt/Condo Med)",1,0,0,0),
("Suburban Multifamily Apt/Condo ",1,0,0,0),
)),
(14, (("Townhome/Live-Work (Alameda Small Townhouse)",1,0,0,0),
("Townhome/Live-Work (SEGA Townhouse)",1,0,0,0),
("Townhome/Live-Work (Denver Brownstone, Stapleton, Denver)",1,0,0,0),
("Townhome/Live-Work (Pearl Townhome, Portland)",1,0,0,0),
("Townhome/Live-Work (Penthouse Row Homes, Stapleton, Denver)",1,0,0,0),
("Townhome/Live-Work (Backyard Row Home, Stapleton, Denver)",1,0,0,0),
("Townhome/Live-Work (Sky Terrace, Stapleton, Denver)",1,0,0,0),
("Townhome/Live-Work (SACOG D. Attached Res, 2-4 floors)",1,0,0,0),
)),
(15, (("Townhome/Live-Work (Alameda Small Townhouse)",1,0,0,0),
("Townhome/Live-Work (SEGA Townhouse)",1,0,0,0),
("Townhome/Live-Work (Denver Brownstone, Stapleton, Denver)",1,0,0,0),
("Townhome/Live-Work (Pearl Townhome, Portland)",1,0,0,0),
("Townhome/Live-Work (Penthouse Row Homes, Stapleton, Denver)",1,0,0,0),
("Townhome/Live-Work (Backyard Row Home, Stapleton, Denver)",1,0,0,0),
("Townhome/Live-Work (Sky Terrace, Stapleton, Denver)",1,0,0,0),
("Townhome/Live-Work (SACOG D. Attached Res, 2-4 floors)",1,0,0,0),
)),
(16, (("Garden Apartment (Corte Bella, Irthorn CA)",1,0,0,0),
("Garden Apartment (Victoria Townhomes, Seattle WA)",1,0,0,0),
("Stapleton Garden Apts (F1 Affordable Townhomes)",1,0,0,0),
)),
(17, (("Very Small Lot 2500 (SEGA Res 2500 sf, 1-2 floors)",1,0,0,0),
("Very Small Lot 2500 (The Boulders, Seattle WA)",1,0,0,0),
("Very Small Lot 2500 (Inverness Square, Murray UT)",1,0,0,0),
("Very Small Lot 2500 (Wild Sage Cohousing, Boulder CO)",1,0,0,0),
("Find new 3000-square-foot lot single family @ ~15 du/acre",1,0,0,0),
("Find new 3000-square-foot lot single family @ ~15 du/acre",1,0,0,0),
("Very Small Lot 2500 (Discovery Collection at Riverdale, Sacramento)",1,0,0,0),
("Very Small Lot 2500 (Coach House, Stapleton, Denver)",1,0,0,0),
("Very Small Lot 2500 (Garden Courts, Stapleton, Denver)",1,0,0,0),
)),
(18, (("Small Lot 4000 (SEGA Res 4000 sf, 1-2 floors)",1,0,0,0),
("Small Lot 4000 (SACOG C. SF Small Lot, 1-2 floors)",1,0,0,0),
("Small Lot 4000 (John Laing SF, Stapleton, Denver)",1,0,0,0),
("Small Lot 4000 (Town Square, Sapleton, Denver)",1,0,0,0),
("Small Lot 4000 (Average, Albany)",1,0,0,0),
("Small Lot 4000 (Alameda SF Detached, 1-2 floors)",1,0,0,0),
)),
(19, (("Medium Lot 5500 (SEGA Res 5500 sf, 1-2 floors)",1,0,0,0),
("Medium Lot 5500 (SACOG 3. Low Den Res, 1-2 floors)",1,0,0,0),
("Daybreak 5500",1,0,0,0),
("Medium Lot 5500 (Laguna West-Plan 3, Laguna West)",1,0,0,0),
("Medium Lot (Average, St. Francis Wood, San Francisco)",1,0,0,0),
("Medium Lot 5500 (Brentwood, Brentwood)",1,0,0,0),
("Medium Lot 5500 (Kentlands, Stapleton, Denver)",1,0,0,0),
)),
(20, (("Large Lot 7500 sf (SEGA Res 7500 sf, 1-2 floors)",1,0,0,0),
("Large Lot 7500 sf (SACOG B. SF Large Lot, 1-2 floors)",1,0,0,0),
("Large Lot 7500 (Average, View Park, Los Angeles)",1,0,0,0),
("Large Lot (Average, Gold Coast, Alameda, CA",1,0,0,0),
("Large Lot 7500 (Estate Home, Stapleton, Denver)",1,0,0,0),
)),
(21, (("Estate Lot (SACOG 2. Very Low Den Res, , 1-2 floors)",1,0,0,0),
("Estate Lot (SCAG Large Lot, 1-2 floors)",1,0,0,0),
("Estate Lot (Average, Beverly Hills)",1,0,0,0),
("Estate Lot (Average, Old Palo Alto)",1,0,0,0),
("Estate Lot (Daybreak Estate, South Jordan)",1,0,0,0),
("Estate Lot (Windemere Estate, San Ramon)",1,0,0,0),
)),
(22, (("Rural Residential (SACOG 1. Rural Res, 1-2 floors)",1,0,0,0),
("Rural Residential (SCAG Rural, 1-2 floors)",1,0,0,0),
("Rural Residential (Prairie Crossing Rural SF, Grayslake)",1,0,0,0),
("Rural Residential (SEGA Rural, 1-2 floors)",1,0,0,0),
)),
(23, (("Rural/Ranchette (AFT 1.5 acre lot)",1,0,0,0),
("Ranchette 1 (near Fresno)",1,0,0,0),
("Ranchette 2 ",1,0,0,0),
("Rural/Ranchette (AFT 5 acre lot)",1,0,0,0),
("Ranchette 6 (Near Fresno)",1,0,0,0),
("Rural/Ranchette (AFT 10 acre lot)",1,0,0,0),
("Ranchette 4 (near Chowchilla)",1,0,0,0),
("Rural/Ranchette (AFT 20 acre lot)",1,0,0,0),
("Ranchette 5 (near Fresno)",1,0,0,0),
("Ranchette 3 (near Boonville)",1,0,0,0),
)),
(24, (("Transbay Tower",0,0.1,0.9,0),
("Skyscraper Office (US Bank Tower, Los Angeles)",0,0.03,0.97,0),
("Skyscraper Office (Washington Mutual Tower, Seattle)",0,0.03,0.97,0),
("Aon Center (Chicago, IL)",0,0.02,0.98,0),
("Aon Center (Los Angeles, CA)",0,0.02,0.98,0),
("Bank of America Center (Los Angeles, CA)",0,0.02,0.98,0),
("Bank of America Tower (New York, NY)",0,0.02,0.98,0),
("120 Collins Street (Melbourne, AU)",0,0,1,0),
)),
(25, (("High-Rise Office (AC Hi Rise Comm/MU, 36+5 floors)",0,0.06,0.94,0),
("High-Rise Office (AC Mid Rise Comm/MU, 22+5 floors)",0,0.08,0.92,0),
("High-Rise Mixed (Tabor Center, Denver)",0,0.1,0.9,0),
("High-Rise Office (560 Mission Street, San Francisco)",0,0.008,0.992,0),
("High Rise Office (555 Mission Street, San Francisco)",0,0.006,0.994,0),
("High-Rise Office (55 Second Street, San Francisco)",0,0,1,0),
("High-Rise Office (SACOG 46. CBD Ofice)",0,0.05,0.95,0),
)),
(26, (("Mid-Rise Office (Langstaff W-05)",0,0,1,0),
("Mid-Rise Mixed (AC Midrise Comm/MU)",0,0.1,0.9,0),
("Mid-Rise Office (Langstaff W-04)",0,0.1,0.9,0),
("Mid-Rise Office (AC Midrise Comm/MU, 10+4 floors)",0,0.1,0.9,0),
("Mid-Rise Office (SCAG City Center Office, 6-15 floors)",0,0.05,0.95,0),
("Mid-Rise Office (EPA Headquarters (Region 8), Denver)",0,0,1,0),
("Mid-Rise Office (SACOG 8. Hi Intensity Office)",0,0.05,0.95,0),
)),
(27, (("Low-Rise Office (AC Low Rise Office)",0,0,1,0),
("Low-Rise Office (CalPERS Headquarters, Sacramento)",0,0,1,0),
("Low-Rise Office (The Terry Thomas, Seattle)",0,0.075,0.925,0),
("Low-Rise Office (223 Yale @ Alley24, Seattle)",0,0.111,0.889,0),
("Low-Rise Office (Symantec Headquarters, Culver City)",0,0,1,0),
("Low-Rise Office (SACOG 98. Mod Inten. Office)",0,0.05,0.95,0),
("Low-Rise Office (R.D. Merrill Building, Seattle)",0,0,1,0),
("Low-Rise Office (SEGA Low Rise Office, 4-6 floors)",0,0,1,0),
)),
(28, (("Main Street Commercial/MU Low (4185 Piedmont Avenue, Oakland) (Dentist Office)",0,0,1,0),
("Main Street Commercial/MU Low (1853 Solano Avenue, Berkeley) (Zachary's Pizza)",0,1,0,0),
("Main Street Commercial/MU Low (3170 College Avenue, Berkeley) (MU Noah's Bagels)",0,0.5,0.5,0),
("Main Street Commercial (Mechanics Bank, Kensington CA)",0,0.2,0.8,0),
("Main Street Commercial/MU Low (960 Cole Street, San Francisco) (Alpha Market)",0,1,0,0),
("Main Street Commercial/MU Low (1601 N Main Street, Walnut Creek) (MU Instrument Sales)",0,0.75,0.25,0),
("Main Street Commercial/MU Low (1616 N Main Street, Walnut Creek) (MU Crepe Vine)",0,0.6,0.4,0),
)),
(29, (("Parking Structure + Ground-Floor Retail (15th and Pearl Structure, Boulder, CO))",0,0.5,0.5,0),
("Parking Structure + Ground-Floor Retail (8th and Hope, Los Angeles CA)",0,1,0,0),
("Parking Structure + Ground-Floor Retail (3)",0,1,0,0),
("Parking Structure+Ground-Floor Retail",0,0.85,0.15,0),
)),
(30, (("Parking Structure (1)",0,1,0,0),
("Parking Structure (Oak & Central, Alameda)",0,1,0,0),
("Parking Structure (2)",0,1,0,0),
("Parking Structure (Jack London Market, Oakland)",0,1,0,0),
("Parking Structure (3)",0,1,0,0),
)),
(31, (("Office Park High (AC Low Rise Office)",0,0,1,0),
("Office Park High (SCAG Office Park, 2-4 floors)",0,0,1,0),
("Office Park High (SACOG Light Indus/Office, 2-4 floors)",0,0,0.75,0.25),
("Office Park High (SEGA Office Park 0.35, 2-4 floors)",0,0,1,0),
("Office Park High (SACOG 98. Mod Inten. Office)",0,0.05,0.95,0),
("Office Park High (Bishop Ranch BR-3, San Ramon)",0,0,1,0),
("Office Park High (Bishop Ranch BR-6, San Ramon",0,0,1,0),
("Office Park High (SEGA Low Rise Office, 4-6 floors)",0,0,1,0),
)),
(32, (("Office Park Low (Redwood Business Park, Petaluma)",0,0,0.8,0.2),
("Office Park Low (Nanometrics Bldg, Milpitas)",0,0,1,0),
("Office Park Low (Sonoma Technology Bldg, Petaluma)",0,0,1,0),
("Office Park Low (Bestronics Bldg, San Jose)",0,0,1,0),
)),
(33, (("Industrial High (SEGA Flex R&D, 1-2 floors)",0,0.02,0,0.98),
("Industrial High (SACOG 13. Light Indus, 1-2 floors)",0,0,0.25,0.75),
("Harte-Hanks Building (Valencia Commerce Center)",0,0,0.15,0.85),
("FedEx Building,Gateway Office Park (South SF)",0,0,0,1),
("Industrial High (SF Produce Markets, San Francisco)",0,0,0,1),
("Industrial High (Odwalla Distribution Center, Berkeley)",0,0,0,1),
("Industrial High (Lyons Magnus Plant #1, Fresno)",0,0,0,1),
("Industrial High (SCAG Light Indus, 1-2 floors)",0,0.03,0,0.97),
)),
(34, (("Industrial Low (SEGA Heavy Ind, 1-2 floors)",0,0,0,1),
("Industrial Low (SACOG 14. Heavy Indus, 1-2 floors)",0,0,0,1),
("Industrial Low (Pacific Business Center, Fremont CA)",0,0,0,1),
("Industrial Low (Tank Farm Light Industrial, San Luis Obispo)",0,0,0,1),
("Industrial Low (SCAG Heavy Indus, 1-2 floors)",0,0.02,0,0.98),
)),
(35, (("120 11th Street, San Francisco, CA 94103",0,0,0.1547,0.8453),
("1360 Egbert, San Francisco, CA 94124",0,0,0,1),
("Dynagraphics - 300 NW 14th Avenue, Portland, OR 97209",0,0,0.3,0.7),
("2181 NW Nicolai, Portland, OR 97210",0,0,0.3,0.7),
("NW Trunk & Bag Building - 522 N Thompson, Portland, OR 97227",0,0,0,1),
("McClaskey Building - 2755 NW 31st Avenue, Portland, OR 97210",0,0,0,1),
("525 SE Pine St, Portland, OR 97214",0,0,0.5,0.5),
("111 SE Madison Ave, Portland, OR 97214",0,0,0.3,0.7),
("WorkSpace - 2150 Folsom, San Francisco, CA 94110",0,0,0,1),
("1154-1158 Howard Street, San Francisco, CA 94103",0,0,0,1),
)),
(36, (("9040 Carroll Way, San Diego, CA 92121 (Propertyline.com)",0,0,0.15,0.85),
("2003 West Avenue 140th, San Leandro, CA 94577 (Loopnet.com)",0,0,0.1,0.9),
("2300 Cesar Chavez, San Francisco, CA 94124",0,0,0.1,0.9),
("Warehouse 3 - Proposed Emeryville IKEA (in 1.2/1.6 FAR district)",0,0,0,1),
)),
(37, (("Hotel High (Four Seasons, San Francisco)",0.645,0.355,0,0),
("Hotel High (Walt Disney World Dolphin, Orlando)",0.9,0.01,0.09,0),
("Hotel High (Sheraton Grand, Sacramento)",0.843,0.157,0,0),
)),
(38, (("Hotel Low (Holiday Inn Express, Truckee)",0.95,0.05,0,0),
("Hotel Low (La Quinta Inn, Redding)",0.95,0.05,0,0),
("Hotel Low (Holiday Inn, Woodland Hills)",0.95,0.05,0,0),
)),
(39, (("Regional Mall (SEGA General Commerical, 1-2 floors)",0,1,0,0),
("Regional Mall (SACOG 11. Regional Retail, 1-2 floors)",0,0.95,0.05,0),
("Regional Mall (Montclair Plaza, San Bernardino)",0,1,0,0),
("Regional Mall (Westfield Galleria, Roseville)",0,1,0,0),
("Regional Mall (Westfield Mission Valley, San Diego)",0,1,0,0),
("Regional Mall (SCAG Regional Mall, 1-2 floors)",0,1,0,0),
)),
(40, (("Strip Commercial (SACOG 10. Comm/Nhood Retail, 1-2 floors)",0,1,0,0),
("Med-Intensity Strip Commercial (Plaza Cienega, Los Angeles)",0,1,0,0),
("Med-Intensity Strip Commercial (Greenway Plaza, Yonkers NY)",0,1,0,0),
("Med-Intensity Strip Commercial (Tanner Market, Pasadena)",0,0.955,0.045,0),
("Strip Commercial (SCAG Strip Commerical, 1-2 floors)",0,0.7,0.3,0),
("Strip Commercial (Cap Metro Strip Commerical, 1-2 floors)",0,0.6,0.4,0),
)),
(41, (("Strip Commercial (Gilroy Crossing, Gilroy)",0,1,0,0),
("Strip Commercial (Paso Robles Strip Retail, Paso Robles)",0,1,0,0),
("Strip Commercial (Renaissance Center West, Las Vegas)",0,0.983,0.017,0),
("Strip Commercial (Mission Viejo Commerce Center)",0,1,0,0),
("Strip Commercial (Mechanics Bank, Kensington CA)",0,1,0,0),
("Strip Commercial (Guernville Rd McDonald's, Santa Rosa CA)",0,1,0,0),
("Strip Commercial (Stanford Ranch, Roseville)",0,1,0,0),
)),
(42, (("Oil Field",0,0,0.05,0.95),
("Occidential Elk Hills Oil Field",0,0,0,1),
("Farm",0,0.05,0.05,0.9),
("Very Large Farm",0.01,0,0,0.99),
("Large Farm (Near Watsonville)",0.01,0,0,0.99),
("Mid-sized farm 1300x1300 (near Manteca)",0.01,0,0,0.99),
("Small farm 1300x650 (near Modesto)",0.02,0,0,0.99),
("Very Small Farm 650x650 (Near Modesto)",0.03,0,0,0.99),
("Orchard",0,0,0,1),
("Very Large Orchard (Near Tracy)",0,0,0,1),
("Medium Orchard (Near Tracy)",0,0,0,1),
("Small Orchard (Near Ojai)",0.01,0,0,0.99),
("Organic Farm",0,0,0,1),
("Large Organic Farm (Frog Hollow, Brentwood)",0.01,0,0,0.99),
("Medium Organic Farm (Live Power Farm, Covelo)",0.02,0,0,0.98),
("Small Organic Farm (Gospel Flat Farm, Bollinas)",0.03,0,0,0.97),
("Livestock",0,0,0,1),
("Livestock Farm: Grassfed beef (Chilleno Valley Ranch, Petaluma)",0,0,0,1),
("Livestock Farm: Harris Ranch Feedlot (I-5 CA-145 Interchange)",0,0,0,1),
("Vineyard",0,0,0,1),
("Vineyard, Small (Martin Stelling Vineyard)",0.03,0,0,0.97),
("Vineyward, Medium (Quintessa Vineyard)",0.02,0,0,0.98),
("Vineyard, Large (Napa Valley Wine Company)",0.01,0,0,0.99),
("Prison",0,0,0,1),
("Recreation",0,0,0.05,0.95),
("Resource Extraction",0,0.05,0.1,0.85),
("Liberty Quarry Proposal (Temecula, CA)",0,0,0,1),
("Wind Farms",0,0,0.05,0.95),
("Castle & Cook Resorts Wind Farm Proposal (Lanai, HI)",0,0,0,1),
)),
(43, (
("Campus/College High (LA City College, Los Angeles)",0,0,1,0),
)),
(45,(("Hospital (Children's Hospital, Los Angeles)",0,0,1,0),
)),
(46, (("Urban Elementary School (Horace Mann ES, San Jose)",0,0,1),
("Urban Elementary School (Cragmont, Berkeley CA)",0,0,1,0),
)),
(48,(("Urban Middle School (Willard, Berkeley CA)",0,0,1,0),
("Central Los Angeles Middle School",0,0,1),
)),
(50, (("Urban High School (Berkeley High School, Berkeley CA)",0,0,1,0),
)),
(51, (
("Non-Urban High School ",0,0,1,0),
)),
(52, (("Urban City Hall (Oakland City Hall, Oakland, CA)",0,0,1,0),
("Urban City Hall (Long Beach City Hall and Civic Center, Long Beach, CA)",0,0,1,0),
)),
(53, (("Urban Public Library - Main Branch (Oakland Public Library, Oakland, CA)",0,0,0,1),
)),
(54, (("Urban Courthouse (Rene C. Davidson Courthouse, Oakland, CA)",0,0,0,1),
("Urban Courthouse (Long Beach Superior Court, Long Beach, CA)",0,0,0,1),
)),
(55, (("Urban Convention Center (Oakland Convention Center, Oakland, CA),",0.1,0.9),
("Urban Convention Center (Long Beach Convention and Entertainment Center, Long Beach, CA),",0.2,0.8),
("Urban Convention Center (San Diego Convention Center),",0.2,0.8),
)),
(56, (("Suburban Civic Complex (City Hall, Library, Rec Center, Menlo Park, CA)",0,0,0,1),
("Suburban Civic Complex (City Hall, Library, Gym and Teen Center, Walnut, CA)",0,0,0,1),
("Suburban Civic Buildings (Police Station, Community Services, Walnut, CA)",0,0,0,1),
)),
(57, (("Town Civic Complex (City Hall, Police Dept., St. Helena, CA)",0,0,0,1,0),
("Town Civic Complex (City Hall, Police and Fire Dept., Bishop, CA)",0,0,0,1,0),
)),
(58, (("Town Library (St Helena Public Library, St. Helena, CA)",0,0,0,1,0),
("Town Library (Bishop Branch Library, Bishop, CA)",0,0,0,1,0),
)),
(59, (("Church 1,",0.05,0.95,0),
("Church 2,",0.05,0.95,0),
("Church 3,",0.05,0.95,),)
)])
def construct_buildingtypes_old():
buildings = []
buildingtypes = []
building_percents = []
building_use_definitions = BuildingUseDefinition.objects.filter(name__in=[Keys.BUILDING_USE_INDUSTRIAL,
Keys.BUILDING_USE_OFFICE,
Keys.BUILDING_USE_RESIDENTIAL,
Keys.BUILDING_USE_RETAIL])
building_use_percents = []
for buildingtype_dict in sample_buildingtype_buildings:
buildingtype = Buildingtype(name=buildingtype_dict['name'])
buildingtypes.append(buildingtype)
building_uses = sample_building_uses.get(buildingtype_dict['buildingtype_id'], [])
name_to_percent = map_to_dict(lambda building_use :(building_use[0], building_use[1:]), building_uses)
for building_dict in buildingtype_dict['buildings']:
# Match the building name to our Building instances that were created from csv
import_building = imported_building_lookup.get(building_dict['name'], Building(name=building_dict['name']))
buildings.append(import_building)
building_percent = BuildingPercent(buildingtype=buildingtype, building=import_building, percent=building_dict['percent'])
building_percents.append(building_percent)
percents = name_to_percent.get(import_building.name, None)
if percents:
building_use_percents.extend(dual_map(lambda building_use_percent, building_use_definition: BuildingUsePercent(
building=import_building, building_use_definition=building_use_definition, percent=building_use_percent),
percents, building_use_definitions))
return {'buildingtypes':buildingtypes, 'buildings':buildings, 'building_percents':building_percents, 'building_use_percents':building_use_percents}
| CalthorpeAnalytics/urbanfootprint | footprint/client/configuration/default/built_form/old_placetypes.py | Python | gpl-3.0 | 71,803 |
from __future__ import print_function
from flask import render_template
import threading
from bokeh.models import ColumnDataSource
from bokeh.plotting import figure, cursession, push, output_server
from bokeh.embed import autoload_server
from devviz import data_handler, app
from devviz.views import View
class BokehLinechart(View):
script = ('<script src="http://cdn.pydata.org/bokeh/release/'
'bokeh-0.9.1.min.js"></script>')
style = ('<link ref="stylesheet" type="text/css" '
'href="http://cdn.pydata.org/bokeh/release/'
'bokeh-0.9.1.min.css" />')
url = 'bokeh'
name = 'Bokeh Linechart'
def __init__(self, variables=None, viewid=None):
super(BokehLinechart, self).__init__(variables, viewid)
self._updated = {}
self._is_updated = threading.Event()
@property
def content(self):
plot, session = self.create_plot()
divs = autoload_server(plot, session)
for var in self.variables:
thread = threading.Thread(target=self.update_plot,
args=(plot, session, var))
thread.start()
return render_template('bokeh_linechart.html', divs=divs,
variables=self.variables, viewid=self.viewid)
def update_plot(self, plot, session, var):
data_handler.subscribe(var)
for item in data_handler.listen():
y = float(item['data'])
renderer = plot.select(dict(name='line'))
ds = renderer[0].data_source
try:
ds.data["x"].append(ds.data["x"][-1] + 1)
except IndexError:
ds.data['x'].append(0)
ds.data["y"].append(y)
if len(ds.data["x"]) > 100:
ds.data["x"].pop(0)
ds.data["y"].pop(0)
session.store_objects(ds)
def update(self, source_name):
self._updated[source_name] = True
self._is_updated.set()
def create_plot(self):
output_server('animation')
source = ColumnDataSource(data=dict(x=[], y=[]))
p = figure(plot_width=800, plot_height=400)
p.line(x='x', y='y', source=source, name='line')
push()
return p, cursession()
app.views[BokehLinechart.url] = BokehLinechart
| hildensia/devviz | devviz/views/bokeh_backend.py | Python | bsd-2-clause | 2,327 |
try:
import json
assert json # workaround for pyflakes issue #13
except ImportError:
import simplejson as json
from rdflib.py3compat import PY3
from os import sep
from os.path import normpath
if PY3:
from urllib.parse import urljoin, urlsplit, urlunsplit
else:
from urlparse import urljoin, urlsplit, urlunsplit
from rdflib.parser import create_input_source
if PY3:
from io import StringIO
def source_to_json(source):
# TODO: conneg for JSON (fix support in rdflib's URLInputSource!)
source = create_input_source(source, format='json-ld')
stream = source.getByteStream()
try:
if PY3:
return json.load(StringIO(stream.read().decode('utf-8')))
else:
return json.load(stream)
finally:
stream.close()
VOCAB_DELIMS = ('#', '/', ':')
def split_iri(iri):
for delim in VOCAB_DELIMS:
at = iri.rfind(delim)
if at > -1:
return iri[:at+1], iri[at+1:]
return iri, None
def norm_url(base, url):
url = urljoin(base, url)
parts = urlsplit(url)
path = normpath(parts[2])
if sep != '/':
path = '/'.join(path.split(sep))
if parts[2].endswith('/') and not path.endswith('/'):
path += '/'
result = urlunsplit(parts[0:2] + (path,) + parts[3:])
if url.endswith('#') and not result.endswith('#'):
result += '#'
return result
def context_from_urlinputsource(source):
if source.content_type == 'application/json':
# response_info was added to InputSource in rdflib 4.2
try:
links = source.response_info.getallmatchingheaders('Link')
except AttributeError:
return
for link in links:
if ' rel="http://www.w3.org/ns/json-ld#context"' in link:
i, j = link.index('<'), link.index('>')
if i > -1 and j > -1:
return urljoin(source.url, link[i+1:j])
| dahuo2013/rdflib-jsonld | rdflib_jsonld/util.py | Python | bsd-3-clause | 1,936 |
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_concurrency import lockutils
from oslo_log import log as logging
from oslo_utils.secretutils import md5
import yaml
from tempest.lib import auth
from tempest.lib.common import cred_provider
from tempest.lib.common import fixed_network
from tempest.lib import exceptions as lib_exc
from tempest.lib.services import clients
LOG = logging.getLogger(__name__)
def read_accounts_yaml(path):
try:
with open(path, 'r') as yaml_file:
accounts = yaml.safe_load(yaml_file)
except IOError:
raise lib_exc.InvalidConfiguration(
'The path for the test accounts file: %s '
'could not be found' % path)
return accounts
class PreProvisionedCredentialProvider(cred_provider.CredentialProvider):
"""Credentials provider using pre-provisioned accounts
This credentials provider loads the details of pre-provisioned
accounts from a YAML file, in the format specified by
``etc/accounts.yaml.sample``. It locks accounts while in use, using the
external locking mechanism, allowing for multiple python processes
to share a single account file, and thus running tests in parallel.
The accounts_lock_dir must be generated using `lockutils.get_lock_path`
from the oslo.concurrency library. For instance::
accounts_lock_dir = os.path.join(lockutils.get_lock_path(CONF),
'test_accounts')
Role names for object storage are optional as long as the
`operator` and `reseller_admin` credential types are not used in the
accounts file.
:param identity_version: identity version of the credentials
:param admin_role: name of the admin role
:param test_accounts_file: path to the accounts YAML file
:param accounts_lock_dir: the directory for external locking
:param name: name of the hash file (optional)
:param credentials_domain: name of the domain credentials belong to
(if no domain is configured)
:param object_storage_operator_role: name of the role
:param object_storage_reseller_admin_role: name of the role
:param identity_uri: Identity URI of the target cloud
"""
# Exclude from the hash fields specific to v2 or v3 identity API
# i.e. only include user*, project*, tenant* and password
HASH_CRED_FIELDS = (set(auth.KeystoneV2Credentials.ATTRIBUTES) &
set(auth.KeystoneV3Credentials.ATTRIBUTES))
def __init__(self, identity_version, test_accounts_file,
accounts_lock_dir, name=None, credentials_domain=None,
admin_role=None, object_storage_operator_role=None,
object_storage_reseller_admin_role=None, identity_uri=None):
super(PreProvisionedCredentialProvider, self).__init__(
identity_version=identity_version, name=name,
admin_role=admin_role, credentials_domain=credentials_domain,
identity_uri=identity_uri)
self.test_accounts_file = test_accounts_file
if test_accounts_file:
accounts = read_accounts_yaml(self.test_accounts_file)
else:
raise lib_exc.InvalidCredentials("No accounts file specified")
self.hash_dict = self.get_hash_dict(
accounts, admin_role, object_storage_operator_role,
object_storage_reseller_admin_role)
self.accounts_dir = accounts_lock_dir
self._creds = {}
@classmethod
def _append_role(cls, role, account_hash, hash_dict):
if role in hash_dict['roles']:
hash_dict['roles'][role].append(account_hash)
else:
hash_dict['roles'][role] = [account_hash]
return hash_dict
@classmethod
def _append_scoped_role(cls, scope, role, account_hash, hash_dict):
key = "%s_%s" % (scope, role)
hash_dict['scoped_roles'].setdefault(key, [])
hash_dict['scoped_roles'][key].append(account_hash)
return hash_dict
@classmethod
def get_hash_dict(cls, accounts, admin_role,
object_storage_operator_role=None,
object_storage_reseller_admin_role=None):
hash_dict = {'roles': {}, 'creds': {}, 'networks': {},
'scoped_roles': {}}
# Loop over the accounts read from the yaml file
for account in accounts:
roles = []
types = []
scope = None
resources = []
if 'roles' in account:
roles = account.pop('roles')
if 'types' in account:
types = account.pop('types')
if 'resources' in account:
resources = account.pop('resources')
if 'project_name' in account:
scope = 'project'
elif 'domain_name' in account:
scope = 'domain'
elif 'system' in account:
scope = 'system'
temp_hash = md5(usedforsecurity=False)
account_for_hash = dict((k, v) for (k, v) in account.items()
if k in cls.HASH_CRED_FIELDS)
temp_hash.update(str(account_for_hash).encode('utf-8'))
temp_hash_key = temp_hash.hexdigest()
hash_dict['creds'][temp_hash_key] = account
for role in roles:
hash_dict = cls._append_role(role, temp_hash_key,
hash_dict)
if scope:
hash_dict = cls._append_scoped_role(
scope, role, temp_hash_key, hash_dict)
# If types are set for the account append the matching role
# subdict with the hash
for type in types:
if type == 'admin':
hash_dict = cls._append_role(admin_role, temp_hash_key,
hash_dict)
elif type == 'operator':
if object_storage_operator_role:
hash_dict = cls._append_role(
object_storage_operator_role, temp_hash_key,
hash_dict)
else:
msg = ("Type 'operator' configured, but no "
"object_storage_operator_role specified")
raise lib_exc.InvalidCredentials(msg)
elif type == 'reseller_admin':
if object_storage_reseller_admin_role:
hash_dict = cls._append_role(
object_storage_reseller_admin_role,
temp_hash_key,
hash_dict)
else:
msg = ("Type 'reseller_admin' configured, but no "
"object_storage_reseller_admin_role specified")
raise lib_exc.InvalidCredentials(msg)
# Populate the network subdict
for resource in resources:
if resource == 'network':
hash_dict['networks'][temp_hash_key] = resources[resource]
else:
LOG.warning(
'Unknown resource type %s, ignoring this field',
resource
)
return hash_dict
def is_multi_user(self):
return len(self.hash_dict['creds']) > 1
def is_multi_tenant(self):
return self.is_multi_user()
def _create_hash_file(self, hash_string):
path = os.path.join(self.accounts_dir, hash_string)
if not os.path.isfile(path):
with open(path, 'w') as fd:
fd.write(self.name)
return True
return False
@lockutils.synchronized('test_accounts_io', external=True)
def _get_free_hash(self, hashes):
# Cast as a list because in some edge cases a set will be passed in
hashes = list(hashes)
if not os.path.isdir(self.accounts_dir):
os.mkdir(self.accounts_dir)
# Create File from first hash (since none are in use)
self._create_hash_file(hashes[0])
return hashes[0]
names = []
for _hash in hashes:
res = self._create_hash_file(_hash)
if res:
return _hash
else:
path = os.path.join(self.accounts_dir, _hash)
with open(path, 'r') as fd:
names.append(fd.read())
msg = ('Insufficient number of users provided. %s have allocated all '
'the credentials for this allocation request' % ','.join(names))
raise lib_exc.InvalidCredentials(msg)
def _get_match_hash_list(self, roles=None, scope=None):
hashes = []
if roles:
# Loop over all the creds for each role in the subdict and generate
# a list of cred lists for each role
for role in roles:
if scope:
key = "%s_%s" % (scope, role)
temp_hashes = self.hash_dict['scoped_roles'].get(key)
if not temp_hashes:
raise lib_exc.InvalidCredentials(
"No credentials matching role: %s, scope: %s "
"specified in the accounts file" % (role, scope))
else:
temp_hashes = self.hash_dict['roles'].get(role, None)
if not temp_hashes:
raise lib_exc.InvalidCredentials(
"No credentials with role: %s specified in the "
"accounts file" % role)
hashes.append(temp_hashes)
# Take the list of lists and do a boolean and between each list to
# find the creds which fall under all the specified roles
temp_list = set(hashes[0])
for hash_list in hashes[1:]:
temp_list = temp_list & set(hash_list)
hashes = temp_list
else:
hashes = self.hash_dict['creds'].keys()
# NOTE(mtreinish): admin is a special case because of the increased
# privilege set which could potentially cause issues on tests where
# that is not expected. So unless the admin role isn't specified do
# not allocate admin.
admin_hashes = self.hash_dict['roles'].get(self.admin_role,
None)
if ((not roles or self.admin_role not in roles) and
admin_hashes):
useable_hashes = [x for x in hashes if x not in admin_hashes]
else:
useable_hashes = hashes
return useable_hashes
def _sanitize_creds(self, creds):
temp_creds = creds.copy()
temp_creds.pop('password')
return temp_creds
def _get_creds(self, roles=None, scope=None):
useable_hashes = self._get_match_hash_list(roles, scope)
if not useable_hashes:
msg = 'No users configured for type/roles %s' % roles
raise lib_exc.InvalidCredentials(msg)
free_hash = self._get_free_hash(useable_hashes)
clean_creds = self._sanitize_creds(
self.hash_dict['creds'][free_hash])
LOG.info('%s allocated creds:\n%s', self.name, clean_creds)
return self._wrap_creds_with_network(free_hash)
@lockutils.synchronized('test_accounts_io', external=True)
def remove_hash(self, hash_string):
hash_path = os.path.join(self.accounts_dir, hash_string)
if not os.path.isfile(hash_path):
LOG.warning('Expected an account lock file %s to remove, but '
'one did not exist', hash_path)
else:
os.remove(hash_path)
if not os.listdir(self.accounts_dir):
os.rmdir(self.accounts_dir)
def get_hash(self, creds):
for _hash in self.hash_dict['creds']:
# Comparing on the attributes that are expected in the YAML
init_attributes = creds.get_init_attributes()
# Only use the attributes initially used to calculate the hash
init_attributes = [x for x in init_attributes if
x in self.HASH_CRED_FIELDS]
hash_attributes = self.hash_dict['creds'][_hash].copy()
# NOTE(andreaf) Not all fields may be available on all credentials
# so defaulting to None for that case.
if all([getattr(creds, k, None) == hash_attributes.get(k, None) for
k in init_attributes]):
return _hash
raise AttributeError('Invalid credentials %s' % creds)
def remove_credentials(self, creds):
_hash = self.get_hash(creds)
clean_creds = self._sanitize_creds(self.hash_dict['creds'][_hash])
self.remove_hash(_hash)
LOG.info("%s returned allocated creds:\n%s", self.name, clean_creds)
# TODO(gmann): Remove this method in favor of get_project_member_creds()
# after the deprecation phase.
def get_primary_creds(self):
if self._creds.get('primary'):
return self._creds.get('primary')
net_creds = self._get_creds()
self._creds['primary'] = net_creds
return net_creds
# TODO(gmann): Replace this method with more appropriate name.
# like get_project_alt_member_creds()
def get_alt_creds(self):
if self._creds.get('alt'):
return self._creds.get('alt')
net_creds = self._get_creds()
self._creds['alt'] = net_creds
return net_creds
def get_system_admin_creds(self):
if self._creds.get('system_admin'):
return self._creds.get('system_admin')
system_admin = self._get_creds(['admin'], scope='system')
self._creds['system_admin'] = system_admin
return system_admin
def get_system_member_creds(self):
if self._creds.get('system_member'):
return self._creds.get('system_member')
system_member = self._get_creds(['member'], scope='system')
self._creds['system_member'] = system_member
return system_member
def get_system_reader_creds(self):
if self._creds.get('system_reader'):
return self._creds.get('system_reader')
system_reader = self._get_creds(['reader'], scope='system')
self._creds['system_reader'] = system_reader
return system_reader
def get_domain_admin_creds(self):
if self._creds.get('domain_admin'):
return self._creds.get('domain_admin')
domain_admin = self._get_creds(['admin'], scope='domain')
self._creds['domain_admin'] = domain_admin
return domain_admin
def get_domain_member_creds(self):
if self._creds.get('domain_member'):
return self._creds.get('domain_member')
domain_member = self._get_creds(['member'], scope='domain')
self._creds['domain_member'] = domain_member
return domain_member
def get_domain_reader_creds(self):
if self._creds.get('domain_reader'):
return self._creds.get('domain_reader')
domain_reader = self._get_creds(['reader'], scope='domain')
self._creds['domain_reader'] = domain_reader
return domain_reader
def get_project_admin_creds(self):
if self._creds.get('project_admin'):
return self._creds.get('project_admin')
project_admin = self._get_creds(['admin'], scope='project')
self._creds['project_admin'] = project_admin
return project_admin
def get_project_alt_admin_creds(self):
# TODO(gmann): Implement alt admin hash.
return
def get_project_member_creds(self):
if self._creds.get('project_member'):
return self._creds.get('project_member')
project_member = self._get_creds(['member'], scope='project')
self._creds['project_member'] = project_member
return project_member
def get_project_alt_member_creds(self):
# TODO(gmann): Implement alt member hash.
return
def get_project_reader_creds(self):
if self._creds.get('project_reader'):
return self._creds.get('project_reader')
project_reader = self._get_creds(['reader'], scope='project')
self._creds['project_reader'] = project_reader
return project_reader
def get_project_alt_reader_creds(self):
# TODO(gmann): Implement alt reader hash.
return
def get_creds_by_roles(self, roles, force_new=False, scope=None):
roles = list(set(roles))
exist_creds = self._creds.get(str(roles).encode(
'utf-8'), None)
# The force kwarg is used to allocate an additional set of creds with
# the same role list. The index used for the previously allocation
# in the _creds dict will be moved.
if exist_creds and not force_new:
return exist_creds
elif exist_creds and force_new:
# NOTE(andreaf) In py3.x encode returns bytes, and b'' is bytes
# In py2.7 encode returns strings, and b'' is still string
new_index = str(roles).encode('utf-8') + b'-' + \
str(len(self._creds)).encode('utf-8')
self._creds[new_index] = exist_creds
net_creds = self._get_creds(roles=roles)
self._creds[str(roles).encode('utf-8')] = net_creds
return net_creds
def clear_creds(self):
for creds in self._creds.values():
self.remove_credentials(creds)
# TODO(gmann): Remove this method in favor of get_project_admin_creds()
# after the deprecation phase.
def get_admin_creds(self):
return self.get_creds_by_roles([self.admin_role])
def is_role_available(self, role):
if self.hash_dict['roles'].get(role):
return True
return False
def admin_available(self):
return self.is_role_available(self.admin_role)
def _wrap_creds_with_network(self, hash):
creds_dict = self.hash_dict['creds'][hash]
# Make sure a domain scope if defined for users in case of V3
# Make sure a tenant is available in case of V2
creds_dict = self._extend_credentials(creds_dict)
# This just builds a Credentials object, it does not validate
# nor fill with missing fields.
credential = auth.get_credentials(
auth_url=None, fill_in=False,
identity_version=self.identity_version, **creds_dict)
net_creds = cred_provider.TestResources(credential)
net_clients = clients.ServiceClients(credentials=credential,
identity_uri=self.identity_uri)
networks_client = net_clients.network.NetworksClient()
net_name = self.hash_dict['networks'].get(hash, None)
try:
network = fixed_network.get_network_from_name(
net_name, networks_client)
except lib_exc.InvalidTestResource:
network = {}
net_creds.set_resources(network=network)
return net_creds
def _extend_credentials(self, creds_dict):
# Add or remove credential domain fields to fit the identity version
domain_fields = set(x for x in auth.KeystoneV3Credentials.ATTRIBUTES
if 'domain' in x)
msg = 'Assuming they are valid in the default domain.'
if self.identity_version == 'v3':
if not domain_fields.intersection(set(creds_dict.keys())):
msg = 'Using credentials %s for v3 API calls. ' + msg
LOG.warning(msg, self._sanitize_creds(creds_dict))
creds_dict['domain_name'] = self.credentials_domain
if self.identity_version == 'v2':
if domain_fields.intersection(set(creds_dict.keys())):
msg = 'Using credentials %s for v2 API calls. ' + msg
LOG.warning(msg, self._sanitize_creds(creds_dict))
# Remove all valid domain attributes
for attr in domain_fields.intersection(set(creds_dict.keys())):
creds_dict.pop(attr)
return creds_dict
| openstack/tempest | tempest/lib/common/preprov_creds.py | Python | apache-2.0 | 20,875 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from . import TraceParameters
__author__ = 'Shamal Faily'
class DotTraceParameters(TraceParameters.TraceParameters):
def __init__(self,fromObjt,fromName,toObjt,toName):
TraceParameters.TraceParameters.__init__(self,fromObjt,fromName,toObjt,toName)
| nathanbjenx/cairis | cairis/core/DotTraceParameters.py | Python | apache-2.0 | 1,055 |
import json
import sys
import logging
import logging.handlers
def load_config():
'''Loads application configuration from a JSON file'''
try:
json_data = open('config.json')
config = json.load(json_data)
json_data.close()
return config
except Exception:
print """There was an error loading config.json.
Make sure that the file exists and it's a valid JSON file."""
sys.exit(1)
def init_logger(file_name='clouddump.log'):
'''
Initializes the logging file and module
parameters
----------
file_name: A string with the name of the file to write the logs in
'''
logger = logging.getLogger('clouddump')
log_file_handler = logging.handlers.RotatingFileHandler(
file_name, maxBytes = 10**9)
log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
log_file_handler.setFormatter(log_format)
logger.addHandler(log_file_handler)
logger.setLevel(logging.DEBUG)
if len(sys.argv) > 1:
if sys.argv[1] == '-v' or sys.argv[1] == '--verbose':
console = logging.StreamHandler()
console.setLevel(logging.INFO)
logger.addHandler(console) | svera/clouddump | tools.py | Python | gpl-2.0 | 1,226 |
'''
Implementation of Bitcoin's p2p protocol
'''
import random
import sys
import time
from twisted.internet import protocol
import p2pool
from . import data as bitcoin_data
from p2pool.util import deferral, p2protocol, pack, variable
class Protocol(p2protocol.Protocol):
def __init__(self, net):
p2protocol.Protocol.__init__(self, net.P2P_PREFIX, 1000000, ignore_trailing_payload=True)
def connectionMade(self):
self.send_version(
version=70007,
services=1,
time=int(time.time()),
addr_to=dict(
services=1,
address=self.transport.getPeer().host,
port=self.transport.getPeer().port,
),
addr_from=dict(
services=1,
address=self.transport.getHost().host,
port=self.transport.getHost().port,
),
nonce=random.randrange(2**64),
sub_version_num='/P2Pool:%s/' % (p2pool.__version__,),
start_height=0,
)
message_version = pack.ComposedType([
('version', pack.IntType(32)),
('services', pack.IntType(64)),
('time', pack.IntType(64)),
('addr_to', bitcoin_data.address_type),
('addr_from', bitcoin_data.address_type),
('nonce', pack.IntType(64)),
('sub_version_num', pack.VarStrType()),
('start_height', pack.IntType(32)),
])
def handle_version(self, version, services, time, addr_to, addr_from, nonce, sub_version_num, start_height):
self.send_verack()
message_verack = pack.ComposedType([])
def handle_verack(self):
self.get_block = deferral.ReplyMatcher(lambda hash: self.send_getdata(requests=[dict(type='block', hash=hash)]))
self.get_block_header = deferral.ReplyMatcher(lambda hash: self.send_getheaders(version=1, have=[], last=hash))
if hasattr(self.factory, 'resetDelay'):
self.factory.resetDelay()
if hasattr(self.factory, 'gotConnection'):
self.factory.gotConnection(self)
self.pinger = deferral.RobustLoopingCall(self.send_ping, nonce=1234)
self.pinger.start(30)
message_inv = pack.ComposedType([
('invs', pack.ListType(pack.ComposedType([
('type', pack.EnumType(pack.IntType(32), {1: 'tx', 2: 'block'})),
('hash', pack.IntType(256)),
]))),
])
def handle_inv(self, invs):
for inv in invs:
if inv['type'] == 'tx':
self.send_getdata(requests=[inv])
elif inv['type'] == 'block':
self.factory.new_block.happened(inv['hash'])
else:
print 'Unknown inv type', inv
message_getdata = pack.ComposedType([
('requests', pack.ListType(pack.ComposedType([
('type', pack.EnumType(pack.IntType(32), {1: 'tx', 2: 'block'})),
('hash', pack.IntType(256)),
]))),
])
message_getblocks = pack.ComposedType([
('version', pack.IntType(32)),
('have', pack.ListType(pack.IntType(256))),
('last', pack.PossiblyNoneType(0, pack.IntType(256))),
])
message_getheaders = pack.ComposedType([
('version', pack.IntType(32)),
('have', pack.ListType(pack.IntType(256))),
('last', pack.PossiblyNoneType(0, pack.IntType(256))),
])
message_getaddr = pack.ComposedType([])
message_addr = pack.ComposedType([
('addrs', pack.ListType(pack.ComposedType([
('timestamp', pack.IntType(32)),
('address', bitcoin_data.address_type),
]))),
])
def handle_addr(self, addrs):
for addr in addrs:
pass
message_tx = pack.ComposedType([
('tx', bitcoin_data.tx_type),
])
def handle_tx(self, tx):
self.factory.new_tx.happened(tx)
message_block = pack.ComposedType([
('block', bitcoin_data.block_type),
])
def handle_block(self, block):
block_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))
self.get_block.got_response(block_hash, block)
self.get_block_header.got_response(block_hash, block['header'])
message_block_old = pack.ComposedType([
('block', bitcoin_data.block_type_old),
])
def handle_block_old(self, block):
block_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))
self.get_block.got_response(block_hash, block)
self.get_block_header.got_response(block_hash, block['header'])
message_headers = pack.ComposedType([
('headers', pack.ListType(bitcoin_data.block_type_old)),
])
def handle_headers(self, headers):
for header in headers:
header = header['header']
self.get_block_header.got_response(bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)), header)
self.factory.new_headers.happened([header['header'] for header in headers])
message_ping = pack.ComposedType([
('nonce', pack.IntType(64)),
])
def handle_ping(self, nonce):
self.send_pong(nonce=nonce)
message_pong = pack.ComposedType([
('nonce', pack.IntType(64)),
])
def handle_pong(self, nonce):
pass
message_alert = pack.ComposedType([
('message', pack.VarStrType()),
('signature', pack.VarStrType()),
])
def handle_alert(self, message, signature):
pass # print 'ALERT:', (message, signature)
def connectionLost(self, reason):
if hasattr(self.factory, 'gotConnection'):
self.factory.gotConnection(None)
if hasattr(self, 'pinger'):
self.pinger.stop()
if p2pool.DEBUG:
print >>sys.stderr, 'Bitcoin connection lost. Reason:', reason.getErrorMessage()
class ClientFactory(protocol.ReconnectingClientFactory):
protocol = Protocol
maxDelay = 1
def __init__(self, net):
self.net = net
self.conn = variable.Variable(None)
self.new_block = variable.Event()
self.new_tx = variable.Event()
self.new_headers = variable.Event()
def buildProtocol(self, addr):
p = self.protocol(self.net)
p.factory = self
return p
def gotConnection(self, conn):
self.conn.set(conn)
def getProtocol(self):
return self.conn.get_not_none()
| dragosbdi/p2pool | p2pool/bitcoin/p2p.py | Python | gpl-3.0 | 6,499 |
import logging
# Configuración del módulo logging
# loguear a la stdout
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
# loguear a un archivo
# logging.basicConfig(filename='s12l36.log', level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
'''
#Programa sin logging
def factorial(numero):
total = 1
for i in range(numero + 1):
total *= i
return total
'''
# Programa con logging
# logging.disable(logging.CRITICAL) # Con esto se deshabilitan los logs de CRITICAL para abajo
logging.debug('Comienzo')
def factorial(numero):
logging.debug('Función factorial, numero es %s' % (numero))
total = 1
for i in range(1, numero + 1):
total *= i
logging.debug('i es %s. total es %s' % (i, total))
logging.debug('Retora un total de %s' % (total))
return total
print(factorial(6))
logging.debug('Final')
| sistemasmarcelocastro/pruebas-python | cursoPython1/s12l36.py | Python | mit | 926 |
# This file is part of uSSync.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2013 Erdal Sivri, Oguzhan Unlu
"""
This module synchronizes files in one folder to another one in a remote computer over a NeTwork.
"""
import os
import sys
def sync_folder(source, target, excluded):
return 1
| oguzhanunlu/uSSync | sync/nt_sync.py | Python | gpl-3.0 | 908 |
#! /usr/bin/python
from twisted.spread import pb
from twisted.internet import reactor
class Two(pb.Referenceable):
def remote_print(self, arg):
print "Two.print() called with", arg
def main():
two = Two()
factory = pb.PBClientFactory()
reactor.connectTCP("localhost", 8800, factory)
def1 = factory.getRootObject()
def1.addCallback(got_obj, two) # hands our 'two' to the callback
reactor.run()
def got_obj(obj, two):
print "got One:", obj
print "giving it our two"
obj.callRemote("takeTwo", two)
main()
| hortonworks/hortonworks-sandbox | desktop/core/ext-py/Twisted/doc/core/howto/listings/pb/pb3client.py | Python | apache-2.0 | 555 |
#!/usr/bin/env python
import argparse
import math
import time
import fiona
import multiprocessing
from rasterio.plot import show
import math
import os
import click
#import matlibplot.pyplot as plt
import numpy as np
import numpy.ma as ma
import rasterio
from rasterio.plot import show, show_hist
from projections.rasterset import RasterSet, Raster
from projections.simpleexpr import SimpleExpr
import projections.predicts as predicts
import projections.r2py.modelr as modelr
import projections.utils as utils
RD_DIST_MIN = 0
RD_DIST_MAX = 195274.3
HPD_MIN = 0
HPD_MAX = 22490
parser = argparse.ArgumentParser(description="ab.py -- abundance projections")
parser.add_argument('--mainland', '-m', dest='mainland', default=False,
action='store_true',
help='Project using mainland coefficients '
'(default: islands)')
parser.add_argument('--clip', '-c', dest='clip', default=False,
action='store_true',
help='Clip predictor variables to max value seen '
'during model fitting')
args = parser.parse_args()
if args.mainland:
ISLMAIN = 1
mask_file = os.path.join(utils.data_root(),
'1km/mainland-from-igor-edited-at.tif')
else:
ISLMAIN = 0
mask_file = os.path.join(utils.data_root(),
'1km/islands-from-igor-edited-at.tif')
# Open the mask raster file (Mainlands)
mask_ds = rasterio.open(mask_file)
# Read Katia's abundance model
mod = modelr.load('/home/vagrant/katia/models/best_model_abund.rds')
predicts.predictify(mod)
# Import standard PREDICTS rasters
rasters = predicts.rasterset('1km', 'medium', year = 2005)
# create an ISL_MAINL raster
# set it to Mainlands this time round (set Mainlands to 1 and Islands to 0)
rasters['ISL_MAINLMAINLAND'] = SimpleExpr('ISL_MAINLMAINLAND', ISLMAIN)
# specify the plantation forest maps as 0
# not sure why it's plantations_pri rather than plantation, but hey ho
rasters['plantation_pri'] = SimpleExpr('plantation_pri', 0)
rasters['plantation_pri_minimal'] = SimpleExpr('plantation_pri_minimal', 0)
rasters['plantation_pri_light'] = SimpleExpr('plantation_pri_light', 0)
rasters['plantation_pri_intense'] = SimpleExpr('plantation_pri_intense', 0)
## If CLIP is true, limit the predictor variable values to the max seen
## when fitting the model
if args.clip:
rasters['clip_hpd'] = SimpleExpr('clip_hpd',
'clip(hpd_ref, %f, %f)' %(HPD_MIN, HPD_MAX))
else:
rasters['clip_hpd'] = SimpleExpr('clip_hpd', 'hpd_ref')
###log values and then rescale them 0 to 1
# we need to check whether the logHPD.rs automatically produced uses the
# same values we use if not, manually create logHPD.rs
rasters['logHPD_rs'] = SimpleExpr('logHPD_rs',
'scale(log(clip_hpd + 1), 0.0, 1.0, 0.0, 10.02087)')
## I'm setting up min and max log values to rescale
# Same is true for logDistRd_rs
rasters['DistRd'] = Raster('DistRd', os.path.join(utils.data_root(), '1km/rddistwgs.tif')) ###Use new raster
## If args.clip is true, limit the predictor variable values to the max seen
## when fitting the model
if args.clip:
rasters['clipDistRd'] = SimpleExpr('clipDistRd',
'clip(DistRd, %f, %f)' %(RD_DIST_MIN,
RD_DIST_MAX))
else:
rasters['clipDistRd'] = SimpleExpr('clipDistRd', 'DistRd')
rasters['logDistRd_rs'] = SimpleExpr('logDistRd_rs',
'scale(log(clipDistRd + 100),'
'0.0, 1.0, -1.120966, 12.18216)')
###Added +100 to DistRd to deal with zero values
# set up the rasterset, cropping to mainlands
rs = RasterSet(rasters, mask = mask_ds, maskval=0, crop = True)
# if you're projecting the whole world, use this code instead
# rs = RasterSet(rasters)
# evaluate the model
# model is square root abundance so square it
# Note that the intercept value has been calculated for the baseline
# land use when all other variables are held at 0
# Therefore I calculate separatedly an intercept where DistRd is set to
# the max value, i.e. logDistRd_RS = 1.
intercept = mod.partial({'ISL_MAINLMAINLAND': ISLMAIN,
'logDistRd_rs': 1.0})
print("intercept: %.5f" % intercept)
if args.mainland:
assert math.isclose(intercept, 0.67184, rel_tol=0.001)
else:
## FIXME: Replace RHS with the R calculated value
assert math.isclose(intercept, 0.7270164, rel_tol=0.001)
rs[mod.output] = mod
rs['output'] = SimpleExpr('output', '(pow(%s, 2) / pow(%f, 2))' % (mod.output, intercept))
fname = 'ab-%s.tif' % ('mainland' if args.mainland else 'islands')
path = ('katia', 'clip' if args.clip else 'no-clip', fname)
rs.write('output', utils.outfn(*path))
| ricardog/raster-project | user-scripts/katia/ab.py | Python | apache-2.0 | 4,820 |
#
# Copyright (c) 2013 Markus Eliasson, http://www.quarterapp.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .quarterapp import *
import plugin | OlofFredriksson/quarterapp | quarterapp/__init__.py | Python | gpl-3.0 | 756 |
# -*- coding: utf-8 -*-
"""
@author: Fabio Erculiani <lxnay@sabayon.org>
@contact: lxnay@sabayon.org
@copyright: Fabio Erculiani
@license: GPL-2
B{Entropy Framework repository database module}.
Entropy repositories (server and client) are implemented as relational
databases. Currently, EntropyRepository class is the object that wraps
sqlite3 database queries and repository logic: there are no more
abstractions between the two because there is only one implementation
available at this time. In future, entropy.db will feature more backends
such as MySQL embedded, SparQL, remote repositories support via TCP socket,
etc. This will require a new layer between the repository interface now
offered by EntropyRepository and the underlying data retrieval logic.
Every repository interface available inherits from EntropyRepository
class and has to reimplement its own Schema subclass and its get_init
method (see EntropyRepository documentation for more information).
I{EntropyRepository} is the sqlite3 implementation of the repository
interface, as written above.
"""
from entropy.db.sqlite import EntropySQLiteRepository as EntropyRepository
from entropy.db.mysql import EntropyMySQLRepository
from entropy.db.cache import EntropyRepositoryCacher
__all__ = ["EntropyRepository", "EntropyMySQLRepository",
"EntropyRepositoryCacher"]
| kidaa/entropy | lib/entropy/db/__init__.py | Python | gpl-2.0 | 1,423 |
"""transdate -- Python implementation of Asian lunisolar calendar
Copyright (c) 2004-2006, Kang Seonghoon aka Tokigun.
This module declares lunardate class which represents a day of Asian
lunisolar calendar. lunardate class is compatible with datetime.date
class, so you can use both lunardate and date interchangeably.
lunardate class can handle date between 1881-01-30 (lunar 1881-01-01)
and 2051-02-10 (lunar 2050-12-29). Since lunisolar calendar table is
based on Korea Astronomy & Space Science Institute, it can be
different with calendars used by other countries.
In order to reduce size of bytecode, all of numeric table is stored as
Unicode string (capable for range between 0 and 65535). If your Python
is not compiled with Unicode, use transdate_nounicode.py instead.
"""
__author__ = 'Kang Seonghoon aka Tokigun'
__version__ = '1.1.1 (2006-06-25)'
__copyright__ = 'Copyright (c) 2004-2006 Kang Seonghoon aka Tokigun'
__license__ = 'LGPL'
__all__ = ['sol2lun', 'lun2sol', 'date', 'timedelta', 'solardate',
'lunardate', 'getganzistr', 'strftime']
from datetime import date, timedelta
import locale, time
###################################################################################
## Lunisolar Calendar Table
_BASEYEAR = 1881
_MINDATE = 686686 # 1881.1.30 (lunar 1881.1.1)
_MAXDATE = 748788 # 2051.2.10 (lunar 2050.12.29)
_DEFAULTLOCALE = locale.getdefaultlocale()[0].split('_')[0]
try: import re; _STRFTIMEREGEXP = re.compile('(?<!%)((?:%%)*)%L(.)')
except ImportError: _STRFTIMEREGEXP = None
_MONTHTABLE = u"\0\u001D\u003B\u0058\u0076\u0093\u00B1\u00CF\u00EC\u010A\u0128\
\u0145\u0163\u0180\u019D\u01BB\u01D8\u01F6\u0213\u0231\u024E\u026C\u028A\u02A7\
\u02C5\u02E3\u0300\u031D\u033B\u0358\u0375\u0393\u03B0\u03CE\u03EC\u040A\u0427\
\u0445\u0463\u0480\u049D\u04BB\u04D8\u04F5\u0513\u0530\u054E\u056C\u0589\u05A7\
\u05C5\u05E3\u0600\u061D\u063B\u0658\u0675\u0693\u06B0\u06CE\u06EB\u0709\u0727\
\u0745\u0762\u0780\u079D\u07BB\u07D8\u07F5\u0813\u0830\u084E\u086B\u0889\u08A7\
\u08C5\u08E2\u0900\u091D\u093B\u0958\u0975\u0993\u09B0\u09CE\u09EB\u0A09\u0A27\
\u0A44\u0A62\u0A80\u0A9D\u0ABB\u0AD8\u0AF5\u0B13\u0B30\u0B4E\u0B6B\u0B89\u0BA6\
\u0BC4\u0BE2\u0BFF\u0C1D\u0C3A\u0C58\u0C75\u0C93\u0CB0\u0CCE\u0CEB\u0D09\u0D26\
\u0D44\u0D61\u0D7F\u0D9D\u0DBA\u0DD8\u0DF5\u0E13\u0E30\u0E4E\u0E6B\u0E89\u0EA6\
\u0EC4\u0EE1\u0EFF\u0F1C\u0F3A\u0F58\u0F75\u0F93\u0FB1\u0FCE\u0FEB\u1009\u1026\
\u1043\u1061\u107E\u109C\u10BA\u10D7\u10F5\u1113\u1131\u114E\u116B\u1189\u11A6\
\u11C3\u11E1\u11FE\u121C\u1239\u1257\u1275\u1293\u12B0\u12CE\u12EB\u1309\u1326\
\u1343\u1361\u137E\u139C\u13B9\u13D7\u13F5\u1413\u1430\u144E\u146B\u1489\u14A6\
\u14C3\u14E1\u14FE\u151C\u1539\u1557\u1574\u1592\u15B0\u15CE\u15EB\u1609\u1626\
\u1643\u1661\u167E\u169C\u16B9\u16D7\u16F4\u1712\u1730\u174D\u176B\u1788\u17A6\
\u17C3\u17E1\u17FE\u181C\u1839\u1857\u1874\u1892\u18AF\u18CD\u18EB\u1908\u1926\
\u1943\u1961\u197E\u199C\u19B9\u19D7\u19F4\u1A12\u1A2F\u1A4D\u1A6A\u1A88\u1AA6\
\u1AC3\u1AE1\u1AFE\u1B1C\u1B39\u1B57\u1B74\u1B91\u1BAF\u1BCC\u1BEA\u1C08\u1C25\
\u1C43\u1C61\u1C7E\u1C9C\u1CB9\u1CD7\u1CF4\u1D11\u1D2F\u1D4C\u1D6A\u1D87\u1DA5\
\u1DC3\u1DE1\u1DFE\u1E1C\u1E39\u1E57\u1E74\u1E91\u1EAF\u1ECC\u1EEA\u1F07\u1F25\
\u1F43\u1F61\u1F7E\u1F9C\u1FB9\u1FD7\u1FF4\u2011\u202F\u204C\u2069\u2087\u20A5\
\u20C2\u20E0\u20FE\u211C\u2139\u2157\u2174\u2191\u21AF\u21CC\u21E9\u2207\u2225\
\u2242\u2260\u227E\u229B\u22B9\u22D7\u22F4\u2311\u232F\u234C\u236A\u2387\u23A5\
\u23C2\u23E0\u23FE\u241B\u2439\u2456\u2474\u2491\u24AF\u24CC\u24EA\u2507\u2525\
\u2542\u2560\u257D\u259B\u25B8\u25D6\u25F4\u2611\u262F\u264C\u266A\u2687\u26A5\
\u26C2\u26DF\u26FD\u271B\u2738\u2756\u2773\u2791\u27AF\u27CC\u27EA\u2807\u2825\
\u2842\u285F\u287D\u289A\u28B8\u28D5\u28F3\u2911\u292F\u294C\u296A\u2987\u29A5\
\u29C2\u29DF\u29FD\u2A1A\u2A38\u2A55\u2A73\u2A91\u2AAF\u2ACC\u2AEA\u2B07\u2B25\
\u2B42\u2B5F\u2B7D\u2B9A\u2BB7\u2BD5\u2BF3\u2C10\u2C2E\u2C4C\u2C6A\u2C87\u2CA5\
\u2CC2\u2CDF\u2CFD\u2D1A\u2D37\u2D55\u2D73\u2D90\u2DAE\u2DCC\u2DEA\u2E07\u2E25\
\u2E42\u2E5F\u2E7D\u2E9A\u2EB7\u2ED5\u2EF2\u2F10\u2F2E\u2F4C\u2F69\u2F87\u2FA5\
\u2FC2\u2FDF\u2FFD\u301A\u3038\u3055\u3072\u3090\u30AE\u30CB\u30E9\u3107\u3124\
\u3142\u315F\u317D\u319A\u31B8\u31D5\u31F3\u3210\u322E\u324B\u3269\u3286\u32A4\
\u32C2\u32DF\u32FD\u331A\u3338\u3355\u3373\u3390\u33AD\u33CB\u33E8\u3406\u3424\
\u3441\u345F\u347D\u349A\u34B8\u34D5\u34F3\u3510\u352D\u354B\u3568\u3586\u35A3\
\u35C1\u35DF\u35FD\u361A\u3638\u3655\u3673\u3690\u36AD\u36CB\u36E8\u3706\u3723\
\u3741\u375F\u377C\u379A\u37B8\u37D5\u37F3\u3810\u382D\u384B\u3868\u3885\u38A3\
\u38C1\u38DE\u38FC\u391A\u3938\u3955\u3973\u3990\u39AD\u39CB\u39E8\u3A05\u3A23\
\u3A40\u3A5E\u3A7C\u3A9A\u3AB7\u3AD5\u3AF3\u3B10\u3B2D\u3B4B\u3B68\u3B85\u3BA3\
\u3BC0\u3BDE\u3BFC\u3C19\u3C37\u3C55\u3C72\u3C90\u3CAD\u3CCB\u3CE8\u3D06\u3D23\
\u3D40\u3D5E\u3D7C\u3D99\u3DB7\u3DD4\u3DF2\u3E10\u3E2D\u3E4B\u3E68\u3E86\u3EA3\
\u3EC0\u3EDE\u3EFB\u3F19\u3F37\u3F54\u3F72\u3F8F\u3FAD\u3FCB\u3FE8\u4006\u4023\
\u4041\u405E\u407B\u4099\u40B6\u40D4\u40F1\u410F\u412D\u414A\u4168\u4186\u41A3\
\u41C1\u41DE\u41FB\u4219\u4236\u4254\u4271\u428F\u42AD\u42CA\u42E8\u4306\u4323\
\u4341\u435E\u437B\u4399\u43B6\u43D3\u43F1\u440F\u442C\u444A\u4468\u4486\u44A3\
\u44C1\u44DE\u44FB\u4519\u4536\u4553\u4571\u458E\u45AC\u45CA\u45E8\u4605\u4623\
\u4641\u465E\u467B\u4699\u46B6\u46D3\u46F1\u470E\u472C\u474A\u4767\u4785\u47A3\
\u47C1\u47DE\u47FB\u4819\u4836\u4853\u4871\u488E\u48AC\u48C9\u48E7\u4905\u4923\
\u4940\u495E\u497B\u4999\u49B6\u49D3\u49F1\u4A0E\u4A2C\u4A49\u4A67\u4A85\u4AA2\
\u4AC0\u4ADE\u4AFB\u4B19\u4B36\u4B54\u4B71\u4B8E\u4BAC\u4BC9\u4BE7\u4C04\u4C22\
\u4C40\u4C5D\u4C7B\u4C99\u4CB6\u4CD4\u4CF1\u4D0F\u4D2C\u4D49\u4D67\u4D84\u4DA2\
\u4DBF\u4DDD\u4DFB\u4E18\u4E36\u4E54\u4E71\u4E8F\u4EAC\u4EC9\u4EE7\u4F04\u4F22\
\u4F3F\u4F5D\u4F7A\u4F98\u4FB6\u4FD4\u4FF1\u500F\u502C\u5049\u5067\u5084\u50A1\
\u50BF\u50DC\u50FA\u5118\u5136\u5153\u5171\u518F\u51AC\u51C9\u51E7\u5204\u5221\
\u523F\u525C\u527A\u5298\u52B5\u52D3\u52F1\u530F\u532C\u5349\u5367\u5384\u53A1\
\u53BF\u53DC\u53FA\u5417\u5435\u5453\u5471\u548E\u54AC\u54C9\u54E7\u5504\u5521\
\u553F\u555C\u557A\u5597\u55B5\u55D3\u55F0\u560E\u562C\u5649\u5667\u5684\u56A1\
\u56BF\u56DC\u56FA\u5717\u5735\u5752\u5770\u578E\u57AB\u57C9\u57E7\u5804\u5822\
\u583F\u585C\u587A\u5897\u58B5\u58D2\u58F0\u590D\u592B\u5949\u5966\u5984\u59A2\
\u59BF\u59DD\u59FA\u5A17\u5A35\u5A52\u5A70\u5A8D\u5AAB\u5AC8\u5AE6\u5B04\u5B21\
\u5B3F\u5B5D\u5B7A\u5B97\u5BB5\u5BD2\u5BEF\u5C0D\u5C2A\u5C48\u5C66\u5C84\u5CA1\
\u5CBF\u5CDD\u5CFA\u5D17\u5D35\u5D52\u5D6F\u5D8D\u5DAA\u5DC8\u5DE6\u5E03\u5E21\
\u5E3F\u5E5D\u5E7A\u5E97\u5EB5\u5ED2\u5EEF\u5F0D\u5F2A\u5F48\u5F65\u5F83\u5FA1\
\u5FBF\u5FDC\u5FFA\u6017\u6035\u6052\u606F\u608D\u60AA\u60C8\u60E5\u6103\u6121\
\u613F\u615C\u617A\u6197\u61B5\u61D2\u61EF\u620D\u622A\u6248\u6265\u6283\u62A1\
\u62BE\u62DC\u62FA\u6317\u6335\u6352\u636F\u638D\u63AA\u63C8\u63E5\u6403\u6420\
\u643E\u645C\u6479\u6497\u64B4\u64D2\u64EF\u650D\u652A\u6548\u6565\u6583\u65A0\
\u65BE\u65DB\u65F9\u6617\u6634\u6652\u666F\u668D\u66AA\u66C8\u66E5\u6703\u6720\
\u673D\u675B\u6779\u6796\u67B4\u67D2\u67EF\u680D\u682B\u6848\u6865\u6883\u68A0\
\u68BD\u68DB\u68F8\u6916\u6934\u6951\u696F\u698D\u69AB\u69C8\u69E5\u6A03\u6A20\
\u6A3D\u6A5B\u6A78\u6A96\u6AB3\u6AD1\u6AEF\u6B0D\u6B2A\u6B48\u6B65\u6B83\u6BA0\
\u6BBD\u6BDB\u6BF8\u6C16\u6C33\u6C51\u6C6F\u6C8D\u6CAA\u6CC8\u6CE5\u6D03\u6D20\
\u6D3D\u6D5B\u6D78\u6D96\u6DB3\u6DD1\u6DEF\u6E0C\u6E2A\u6E48\u6E65\u6E83\u6EA0\
\u6EBD\u6EDB\u6EF8\u6F16\u6F33\u6F51\u6F6E\u6F8C\u6FAA\u6FC7\u6FE5\u7002\u7020\
\u703D\u705B\u7078\u7096\u70B3\u70D1\u70EE\u710C\u7129\u7147\u7165\u7182\u71A0\
\u71BD\u71DB\u71F8\u7216\u7233\u7251\u726E\u728C\u72A9\u72C7\u72E4\u7302\u7320\
\u733D\u735B\u7378\u7396\u73B3\u73D1\u73EE\u740B\u7429\u7446\u7464\u7482\u749F\
\u74BD\u74DB\u74F8\u7516\u7533\u7551\u756E\u758B\u75A9\u75C6\u75E4\u7601\u761F\
\u763D\u765B\u7678\u7696\u76B3\u76D1\u76EE\u770B\u7729\u7746\u7764\u7781\u779F\
\u77BD\u77DB\u77F8\u7816\u7833\u7851\u786E\u788B\u78A9\u78C6\u78E3\u7901\u791F\
\u793D\u795A\u7978\u7996\u79B3\u79D1\u79EE\u7A0B\u7A29\u7A46\u7A63\u7A81\u7A9F\
\u7ABC\u7ADA\u7AF8\u7B15\u7B33\u7B51\u7B6E\u7B8B\u7BA9\u7BC6\u7BE4\u7C01\u7C1F\
\u7C3C\u7C5A\u7C78\u7C95\u7CB3\u7CD0\u7CEE\u7D0B\u7D29\u7D46\u7D64\u7D81\u7D9F\
\u7DBC\u7DDA\u7DF7\u7E15\u7E32\u7E50\u7E6E\u7E8B\u7EA9\u7EC6\u7EE4\u7F01\u7F1F\
\u7F3C\u7F59\u7F77\u7F95\u7FB2\u7FD0\u7FED\u800B\u8029\u8046\u8064\u8081\u809F\
\u80BC\u80D9\u80F7\u8114\u8132\u814F\u816D\u818B\u81A9\u81C6\u81E4\u8201\u821F\
\u823C\u8259\u8277\u8294\u82B2\u82CF\u82ED\u830B\u8329\u8346\u8364\u8381\u839F\
\u83BC\u83D9\u83F7\u8414\u8431\u844F\u846D\u848B\u84A8\u84C6\u84E4\u8501\u851F\
\u853C\u8559\u8577\u8594\u85B1\u85CF\u85ED\u860A\u8628\u8646\u8664\u8681\u869F\
\u86BC\u86D9\u86F7\u8714\u8731\u874F\u876C\u878A\u87A8\u87C6\u87E3\u8801\u881E\
\u883C\u8859\u8877\u8894\u88B2\u88CF\u88EC\u890A\u8928\u8945\u8963\u8981\u899E\
\u89BC\u89D9\u89F7\u8A14\u8A32\u8A4F\u8A6C\u8A8A\u8AA8\u8AC5\u8AE3\u8B00\u8B1E\
\u8B3C\u8B59\u8B77\u8B94\u8BB2\u8BCF\u8BED\u8C0A\u8C27\u8C45\u8C62\u8C80\u8C9E\
\u8CBB\u8CD9\u8CF7\u8D14\u8D32\u8D4F\u8D6D\u8D8A\u8DA7\u8DC5\u8DE2\u8E00\u8E1D\
\u8E3B\u8E59\u8E76\u8E94\u8EB2\u8ECF\u8EED\u8F0A\u8F27\u8F45\u8F62\u8F7F\u8F9D\
\u8FBB\u8FD8\u8FF6\u9014\u9032\u904F\u906D\u908A\u90A7\u90C5\u90E2\u90FF\u911D\
\u913B\u9158\u9176\u9194\u91B2\u91CF\u91ED\u920A\u9227\u9245\u9262\u927F\u929D\
\u92BA\u92D8\u92F6\u9314\u9331\u934F\u936D\u938A\u93A7\u93C5\u93E2\u93FF\u941D\
\u943A\u9458\u9476\u9493\u94B1\u94CF\u94EC\u950A\u9527\u9545\u9562\u957F\u959D\
\u95BA\u95D8\u95F5\u9613\u9631\u964E\u966C\u968A\u96A7\u96C5\u96E2\u9700\u971D\
\u973A\u9758\u9775\u9793\u97B1\u97CE\u97EC\u9809\u9827\u9845\u9862\u9880\u989D\
\u98BB\u98D8\u98F5\u9913\u9930\u994E\u996B\u9989\u99A7\u99C4\u99E2\u9A00\u9A1D\
\u9A3B\u9A58\u9A75\u9A93\u9AB0\u9ACE\u9AEB\u9B09\u9B27\u9B44\u9B62\u9B80\u9B9D\
\u9BBB\u9BD8\u9BF5\u9C13\u9C30\u9C4D\u9C6B\u9C89\u9CA6\u9CC4\u9CE2\u9D00\u9D1D\
\u9D3B\u9D58\u9D75\u9D93\u9DB0\u9DCD\u9DEB\u9E08\u9E26\u9E44\u9E62\u9E7F\u9E9D\
\u9EBB\u9ED8\u9EF5\u9F13\u9F30\u9F4D\u9F6B\u9F88\u9FA6\u9FC4\u9FE1\u9FFF\uA01D\
\uA03A\uA058\uA075\uA093\uA0B0\uA0CD\uA0EB\uA108\uA126\uA143\uA161\uA17F\uA19D\
\uA1BA\uA1D8\uA1F5\uA213\uA230\uA24D\uA26B\uA288\uA2A6\uA2C3\uA2E1\uA2FF\uA31C\
\uA33A\uA358\uA375\uA393\uA3B0\uA3CE\uA3EB\uA408\uA426\uA443\uA461\uA47E\uA49C\
\uA4BA\uA4D7\uA4F5\uA512\uA530\uA54E\uA56B\uA589\uA5A6\uA5C3\uA5E1\uA5FE\uA61C\
\uA639\uA657\uA675\uA692\uA6B0\uA6CE\uA6EB\uA709\uA726\uA743\uA761\uA77E\uA79B\
\uA7B9\uA7D7\uA7F4\uA812\uA830\uA84E\uA86B\uA889\uA8A6\uA8C3\uA8E1\uA8FE\uA91B\
\uA939\uA956\uA974\uA992\uA9B0\uA9CD\uA9EB\uAA09\uAA26\uAA43\uAA61\uAA7E\uAA9B\
\uAAB9\uAAD6\uAAF4\uAB12\uAB2F\uAB4D\uAB6B\uAB89\uABA6\uABC3\uABE1\uABFE\uAC1B\
\uAC39\uAC56\uAC74\uAC91\uACAF\uACCD\uACEB\uAD08\uAD26\uAD43\uAD61\uAD7E\uAD9B\
\uADB9\uADD6\uADF4\uAE11\uAE2F\uAE4D\uAE6A\uAE88\uAEA6\uAEC3\uAEE1\uAEFE\uAF1B\
\uAF39\uAF56\uAF74\uAF91\uAFAF\uAFCC\uAFEA\uB008\uB025\uB043\uB060\uB07E\uB09B\
\uB0B9\uB0D6\uB0F4\uB111\uB12F\uB14C\uB16A\uB187\uB1A5\uB1C3\uB1E0\uB1FE\uB21C\
\uB239\uB256\uB274\uB291\uB2AF\uB2CC\uB2EA\uB307\uB325\uB342\uB360\uB37E\uB39B\
\uB3B9\uB3D7\uB3F4\uB411\uB42F\uB44C\uB469\uB487\uB4A4\uB4C2\uB4E0\uB4FE\uB51B\
\uB539\uB557\uB574\uB591\uB5AF\uB5CC\uB5E9\uB607\uB624\uB642\uB660\uB67D\uB69B\
\uB6B9\uB6D7\uB6F4\uB711\uB72F\uB74C\uB769\uB787\uB7A4\uB7C2\uB7DF\uB7FD\uB81B\
\uB839\uB856\uB874\uB891\uB8AF\uB8CC\uB8E9\uB907\uB924\uB942\uB95F\uB97D\uB99B\
\uB9B8\uB9D6\uB9F4\uBA11\uBA2F\uBA4C\uBA69\uBA87\uBAA4\uBAC2\uBADF\uBAFD\uBB1A\
\uBB38\uBB56\uBB74\uBB91\uBBAF\uBBCC\uBBE9\uBC07\uBC24\uBC42\uBC5F\uBC7D\uBC9A\
\uBCB8\uBCD6\uBCF3\uBD11\uBD2E\uBD4C\uBD69\uBD87\uBDA4\uBDC2\uBDDF\uBDFD\uBE1A\
\uBE38\uBE55\uBE73\uBE90\uBEAE\uBECC\uBEE9\uBF07\uBF24\uBF42\uBF5F\uBF7D\uBF9A\
\uBFB7\uBFD5\uBFF2\uC010\uC02E\uC04C\uC069\uC087\uC0A4\uC0C2\uC0DF\uC0FD\uC11A\
\uC137\uC155\uC172\uC190\uC1AE\uC1CB\uC1E9\uC207\uC224\uC242\uC25F\uC27D\uC29A\
\uC2B7\uC2D5\uC2F2\uC310\uC32D\uC34B\uC369\uC387\uC3A4\uC3C2\uC3DF\uC3FD\uC41A\
\uC437\uC455\uC472\uC490\uC4AD\uC4CB\uC4E9\uC507\uC524\uC542\uC55F\uC57D\uC59A\
\uC5B7\uC5D5\uC5F2\uC610\uC62D\uC64B\uC669\uC686\uC6A4\uC6C2\uC6DF\uC6FD\uC71A\
\uC737\uC755\uC772\uC790\uC7AD\uC7CB\uC7E8\uC806\uC824\uC841\uC85F\uC87C\uC89A\
\uC8B7\uC8D5\uC8F2\uC910\uC92D\uC94B\uC968\uC986\uC9A3\uC9C1\uC9DF\uC9FC\uCA1A\
\uCA37\uCA55\uCA72\uCA90\uCAAD\uCACB\uCAE8\uCB06\uCB23\uCB41\uCB5E\uCB7C\uCB9A\
\uCBB7\uCBD5\uCBF2\uCC10\uCC2D\uCC4B\uCC68\uCC85\uCCA3\uCCC0\uCCDE\uCCFC\uCD19\
\uCD37\uCD55\uCD72\uCD90\uCDAD\uCDCB\uCDE8\uCE05\uCE23\uCE40\uCE5E\uCE7B\uCE99\
\uCEB7\uCED5\uCEF2\uCF10\uCF2D\uCF4B\uCF68\uCF85\uCFA3\uCFC0\uCFDE\uCFFB\uD019\
\uD037\uD055\uD072\uD090\uD0AD\uD0CB\uD0E8\uD105\uD123\uD140\uD15D\uD17B\uD199\
\uD1B7\uD1D4\uD1F2\uD210\uD22D\uD24B\uD268\uD285\uD2A3\uD2C0\uD2DD\uD2FB\uD319\
\uD336\uD354\uD372\uD38F\uD3AD\uD3CB\uD3E8\uD405\uD423\uD440\uD45D\uD47B\uD499\
\uD4B6\uD4D4\uD4F1\uD50F\uD52D\uD54A\uD568\uD585\uD5A3\uD5C0\uD5DE\uD5FB\uD619\
\uD636\uD654\uD671\uD68F\uD6AC\uD6CA\uD6E8\uD705\uD723\uD740\uD75E\uD77B\uD799\
\uD7B6\uD7D3\uD7F1\uD80E\uD82C\uD84A\uD867\uD885\uD8A3\uD8C0\uD8DE\uD8FB\uD919\
\uD936\uD953\uD971\uD98E\uD9AC\uD9C9\uD9E7\uDA05\uDA23\uDA40\uDA5E\uDA7B\uDA99\
\uDAB6\uDAD3\uDAF1\uDB0E\uDB2C\uDB49\uDB67\uDB85\uDBA3\uDBC0\uDBDE\uDBFB\uDC19\
\uDC36\uDC53\uDC71\uDC8E\uDCAB\uDCC9\uDCE7\uDD04\uDD22\uDD40\uDD5E\uDD7B\uDD99\
\uDDB6\uDDD3\uDDF1\uDE0E\uDE2B\uDE49\uDE67\uDE84\uDEA2\uDEC0\uDEDE\uDEFB\uDF19\
\uDF36\uDF53\uDF71\uDF8E\uDFAB\uDFC9\uDFE6\uE004\uE022\uE040\uE05D\uE07B\uE098\
\uE0B6\uE0D3\uE0F1\uE10E\uE12B\uE149\uE166\uE184\uE1A2\uE1BF\uE1DD\uE1FB\uE218\
\uE236\uE253\uE271\uE28E\uE2AC\uE2C9\uE2E6\uE304\uE321\uE33F\uE35D\uE37A\uE398\
\uE3B6\uE3D3\uE3F1\uE40E\uE42C\uE449\uE467\uE484\uE4A1\uE4BF\uE4DC\uE4FA\uE518\
\uE535\uE553\uE571\uE58E\uE5AC\uE5C9\uE5E7\uE604\uE621\uE63F\uE65C\uE67A\uE697\
\uE6B5\uE6D3\uE6F0\uE70E\uE72C\uE749\uE767\uE784\uE7A1\uE7BF\uE7DC\uE7F9\uE817\
\uE835\uE852\uE870\uE88E\uE8AC\uE8C9\uE8E7\uE904\uE921\uE93F\uE95C\uE979\uE997\
\uE9B4\uE9D2\uE9F0\uEA0E\uEA2C\uEA49\uEA67\uEA84\uEAA1\uEABF\uEADC\uEAF9\uEB17\
\uEB34\uEB52\uEB70\uEB8E\uEBAB\uEBC9\uEBE7\uEC04\uEC21\uEC3F\uEC5C\uEC79\uEC97\
\uECB4\uECD2\uECF0\uED0D\uED2B\uED49\uED66\uED84\uEDA1\uEDBF\uEDDC\uEDF9\uEE17\
\uEE34\uEE52\uEE6F\uEE8D\uEEAB\uEEC8\uEEE6\uEF04\uEF21\uEF3F\uEF5C\uEF7A\uEF97\
\uEFB4\uEFD2\uEFEF\uF00D\uF02A\uF048\uF066\uF083\uF0A1\uF0BF\uF0DC\uF0FA\uF117\
\uF135\uF152\uF16F\uF18D\uF1AA\uF1C8\uF1E5\uF203\uF221\uF23E\uF25C\uF27A\uF297"
_YEARTABLE = u"\00\u000D\u0019\u0025\u0032\u003E\u004A\u0057\u0063\u006F\u007C\
\u0088\u0095\u00A1\u00AD\u00BA\u00C6\u00D2\u00DF\u00EB\u00F8\u0104\u0110\u011D\
\u0129\u0135\u0142\u014E\u015A\u0167\u0173\u0180\u018C\u0198\u01A5\u01B1\u01BD\
\u01CA\u01D6\u01E3\u01EF\u01FB\u0208\u0214\u0220\u022D\u0239\u0245\u0252\u025E\
\u026B\u0277\u0283\u0290\u029C\u02A8\u02B5\u02C1\u02CE\u02DA\u02E6\u02F3\u02FF\
\u030B\u0318\u0324\u0330\u033D\u0349\u0356\u0362\u036E\u037B\u0387\u0393\u03A0\
\u03AC\u03B9\u03C5\u03D1\u03DE\u03EA\u03F6\u0403\u040F\u041B\u0428\u0434\u0441\
\u044D\u0459\u0466\u0472\u047E\u048B\u0497\u04A4\u04B0\u04BC\u04C9\u04D5\u04E1\
\u04EE\u04FA\u0507\u0513\u051F\u052C\u0538\u0544\u0551\u055D\u0569\u0576\u0582\
\u058F\u059B\u05A7\u05B4\u05C0\u05CC\u05D9\u05E5\u05F1\u05FE\u060A\u0617\u0623\
\u062F\u063C\u0648\u0654\u0661\u066D\u067A\u0686\u0692\u069F\u06AB\u06B7\u06C4\
\u06D0\u06DC\u06E9\u06F5\u0702\u070E\u071A\u0727\u0733\u073F\u074C\u0758\u0765\
\u0771\u077D\u078A\u0796\u07A2\u07AF\u07BB\u07C7\u07D4\u07E0\u07ED\u07F9\u0805\
\u0812\u081E\u082A"
_LEAPTABLE = "\7\0\0\5\0\0\4\0\0\2\0\6\0\0\5\0\0\3\0\10\0\0\5\0\0\4\0\0\2\0\6\
\0\0\5\0\0\2\0\7\0\0\5\0\0\4\0\0\2\0\6\0\0\5\0\0\3\0\7\0\0\6\0\0\4\0\0\2\0\7\0\
\0\5\0\0\3\0\10\0\0\6\0\0\4\0\0\3\0\7\0\0\5\0\0\4\0\10\0\0\6\0\0\4\0\12\0\0\6\
\0\0\5\0\0\3\0\10\0\0\5\0\0\4\0\0\2\0\7\0\0\5\0\0\3\0\11\0\0\5\0\0\4\0\0\2\0\6\
\0\0\5\0\0\3\0\13\0\0\6\0\0\5\0\0\2\0\7\0\0\5\0\0\3"
_GANZIMAP = {
'ko': u'\uac11\uc744\ubcd1\uc815\ubb34\uae30\uacbd\uc2e0\uc784\uacc4\uc790'
u'\ucd95\uc778\ubb18\uc9c4\uc0ac\uc624\ubbf8\uc2e0\uc720\uc220\ud574',
'ja': u'\u7532\u4e59\u4e19\u4e01\u620a\u5df1\u5e9a\u8f9b\u58ec\u7678\u5b50'
u'\u4e11\u5bc5\u536f\u8fb0\u5df3\u5348\u672a\u7533\u9149\u620c\u4ea5',
'zh': u'\u7532\u4e59\u4e19\u4e01\u620a\u5df1\u5e9a\u8f9b\u58ec\u7678\u5b50'
u'\u4e11\u5bc5\u536f\u8fb0\u5df3\u5348\u672a\u7533\u9149\u620c\u4ea5',
}
###################################################################################
## Basic Functions
def _bisect(a, x):
lo = 0; hi = len(a)
while lo < hi:
mid = (lo + hi) // 2
if x < ord(a[mid]): hi = mid
else: lo = mid + 1
return lo - 1
def sol2lun(year, month, day, leap=False):
"""sol2lun(year, month, day, leap=False) -> (year, month, day, leap)
Returns corresponding date in lunar calendar. leap will be ignored."""
days = date(year, month, day).toordinal()
if not _MINDATE <= days <= _MAXDATE:
raise ValueError, "year is out of range"
days -= _MINDATE
month = _bisect(_MONTHTABLE, days)
year = _bisect(_YEARTABLE, month)
month, day = month - ord(_YEARTABLE[year]) + 1, days - ord(_MONTHTABLE[month]) + 1
if (ord(_LEAPTABLE[year]) or 13) < month:
month -= 1
leap = (ord(_LEAPTABLE[year]) == month)
else:
leap = False
return (year + _BASEYEAR, month, day, leap)
def lun2sol(year, month, day, leap=False):
"""lun2sol(year, month, day, leap=False) -> (year, month, day, leap)
Returns corresponding date in solar calendar."""
year -= _BASEYEAR
if not 0 <= year < len(_YEARTABLE):
raise ValueError, "year is out of range"
if not 1 <= month <= 12:
raise ValueError, "wrong month"
if leap and ord(_LEAPTABLE[year]) != month:
raise ValueError, "wrong leap month"
months = ord(_YEARTABLE[year]) + month - 1
if leap or (ord(_LEAPTABLE[year]) or 13) < month:
months += 1
days = ord(_MONTHTABLE[months]) + day - 1
if day < 1 or days >= ord(_MONTHTABLE[months + 1]):
raise ValueError, "wrong day"
return date.fromordinal(days + _MINDATE).timetuple()[:3] + (False,)
def getganzistr(index, locale=None):
"""getganzistr(index, locale=None) -> unicode string
Returns corresponding unicode string of ganzi.
locale can be "ko", "ja", "zh". Uses default locale when locale is ignored."""
locale = locale or _DEFAULTLOCALE
return _GANZIMAP[locale][index%10] + _GANZIMAP[locale][10+index%12]
def strftime(format, t=None):
"""strftime(format, t=None) -> string
Returns formatted string of given timestamp. If timestamp is omitted,
current timestamp (return value of time.localtime()) is used.
Similar to time.strftime, but has the following extensions:
%LC - (year / 100) as a decimal number (at least 2 digits)
%Ld - lunar day of the month as a decimal number [01,30]
%Le - same as %Ld, but preceding blank instead of zero
%LF - same as "%LY-%Lm-%Ld"
%Lj - day of the lunar year as a decimal number [001,390]
%Ll - 0 for non-leap month, 1 for leap month
%Lm - lunar month as a decimal number [01,12]
%Ly - lunar year without century as a decimal number [00,99]
%LY - lunar year with century as a decimal number
"""
if t is None: t = time.localtime()
if _STRFTIMEREGEXP is not None:
lt = sol2lun(*t[:3])
lord = date(t[0], t[1], t[2]).toordinal() - _MINDATE
ldoy = lord - ord(_MONTHTABLE[ord(_YEARTABLE[lt[0] - _BASEYEAR])]) + 1
lmap = {'Y': '%04d' % lt[0], 'm': '%02d' % lt[1], 'd': '%02d' % lt[2],
'y': '%02d' % (lt[0] % 100), 'C': '%02d' % (lt[0] // 100),
'F': '%04d-%02d-%02d' % lt[:3], 'e': str(lt[2]),
'l': '%d' % lt[3], 'j': '%03d' % ldoy}
format = _STRFTIMEREGEXP.sub(lambda m: '%' * (len(m.group(1)) / 2) +
lmap.get(m.group(2), ''), format)
return time.strftime(format, t)
###################################################################################
## Class Declaration
# just alias. we have lunardate, so why not we have solardate?
solardate = date
class lunardate(date):
"""lunardate(year, month, day, leap=False) -> new lunardate object"""
def __new__(cls, year, month, day, leap=False):
obj = date.__new__(cls, *lun2sol(year, month, day, leap)[:3])
object.__setattr__(obj, 'lunaryear', year)
object.__setattr__(obj, 'lunarmonth', month)
object.__setattr__(obj, 'lunarday', day)
object.__setattr__(obj, 'lunarleap', leap)
return obj
def __repr__(self):
return '%s.%s(%d, %d, %d, %s)' % \
(self.__class__.__module__, self.__class__.__name__,
self.lunaryear, self.lunarmonth, self.lunarday, self.lunarleap)
min = type('propertyproxy', (object,), {
'__doc__': 'lunardate.min -> The earliest representable date',
'__get__': lambda self, inst, cls: cls.fromordinal(_MINDATE)})()
max = type('propertyproxy', (object,), {
'__doc__': 'lunardate.max -> The latest representable date',
'__get__': lambda self, inst, cls: cls.fromordinal(_MAXDATE)})()
def __setattr__(self, name, value):
raise AttributeError, "can't set attribute."
def __add__(self, other):
return self.fromsolardate(date.__add__(self, other))
def __radd__(self, other):
return self.fromsolardate(date.__radd__(self, other))
def __sub__(self, other):
result = date.__sub__(self, other)
if not isinstance(result, timedelta):
result = self.fromsolardate(result)
return result
def replace(self, year=None, month=None, day=None, leap=None):
"""lunardate.replace(year, month, day, leap) -> new lunardate object
Same as date.replace, but returns lunardate object instead of date object."""
if leap is None: leap = self.lunarleap
return self.__class__(year or self.lunaryear, month or self.lunarmonth,
day or self.month, leap)
def tosolardate(self):
"""lunardate.tosolardate() -> date object
Returns corresponding date object."""
return date(self.year, self.month, self.day)
def today(self):
"""lunardate.today() -> new lunardate object
Returns lunardate object which represents today."""
return self.fromsolardate(date.today())
def fromsolardate(self, solardate):
"""lunardate.fromsolardate(solardate) -> new lunardate object
Returns corresponding lunardate object from date object."""
return self(*sol2lun(*solardate.timetuple()[:3]))
def fromtimestamp(self, timestamp):
"""lunardate.fromtimestamp(timestamp) -> new lunardate object
Returns corresponding lunardate object from UNIX timestamp."""
return self.fromsolardate(date.fromtimestamp(timestamp))
def fromordinal(self, ordinal):
"""lunardate.fromordinal(ordinal) -> new lunardate object
Returns corresponding lunardate object from Gregorian ordinal."""
return self.fromsolardate(date.fromordinal(ordinal))
def getganzi(self):
"""lunardate.getganzi() -> (year_ganzi, month_ganzi, day_ganzi)
Returns ganzi index between 0..59 from lunardate object.
Ganzi index can be converted using getganzistr function."""
return ((self.lunaryear + 56) % 60,
(self.lunaryear * 12 + self.lunarmonth + 13) % 60,
(self.toordinal() + 14) % 60)
def getganzistr(self, locale=None):
"""lunardate.getganzistr(locale=None) -> 3-tuple of unicode string
Returns unicode string of ganzi from lunardate object.
See getganzistr global function for detail."""
return tuple([getganzistr(i, locale) for i in self.getganzi()])
def strftime(self, format):
"""lunardate.strftime(format) -> string
Returns formatted string of lunardate object.
See strftime global function for detail."""
return strftime(format, self.timetuple())
today = classmethod(today)
fromsolardate = classmethod(fromsolardate)
fromtimestamp = classmethod(fromtimestamp)
fromordinal = classmethod(fromordinal)
# we create new lunardate class from old lunardate class using typeproxy,
# because default type class always allows setting class variable.
# __slots__ is added later to forbid descriptor initialization by type.
class typeproxy(type):
def __setattr__(self, name, value):
raise AttributeError, "can't set attribute."
clsdict = dict(lunardate.__dict__)
clsdict['__slots__'] = ['lunaryear', 'lunarmonth', 'lunarday', 'lunarleap']
lunardate = typeproxy(lunardate.__name__, lunardate.__bases__, clsdict)
del typeproxy
###################################################################################
## Command Line Interface
if __name__ == '__main__':
import sys
try:
mode = sys.argv[1].lower()
if mode == 'today':
if len(sys.argv) != 2: raise RuntimeError
today = lunardate.today()
isleap = today.lunarleap and ' (leap)' or ''
print today.strftime('Today: solar %Y-%m-%d %a = lunar %LY-%Lm-%Ld' + isleap)
elif mode == 'solar':
if len(sys.argv) != 5: raise RuntimeError
solar = lunardate.fromsolardate(date(*map(int, sys.argv[2:])))
isleap = solar.lunarleap and ' (leap)' or ''
print solar.strftime('solar %Y-%m-%d %a -> lunar %LY-%Lm-%Ld' + isleap)
elif mode == 'lunar':
if len(sys.argv) not in (5, 6): raise RuntimeError
leap = (len(sys.argv) == 6 and sys.argv[5].lower() == 'leap')
solar = lunardate(*(map(int, sys.argv[2:5]) + [leap]))
isleap = leap and ' (leap)' or ''
print solar.strftime('lunar %LY-%Lm-%Ld' + isleap + ' -> solar %Y-%m-%d %a')
else:
raise RuntimeError
except (IndexError, RuntimeError):
app = sys.argv[0]
print 'Usage:'
print ' for today - python %s today' % app
print ' for solar to lunar - python %s solar <year> <month> <day>' % app
print ' for lunar to solar - python %s lunar <year> <month> <day> [leap]' % app
except:
print 'Error: %s' % sys.exc_info()[1]
| lifthrasiir/transdate | transdate.py | Python | lgpl-2.1 | 26,449 |
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtGui import QIcon
from PyQt5.QtCore import pyqtSlot
from mazeui.Maze import Config
from mazeui.core.MazeBBLTable import MazeBBLTable
class MazeUIBBLWindow(QtWidgets.QMainWindow):
def __init__(self, parent=None):
"""
Constructor
"""
QtWidgets.QMainWindow.__init__(self)
self.name = "Orphaned BBLs"
self.parent = parent
self.icon = QIcon(Config().icons_path + 'radar-icon.png')
self.central_widget = QtWidgets.QWidget()
self.setCentralWidget(self.central_widget)
main_layout = QtWidgets.QHBoxLayout()
self.main_splitter = QtWidgets.QSplitter(QtCore.Qt.Horizontal)
main_layout.addWidget(self.main_splitter)
self.central_widget.setLayout(main_layout)
layout = QtWidgets.QVBoxLayout()
self.BBLTableWG = QtWidgets.QWidget()
self.BBLTableWG.setLayout(layout)
self.bblTlb = MazeBBLTable()
layout.addWidget(self.bblTlb)
self.main_splitter.addWidget(self.BBLTableWG)
@pyqtSlot()
def OnDataLoad(self):
self.bblTlb.populate()
@pyqtSlot()
def OnDataReLoad(self):
self.bblTlb.setRowCount(0)
self.bblTlb.populate()
| 0xPhoeniX/MazeWalker | MazeUI/mazeui/widgets/MazeUIBBLWindow.py | Python | lgpl-3.0 | 1,255 |
"""
WSGI config for gdgapi project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
# os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gdgapi.settings.local")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gdgapi.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| mrmmm/gdgapi | gdgapi/gdgapi/wsgi.py | Python | mit | 1,635 |
import sys
import platform
import numpy
#if platform.machine() != 'armv6l':
#import numpy.core._dotblas
import numpy.core.multiarray
import numpy.core.multiarray_tests
import numpy.core.scalarmath
import numpy.core.umath
import numpy.core.umath_tests
import numpy.fft.fftpack_lite
import numpy.lib._compiled_base
import numpy.linalg.lapack_lite
import numpy.random.mtrand
#from numpy.fft import using_mklfft
sys.gettotalrefcount()
if sys.platform == 'win32' and sys.version_info[0] == 3:
print('Not running numpy tests Windows on Py3k')
else:
numpy.test()
try:
print('MKL: %r' % numpy.__mkl_version__)
except AttributeError:
print('NO MKL')
#print('USING MKLFFT: %s' % using_mklfft)
| pitrou/conda-recipes | numpydbg-1.9.2/run_test.py | Python | mit | 710 |
import cassiopeia as cass
from cassiopeia import Map, Maps
def get_maps():
maps = cass.get_maps(region="NA")
for map in maps:
print(map.name, map.id)
map = Map(name="Summoner's Rift", region="NA")
print(map.id)
if __name__ == "__main__":
get_maps()
| robrua/cassiopeia | examples/map.py | Python | mit | 282 |
import asyncio
import pytest
import aioredis
async def _reader(channel, output, waiter, conn):
await conn.execute('subscribe', channel)
ch = conn.pubsub_channels[channel]
waiter.set_result(conn)
while await ch.wait_message():
msg = await ch.get()
await output.put(msg)
@pytest.mark.run_loop
async def test_publish(create_connection, redis, server, loop):
out = asyncio.Queue(loop=loop)
fut = loop.create_future()
conn = await create_connection(
server.tcp_address, loop=loop)
sub = asyncio.ensure_future(_reader('chan:1', out, fut, conn), loop=loop)
await fut
await redis.publish('chan:1', 'Hello')
msg = await out.get()
assert msg == b'Hello'
sub.cancel()
@pytest.mark.run_loop
async def test_publish_json(create_connection, redis, server, loop):
out = asyncio.Queue(loop=loop)
fut = loop.create_future()
conn = await create_connection(
server.tcp_address, loop=loop)
sub = asyncio.ensure_future(_reader('chan:1', out, fut, conn), loop=loop)
await fut
res = await redis.publish_json('chan:1', {"Hello": "world"})
assert res == 1 # recievers
msg = await out.get()
assert msg == b'{"Hello": "world"}'
sub.cancel()
@pytest.mark.run_loop
async def test_subscribe(redis):
res = await redis.subscribe('chan:1', 'chan:2')
assert redis.in_pubsub == 2
ch1 = redis.channels['chan:1']
ch2 = redis.channels['chan:2']
assert res == [ch1, ch2]
assert ch1.is_pattern is False
assert ch2.is_pattern is False
res = await redis.unsubscribe('chan:1', 'chan:2')
assert res == [[b'unsubscribe', b'chan:1', 1],
[b'unsubscribe', b'chan:2', 0]]
@pytest.mark.parametrize('create_redis', [
pytest.param(aioredis.create_redis_pool, id='pool'),
])
@pytest.mark.run_loop
async def test_subscribe_empty_pool(create_redis, server, loop, _closable):
redis = await create_redis(server.tcp_address, loop=loop)
_closable(redis)
await redis.connection.clear()
res = await redis.subscribe('chan:1', 'chan:2')
assert redis.in_pubsub == 2
ch1 = redis.channels['chan:1']
ch2 = redis.channels['chan:2']
assert res == [ch1, ch2]
assert ch1.is_pattern is False
assert ch2.is_pattern is False
res = await redis.unsubscribe('chan:1', 'chan:2')
assert res == [[b'unsubscribe', b'chan:1', 1],
[b'unsubscribe', b'chan:2', 0]]
@pytest.mark.run_loop
async def test_psubscribe(redis, create_redis, server, loop):
sub = redis
res = await sub.psubscribe('patt:*', 'chan:*')
assert sub.in_pubsub == 2
pat1 = sub.patterns['patt:*']
pat2 = sub.patterns['chan:*']
assert res == [pat1, pat2]
pub = await create_redis(
server.tcp_address, loop=loop)
await pub.publish_json('chan:123', {"Hello": "World"})
res = await pat2.get_json()
assert res == (b'chan:123', {"Hello": "World"})
res = await sub.punsubscribe('patt:*', 'patt:*', 'chan:*')
assert res == [[b'punsubscribe', b'patt:*', 1],
[b'punsubscribe', b'patt:*', 1],
[b'punsubscribe', b'chan:*', 0],
]
@pytest.mark.parametrize('create_redis', [
pytest.param(aioredis.create_redis_pool, id='pool'),
])
@pytest.mark.run_loop
async def test_psubscribe_empty_pool(create_redis, server, loop, _closable):
sub = await create_redis(server.tcp_address, loop=loop)
pub = await create_redis(server.tcp_address, loop=loop)
_closable(sub)
_closable(pub)
await sub.connection.clear()
res = await sub.psubscribe('patt:*', 'chan:*')
assert sub.in_pubsub == 2
pat1 = sub.patterns['patt:*']
pat2 = sub.patterns['chan:*']
assert res == [pat1, pat2]
await pub.publish_json('chan:123', {"Hello": "World"})
res = await pat2.get_json()
assert res == (b'chan:123', {"Hello": "World"})
res = await sub.punsubscribe('patt:*', 'patt:*', 'chan:*')
assert res == [[b'punsubscribe', b'patt:*', 1],
[b'punsubscribe', b'patt:*', 1],
[b'punsubscribe', b'chan:*', 0],
]
@pytest.redis_version(
2, 8, 0, reason='PUBSUB CHANNELS is available since redis>=2.8.0')
@pytest.mark.run_loop
async def test_pubsub_channels(create_redis, server, loop):
redis = await create_redis(
server.tcp_address, loop=loop)
res = await redis.pubsub_channels()
assert res == []
res = await redis.pubsub_channels('chan:*')
assert res == []
sub = await create_redis(
server.tcp_address, loop=loop)
await sub.subscribe('chan:1')
res = await redis.pubsub_channels()
assert res == [b'chan:1']
res = await redis.pubsub_channels('ch*')
assert res == [b'chan:1']
await sub.unsubscribe('chan:1')
await sub.psubscribe('chan:*')
res = await redis.pubsub_channels()
assert res == []
@pytest.redis_version(
2, 8, 0, reason='PUBSUB NUMSUB is available since redis>=2.8.0')
@pytest.mark.run_loop
async def test_pubsub_numsub(create_redis, server, loop):
redis = await create_redis(
server.tcp_address, loop=loop)
res = await redis.pubsub_numsub()
assert res == {}
res = await redis.pubsub_numsub('chan:1')
assert res == {b'chan:1': 0}
sub = await create_redis(
server.tcp_address, loop=loop)
await sub.subscribe('chan:1')
res = await redis.pubsub_numsub()
assert res == {}
res = await redis.pubsub_numsub('chan:1')
assert res == {b'chan:1': 1}
res = await redis.pubsub_numsub('chan:2')
assert res == {b'chan:2': 0}
res = await redis.pubsub_numsub('chan:1', 'chan:2')
assert res == {b'chan:1': 1, b'chan:2': 0}
await sub.unsubscribe('chan:1')
await sub.psubscribe('chan:*')
res = await redis.pubsub_numsub()
assert res == {}
@pytest.redis_version(
2, 8, 0, reason='PUBSUB NUMPAT is available since redis>=2.8.0')
@pytest.mark.run_loop
async def test_pubsub_numpat(create_redis, server, loop, redis):
sub = await create_redis(
server.tcp_address, loop=loop)
res = await redis.pubsub_numpat()
assert res == 0
await sub.subscribe('chan:1')
res = await redis.pubsub_numpat()
assert res == 0
await sub.psubscribe('chan:*')
res = await redis.pubsub_numpat()
assert res == 1
@pytest.mark.run_loop
async def test_close_pubsub_channels(redis, loop):
ch, = await redis.subscribe('chan:1')
async def waiter(ch):
assert not await ch.wait_message()
tsk = asyncio.ensure_future(waiter(ch), loop=loop)
redis.close()
await redis.wait_closed()
await tsk
@pytest.mark.run_loop
async def test_close_pubsub_patterns(redis, loop):
ch, = await redis.psubscribe('chan:*')
async def waiter(ch):
assert not await ch.wait_message()
tsk = asyncio.ensure_future(waiter(ch), loop=loop)
redis.close()
await redis.wait_closed()
await tsk
@pytest.mark.run_loop
async def test_close_cancelled_pubsub_channel(redis, loop):
ch, = await redis.subscribe('chan:1')
async def waiter(ch):
with pytest.raises(asyncio.CancelledError):
await ch.wait_message()
tsk = asyncio.ensure_future(waiter(ch), loop=loop)
await asyncio.sleep(0, loop=loop)
tsk.cancel()
@pytest.mark.run_loop
async def test_channel_get_after_close(create_redis, loop, server):
sub = await create_redis(
server.tcp_address, loop=loop)
pub = await create_redis(
server.tcp_address, loop=loop)
ch, = await sub.subscribe('chan:1')
await pub.publish('chan:1', 'message')
assert await ch.get() == b'message'
loop.call_soon(sub.close)
assert await ch.get() is None
with pytest.raises(aioredis.ChannelClosedError):
assert await ch.get()
@pytest.mark.run_loop
async def test_subscribe_concurrency(create_redis, server, loop):
sub = await create_redis(
server.tcp_address, loop=loop)
pub = await create_redis(
server.tcp_address, loop=loop)
async def subscribe(*args):
return await sub.subscribe(*args)
async def publish(*args):
await asyncio.sleep(0, loop=loop)
return await pub.publish(*args)
res = await asyncio.gather(
subscribe('channel:0'),
publish('channel:0', 'Hello'),
subscribe('channel:1'),
loop=loop)
(ch1,), subs, (ch2,) = res
assert ch1.name == b'channel:0'
assert subs == 1
assert ch2.name == b'channel:1'
@pytest.redis_version(
3, 2, 0, reason='PUBSUB PING is available since redis>=3.2.0')
@pytest.mark.run_loop
async def test_pubsub_ping(redis):
await redis.subscribe('chan:1', 'chan:2')
res = await redis.ping()
assert res == b'PONG'
res = await redis.ping('Hello')
assert res == b'Hello'
res = await redis.ping('Hello', encoding='utf-8')
assert res == 'Hello'
await redis.unsubscribe('chan:1', 'chan:2')
@pytest.mark.run_loop
async def test_pubsub_channel_iter(create_redis, server, loop):
sub = await create_redis(server.tcp_address, loop=loop)
pub = await create_redis(server.tcp_address, loop=loop)
ch, = await sub.subscribe('chan:1')
async def coro(ch):
lst = []
async for msg in ch.iter():
lst.append(msg)
return lst
tsk = asyncio.ensure_future(coro(ch), loop=loop)
await pub.publish_json('chan:1', {'Hello': 'World'})
await pub.publish_json('chan:1', ['message'])
await asyncio.sleep(0, loop=loop)
ch.close()
assert await tsk == [b'{"Hello": "World"}', b'["message"]']
| ymap/aioredis | tests/pubsub_commands_test.py | Python | mit | 9,612 |
# -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
import django
DEBUG = True
USE_TZ = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
}
ROOT_URLCONF = "tests.urls"
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"django_oscar_stripe",
]
SITE_ID = 1
if django.VERSION >= (1, 10):
MIDDLEWARE = ()
else:
MIDDLEWARE_CLASSES = ()
| artforlife/django-oscar-stripe | tests/settings.py | Python | mit | 636 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Densnet handler.
Adapted from
https://github.com/pytorch/vision/blob/master/torchvision/models/densenet.py
"""
import functools
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from cascaded_networks.models import custom_ops
from cascaded_networks.models import dense_blocks
from cascaded_networks.models import model_utils
class DenseNet(nn.Module):
"""Densenet."""
def __init__(self,
name,
block,
block_arch,
growth_rate=12,
reduction=0.5,
num_classes=10,
**kwargs):
"""Initialize dense net."""
super(DenseNet, self).__init__()
self.name = name
self.growth_rate = growth_rate
self._cascaded = kwargs['cascaded']
self.block_arch = block_arch
self._norm_layer_op = self._setup_bn_op(**kwargs)
self._build_net(block, block_arch, growth_rate,
reduction, num_classes, **kwargs)
def _build_net(self,
block,
block_arch,
growth_rate,
reduction,
num_classes,
**kwargs):
self.layers = []
num_planes = 2 * growth_rate
self.conv1 = nn.Conv2d(3, num_planes, kernel_size=3, padding=1, bias=False)
self.dense1 = self._make_dense_layers(block, num_planes,
block_arch[0], **kwargs)
num_planes += block_arch[0] * growth_rate
out_planes = int(np.floor(num_planes * reduction))
self.trans1 = dense_blocks.Transition(num_planes,
out_planes,
norm_layer=self._norm_layer_op,
**kwargs)
num_planes = out_planes
self.dense2 = self._make_dense_layers(block, num_planes,
block_arch[1], **kwargs)
num_planes += block_arch[1] * growth_rate
out_planes = int(np.floor(num_planes * reduction))
self.trans2 = dense_blocks.Transition(num_planes,
out_planes,
norm_layer=self._norm_layer_op,
**kwargs)
num_planes = out_planes
self.dense3 = self._make_dense_layers(block, num_planes,
block_arch[2], **kwargs)
num_planes += block_arch[2] * growth_rate
out_planes = int(np.floor(num_planes * reduction))
self.trans3 = dense_blocks.Transition(num_planes,
out_planes,
norm_layer=self._norm_layer_op,
**kwargs)
num_planes = out_planes
self.dense4 = self._make_dense_layers(block, num_planes,
block_arch[3], **kwargs)
num_planes += block_arch[3] * growth_rate
self.bn = self._norm_layer_op(num_planes)
self.fc = nn.Linear(num_planes, num_classes)
self.layers.append(self.trans1)
self.layers.append(self.trans2)
self.layers.append(self.trans3)
def _make_dense_layers(self, block, in_planes, n_blocks, **kwargs):
layers = []
for _ in range(n_blocks):
block_i = block(in_planes,
self.growth_rate,
norm_layer=self._norm_layer_op,
**kwargs)
self.layers.append(block_i)
layers.append(block_i)
in_planes += self.growth_rate
return nn.Sequential(*layers)
@property
def timesteps(self):
return sum(self.block_arch) + 1
def _setup_bn_op(self, **kwargs):
if self._cascaded:
self._norm_layer = custom_ops.BatchNorm2d
# Setup batchnorm opts
self.bn_opts = kwargs.get('bn_opts', {
'affine': False,
'standardize': False
})
self.bn_opts['n_timesteps'] = self.timesteps
norm_layer_op = functools.partial(self._norm_layer, self.bn_opts)
else:
self._norm_layer = nn.BatchNorm2d
norm_layer_op = self._norm_layer
return norm_layer_op
def _set_time(self, t):
for block in self.layers:
block.set_time(t)
def forward(self, x, t=0):
# Set time on all blocks
if self._cascaded:
self._set_time(t)
# Feature extraction
out = self.conv1(x)
out = self.dense1(out)
out = self.trans1(out)
out = self.dense2(out)
out = self.trans2(out)
out = self.dense3(out)
out = self.trans3(out)
out = self.dense4(out)
# Classifier
out = self.bn(out) if not self._cascaded else self.bn(out, t)
out = F.avg_pool2d(F.relu(out), 4)
out = out.view(out.size(0), -1)
out = self.fc(out)
return out
def make_densenet(name, block, layers, pretrained, growth_rate, **kwargs):
model = DenseNet(name, block, layers, growth_rate=growth_rate, **kwargs)
if pretrained:
kwargs['model_name'] = name
model = model_utils.load_model(model, kwargs)
return model
def densenet121(pretrained=False, **kwargs):
return make_densenet('densenet121', dense_blocks.Bottleneck, [6, 12, 24, 16],
pretrained, growth_rate=32, **kwargs)
def densenet161(pretrained=False, **kwargs):
return make_densenet('densenet161', dense_blocks.Bottleneck, [6, 12, 36, 24],
pretrained, growth_rate=48, **kwargs)
def densenet169(pretrained=False, **kwargs):
return make_densenet('densenet169', dense_blocks.Bottleneck, [6, 12, 32, 32],
pretrained, growth_rate=32, **kwargs)
def densenet201(pretrained=False, **kwargs):
return make_densenet('densenet201', dense_blocks.Bottleneck, [6, 12, 48, 32],
pretrained, growth_rate=32, **kwargs)
def densenet_cifar(pretrained=False, **kwargs):
block_arch = [6, 12, 24, 16]
growth_rate = 16
return make_densenet('densenet121_cifar', dense_blocks.Bottleneck, block_arch,
pretrained, growth_rate=growth_rate, **kwargs)
| google-research/google-research | cascaded_networks/models/densenet.py | Python | apache-2.0 | 6,633 |
from setuptools import setup, find_packages
setup(name='MODEL8568434338',
version=20140916,
description='MODEL8568434338 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/MODEL8568434338',
maintainer='Stanley Gu',
maintainer_url='stanleygu@gmail.com',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
) | biomodels/MODEL8568434338 | setup.py | Python | cc0-1.0 | 377 |
import unittest
import iot_button
class FunctionalTest(unittest.TestCase):
def test_1(self):
event = {"batteryVoltage": "testing", "serialNumber": "testing", "not_a_real_clickType": "LONG"}
context = {"aws_request_id": "foo",
"log_stream_name": "foo",
"invoked_function_arn": "foo",
"client_context": "",
"log_group_name": "foo",
"function_name": "foo",
"function_version": "$LATEST",
"identity": TestingCognitoIdentity(),
"memory_limit_in_mb": "128",
}
self.assertEqual(iot_button.lambda_handler(event, context), 'success')
class TestingCognitoIdentity:
foo = "foo"
bar = "bar"
def some_method(self):
return True
if __name__ == '__main__':
unittest.main()
| nanderson/iot_button_lambda_handler | test.py | Python | apache-2.0 | 889 |
from __future__ import absolute_import, unicode_literals
from .models import BlockedUser
from django.contrib import admin
admin.site.register(BlockedUser)
| emacsway/django-login-secure | login_secure/admin.py | Python | bsd-3-clause | 156 |
import logging
logger = logging.getLogger(__name__)
import trollius as asyncio
from winlib import (
INPUT_RECORD, DWORD, ReadConsoleInput, pointer, WinError,
GetStdHandle, STD_INPUT_HANDLE,
)
def platform_setup(driver):
driver.loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(driver.loop)
driver.kbd_h = GetStdHandle(STD_INPUT_HANDLE)
def create_tap(driver):
#driver.tap_h = ??
#driver.tap_name = ??
raise NotImplementedError
def wait_for_keypress(driver):
input_rec = INPUT_RECORD()
rec_count = DWORD(0)
while (driver.running and
(input_rec.EventType != 1 or
not input_rec.Event.KeyEvent.bKeyDown)):
if not ReadConsoleInput(driver.kbd_h,
pointer(input_rec),
1,
pointer(rec_count)):
raise WinError()
return input_rec.Event.KeyEvent.uChar.AsciiChar
def register_int_callback(driver, callback):
raise NotImplementedError
def register_tapin_callback(driver, callback):
raise NotImplementedError
| sstjohn/thundergate | py/tap/win_fun.py | Python | gpl-3.0 | 1,103 |
# -*- coding: utf-8 -*-
"""
AsciiDoc Reader
===============
This plugin allows you to use AsciiDoc to write your posts.
File extension should be ``.asc``, ``.adoc``, or ``asciidoc``.
"""
from pelican.readers import BaseReader
from pelican import signals
import os
import re
import subprocess
def call(cmd):
"""Calls a CLI command and returns the stdout as string."""
return subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()[0].decode('utf-8')
def default():
"""Attempt to find the default AsciiDoc utility."""
for cmd in ALLOWED_CMDS:
if len(call(cmd + " --help")):
return cmd
ALLOWED_CMDS = ["asciidoc", "asciidoctor"]
ENABLED = None != default()
class AsciiDocReader(BaseReader):
"""Reader for AsciiDoc files."""
enabled = ENABLED
file_extensions = ['asc', 'adoc', 'asciidoc']
default_options = ['--no-header-footer']
def read(self, source_path):
"""Parse content and metadata of AsciiDoc files."""
cmd = self._get_cmd()
content = ""
if cmd:
optlist = self.settings.get('ASCIIDOC_OPTIONS', []) + self.default_options
options = " ".join(optlist)
content = call("%s %s -o - %s" % (cmd, options, source_path))
metadata = self._read_metadata(source_path)
return content, metadata
def _get_cmd(self):
"""Returns the AsciiDoc utility command to use for rendering or None if
one cannot be found."""
if self.settings.get('ASCIIDOC_CMD') in ALLOWED_CMDS:
return self.settings.get('ASCIIDOC_CMD')
return default()
def _read_metadata(self, source_path):
"""Parses the AsciiDoc file at the given `source_path` and returns found
metadata."""
metadata = {}
with open(source_path) as fi:
prev = ""
for line in fi.readlines():
# Parse for doc title.
if 'title' not in metadata.keys():
title = ""
if line.startswith("= "):
title = line[2:].strip()
elif line.count("=") == len(prev.strip()):
title = prev.strip()
if title:
metadata['title'] = self.process_metadata('title', title)
# Parse for other metadata.
regexp = re.compile(r"^:[A-z]+:\s*[A-z0-9]")
if regexp.search(line):
toks = line.split(":", 2)
key = toks[1].strip().lower()
val = toks[2].strip()
metadata[key] = self.process_metadata(key, val)
prev = line
return metadata
def add_reader(readers):
for ext in AsciiDocReader.file_extensions:
readers.reader_classes[ext] = AsciiDocReader
def register():
signals.readers_init.connect(add_reader)
| benjaminabel/pelican-plugins | asciidoc_reader/asciidoc_reader.py | Python | agpl-3.0 | 2,946 |
# coding: utf-8
import datetime
import sys
import unittest
try:
import ezcf
except ImportError:
sys.path.append('../')
import ezcf
from subdir.sample_json import *
from subdir.sample_yaml import *
from subdir.sample_ini import *
from subdir.sample_xml import *
class TestProto(unittest.TestCase):
def test_import_all(self):
self.assertEqual(hello, "world")
self.assertEqual(a_list, [1, 2, 3])
self.assertEqual(a_dict, {
"key1": 1000,
"key2": [u"你好", 100]
})
self.assertEqual(Date, datetime.datetime(2001, 11, 23, 20, 3, 17))
self.assertEqual(Fatal, 'Unknown variable "bar"')
self.assertEqual(
Stack,
[{'code': 'x = MoreObject("345\\n")\n',
'file': 'TopClass.py',
'line': 23},
{'code': 'foo = bar', 'file': 'MoreClass.py', 'line': 58}])
self.assertEqual(Time, datetime.datetime(2001, 11, 23, 20, 2, 31))
self.assertEqual(User, 'ed')
self.assertEqual(warning, 'A slightly different error message.')
self.assertEqual(keyword1, 'value1')
self.assertEqual(keyword2, 'value2')
self.assertEqual(
section1,
{
'keyword1': 'value1', 'keyword2': 'value2',
'sub-section': {
'keyword1': 'value1', 'keyword2': 'value2',
'nested section': {
'keyword1': 'value1', 'keyword2': 'value2',
},
},
'sub-section2': {
'keyword1': 'value1', 'keyword2': 'value2',
},
}
)
self.assertEqual(section2,
{'keyword1': 'value1', 'keyword2': 'value2'})
self.assertEqual(note, {"to": u"我", "from": "you"}) | laike9m/ezcf | tests/test_wildcard_sub.py | Python | mit | 1,879 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import requests
from brickclient import client
from brickclient import exceptions
from brickclient.tests import utils
fake_response = utils.TestResponse({
"status_code": 200,
"text": '{"hi": "there"}',
})
fake_response_empty = utils.TestResponse({
"status_code": 200,
"text": '{"access": {}}'
})
mock_request = mock.Mock(return_value=(fake_response))
mock_request_empty = mock.Mock(return_value=(fake_response_empty))
bad_400_response = utils.TestResponse({
"status_code": 400,
"text": '{"error": {"message": "n/a", "details": "Terrible!"}}',
})
bad_400_request = mock.Mock(return_value=(bad_400_response))
bad_401_response = utils.TestResponse({
"status_code": 401,
"text": '{"error": {"message": "FAILED!", "details": "DETAILS!"}}',
})
bad_401_request = mock.Mock(return_value=(bad_401_response))
bad_500_response = utils.TestResponse({
"status_code": 500,
"text": '{"error": {"message": "FAILED!", "details": "DETAILS!"}}',
})
bad_500_request = mock.Mock(return_value=(bad_500_response))
connection_error_request = mock.Mock(
side_effect=requests.exceptions.ConnectionError)
def get_client(retries=0):
cl = client.HTTPClient("username", "password",
"project_id", "auth_test", retries=retries)
return cl
def get_authed_client(retries=0):
cl = get_client(retries=retries)
cl.management_url = "http://example.com"
cl.auth_token = "token"
return cl
class ClientTest(utils.TestCase):
def test_get(self):
cl = get_authed_client()
@mock.patch.object(requests, "request", mock_request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
headers = {"X-Auth-Token": "token",
"X-Auth-Project-Id": "project_id",
"User-Agent": cl.USER_AGENT,
'Accept': 'application/json', }
mock_request.assert_called_with(
"GET",
"http://example.com/hi",
headers=headers,
**self.TEST_REQUEST_BASE)
# Automatic JSON parsing
self.assertEqual({"hi": "there"}, body)
test_get_call()
def test_get_reauth_0_retries(self):
cl = get_authed_client(retries=0)
self.requests = [bad_401_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
def reauth():
cl.management_url = "http://example.com"
cl.auth_token = "token"
@mock.patch.object(cl, 'authenticate', reauth)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
test_get_call()
self.assertEqual([], self.requests)
def test_get_retry_500(self):
cl = get_authed_client(retries=1)
self.requests = [bad_500_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
test_get_call()
self.assertEqual([], self.requests)
def test_get_retry_connection_error(self):
cl = get_authed_client(retries=1)
self.requests = [connection_error_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
test_get_call()
self.assertEqual(self.requests, [])
def test_retry_limit(self):
cl = get_authed_client(retries=1)
self.requests = [bad_500_request, bad_500_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
self.assertRaises(exceptions.ClientException, test_get_call)
self.assertEqual([mock_request], self.requests)
def test_get_no_retry_400(self):
cl = get_authed_client(retries=0)
self.requests = [bad_400_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
self.assertRaises(exceptions.BadRequest, test_get_call)
self.assertEqual([mock_request], self.requests)
def test_get_retry_400_socket(self):
cl = get_authed_client(retries=1)
self.requests = [bad_400_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
test_get_call()
self.assertEqual([], self.requests)
def test_post(self):
cl = get_authed_client()
@mock.patch.object(requests, "request", mock_request)
def test_post_call():
cl.post("/hi", body=[1, 2, 3])
headers = {
"X-Auth-Token": "token",
"X-Auth-Project-Id": "project_id",
"Content-Type": "application/json",
'Accept': 'application/json',
"User-Agent": cl.USER_AGENT
}
mock_request.assert_called_with(
"POST",
"http://example.com/hi",
headers=headers,
data='[1, 2, 3]',
**self.TEST_REQUEST_BASE)
test_post_call()
def test_auth_failure(self):
cl = get_client()
# response must not have x-server-management-url header
@mock.patch.object(requests, "request", mock_request_empty)
def test_auth_call():
self.assertRaises(exceptions.AuthorizationFailure,
cl.authenticate)
test_auth_call()
def test_auth_not_implemented(self):
cl = get_client()
# response must not have x-server-management-url header
# {'hi': 'there'} is neither V2 or V3
@mock.patch.object(requests, "request", mock_request)
def test_auth_call():
self.assertRaises(NotImplementedError, cl.authenticate)
test_auth_call()
| e0ne/python-brickagentclient | brickclient/tests/test_http.py | Python | apache-2.0 | 7,748 |
# enable or disable the whole program
ENABLED = True
# if we're in testing mode, output more debug and allow testers to add their own email
DEBUG = True
# used with above, you can check the output of emails that would have been sent
SEND_EMAILS = True
# iSAMS Batch API key
API_KEY = "11D497FF-A7D9-4646-A6B8-D9D1B8718FAC"
# iSAMS URL
URL = 'https://isams.school.com'
# Choose which connection method from: JSON, XML, MSSQL
CONNECTION_METHOD = 'JSON'
# Database settings
DATABASE = ''
DATABASE_SERVER = ''
DATABASE_USER = ''
DATABASE_PASSWORD = ''
# specify your own dates to use when testing, e.g. a date that has already had the register taken for
DEBUG_START_DATE = '2016-09-18'
DEBUG_END_DATE = '2016-09-19'
# allows you to specify a file with XML or JSON content to test with rather tha using live data
DEBUG_DATA = 'test_data.xml'
# outgoing SMTP details
EMAIL = {
'server': 'smtp.example.com',
'port': 465,
'username': 'john@company.com',
'password': 'p455w0rd',
'subject': 'Register not completed',
'from': 'isams@company.com',
'to': 'isams@company.com',
'cc': 'reception@company.com',
'bcc': 'manager@company.com'
}
# whether to log into the SMTP server
EMAIL_LOGIN = True
# whether to create an SSL connection or not
EMAIL_SSL = True
# Default: Monday - Friday, 0 = Mon, 6 = Sun
WORKING_DAYS = (0, 1, 2, 3, 4)
# weekdays which are not school days
# for help generating these:
# import pandas
# pandas.bdate_range('2016-12-15', '2017-01-07')
HOLIDAYS = (
# Winter break
'2016-12-15', '2016-12-16', '2016-12-19', '2016-12-20',
'2016-12-21', '2016-12-22', '2016-12-23', '2016-12-26',
'2016-12-27', '2016-12-28', '2016-12-29', '2016-12-30',
'2017-01-02', '2017-01-03', '2017-01-04', '2017-01-05',
'2017-01-06',
)
# email templates
FIRST_EMAIL = """
Dear Teacher,
This is a friendly reminder to complete your register. One or more of your students has not yet been registered.
If you are having problems completing it, please email XXX
If this message is in error, please forward to the helpdesk.
Regards,
iSAMS Bot
"""
SECOND_EMAIL = """
Dear Teacher,
There are still one or more of your students has not yet been registered.
If you are having problems completing it, please email XXX
If this message is in error, please forward to the helpdesk.
Regards,
iSAMS Bot
"""
# You can use %list_of_missing_registers% for a list in the template
FINAL_EMAIL = """
Here is a list of forms that still are oustanding:
%list_of_missing_registers%
Regards,
iSAMS Bot
"""
# separate with commas if you want more than one recipient
FINAL_EMAIL_TO = "reception@company.com"
#######################
# Data Check Settings #
#######################
DATA_CHECK_ENABED = True
# who to email when it fails
DATA_CHECK_FAIL_EMAIL = "manager@company.com"
# list of subjects to ignore from checks in single quotes
DATA_CHECK_IGNORE_SUBJECTS = ["Games", "Physical Education"] | CranleighAD/isams-tools | settings_example.py | Python | gpl-3.0 | 2,947 |
"""
Write a method to replace all spaces in a string with '%20'. You may assume that the string has suffcient space at the
end of the string to hold the additional characters, and that you are given the "true" length of the string.
Example:
Input: "Mr John Smith "
Output: "Mr%20John%20Smith"
"""
def replace_spaces(str_mod):
str_mod = list(str_mod)
for i, _ in enumerate(str_mod):
if str_mod[i] == ' ':
def shift_right(list_shift, shift_pos):
to_shift = list_shift[shift_pos:]
prev = ''
for i, _ in enumerate(to_shift):
prev, to_shift[i] = to_shift[i], prev
return list_shift[:shift_pos] + to_shift
str_mod = shift_right(str_mod, i+1)
str_mod = shift_right(str_mod, i+1)
str_mod[i] = '%'
str_mod[i+1] = '2'
str_mod[i+2] = '0'
return ''.join(str_mod)
def main():
test = "Mr John "
print('Input: ', test)
print('Output: ', replace_spaces(test))
test = 'Mr John Smith '
print('Input:', test)
print('Output: ', replace_spaces(test))
test = ' '
print('Input: ', test)
print('Output:', replace_spaces(test))
if __name__ == '__main__':
main()
| MFry/pyAlgoDataStructures | Cracking the Coding Interview/Arrays_and_Strings/question4.py | Python | mit | 1,278 |
import web
from time import sleep
stat_and_cap='''
{
"ma-status-and-capabilities": {
"ma-agent-id": "550e8400-e29b-41d4-a716-446655440000",
"ma-device-id": "urn:dev:mac:0024befffe804ff1",
"ma-hardware": "mfr-home-gateway-v10",
"ma-firmware": "25637748-rev2a",
"ma-version": "ispa-v1.01",
"ma-interfaces": [
{
"ma-interface-name": "broadband",
"ma-interface-type": "PPPoE"
}
],
"ma-last-measurement": "",
"ma-last-report": "",
"ma-last-instruction": "",
"ma-last-configuration": "2014-06-08T22:47:31+00:00",
"ma-supported-tasks": [
{
"ma-task-name": "Controller configuration",
"ma-task-registry": "urn:ietf:lmap:control:http_controller_configuration"
},
{
"ma-task-name": "Controller status and capabilities",
"ma-task-registry": "urn:ietf:lmap:control:http_controller_status_and_capabilities"
},
{
"ma-task-name": "Controller instruction",
"ma-task-registry": "urn:ietf:lmap:control:http_controller_instruction"
},
{
"ma-task-name": "Report",
"ma-task-registry": "urn:ietf:lmap:report:http_report"
},
{
"ma-task-name": "UDP Latency",
"ma-task-registry": "urn:ietf:ippm:measurement:UDPLatency-Poisson-XthPercentileMean"
}
]
}
}
'''
urls= (
"/ma/config", "config",
"/ma/ins", "ins",
"/ma/cap", "cap"
)
class cap:
def GET(self):
web.header('Content-Type', 'application/json')
return stat_and_cap
class ins:
def POST(self):
data=web.data()
web.header('Content-Type', 'application/json')
print data
class config:
def PUT(self):
web.header('Content-Type', 'application/json')
data=web.data()
print data
def my_loadhook():
print "my load hook"
def my_unloadhook():
print "my unload hook"; sleep(10)
if __name__ == "__main__":
app = web.application(urls, globals())
app.add_processor(web.loadhook(my_loadhook))
app.add_processor(web.unloadhook(my_unloadhook))
app.run()
| andrewleu/controller-and-server | ma.py | Python | apache-2.0 | 2,145 |
#!/usr/bin/env python
#
# Copyright 2012 Nick Foster
#
# This file is part of gr-air-modes
#
# gr-air-modes is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# gr-air-modes is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with gr-air-modes; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# azimuthal projection widget to plot reception range vs. azimuth
from PyQt4 import QtCore, QtGui
import threading
import math
import air_modes
# model has max range vs. azimuth in n-degree increments
# contains separate max range for a variety of altitudes so
# you can determine your altitude dropouts by bearing
# assumes that if you can hear ac at 1000', you can hear at 5000'+.
class az_map_model(QtCore.QObject):
dataChanged = QtCore.pyqtSignal(name='dataChanged')
npoints = 360/5
def __init__(self, parent=None):
super(az_map_model, self).__init__(parent)
self._data = []
self.lock = threading.Lock()
self._altitudes = [0, 1000, 2000, 5000, 10000, 15000, 20000, 25000, 30000]
#initialize everything to 0
for i in range(0,az_map_model.npoints):
self._data.append([0] * len(self._altitudes))
def rowCount(self):
return len(self._data)
def columnCount(self):
return len(self._altitudes)
def data(self, row, col):
return self._data[row][col]
def addRecord(self, bearing, altitude, distance):
with self.lock:
#round up to nearest altitude in altitudes list
#there's probably another way to do it
if altitude >= max(self._altitudes):
col = self.columnCount()-1
else:
col = self._altitudes.index(min([alt for alt in self._altitudes if alt >= altitude]))
#find which bearing row we sit in
row = int(int(bearing+(180./az_map_model.npoints)) / (360./az_map_model.npoints)) % az_map_model.npoints
#set max range for all alts >= the ac alt
#this expresses the assumption that higher ac can be heard further
update = False
for i in range(col, len(self._altitudes)):
if distance > self._data[row][i]:
self._data[row][i] = distance
update = True
if update:
self.dataChanged.emit()
def reset(self):
with self.lock:
self._data = []
for i in range(0,az_map_model.npoints):
self._data.append([0] * len(self._altitudes))
self.dataChanged.emit()
# the azimuth map widget
class az_map(QtGui.QWidget):
maxrange = 450
ringsize = 100
bgcolor = QtCore.Qt.black
ringpen = QtGui.QPen(QtGui.QColor(0, 96, 127, 255), 1.3)
#rangepen = QtGui.QPen(QtGui.QColor(255, 255, 0, 255), 1.0)
def __init__(self, parent=None):
super(az_map, self).__init__(parent)
self._model = None
self._paths = []
self.maxrange = az_map.maxrange
self.ringsize = az_map.ringsize
def minimumSizeHint(self):
return QtCore.QSize(50, 50)
def sizeHint(self):
return QtCore.QSize(300, 300)
def setModel(self, model):
self._model = model
self._model.dataChanged.connect(self.repaint)
def paintEvent(self, event):
painter = QtGui.QPainter(self)
painter.setRenderHint(QtGui.QPainter.Antialiasing)
#TODO: make it not have to redraw paths EVERY repaint
#drawing paths is VERY SLOW
#maybe use a QTimer to limit repaints
self.drawPaths()
#set background
painter.fillRect(event.rect(), QtGui.QBrush(az_map.bgcolor))
#draw the range rings
self.drawRangeRings(painter)
for i in range(len(self._paths)):
alpha = 230 * (i+1) / (len(self._paths)) + 25
painter.setPen(QtGui.QPen(QtGui.QColor(alpha,alpha,0,255), 1.0))
painter.drawPath(self._paths[i])
def drawPaths(self):
self._paths = []
if(self._model):
for alt in range(0, self._model.columnCount()):
path = QtGui.QPainterPath()
for i in range(az_map_model.npoints-1,-1,-1):
#bearing is to start point of arc (clockwise)
bearing = (i+0.5) * 360./az_map_model.npoints
distance = self._model._data[i][alt]
radius = min(self.width(), self.height()) / 2.0
scale = radius * distance / self.maxrange
#convert bearing,distance to x,y
xpts = scale * math.sin(bearing * math.pi / 180)
ypts = scale * math.cos(bearing * math.pi / 180)
#get the bounding rectangle of the arc
arcrect = QtCore.QRectF(QtCore.QPointF(0-scale, 0-scale),
QtCore.QPointF(scale, scale))
if path.isEmpty():
path.moveTo(xpts, 0-ypts) #so we don't get a line from 0,0 to the first point
else:
path.lineTo(xpts, 0-ypts)
path.arcTo(arcrect, 90-bearing, 360./az_map_model.npoints)
self._paths.append(path)
def drawRangeRings(self, painter):
painter.translate(self.width()/2, self.height()/2)
painter.setPen(az_map.ringpen)
for i in range(0, self.maxrange, self.ringsize):
diameter = (float(i) / az_map.maxrange) * min(self.width(), self.height())
painter.drawEllipse(QtCore.QRectF(-diameter / 2.0,
-diameter / 2.0, diameter, diameter))
def setMaxRange(self, maxrange):
self.maxrange = maxrange
self.drawPath()
def setRingSize(self, ringsize):
self.ringsize = ringsize
self.drawPath()
class az_map_output(air_modes.parse):
def __init__(self, mypos, model):
air_modes.parse.__init__(self, mypos)
self.model = model
def output(self, msg):
[data, ecc, reference, timestamp] = msg.split()
data = air_modes.modes_reply(long(data, 16))
ecc = long(ecc, 16)
rssi = 10.*math.log10(float(reference))
msgtype = data["df"]
now = time.time()
if msgtype == 17:
icao = data["aa"]
subtype = data["ftc"]
distance, altitude, bearing = [0,0,0]
if 5 <= subtype <= 8:
(ground_track, decoded_lat, decoded_lon, distance, bearing) = self.parseBDS06(data)
altitude = 0
elif 9 <= subtype <= 18:
(altitude, decoded_lat, decoded_lon, distance, bearing) = self.parseBDS05(data)
self.model.addRecord(bearing, altitude, distance)
##############################
# Test stuff
##############################
import random, time
class model_updater(threading.Thread):
def __init__(self, model):
super(model_updater, self).__init__()
self.model = model
self.setDaemon(1)
self.done = False
self.start()
def run(self):
for i in range(az_map_model.npoints):
time.sleep(0.005)
if(self.model):
for alt in self.model._altitudes:
self.model.addRecord(i*360./az_map_model.npoints, alt, random.randint(0,az_map.maxrange)*alt / max(self.model._altitudes))
self.done = True
class Window(QtGui.QWidget):
def __init__(self):
super(Window, self).__init__()
layout = QtGui.QGridLayout()
self.model = az_map_model()
mymap = az_map(None)
mymap.setModel(self.model)
self.updater = model_updater(self.model)
layout.addWidget(mymap, 0, 1)
self.setLayout(layout)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = Window()
window.show()
window.update()
sys.exit(app.exec_())
| rsenykoff/gr-air-modes-kml | python/az_map.py | Python | gpl-3.0 | 8,512 |
from __future__ import absolute_import
from .base import flatten, url_test
URLS = flatten((
# bug 832348 **/index.html -> **/
url_test('/any/random/url/with/index.html', '/any/random/url/with/'),
# bug 774675
url_test('/en/', '/en-US/'),
url_test('/es/', '/es-ES/'),
url_test('/pt/', '/pt-BR/'),
# bug 795970 - lowercase to uppercase, e.g. en-us to en-US
url_test('/en-us/firefox/', '/en-US/firefox/'),
url_test('/es-es/firefox/', '/es-ES/firefox/'),
url_test('/pt-br/MPL/', '/pt-BR/MPL/'),
# bug 880182
url_test('/ja-JP-mac/', '/ja/'),
# bug 795970 - lowercase to uppercase, e.g. en-us to en-US
url_test('/en-us/', '/en-US/'),
url_test('/pt-br/', '/pt-BR/'),
# bug 845988 - remove double slashes in URLs
url_test('/en-US/firefox//all/', '/en-US/firefox/all/'),
url_test('/pt-BR/////thunderbird/', '/pt-BR/thunderbird/'),
# bug 755826, 1222348
url_test('/zh-CN/', 'http://www.firefox.com.cn/', query={
'utm_medium': 'referral',
'utm_source': 'mozilla.org'
}),
# bug 764261, 841393, 996608, 1008162, 1067691, 1113136, 1119022, 1131680, 1115626
url_test('/zh-TW/', 'http://mozilla.com.tw/'),
url_test('/zh-TW/mobile/', 'http://mozilla.com.tw/firefox/mobile/'),
url_test('/zh-TW/download/', 'http://mozilla.com.tw/firefox/download/'),
# bug 874913
url_test('/en-US/products/download.html{,?stuff=whatnot}', '/en-US/firefox/new/'),
# bug 845580
url_test('/en-US/home/', '/en-US/firefox/new/'),
# bug 948605
url_test('/en-US/firefox/xp-any-random-thing', '/en-US/firefox/new/'),
url_test('/en-US/products/firefox/start/', 'http://start.mozilla.org'),
url_test('/start/the-sm-one', 'http://www.seamonkey-project.org/start/',
req_headers={'User-Agent': 'mozilla seamonkey'},
resp_headers={'vary': 'user-agent'}),
url_test('/start/any-random-thing', '/firefox/new/',
resp_headers={'vary': 'user-agent'}),
# bug 856081 redirect /about/drivers https://wiki.mozilla.org/Firefox/Drivers
url_test('/about/drivers{/,.html}', 'https://wiki.mozilla.org/Firefox/Drivers'),
# community
# bug 885797
url_test('/community/{directory,wikis,blogs,websites}.html',
'https://wiki.mozilla.org/Websites/Directory'),
# bug 885856
url_test('/projects/index.{de,fr,hr,sq}.html', '/{de,fr,hr,sq}/firefox/products/'),
# bug 856075
url_test('/projects/technologies.html',
'https://developer.mozilla.org/docs/Mozilla/Using_Mozilla_code_in_other_projects'),
# bug 787269
url_test('/projects/security/components/signed-script{s,-example}.html',
'https://developer.mozilla.org/docs/Bypassing_Security_Restrictions_and_Signing_Code'),
# bug 874526, 877698
url_test('/projects/security/components/any-random-thing',
'http://www-archive.mozilla.org/projects/security/components/any-random-thing'),
# bug 876889
url_test('/projects/testopia/',
'https://developer.mozilla.org/docs/Mozilla/Bugzilla/Testopia'),
# bug 874525
url_test('/projects/security/pki/{n,j}ss/random-thing',
'https://developer.mozilla.org/docs/{N,J}SS'),
# bug 866190
url_test('/projects/security/pki/python-nss/',
'https://developer.mozilla.org/docs/Python_binding_for_NSS'),
# bug 1043035
url_test('/projects/security/pki/{,index.html}',
'https://developer.mozilla.org/docs/PKI'),
url_test('/projects/security/pki/pkcs11-random-thing',
'https://developer.mozilla.org/docs/Mozilla/Projects/NSS#PKCS_.2311_information'),
url_test('/projects/security/pki/psm-random-thing',
'https://developer.mozilla.org/docs/Mozilla/Projects/PSM'),
url_test('/projects/security/pki/src-random-thing',
'https://developer.mozilla.org/docs/Mozilla/Projects/NSS/NSS_Sources_Building_Testing'),
# bug 975476
url_test('/projects/security/pki/python-nss/doc/api/current/html/random/stuff/',
'https://mozilla.github.io/python-nss-docs/random/stuff/'),
# bug 780672
url_test('/firefox/webhero/random/stuff/', '/firefox/new/'),
# bug 964107
url_test('/firefox/video/random/stuff/', 'https://www.youtube.com/firefoxchannel'),
# bug 948520
url_test('/firefox/livebookmarks/random/stuff/',
'https://support.mozilla.org/kb/Live%20Bookmarks'),
# bug 782333
url_test('/firefox/backtoschool/',
'https://addons.mozilla.org/firefox/collections/mozilla/back-to-school/'),
url_test('/firefox/backtoschool/firstrun/', '/firefox/firstrun/'),
# bug 824126, 837942
url_test('/ports/qtmozilla/{,index.html}', 'https://wiki.mozilla.org/Qt'),
url_test('/ports/os2/', 'https://wiki.mozilla.org/Ports/os2'),
url_test('/ports/other-things/', 'http://www-archive.mozilla.org/ports/other-things/'),
# bug 1013082
url_test('/ja/', 'http://www.mozilla.jp/'),
# bug 1051686
url_test('/ja/firefox/organizations/', 'http://www.mozilla.jp/business/downloads/'),
# bug 1205632
url_test('/js/language/',
'https://developer.mozilla.org/docs/Web/JavaScript/Language_Resources'),
url_test('/js/language/js20/', 'http://www.ecmascript-lang.org'),
url_test('/js/language/es4/', 'http://www.ecmascript-lang.org'),
url_test('/js/language/E262-3-errata.html',
'http://www-archive.mozilla.org/js/language/E262-3-errata.html'),
# bug 1138280
url_test('/ja/firefox/beta/notes/', 'http://www.mozilla.jp/firefox/beta/notes/'),
url_test('/ja/thunderbird/notes/', 'http://www.mozilla.jp/thunderbird/notes/'),
url_test('/ja/thunderbird/android/2.2beta/releasenotes/',
'http://www.mozilla.jp/thunderbird/android/2.2beta/releasenotes/'),
# bug 987059, 1050149, 1072170, 1208358
url_test('/ja/about/', 'http://www.mozilla.jp/about/mozilla/'),
url_test('/ja/about/japan/', 'http://www.mozilla.jp/about/japan/'),
# bug 927442
url_test('{/firefox,}/community/', '/contribute/'),
# bug 925551
url_test('/plugincheck/more_info.html', '/plugincheck/'),
# bug 854561
url_test('/projects/mozilla-based{.html,/}', '/about/mozilla-based/'),
# bug 851727
url_test('/projects/powered-by{.html,/}', '/about/powered-by/'),
# bug 957664
url_test('/press/awards{/,.html}', 'https://blog.mozilla.org/press/awards/'),
url_test('/firefox/aurora/all/', '/firefox/developer/all/'),
url_test('/projects/firefox/3.6.10/whatsnew/bunny-lebowski/',
'/firefox/3.6.10/whatsnew/bunny-lebowski/'),
url_test('/projects/firefox/4.0/firstrun/', '/firefox/4.0/firstrun/'),
url_test('/projects/firefox/4.0a2/{firstrun,whatsnew}/stuff',
'/firefox/nightly/firstrun/stuff'),
url_test('/{{firefox,mobile}/,}beta/', '/firefox/channel/#beta'),
url_test('/{{firefox,mobile}/,}aurora/', '/firefox/channel/#developer'),
url_test('/firefox/unsupported-systems.html', '/firefox/unsupported-systems/'),
url_test('/download/', '/firefox/new/'),
url_test('/firefox/firefox.exe', '/'),
# should be case insensitive
url_test('/pt-BR/FireFox/Firefox.EXE', '/pt-BR/'),
# bug 821006
url_test('/firefox/all.html', '/firefox/all/'),
# bug 727561
url_test('/firefox/search{,.html}', '/firefox/new/'),
# bug 860865, 1101220
url_test('/firefox/all-{beta,rc}{/,.html}', '/firefox/beta/all/'),
url_test('/firefox/all-aurora{/,.html}', '/firefox/developer/all/'),
url_test('/firefox/aurora/{all,notes,system-requirements}/'
'/firefox/developer/{all,notes,system-requirements}/'),
url_test('/firefox/organizations/all.html', '/firefox/organizations/all/'),
# bug 729329
url_test('/mobile/sync/is/da/best/', '/firefox/sync/'),
# bug 882845
url_test('/firefox/toolkit/download-to-your-devices/because-i-say-so/', '/firefox/new/'),
# bug 1091977
url_test('/ja/contribute/random/stuff/', 'http://www.mozilla.jp/community/'),
# bug 1014823
url_test('/pt-BR/firefox/releases/whatsnew/', '/pt-BR/firefox/whatsnew/'),
# bug 929775
url_test('/firefox/update/and/stuff/', '/firefox/new/', query={
'utm_source': 'firefox-browser',
'utm_medium': 'firefox-browser',
'utm_campaign': 'firefox-update-redirect',
}),
# bug 868182
url_test('/firefox/mobile/faq/?os=firefox-os', '/firefox/os/faq/'),
# Bug 986174
url_test('/{m,{firefox/,}mobile}/features/', '/firefox/android/'),
url_test('/{m,{firefox/,}mobile}/faq/', '/firefox/android/faq/'),
# bug 885799, 952429
url_test('/projects/calendar/holidays.html', '/projects/calendar/holidays/'),
url_test('/en-US/projects/calendar/random/stuff/', '/projects/calendar/'),
# redirects don't catch real urls
url_test('/en-US/projects/calendar/', status_code=200),
url_test('/en-US/projects/calendar/holidays/', status_code=200),
# bug 1124038
url_test('/thunderbird/organizations/{all-esr.html,faq/}', '/thunderbird/organizations/'),
# bug 1123399, 1150649
url_test('/thunderbird/all.htm', '/thunderbird/all/'),
url_test('/thunderbird/all-beta.html', '/thunderbird/beta/all/'),
url_test('/thunderbird/early_releases/downloads/', '/thunderbird/beta/all/'),
url_test('/thunderbird/early_releases/', '/thunderbird/channel/'),
# bug 1081917, 1029829, 1029838
url_test('/thunderbird/releases/0.9.html',
'http://website-archive.mozilla.org/www.mozilla.org/thunderbird_releasenotes'
'/en-US/thunderbird/releases/0.9.html'),
# should catch everything 1.* to 29.*
url_test('/thunderbird/{1,5,15,29}.0beta/{releasenotes,system-requirements}/',
'http://website-archive.mozilla.org/www.mozilla.org/thunderbird_releasenotes'
'/en-US/thunderbird/{1,5,15,29}.0beta/{releasenotes,system-requirements}/'),
# bug 1124042
url_test('/thunderbird/features/email_providers.html', '/thunderbird/email-providers/'),
# bug 1133266
url_test('/thunderbird/legal/privacy/', '/privacy/thunderbird/'),
url_test('/thunderbird/about/privacy-policy/', '/privacy/archive/thunderbird/2010-06/'),
# bug 1196578
url_test('/thunderbird/about/legal/eula/', '/about/legal/eula/'),
url_test('/thunderbird/about/legal/eula/thunderbird2.html', '/about/legal/eula/thunderbird-2/'),
url_test('/thunderbird/about/legal/eula/thunderbird.html', '/about/legal/eula/thunderbird-1.5/'),
# bug 1204579
url_test('/thunderbird/2.0.0.0/eula/', '/about/legal/eula/thunderbird-2/'),
url_test('/thunderbird/about/legal/', '/about/legal/terms/mozilla/'),
url_test('/thunderbird/download/', '/thunderbird/'),
url_test('/thunderbird/about/', 'https://wiki.mozilla.org/Thunderbird'),
url_test('/thunderbird/about/mission/', 'https://wiki.mozilla.org/Thunderbird'),
url_test('/thunderbird/about/{careers,contact,get-involved}/',
'https://wiki.mozilla.org/Thunderbird#Contributing'),
url_test('/thunderbird/community/', 'https://wiki.mozilla.org/Thunderbird#Contributing'),
url_test('/thunderbird/3.1{a,b,rc}{1,2}/',
'http://website-archive.mozilla.org/www.mozilla.org/thunderbird/thunderbird/3.1{a,b,rc}{1,2}/'),
url_test('/thunderbird/{6,7,8,9}.0beta/',
'http://website-archive.mozilla.org/www.mozilla.org/thunderbird/thunderbird/{6,7,8,9}.0beta/'),
url_test('/thunderbird/about/{board,press,staff}/',
'http://website-archive.mozilla.org/www.mozilla.org/thunderbird/thunderbird/about/{board,press,staff}/'),
# bug 1121082
url_test('/hello/', '/firefox/hello/'),
# bug 1148127
url_test('/products/', '/firefox/products/'),
# Bug 1110927
url_test('/firefox/start/central.html', '/firefox/new/'),
url_test('/firefox/sync/firstrun.html', '/firefox/sync/'),
url_test('/firefox/panorama/', 'https://support.mozilla.org/kb/tab-groups-organize-tabs'),
# bug 876810
url_test('/hacking/commit-access-policy/',
'/about/governance/policies/commit/access-policy/'),
url_test('/hacking/committer/{,faq.html}', '/about/governance/policies/commit/'),
url_test('/hacking/notification/', '/about/governance/policies/commit/'),
url_test('/hacking/committer/committers-agreement.{odt,pdf,txt}',
'https://static.mozilla.com/foundation/documents/'
'commit-access/committers-agreement.{odt,pdf,txt}'),
url_test('/hacking/notification/acceptance-email.txt',
'https://static.mozilla.com/foundation/documents/commit-access/acceptance-email.txt'),
# bug 1165344
url_test('/hacking/CVS-Contributor-Form.{pdf,ps}', '/about/governance/policies/commit/'),
url_test('/hacking/{form,getting-cvs-write-access}.html',
'/about/governance/policies/commit/'),
url_test('/hacking/portable-cpp.html',
'https://developer.mozilla.org/docs/Mozilla/C++_Portability_Guide'),
url_test('/hacking/rules.html', 'https://developer.mozilla.org/docs/mozilla-central'),
url_test('/hacking/{module-ownership,reviewers}.html',
'/about/governance/policies/{module-ownership,reviewers}/'),
url_test('/hacking/regression-policy.html', '/about/governance/policies/regressions/'),
# Bug 1040970
url_test('/mozillacareers', 'https://wiki.mozilla.org/People/mozillacareers', query={
'utm_medium': 'redirect',
'utm_source': 'mozillacareers-vanity',
}),
# Bug 987852 & 1201914
url_test('/MPL/Revision-FAQ.html', '/MPL/Revision-FAQ/'),
url_test('/MPL/2.0/index.txt', '/media/MPL/2.0/index.txt'),
# Bug 1090468
url_test('/security/transition.txt', '/media/security/transition.txt'),
# Bug 920212
url_test('/firefox/fx/', '/firefox/new/'),
# Bug 979670, 979531, 1003727, 979664, 979654, 979660, 1150713
url_test('/firefox/features/', '/firefox/desktop/'),
url_test('/firefox/customize/', '/firefox/desktop/customize/'),
url_test('/firefox/{performance,happy,speed,memory}/', '/firefox/desktop/fast/'),
url_test('/firefox/security/', '/firefox/desktop/trust/'),
url_test('/firefox/technology/', 'https://developer.mozilla.org/docs/Tools'),
url_test('/firefox/sms/{,sent}', '/firefox/products/'),
# Bug 979527
url_test('/firefox/central/', '/firefox/new/',
req_headers={'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:17.0) '
'Gecko/20121202 Firefox/17.0 Iceweasel/17.0.1'},
resp_headers={'vary': 'user-agent'}),
url_test('/firefox/central/',
'https://support.mozilla.org/kb/get-started-firefox-overview-main-features',
req_headers={'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:42.0) '
'Gecko/20100101 Firefox/42.0'},
resp_headers={'vary': 'user-agent'}),
# bug 868169
url_test('/mobile/android-download.html?dude=abiding',
'https://play.google.com/store/apps/details', query={'id': 'org.mozilla.firefox',
'dude': 'abiding'}),
url_test('/mobile/android-download-beta.html?walter=raging',
'https://play.google.com/store/apps/details', query={'id': 'org.mozilla.firefox_beta',
'walter': 'raging'}),
# bug 877198
url_test('/press/news.html', 'http://blog.mozilla.org/press/'),
url_test('/press/mozilla-2003-10-15.html',
'http://blog.mozilla.org/press/2003/10/mozilla-foundation-launches-new-web-browser-and-end-user-services/'),
url_test('/press/mozilla-2004-02-09.html',
'https://blog.mozilla.org/press/2004/02/new-round-of-releases-extends-mozilla-project%C2%92s-standards-based-open-source-offerings/'),
url_test('/press/mozilla-2004-02-17.html',
'http://blog.mozilla.org/press/2004/02/mozilla-gains-stronghold-in-europe/'),
url_test('/press/mozilla-2004-02-26.html',
'https://blog.mozilla.org/press/2004/02/mozilla-foundation-rallies-supporters-to-take-back-the-web/'),
url_test('/press/mozilla-2004-05-03.html',
'http://blog.mozilla.org/press/2004/05/mozilla-foundation-releases-thunderbird-0-6/'),
url_test('/press/mozilla-2004-06-15.html',
'http://blog.mozilla.org/press/2004/06/mozilla-reloads-firefox/'),
url_test('/press/mozilla-2004-06-16.html',
'http://blog.mozilla.org/press/2004/06/mozilla-foundation-releases-thunderbird-0-7/'),
url_test('/press/mozilla-2004-06-30.html',
'http://blog.mozilla.org/press/2013/11/mozilla-foundation-announces-more-open-scriptable-plugins/'),
url_test('/press/mozilla-2004-08-02.html',
'http://blog.mozilla.org/press/2004/08/mozilla-foundation-announces-security-bug-bounty-program/'),
url_test('/press/mozilla-2004-08-10.html',
'http://blog.mozilla.org/press/2004/08/mozilla-foundation-announces-xforms-development-project/'),
url_test('/press/mozilla-2004-08-18.html',
'http://blog.mozilla.org/press/2004/08/mozilla-affiliate-in-japan-kicks-off/'),
url_test('/press/mozilla-2004-09-14-01.html',
'http://blog.mozilla.org/press/2004/09/mozilla-foundation-announces-first-payments-of-security-bug-bounty-program-further-strengthens-browser-security/'),
url_test('/press/mozilla-2004-09-14-02.html',
'http://blog.mozilla.org/press/2013/11/firefox-preview-release-and-thunderbird-0-8-released/'),
url_test('/press/mozilla-2004-09-20.html',
'http://blog.mozilla.org/press/2004/09/mozilla-firefox-preview-release-hits-one-million-downloads-in-first-four-days-of-availability/'),
url_test('/press/mozilla-2004-10-01-02.html',
'http://blog.mozilla.org/press/2004/10/important-security-update-for-firefox-available/'),
url_test('/press/mozilla-2004-11-09.html',
'http://blog.mozilla.org/press/2004/11/mozilla-foundation-releases-the-highly-anticipated-mozilla-firefox-1-0-web-browser/'),
url_test('/press/mozilla-2004-11-22.html',
'http://blog.mozilla.org/press/2004/11/important-update-to-german-language-version-of-firefox-1-0/'),
url_test('/press/mozilla-2004-12-15.html',
'http://blog.mozilla.org/press/2004/12/mozilla-foundation-places-two-page-advocacy-ad-in-the-new-york-times/'),
url_test('/press/mozilla-2004-12-7.html',
'http://blog.mozilla.org/press/2004/12/mozilla-thunderbird-1-0-email-client-has-landed/'),
url_test('/press/mozilla-2005-01-07.html',
'http://blog.mozilla.org/press/2005/01/mozilla-firefox-and-thunderbird-to-support-new-open-standard-platform-for-usb-drives/'),
url_test('/press/mozilla-2005-02-02.html',
'http://blog.mozilla.org/press/2005/02/mozilla-foundation-announces-beta-release-of-xforms-1-0-recommendation/'),
url_test('/press/mozilla-2005-02-16.html',
'http://blog.mozilla.org/press/2005/01/mozilla-firefox-and-thunderbird-to-support-new-open-standard-platform-for-usb-drives/'),
url_test('/press/mozilla-2005-02-24.html',
'http://blog.mozilla.org/press/2005/02/mozilla-foundation-announces-update-to-firefox/'),
url_test('/press/mozilla-2005-03-04.html',
'http://blog.mozilla.org/press/2005/03/mozilla-foundation-expands-with-launch-of-mozilla-china/'),
url_test('/press/mozilla-2005-03-23.html',
'http://blog.mozilla.org/press/2005/03/mozilla-foundation-releases-security-update-to-firefox/'),
url_test('/press/mozilla-2005-03-28.html',
'http://blog.mozilla.org/press/2005/03/mozilla-foundation-awards-bug-bounties/'),
url_test('/press/mozilla-2005-05-13.html',
'http://blog.mozilla.org/press/2005/05/mozilla-foundation-co-hosts-europes-leading-xml-and-web-developer-conference/'),
url_test('/press/mozilla-2005-07-28.html',
'http://blog.mozilla.org/press/2005/07/mozilla-headlines-two-key-open-source-development-conferences-in-august/'),
url_test('/press/mozilla-2005-08-03.html',
'http://blog.mozilla.org/press/2005/08/mozilla-foundation-forms-new-organization-to-further-the-creation-of-free-open-source-internet-software-including-the-award-winning-mozilla-firefox-browser/'),
url_test('/press/mozilla-2005-10-03.html',
'http://blog.mozilla.org/press/2005/10/mozilla-launches-beta-of-comprehensive-online-developer-center/'),
url_test('/press/mozilla-2005-10-19.html',
'http://blog.mozilla.org/press/2005/10/firefox-surpasses-100-million-downloads/'),
url_test('/press/mozilla-2005-11-29.html',
'http://blog.mozilla.org/press/2005/11/mozilla-introduces-firefox-1-5-and-ups-the-ante-in-web-browsing/'),
url_test('/press/mozilla-2005-11-3.html',
'http://blog.mozilla.org/press/2005/11/mozilla-kicks-off-extend-firefox-competition/'),
url_test('/press/mozilla-2005-11-30.html',
'http://blog.mozilla.org/press/2005/11/firefox-1-5-adds-answers-com-for-quick-reference/'),
url_test('/press/mozilla-2005-12-2.html',
'http://blog.mozilla.org/press/2005/12/mozilla-launches-firefox-flicks-campaign/'),
url_test('/press/mozilla-2005-12-22.html',
'http://blog.mozilla.org/press/2005/12/mozilla-launches-firefox-flicks-ad-contest/'),
url_test('/press/mozilla-2006-01-12.html',
'http://blog.mozilla.org/press/2006/01/mozilla-releases-thunderbird-1-5-email-client/'),
url_test('/press/mozilla-2006-01-24.html',
'http://blog.mozilla.org/press/2006/01/firefox-1-5-adoption-rising-as-browser-garners-acclaim/'),
url_test('/press/mozilla-2006-01-25.html',
'http://blog.mozilla.org/press/2006/01/indie-film-all-stars-foin-firefox-flicks-crew/'),
url_test('/press/mozilla-2006-02-03.html',
'http://blog.mozilla.org/press/2006/02/mozilla-releases-preview-of-application-framework-for-development-of-cross-platform-internet-client-applications/'),
url_test('/press/mozilla-2006-03-02.html',
'http://blog.mozilla.org/press/2006/03/mozilla-announces-winners-of-extend-firefox-competition/'),
url_test('/press/mozilla-2006-04-12.html',
'http://blog.mozilla.org/press/2006/04/mozilla-showcases-first-round-of-community-produced-firefox-flicks-videos/'),
url_test('/press/mozilla-2006-04-18.html',
'http://blog.mozilla.org/press/2006/04/mozilla-receives-over-280-community-produced-videos-for-firefox-flicks/'),
url_test('/press/mozilla-2006-04-27.html',
'http://blog.mozilla.org/press/2006/04/firefox-flicks-video-contest-winners-announced/'),
url_test('/press/mozilla-2006-06-14.html',
'http://blog.mozilla.org/press/2006/06/mozilla-feeds-soccer-fans-passion-with-new-firefox-add-on/'),
url_test('/press/mozilla-2006-10-11.html',
'http://blog.mozilla.org/press/2006/10/qualcomm-launches-project-in-collaboration-with-mozilla-foundation-to-develop-open-source-version-of-eudora-email-program/'),
url_test('/press/mozilla-2006-10-24-02.html',
'http://blog.mozilla.org/press/2006/10/firefox-moving-the-internet-forward/'),
url_test('/press/mozilla-2006-10-24.html',
'http://blog.mozilla.org/press/2006/10/mozilla-releases-major-update-to-firefox-and-raises-the-bar-for-online-experience/'),
url_test('/press/mozilla-2006-11-07.html',
'http://blog.mozilla.org/press/2006/11/adobe-and-mozilla-foundation-to-open-source-flash-player-scripting-engine/'),
url_test('/press/mozilla-2006-12-04.html',
'http://blog.mozilla.org/press/2006/12/the-world-economic-forum-announces-technology-pioneers-2007-mozilla-selected/'),
url_test('/press/mozilla-2006-12-11.html',
'http://blog.mozilla.org/press/2006/12/mozilla-firefox-headed-for-primetime/'),
url_test('/press/mozilla-2007-02-07.html',
'http://blog.mozilla.org/press/2007/02/kodak-and-mozilla-join-forces-to-make-sharing-photos-even-easier/'),
url_test('/press/mozilla-2007-03-27.html',
'http://blog.mozilla.org/press/2007/03/mozilla-launches-new-firefox-add-ons-web-site/'),
url_test('/press/mozilla-2007-03-28.html',
'http://blog.mozilla.org/press/2007/03/mozilla-and-ebay-working-together-to-make-the-auction-experience-easier-for-firefox-users-in-france-germany-and-the-uk/'),
url_test('/press/mozilla-2007-04-19.html',
'http://blog.mozilla.org/press/2007/04/mozilla-thunderbird-2-soars-to-new-heights/'),
url_test('/press/mozilla-2007-05-16.html',
'http://blog.mozilla.org/press/2007/05/united-nations-agency-awards-mozilla-world-information-society-award/'),
url_test('/press/mozilla-2007-07-04.html',
'http://blog.mozilla.org/press/2007/07/mozilla-and-ebay-launch-firefox-companion-for-ebay-users/'),
url_test('/press/mozilla-2007-08-10.html',
'http://blog.mozilla.org/press/2007/08/mozilla-to-host-24-hour-worldwide-community-event/'),
url_test('/press/mozilla-2007-08-28.html',
'http://blog.mozilla.org/press/2007/08/mozilla-welcomes-students-back-to-school-with-firefox-campus-edition/'),
url_test('/press/mozilla-2007-09-17-faq.html',
'http://blog.mozilla.org/press/2007/09/mozilla-launches-internet-mail-and-communications-initiative/'),
url_test('/press/mozilla-2007-09-17.html',
'http://blog.mozilla.org/press/2007/09/mozilla-launches-internet-mail-and-communications-initiative/'),
url_test('/press/mozilla-2008-01-07-faq.html',
'http://blog.mozilla.org/press/2008/01/mozilla-appoints-john-lilly-as-chief-executive-officer/'),
url_test('/press/mozilla-2008-01-07.html',
'http://blog.mozilla.org/press/2008/01/mozilla-appoints-john-lilly-as-chief-executive-officer/'),
url_test('/press/mozilla-2008-02-19-faq.html',
'http://blog.mozilla.org/press/2008/02/mozilla-messaging-starts-up-operations/'),
url_test('/press/mozilla-2008-02-19.html',
'http://blog.mozilla.org/press/2008/02/mozilla-messaging-starts-up-operations/'),
url_test('/press/mozilla-2008-05-28.html',
'http://blog.mozilla.org/press/2008/05/mozilla-aims-to-set-guinness-world-record-on-firefox-3-download-day/'),
url_test('/press/mozilla-2008-06-17-faq.html',
'http://blog.mozilla.org/press/2008/06/mozilla-releases-firefox-3-and-redefines-the-web-experience/'),
url_test('/press/mozilla-2008-06-17.html',
'http://blog.mozilla.org/press/2008/06/mozilla-releases-firefox-3-and-redefines-the-web-experience/'),
url_test('/press/mozilla-2008-07-02.html',
'http://blog.mozilla.org/press/2008/07/mozilla-sets-new-guinness-world-record-with-firefox-3-downloads/'),
url_test('/press/mozilla-2008-11-18.html',
'http://blog.mozilla.org/press/2008/11/mozilla-launches-fashion-your-firefox-and-makes-it-easy-to-customize-the-browsing-experience/'),
url_test('/press/mozilla-2008-12-03.html',
'http://blog.mozilla.org/press/2008/12/mozilla-and-zazzle-announce-strategic-relationship-for-apparel-on-demand/'),
url_test('/press/mozilla-2009-03-31.html',
'https://blog.mozilla.org/press/2009/03/%C2%AD%C2%ADmozilla-adds-style-and-star-power-to-firefox-with-new-personas/'),
url_test('/press/mozilla-2009-06-30-faq.html',
'http://blog.mozilla.org/press/2009/04/mozilla-advances-the-web-with-firefox-3-5/'),
url_test('/press/mozilla-2009-06-30.html',
'http://blog.mozilla.org/press/2009/04/mozilla-advances-the-web-with-firefox-3-5/'),
url_test('/press/mozilla-foundation.html',
'http://blog.mozilla.org/press/2003/07/mozilla-org-announces-launch-of-the-mozilla-foundation-to-lead-open-source-browser-efforts/'),
url_test('/press/mozilla1.0.html',
'http://blog.mozilla.org/press/2002/06/mozilla-org-launches-mozilla-1-0/'),
url_test('/press/open-source-security.html',
'http://blog.mozilla.org/press/2000/01/open-source-development-of-security-products-possible-worldwide-enhancing-security-and-privacy-for-e-commerce-and-communication/'),
# Bug 608370, 957664
url_test('/press/kit{.html,s/}', 'https://blog.mozilla.org/press/kits/'),
# bug 957637
url_test('/sopa/',
'https://blog.mozilla.org/blog/2012/01/19/firefox-users-engage-congress-sopa-strike-stats/'),
# bug 675031
url_test('/projects/fennec/is/a/pretty/fox.html',
'http://website-archive.mozilla.org/www.mozilla.org/fennec_releasenotes/projects/fennec/is/a/pretty/fox.html'),
# bug 924687
url_test('/opportunities{,/,/index.html}', 'https://careers.mozilla.org/'),
# bug 884933
url_test('/{m,{firefox/,}mobile}/platforms/',
'https://support.mozilla.org/kb/will-firefox-work-my-mobile-device'),
url_test('/m/', '/firefox/new/'),
# bug 876581
url_test('/firefox/phishing-protection/',
'https://support.mozilla.org/kb/how-does-phishing-and-malware-protection-work'),
# bug 1006079
url_test('/mobile/home/{,index.html}',
'https://blog.mozilla.org/services/2012/08/31/retiring-firefox-home/'),
# bug 949562
url_test('/mobile/home/1.0/releasenotes/{,index.html}',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_home/mobile/home/1.0/releasenotes/'),
url_test('/mobile/home/1.0.2/releasenotes/{,index.html}',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_home/mobile/home/1.0.2/releasenotes/'),
url_test('/mobile/home/faq/{,index.html}',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_home/mobile/home/faq/'),
# bug 960064
url_test('/firefox/vpat-1.5.html',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_vpat/firefox-vpat-1.5.html'),
url_test('/firefox/vpat.html',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_vpat/firefox-vpat-3.html'),
# bug 1068931
url_test('/advocacy/', 'https://advocacy.mozilla.org/'),
# bug 887426
url_test('/about/policies/', '/about/governance/policies/'),
url_test('/about/policies/participation.html', '/about/governance/policies/participation/'),
url_test('/about/policies/policies.html', '/about/governance/policies/'),
# bug 882923
url_test('/opt-out.html', '/privacy/websites/#user-choices'),
# bug 818321
url_test('/projects/security/tld-idn-policy-list.html',
'/about/governance/policies/security-group/tld-idn/'),
url_test('/projects/security/membership-policy.html',
'/about/governance/policies/security-group/membership/'),
url_test('/projects/security/secgrouplist.html',
'/about/governance/policies/security-group/'),
url_test('/projects/security/security-bugs-policy.html',
'/about/governance/policies/security-group/bugs/'),
# bug 818316, 1128579
url_test('/projects/security/certs/', '/about/governance/policies/security-group/certs/'),
url_test('/projects/security/certs/included/', 'https://wiki.mozilla.org/CA:IncludedCAs'),
url_test('/projects/security/certs/pending/', 'https://wiki.mozilla.org/CA:PendingCAs'),
url_test('/about/governance/policies/security-group/certs/included/',
'https://wiki.mozilla.org/CA:IncludedCAs'),
url_test('/about/governance/policies/security-group/certs/pending/',
'https://wiki.mozilla.org/CA:PendingCAs'),
url_test('/projects/security/certs/policy/',
'/about/governance/policies/security-group/certs/policy/'),
url_test('/projects/security/certs/policy/EnforcementPolicy.html',
'/about/governance/policies/security-group/certs/policy/enforcement/'),
url_test('/projects/security/certs/policy/MaintenancePolicy.html',
'/about/governance/policies/security-group/certs/policy/maintenance/'),
url_test('/projects/security/certs/policy/InclusionPolicy.html',
'/about/governance/policies/security-group/certs/policy/inclusion/'),
# bug 926629
url_test('/newsletter/about_mobile/', '/newsletter/'),
url_test('/newsletter/about_mozilla/', '/contribute/'),
url_test('/newsletter/new/', '/newsletter/'),
# bug 818323
url_test('/projects/security/known-vulnerabilities.html', '/security/known-vulnerabilities/'),
url_test('/projects/security/older-vulnerabilities.html',
'/security/known-vulnerabilities/older-vulnerabilities/'),
# bug 1017564
url_test('/mobile/RANDOM-STUFF/system-requirements/',
'https://support.mozilla.org/kb/will-firefox-work-my-mobile-device'),
# bug 1041712, 1069335, 1069902
url_test('/{firefox,mobile}/{2,19,27}.0{a2,beta,.2}/{release,aurora}notes/{,stuff}',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_releasenotes/en-US'
'/{firefox,mobile}/{2,19,27}.0{a2,beta,.2}/{release,aurora}notes/{,stuff}'),
# bug 1090468
url_test('/security/{older-alerts,security-announcement,phishing-test{,-results}}.html',
'http://website-archive.mozilla.org/www.mozilla.org/security/security'
'/{older-alerts,security-announcement,phishing-test{,-results}}.html'),
url_test('/security/iSECPartners_Phishing.pdf',
'http://website-archive.mozilla.org/www.mozilla.org/security/security'
'/iSECPartners_Phishing.pdf'),
# bug 878039
url_test('/access/', 'https://developer.mozilla.org/docs/Web/Accessibility'),
url_test('/access/architecture.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Accessibility_architecture'),
url_test('/access/at-vendors.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Information_for_Assistive_Technology_Vendors'),
url_test('/access/authors.html',
'https://developer.mozilla.org/docs/Web/Accessibility/Information_for_Web_authors'),
url_test('/access/core-developers.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Accessibility_Information_for_Core_Gecko_Developer'),
url_test('/access/evaluators.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Information_for_Governments_and_Other_Organization'),
url_test('/access/event-flow.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Event_Process_Procedure'),
url_test('/access/external-developers.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Information_for_External_Developers_Dealing_with_A#community'),
url_test('/access/features.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Accessibility_Features_in_Firefox'),
url_test('/access/highlevel.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/CSUN_Firefox_Materials'),
url_test('/access/platform-apis.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Accessibility_API_cross-reference#Accessible_Roles'),
url_test('/access/plugins-work.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Mozilla_Plugin_Accessibility'),
url_test('/access/prefs-and-apis.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Embedding_API_for_Accessibility'),
url_test('/access/resources.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Links_and_Resources'),
url_test('/access/section508.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Mozilla_s_Section_508_Compliance'),
url_test('/access/today.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Software_accessibility_today'),
url_test('/access/toolkit-checklist.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/What_needs_to_be_done_when_building_new_toolkits'),
url_test('/access/ui-developers.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Accessibility_information_for_UI_designers'),
url_test('/access/users.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Information_for_users'),
url_test('/access/w3c-uaag.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/UAAG_evaluation_report'),
url_test('/access/w4a.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/W4A'),
url_test('/access/windows/at-apis.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/Gecko_info_for_Windows_accessibility_vendors'),
url_test('/access/windows/msaa-server.html',
'https://developer.mozilla.org/docs/Web/Accessibility/Implementing_MSAA_server'),
url_test('/access/windows/zoomtext.html',
'https://developer.mozilla.org/docs/Mozilla/Accessibility/ZoomText'),
# bug 1148187
url_test('/access/unix.html',
'http://website-archive.mozilla.org/www.mozilla.org/access/access/unix.html'),
# bug 1216953
url_test('/MPL/MPL-1.0.html',
'http://website-archive.mozilla.org/www.mozilla.org/mpl/MPL/1.0/'),
url_test('/MPL/MPL-1.1.html', '/MPL/1.1/'),
# bug 987852
url_test('/MPL/0.95/stuff.html',
'http://website-archive.mozilla.org/www.mozilla.org/mpl/MPL/0.95/stuff.html'),
url_test('/MPL/1.0/stuff.html',
'http://website-archive.mozilla.org/www.mozilla.org/mpl/MPL/1.0/stuff.html'),
url_test('/MPL/2.0/process/stuff.html',
'http://website-archive.mozilla.org/www.mozilla.org/mpl/MPL/2.0/process/stuff.html'),
url_test('/MPL/NPL/stuff.html',
'http://website-archive.mozilla.org/www.mozilla.org/mpl/MPL/NPL/stuff.html'),
url_test('/MPL/boilerplate-1.1/stuff.html',
'http://website-archive.mozilla.org/www.mozilla.org/mpl/MPL/boilerplate-1.1/stuff.html'),
url_test('/MPL/missing.html',
'http://website-archive.mozilla.org/www.mozilla.org/mpl/MPL/missing.html'),
# bug 858315
url_test('/projects/devpreview/firstrun/', '/firefox/firstrun/'),
url_test('/projects/devpreview/stuff.html',
'http://website-archive.mozilla.org/www.mozilla.org/devpreview_releasenotes/projects/devpreview/stuff.html'),
# bug 947890, 1069902
url_test('/firefox/releases/{0.9.1,1.5.0.1}.html',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_releasenotes/en-US'
'/firefox/releases/{0.9.1,1.5.0.1}.html'),
url_test('/{firefox,mobile}/{2,9,18,25}.0/releasenotes/',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_releasenotes/en-US'
'/{firefox,mobile}/{2,9,18,25}.0/releasenotes/'),
# bug 988746, 989423, 994186, 1153351
url_test('/mobile/{23,28,29}.0/releasenotes/',
'/firefox/android/{23,28,29}.0/releasenotes/'),
url_test('/mobile/{3,4}2.0beta/{aurora,release}notes/',
'/firefox/android/{3,4}2.0beta/{aurora,release}notes/'),
# bug 724682
url_test('/projects/mathml/demo/texvsmml.html',
'https://developer.mozilla.org/docs/Mozilla_MathML_Project/MathML_Torture_Test'),
url_test('/projects/mathml/{,demo/}',
'https://developer.mozilla.org/en-US/docs/Mozilla/MathML_Project'),
url_test('/projects/mathml/fonts/',
'https://developer.mozilla.org/Mozilla_MathML_Project/Fonts'),
url_test('/projects/mathml/screenshots/',
'https://developer.mozilla.org/Mozilla_MathML_Project/Screenshots'),
# bug 961010
url_test('/mobile/credits/credits-people-list.html', '/credits/'),
url_test('/{mobile,fennec}/', '/firefox/partners/'),
# bug 876668
url_test('/mobile/customize/', '/firefox/android/'),
# bug 736934, 860865, 1101220, 1153351
url_test('/mobile/{{beta,aurora}/,}notes/', '/firefox/android/{{beta,aurora}/,}notes/'),
url_test('/firefox/{{beta,aurora,organizations}/,}system-requirements.html',
'/firefox/{{beta,aurora,organizations}/,}system-requirements/'),
# bug 897082
url_test('/about/mozilla-spaces/stuff.html', '/contact/spaces/'),
url_test('/about/contact/stuff.html', '/contact/spaces/'),
url_test('/contribute/local/', '/contact/communities/'),
url_test('/contribute/local/northamerica.html', '/contact/communities/north-america/'),
url_test('/contribute/local/europe.html', '/contact/communities/europe/'),
url_test('/contribute/local/latinamerica.html', '/contact/communities/latin-america/'),
url_test('/contribute/local/africamideast.html', '/contact/communities/africa-middle-east/'),
url_test('/contribute/local/asia.html', '/contact/communities/asia-south-pacific/'),
url_test('/contribute/local/southpole.html', '/contact/communities/antarctica/'),
# bug 875052
url_test('/about/get-involved/whanot/', '/contribute/'),
# bug 1155870
url_test('/firefox/os/{releases,notes}/',
'https://developer.mozilla.org/Firefox_OS/Releases'),
url_test('/firefox/os/{release,}notes/2.0/',
'https://developer.mozilla.org/Firefox_OS/Releases/2.0'),
# bug 878871
url_test('/firefoxos/is.great/', '/firefox/os/'),
# bug 831810 & 1142583
url_test('/{mwc,MWC}/', '/firefox/partners/', query={
'utm_campaign': 'mwc-redirect',
'utm_medium': 'referral',
'utm_source': 'mozilla.org',
}),
# bug 878926
url_test('/{de/,}firefoxflicks/{,stuff}',
'https://firefoxflicks.mozilla.org/{de/,}{,stuff}'),
# bug 849426
url_test('/about/history.html', '/about/history/'),
url_test('/about/bookmarks.html', 'https://wiki.mozilla.org/Historical_Documents'),
url_test('/about/timeline.html', 'https://wiki.mozilla.org/Timeline'),
# bug 1016400
url_test('/about/careers.html', 'https://careers.mozilla.org/'),
# bug 861243 and bug 869489
url_test('/about/manifesto.html', '/about/manifesto/'),
url_test('/about/manifesto.{de,pt-BR}.html', '/{de,pt-BR}/about/manifesto/'),
# bug 856077
url_test('/projects/toolkit/', 'https://developer.mozilla.org/docs/Toolkit_API'),
# bug 877165
url_test('/firefox/connect/random/stuff', '/'),
# bug 657049
url_test('/firefox/accountmanager/', '/persona/'),
# bug 841846
url_test('/firefox/nightly/', 'https://nightly.mozilla.org/'),
# bug 1209720
url_test('/thunderbird/releasenotes' '/thunderbird/notes/'),
url_test('/rhino/download.html',
'https://developer.mozilla.org/docs/Mozilla/Projects/Rhino/Download_Rhino'),
url_test('/rhino/doc.html',
'https://developer.mozilla.org/docs/Mozilla/Projects/Rhino/Documentation'),
url_test('/rhino/random/stuff/', 'https://developer.mozilla.org/docs/Mozilla/Projects/Rhino'),
# Bug 730488 deprecate /firefox/all-older.html
url_test('/firefox/all-older.html', '/firefox/new/'),
# Bug 1209643
url_test('/legal/bylaws_amendment_1.html', '/foundation/documents/bylaws-amendment-1/'),
url_test('/legal/bylaws_amendment_2.html', '/foundation/documents/bylaws-amendment-2/'),
url_test('/legal/articles.html', '/foundation/documents/articles-of-incorporation/'),
url_test('/legal/amendment.html', '/foundation/documents/articles-of-incorporation/amendment/'),
url_test('/legal/bylaws.html', '/foundation/documents/bylaws/'),
# bug 1211007
url_test('/thunderbird/download', '/thunderbird/'),
# bug 1211907
url_test('/firefox/independent', '/firefox/new/'),
url_test('/firefox/personal', '/firefox/new/'),
# bug 960689, 1013349, 896474
url_test('/about/legal.html', '/about/legal/'),
url_test('/about/partnerships.html', '/about/partnerships/'),
# bug 846362
url_test('/community/{index{.{de,fr},}.html,}', '/contribute/'),
# bug 1003703, 1009630
url_test('/firefox/42.0/firstrun/eu/', '/firefox/42.0/firstrun/', query={
'utm_source': 'direct',
'utm_medium': 'none',
'utm_campaign': 'redirect',
'utm_content': 'eu-firstrun-redirect',
}),
# bug 845983
url_test('/metrofirefox/random/stuff/', '/firefox/random/stuff/'),
# bug 860532 - Reidrects for governance pages
url_test('/about/governance.html', '/about/governance/'),
url_test('/about/roles.html', '/about/governance/roles/'),
url_test('/about/organizations.html', '/about/governance/organizations/'),
# bug 876233
url_test('/about/participate/', '/contribute/'),
# bug 790784
url_test('/{about/policies/,foundation/,}privacy-policy{/,.html,}', '/privacy/websites/'),
url_test('/privacy-policy.pdf',
'https://static.mozilla.com/moco/en-US/pdf/mozilla_privacypolicy.pdf'),
# bug 1074354
url_test('/legal/', '/about/legal/'),
# bug 963816
url_test('/legal/privacy/', '/privacy/'),
url_test('/legal/privacy/firefox{/,.html}', '/privacy/firefox/'),
url_test('/legal/privacy/oct-2006', '/privacy/archive/firefox/2006-10/'),
url_test('/legal/privacy/june-2008', '/privacy/archive/firefox/2008-06/'),
url_test('/legal/privacy/jan-2009', '/privacy/archive/firefox/2009-01/'),
url_test('/legal/privacy/sept-2009', '/privacy/archive/firefox/2009-09/'),
url_test('/legal/privacy/jan-2010', '/privacy/archive/firefox/2010-01/'),
url_test('/legal/privacy/dec-2010', '/privacy/archive/firefox/2010-12/'),
url_test('/legal/privacy/june-2011', '/privacy/archive/firefox/2011-06/'),
url_test('/legal/privacy/june-2012', '/privacy/archive/firefox/2012-06/'),
url_test('/legal/privacy/sept-2012', '/privacy/archive/firefox/2012-09/'),
url_test('/legal/privacy/dec-2012', '/privacy/archive/firefox/2012-12/'),
url_test('/legal/privacy/firefox-third-party', '/privacy/archive/firefox/third-party/'),
url_test('/legal/privacy/notices-firefox', '/legal/firefox/'),
url_test('/privacy/policies/{facebook,firefox-os,websites}/',
'/privacy/{facebook,firefox-os,websites}/'),
# bug 1034859
url_test('/en-US/about/buttons/dude.jpg', '/media/img/careers/buttons/dude.jpg'),
# bug 1003737
url_test('/de/impressum/', '/de/about/legal/impressum/'),
# bug 960543
url_test('/firefox/{2,3}.0/eula/random/stuff/', '/legal/eula/firefox-{2,3}/'),
# bug 724633 - Porting foundation pages
# Add redirects for the pdfs that were under /foundation/documents/
# that will now be served from static.mozilla.com/foundation/documents/
# (The links within the foundation pages have been updated, but there are
# probably many links to them from other pages and sites that need to keep
# working.)
url_test('/foundation/documents/random-stuff.pdf',
'https://static.mozilla.com/foundation/documents/random-stuff.pdf'),
url_test('/foundation/donate_form.pdf',
'https://static.mozilla.com/foundation/documents/donate_form.pdf'),
# openwebfund/ and openwebfund/index.html redirect to another site. Careful because
# there are other pages under openwebfund that still need to be served from Bedrock.
url_test('/foundation/openwebfund/',
'https://donate.mozilla.org/?source=owf_redirect'),
url_test('/foundation/donate.html',
'https://donate.mozilla.org/?source=donate_redirect'),
# FIXUPs for changing foo/bar.html to foo/bar/
# Redirect foundation/foo.html to foundation/foo/, with a redirect for the nice search engines
url_test('/foundation/{about,careers,licensing,moco,mocosc}.html',
'/foundation/{about,careers,licensing,moco,mocosc}/'),
# Redirect foundation/anything/foo.html to foundation/anything/foo/,
# with a redirect for the nice search engines
url_test('/foundation/{annualreport,documents,feed-icon-guidelines,'
'licensing,openwebfund,trademarks}/random-stuff.html',
'/foundation/{annualreport,documents,feed-icon-guidelines,'
'licensing,openwebfund,trademarks}/random-stuff/'),
url_test('/foundation/documents/{index,mozilla-2002-financial-faq}.html',
'/foundation/{index,mozilla-2002-financial-faq}/'),
# bug 442671
url_test('/foundation/trademarks/l10n-policy/', '/foundation/trademarks/'),
# Bug 1186373
url_test('/firefox/hello/npssurvey/',
'https://www.surveygizmo.com/s3/2227372/Firefox-Hello-Product-Survey',
status_code=302),
# Bug 1221739
url_test('/firefox/hello/feedbacksurvey/',
'https://www.surveygizmo.com/s3/2319863/d2b7dc4b5687',
status_code=302),
# bug 1224060
url_test('/ja/firefox/ios/1.0/{releasenotes,system-requirements}/',
'http://www.mozilla.jp/firefox/ios/1.0/{releasenotes,system-requirements}/'),
))
| davehunt/bedrock | test_redirects/map_globalconf.py | Python | mpl-2.0 | 49,116 |
# Copyright (c) 2020, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
def execute():
count = frappe.db.sql("SELECT COUNT(*) FROM `tabSingles` WHERE doctype='Amazon MWS Settings' AND field='enable_sync';")[0][0]
if count == 0:
frappe.db.sql("UPDATE `tabSingles` SET field='enable_sync' WHERE doctype='Amazon MWS Settings' AND field='enable_synch';")
frappe.reload_doc("ERPNext Integrations", "doctype", "Amazon MWS Settings")
| gsnbng/erpnext | erpnext/patches/v12_0/rename_mws_settings_fields.py | Python | agpl-3.0 | 481 |
# -*- coding: utf-8 -*-
from .tests import (
FacetListTestCase,
FacetTestCase,
QueryFacetTestCase,
FacetItemTestCase,
)
from .factories import (
FacetListFactory,
FacetFactory,
FacetItemFactory,
) | Squishymedia/feedingdb | django-faceted-search/faceted_search/tests/__init__.py | Python | gpl-3.0 | 226 |
# UDP clients and server.
# Note that UDP packets may be lost, in which case server may not
# receive all requests (and program may need to be terminated
# explicitly).
import sys, socket
import asyncoro
def server_proc(n, sock, coro=None):
for i in range(n):
msg, addr = yield sock.recvfrom(1024)
print('Received "%s" from %s:%s' % (msg, addr[0], addr[1]))
sock.close()
def client_proc(host, port, coro=None):
sock = asyncoro.AsyncSocket(socket.socket(socket.AF_INET, socket.SOCK_DGRAM))
msg = 'client socket: %s' % (sock.fileno())
if sys.version_info.major >= 3:
msg = bytes(msg, 'ascii')
yield sock.sendto(msg, (host, port))
sock.close()
if __name__ == '__main__':
sock = asyncoro.AsyncSocket(socket.socket(socket.AF_INET, socket.SOCK_DGRAM))
sock.bind(('127.0.0.1', 0))
host, port = sock.getsockname()
n = 50
server_coro = asyncoro.Coro(server_proc, n, sock)
for i in range(n):
asyncoro.Coro(client_proc, host, port)
server_coro.value()
| pgiri/asyncoro | examples/udp.py | Python | mit | 1,035 |
from django.contrib import admin
from models import Datalogger
class DataloggerAdmin(admin.ModelAdmin):
list_display = ('event_id','event_type','object_id','model_name','field_name','before_value','after_value','operator','create_at','update_at')
search_fields = ('event_id','event_type','object_id','model_name','field_name','before_value','after_value','operator')
admin.site.register(Datalogger, DataloggerAdmin) | pylixm/django-datalogger | admin.py | Python | mit | 427 |
# __about__.py
#
# Copyright (C) 2006-2016 wolfSSL Inc.
#
# This file is part of wolfSSL. (formerly known as CyaSSL)
#
# wolfSSL is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# wolfSSL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
metadata = dict(
__name__ = "wolfcrypt",
__version__ = "0.1.8",
__license__ = "GPLv2 or Commercial License",
__author__ = "wolfSSL Inc.",
__author_email__ = "info@wolfssl.com",
__url__ = "https://wolfssl.github.io/wolfcrypt-py",
__description__ = \
u"A Python library that encapsulates wolfSSL's wolfCrypt API.",
__keywords__ = "security, cryptography, ssl, embedded, embedded ssl",
__classifiers__ = [
u"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
u"License :: Other/Proprietary License",
u"Operating System :: OS Independent",
u"Programming Language :: Python :: 2.7",
u"Programming Language :: Python :: 3.5",
u"Topic :: Security",
u"Topic :: Security :: Cryptography",
u"Topic :: Software Development"
]
)
globals().update(metadata)
__all__ = list(metadata.keys())
| jakobsa/wolfssl | wrapper/python/wolfcrypt/__about__.py | Python | gpl-2.0 | 1,703 |
# @file lassdata.py
# @brief load sensor data from lass. management sensor data
# README:
# MODULE_ARCH:
# CLASS_ARCH:
# GLOBAL USAGE:
#standard
import logging
#extend
import urllib
import simplejson
import requests #requests need to load after simplejson
#library
import lib.globalclasses as gc
from lib.const import *
##### Code section #####
#Spec: one lass site
#How/NeedToKnow:
class Site():
def __init__(self,site_data):
self.device_id = site_data['device_id']
self.gps_lat= float(site_data['gps_lat'])
self.gps_lon= float(site_data['gps_lon'])
self.site_name=site_data['SiteName']
self.sensor_data = {}
self.sensor_data[site_data['timestamp']] = {'s_t0':site_data['s_t0'],'s_d0':site_data['s_d0'],'s_h0':site_data['s_h0']}
self.pos_idx = "0@0" #index that used in the map
def in_area(self,area): #return true if in area. area=[long1,lat1,long2,lat2]
if self.gps_lat >= area[1] and self.gps_lat <= area[3]:
if self.gps_lon >= area[0] and self.gps_lon <= area[2]:
return True
return False
def update_his(self,json_data): # history of 2 day
for feeds in json_data['feeds']:
try:
self.sensor_data[feeds['timestamp']] = {'s_t0':float(feeds['temperature']),'s_d0':float(feeds['PM2_5']),'s_h0':float(feeds['humidity'])}
except:
print("update_his exception:%s" %(self.device_id) )
def get_data_bytime(self,ts): # get sensor data by time stamp, ts: datetime
fmt = '%Y-%m-%dT%H:%M:%SZ'
ts_str = ts.strftime(fmt)
if ts_str in self.sensor_data:
#print("get_data_bytime : %s" %(ts_str))
return self.sensor_data[ts_str]
return None
def desc(self,did):
desc_str = "device_id=%s,gps_lon=%f,gps_lat=%f,SiteName=%s,pos_idx=%s"%(self.device_id,self.gps_lon,self.gps_lat,self.site_name, self.pos_idx)
return desc_str
#Spec: lass data mgr
#How/NeedToKnow:
class LassDataMgr():
def __init__(self):
#private
#global: these variables allow to direct access from outside.
self.sites_link={'lass':"http://nrl.iis.sinica.edu.tw/LASS/last-all-lass.json",
'airbox':"http://nrl.iis.sinica.edu.tw/LASS/last-all-airbox.json"
}
self.cur_json={} #lass:data, airbox:data. json load from sites_link
self.his2day_json={} #device_id:data, json load from 2 day history
self.sites = {} #device_id: Site(), all sites
self.site_tag = {} #tag_name: [ device_id list ]
self.load_test_tag()
#to show the latest submission of all alive devices of the PM25 app:
#http://nrl.iis.sinica.edu.tw/LASS/last-all-lass.json
#to show the latest air quality data imported from TPE AirBox Open Data
#http://nrl.iis.sinica.edu.tw/LASS/last-all-airbox.json
#
#{
# "source": "last-all-lass by IIS-NRL",
# "feeds": [
# {
# "gps_lat": 23.705523,
# "s_t0": 26.9,
# "SiteName": "FT3_999",
# "timestamp": "2016-10-23T20:58:04Z",
# "gps_lon": 120.547142,
# "s_d0": 43.0,
# "s_h0": 85.5,
# "device_id": "FT3_999"
# },
# ...
# ],
# "version": "2016-10-23T21:10:02Z",
# "num_of_records": 49
#}
#load from lass original, airbox
# for test purpose
def load_test_tag(self):
self.site_tag['t']=['74DA3895C2B4','74DA3895C214']
#to show the latest submission of all alive devices of the PM25 app:
#http://nrl.iis.sinica.edu.tw/LASS/last-all-lass.json
#to show the latest air quality data imported from TPE AirBox Open Data
#http://nrl.iis.sinica.edu.tw/LASS/last-all-airbox.json
#
#load from lass original, airbox
def load_site_list(self):
for link_key in self.sites_link.keys():
response = urllib.request.urlopen(self.sites_link[link_key])
data = simplejson.load(response)
self.cur_json[link_key] = data
for site_data in data['feeds']:
device_id = site_data['device_id']
site = Site(site_data)
self.sites[device_id] = site
[x,y] = gc.MODEL.map.gps_to_idx([site.gps_lon,site.gps_lat])
site.pos_idx = "%i@%i" %(x,y)
#print(site.desc(0))
#print(data["version"])
#print(data["feeds"])
#print(len(data["feeds"]))
#print(data["feeds"][0]['device_id'])
#show the hourly average of a partucylar PM25 device in the past two days:
#http://nrl.iis.sinica.edu.tw/LASS/history-hourly.php?device_id=FT1_001
#{
# "device_id": "FT1_001",
# "feeds": [
# {
# "timestamp": "2016-10-25T00:00:00Z",
# "temperature": 31.61,
# "humidity": 81.14,
# "PM2_5": 7.62,
# "PM10": 9.15
# },
# ...
# ]
#}
#load from 2 day history json
def load_site_history_of_2day(self,device_id):
json_link = "http://nrl.iis.sinica.edu.tw/LASS/history-hourly.php?device_id=%s" %(device_id)
response = urllib.request.urlopen(json_link)
his_json_data = simplejson.load(response)
self.his2day_json[device_id] = his_json_data
site = self.sites[device_id]
site.update_his(his_json_data)
#load all devices_id that list by tag_name
def load_his_by_tag(self,tag_name):
for device_id in self.site_tag[tag_name]:
logging.info("loading history json for %s" %(device_id))
self.load_site_history_of_2day(device_id)
# find site by area and tag a name
def tag_site_by_area(self,name,area ): #area = [long1,lat1,long2,lat2]
for site_key in self.sites:
site = self.sites[site_key]
if site.in_area(area):
if name in self.site_tag:
self.site_tag[name].append(site.device_id)
#print("%s" %(site.device_id) )
else:
self.site_tag[name]=[site.device_id]
#by using map_time to get sensor data and update to the map
def apply_to_map(self,map,map_time,tag_name): #map_time: datetime
for device_id in self.site_tag[tag_name]:
site = self.sites[device_id]
sensor_data = site.get_data_bytime(map_time)
if sensor_data:
[x,y] = map.gps_to_idx([site.gps_lon,site.gps_lat])
pos_idx = "%i@%i" % (x,y)
#print("apply_to_map pos_idx=%s" %(pos_idx))
map.poss[pos_idx].pm_set(sensor_data['s_d0'])
# find all device's pos_idx
def get_posidx_by_tag(self,tag_name): # return [ pos_idx_string ]
ret=[]
for device_id in self.site_tag[tag_name]:
site = self.sites[device_id]
ret.append( site.pos_idx)
return ret
def save_csv(self,tag_name,pathname):
header="timestamp,device_Id, SiteName, gps_lon , gps_lat, PM2_5, PM10, temperature, humidity\n"
str_output = ""
try:
for device_id in self.site_tag[tag_name]:
site = self.sites[device_id]
for feeds in self.his2day_json[device_id]['feeds']:
#timestamp,device_Id, SiteName, gps_lon , gps_lat, PM2_5, PM10, temperature, humidity
#ts_format = "yyyy-MM-dd HH:mm:ss" #2016-10-25T00:00:00Z
ts_format = feeds['timestamp'].replace('T',' ').replace('Z','')
str_output +="%s,%s,%s,%f,%f,%f,%f,%f,%f\n" %(ts_format,site.device_id, site.site_name, site.gps_lon , site.gps_lat, feeds['PM2_5'], feeds['PM10'], feeds['temperature'], feeds['humidity'])
except :
print("load history from %s have problem!" %(device_id))
with open(pathname, "w") as text_file:
text_file.write("%s%s" % (header,str_output))
def desc(self,did):
print("LASS data - All SiteName" )
for data_key in self.cur_json.keys():
print("data_key=%s,count=%i" % (data_key,len(self.cur_json[data_key]['feeds'])))
for site in self.cur_json[data_key]['feeds']:
print(site['SiteName'])
for tag_key in self.site_tag:
print("tag %s count=%i, include:\n%s" % (tag_key,len(self.site_tag[tag_key]),self.site_tag[tag_key]))
for data_key in self.his2day_json.keys():
for record in self.his2day_json[data_key]['feeds']:
print("%s@%s" %(record['PM2_5'],record['timestamp'])) | LinkItONEDevGroup/LASS | LASS-Simulator/codes/lassdata.py | Python | mit | 8,420 |
"""
The DoInterestManager keeps track of which parent/zones that we currently
have interest in. When you want to "look" into a zone you add an interest
to that zone. When you want to get rid of, or ignore, the objects in that
zone, remove interest in that zone.
p.s. A great deal of this code is just code moved from ClientRepository.py.
"""
from panda3d.core import *
from panda3d.direct import *
from .MsgTypes import *
from direct.showbase.PythonUtil import *
from direct.showbase import DirectObject
from .PyDatagram import PyDatagram
from direct.directnotify.DirectNotifyGlobal import directNotify
import types
from direct.showbase.PythonUtil import report
class InterestState:
StateActive = 'Active'
StatePendingDel = 'PendingDel'
def __init__(self, desc, state, context, event, parentId, zoneIdList,
eventCounter, auto=False):
self.desc = desc
self.state = state
self.context = context
# We must be ready to keep track of multiple events. If somebody
# requested an interest to be removed and we get a second request
# for removal of the same interest before we get a response for the
# first interest removal, we now have two parts of the codebase
# waiting for a response on the removal of a single interest.
self.events = []
self.eventCounter = eventCounter
if event:
self.addEvent(event)
self.parentId = parentId
self.zoneIdList = zoneIdList
self.auto = auto
def addEvent(self, event):
self.events.append(event)
self.eventCounter.num += 1
def getEvents(self):
return list(self.events)
def clearEvents(self):
self.eventCounter.num -= len(self.events)
assert self.eventCounter.num >= 0
self.events = []
def sendEvents(self):
for event in self.events:
messenger.send(event)
self.clearEvents()
def setDesc(self, desc):
self.desc = desc
def isPendingDelete(self):
return self.state == InterestState.StatePendingDel
def __repr__(self):
return 'InterestState(desc=%s, state=%s, context=%s, event=%s, parentId=%s, zoneIdList=%s)' % (
self.desc, self.state, self.context, self.events, self.parentId, self.zoneIdList)
class InterestHandle:
"""This class helps to ensure that valid handles get passed in to DoInterestManager funcs"""
def __init__(self, id):
self._id = id
def asInt(self):
return self._id
def __eq__(self, other):
if type(self) == type(other):
return self._id == other._id
return self._id == other
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._id)
# context value for interest changes that have no complete event
NO_CONTEXT = 0
class DoInterestManager(DirectObject.DirectObject):
"""
Top level Interest Manager
"""
notify = directNotify.newCategory("DoInterestManager")
InterestDebug = ConfigVariableBool('interest-debug', False)
# 'handle' is a number that represents a single interest set that the
# client has requested; the interest set may be modified
_HandleSerialNum = 0
# high bit is reserved for server interests
_HandleMask = 0x7FFF
# 'context' refers to a single request to change an interest set
_ContextIdSerialNum = 100
_ContextIdMask = 0x3FFFFFFF # avoid making Python create a long
_interests = {}
if __debug__:
_debug_interestHistory = []
_debug_maxDescriptionLen = 40
_SerialGen = SerialNumGen()
_SerialNum = serialNum()
def __init__(self):
assert DoInterestManager.notify.debugCall()
DirectObject.DirectObject.__init__(self)
self._addInterestEvent = uniqueName('DoInterestManager-Add')
self._removeInterestEvent = uniqueName('DoInterestManager-Remove')
self._noNewInterests = False
self._completeDelayedCallback = None
# keep track of request contexts that have not completed
self._completeEventCount = ScratchPad(num=0)
self._allInterestsCompleteCallbacks = []
def __verbose(self):
return self.InterestDebug.getValue() or self.getVerbose()
def _getAnonymousEvent(self, desc):
return 'anonymous-%s-%s' % (desc, DoInterestManager._SerialGen.next())
def setNoNewInterests(self, flag):
self._noNewInterests = flag
def noNewInterests(self):
return self._noNewInterests
def setAllInterestsCompleteCallback(self, callback):
if ((self._completeEventCount.num == 0) and
(self._completeDelayedCallback is None)):
callback()
else:
self._allInterestsCompleteCallbacks.append(callback)
def getAllInterestsCompleteEvent(self):
return 'allInterestsComplete-%s' % DoInterestManager._SerialNum
def resetInterestStateForConnectionLoss(self):
DoInterestManager._interests.clear()
self._completeEventCount = ScratchPad(num=0)
if __debug__:
self._addDebugInterestHistory("RESET", "", 0, 0, 0, [])
def isValidInterestHandle(self, handle):
# pass in a handle (or anything else) and this will return true if it is
# still a valid interest handle
if not isinstance(handle, InterestHandle):
return False
return handle.asInt() in DoInterestManager._interests
def updateInterestDescription(self, handle, desc):
iState = DoInterestManager._interests.get(handle.asInt())
if iState:
iState.setDesc(desc)
def addInterest(self, parentId, zoneIdList, description, event=None):
"""
Look into a (set of) zone(s).
"""
assert DoInterestManager.notify.debugCall()
handle = self._getNextHandle()
# print 'base.cr.addInterest(',description,',',handle,'):',globalClock.getFrameCount()
if self._noNewInterests:
DoInterestManager.notify.warning(
"addInterest: addingInterests on delete: %s" % (handle))
return
# make sure we've got parenting rules set in the DC
if parentId not in (self.getGameDoId(),):
parent = self.getDo(parentId)
if not parent:
DoInterestManager.notify.error(
'addInterest: attempting to add interest under unknown object %s' % parentId)
else:
if not parent.hasParentingRules():
DoInterestManager.notify.error(
'addInterest: no setParentingRules defined in the DC for object %s (%s)'
'' % (parentId, parent.__class__.__name__))
if event:
contextId = self._getNextContextId()
else:
contextId = 0
# event = self._getAnonymousEvent('addInterest')
DoInterestManager._interests[handle] = InterestState(
description, InterestState.StateActive, contextId, event, parentId, zoneIdList, self._completeEventCount)
if self.__verbose():
print('CR::INTEREST.addInterest(handle=%s, parentId=%s, zoneIdList=%s, description=%s, event=%s)' % (
handle, parentId, zoneIdList, description, event))
self._sendAddInterest(handle, contextId, parentId, zoneIdList, description)
if event:
messenger.send(self._getAddInterestEvent(), [event])
assert self.printInterestsIfDebug()
return InterestHandle(handle)
def addAutoInterest(self, parentId, zoneIdList, description):
"""
Look into a (set of) zone(s).
"""
assert DoInterestManager.notify.debugCall()
handle = self._getNextHandle()
if self._noNewInterests:
DoInterestManager.notify.warning(
"addInterest: addingInterests on delete: %s" % (handle))
return
# make sure we've got parenting rules set in the DC
if parentId not in (self.getGameDoId(),):
parent = self.getDo(parentId)
if not parent:
DoInterestManager.notify.error(
'addInterest: attempting to add interest under unknown object %s' % parentId)
else:
if not parent.hasParentingRules():
DoInterestManager.notify.error(
'addInterest: no setParentingRules defined in the DC for object %s (%s)'
'' % (parentId, parent.__class__.__name__))
DoInterestManager._interests[handle] = InterestState(
description, InterestState.StateActive, 0, None, parentId, zoneIdList, self._completeEventCount, True)
if self.__verbose():
print('CR::INTEREST.addInterest(handle=%s, parentId=%s, zoneIdList=%s, description=%s)' % (
handle, parentId, zoneIdList, description))
assert self.printInterestsIfDebug()
return InterestHandle(handle)
def removeInterest(self, handle, event = None):
"""
Stop looking in a (set of) zone(s)
"""
# print 'base.cr.removeInterest(',handle,'):',globalClock.getFrameCount()
assert DoInterestManager.notify.debugCall()
assert isinstance(handle, InterestHandle)
existed = False
if not event:
event = self._getAnonymousEvent('removeInterest')
handle = handle.asInt()
if handle in DoInterestManager._interests:
existed = True
intState = DoInterestManager._interests[handle]
if event:
messenger.send(self._getRemoveInterestEvent(),
[event, intState.parentId, intState.zoneIdList])
if intState.isPendingDelete():
self.notify.warning(
'removeInterest: interest %s already pending removal' %
handle)
# this interest is already pending delete, so let's just tack this
# callback onto the list
if event is not None:
intState.addEvent(event)
else:
if len(intState.events) > 0:
# we're not pending a removal, but we have outstanding events?
# probably we are waiting for an add/alter complete.
# should we send those events now?
assert self.notify.warning('removeInterest: abandoning events: %s' %
intState.events)
intState.clearEvents()
intState.state = InterestState.StatePendingDel
contextId = self._getNextContextId()
intState.context = contextId
if event:
intState.addEvent(event)
self._sendRemoveInterest(handle, contextId)
if not event:
self._considerRemoveInterest(handle)
if self.__verbose():
print('CR::INTEREST.removeInterest(handle=%s, event=%s)' % (
handle, event))
else:
DoInterestManager.notify.warning(
"removeInterest: handle not found: %s" % (handle))
assert self.printInterestsIfDebug()
return existed
def removeAutoInterest(self, handle):
"""
Stop looking in a (set of) zone(s)
"""
assert DoInterestManager.notify.debugCall()
assert isinstance(handle, InterestHandle)
existed = False
handle = handle.asInt()
if handle in DoInterestManager._interests:
existed = True
intState = DoInterestManager._interests[handle]
if intState.isPendingDelete():
self.notify.warning(
'removeInterest: interest %s already pending removal' %
handle)
# this interest is already pending delete, so let's just tack this
# callback onto the list
else:
if len(intState.events) > 0:
# we're not pending a removal, but we have outstanding events?
# probably we are waiting for an add/alter complete.
# should we send those events now?
self.notify.warning('removeInterest: abandoning events: %s' %
intState.events)
intState.clearEvents()
intState.state = InterestState.StatePendingDel
self._considerRemoveInterest(handle)
if self.__verbose():
print('CR::INTEREST.removeAutoInterest(handle=%s)' % (handle))
else:
DoInterestManager.notify.warning(
"removeInterest: handle not found: %s" % (handle))
assert self.printInterestsIfDebug()
return existed
@report(types = ['args'], dConfigParam = 'guildmgr')
def removeAIInterest(self, handle):
"""
handle is NOT an InterestHandle. It's just a bare integer representing an
AI opened interest. We're making the client close down this interest since
the AI has trouble removing interests(that its opened) when the avatar goes
offline. See GuildManager(UD) for how it's being used.
"""
self._sendRemoveAIInterest(handle)
def alterInterest(self, handle, parentId, zoneIdList, description=None,
event=None):
"""
Removes old interests and adds new interests.
Note that when an interest is changed, only the most recent
change's event will be triggered. Previous events are abandoned.
If this is a problem, consider opening multiple interests.
"""
assert DoInterestManager.notify.debugCall()
assert isinstance(handle, InterestHandle)
#assert not self._noNewInterests
handle = handle.asInt()
if self._noNewInterests:
DoInterestManager.notify.warning(
"alterInterest: addingInterests on delete: %s" % (handle))
return
exists = False
if event is None:
event = self._getAnonymousEvent('alterInterest')
if handle in DoInterestManager._interests:
if description is not None:
DoInterestManager._interests[handle].desc = description
else:
description = DoInterestManager._interests[handle].desc
# are we overriding an existing change?
if DoInterestManager._interests[handle].context != NO_CONTEXT:
DoInterestManager._interests[handle].clearEvents()
contextId = self._getNextContextId()
DoInterestManager._interests[handle].context = contextId
DoInterestManager._interests[handle].parentId = parentId
DoInterestManager._interests[handle].zoneIdList = zoneIdList
DoInterestManager._interests[handle].addEvent(event)
if self.__verbose():
print('CR::INTEREST.alterInterest(handle=%s, parentId=%s, zoneIdList=%s, description=%s, event=%s)' % (
handle, parentId, zoneIdList, description, event))
self._sendAddInterest(handle, contextId, parentId, zoneIdList, description, action='modify')
exists = True
assert self.printInterestsIfDebug()
else:
DoInterestManager.notify.warning(
"alterInterest: handle not found: %s" % (handle))
return exists
def openAutoInterests(self, obj):
if hasattr(obj, '_autoInterestHandle'):
# must be multiple inheritance
self.notify.debug('openAutoInterests(%s): interests already open' % obj.__class__.__name__)
return
autoInterests = obj.getAutoInterests()
obj._autoInterestHandle = None
if not len(autoInterests):
return
obj._autoInterestHandle = self.addAutoInterest(obj.doId, autoInterests, '%s-autoInterest' % obj.__class__.__name__)
def closeAutoInterests(self, obj):
if not hasattr(obj, '_autoInterestHandle'):
# must be multiple inheritance
self.notify.debug('closeAutoInterests(%s): interests already closed' % obj)
return
if obj._autoInterestHandle is not None:
self.removeAutoInterest(obj._autoInterestHandle)
del obj._autoInterestHandle
# events for InterestWatcher
def _getAddInterestEvent(self):
return self._addInterestEvent
def _getRemoveInterestEvent(self):
return self._removeInterestEvent
def _getInterestState(self, handle):
return DoInterestManager._interests[handle]
def _getNextHandle(self):
handle = DoInterestManager._HandleSerialNum
while True:
handle = (handle + 1) & DoInterestManager._HandleMask
# skip handles that are already in use
if handle not in DoInterestManager._interests:
break
DoInterestManager.notify.warning(
'interest %s already in use' % handle)
DoInterestManager._HandleSerialNum = handle
return DoInterestManager._HandleSerialNum
def _getNextContextId(self):
contextId = DoInterestManager._ContextIdSerialNum
while True:
contextId = (contextId + 1) & DoInterestManager._ContextIdMask
# skip over the 'no context' id
if contextId != NO_CONTEXT:
break
DoInterestManager._ContextIdSerialNum = contextId
return DoInterestManager._ContextIdSerialNum
def _considerRemoveInterest(self, handle):
"""
Consider whether we should cull the interest set.
"""
assert DoInterestManager.notify.debugCall()
if handle in DoInterestManager._interests:
if DoInterestManager._interests[handle].isPendingDelete():
# make sure there is no pending event for this interest
if DoInterestManager._interests[handle].context == NO_CONTEXT:
assert len(DoInterestManager._interests[handle].events) == 0
del DoInterestManager._interests[handle]
if __debug__:
def printInterestsIfDebug(self):
if DoInterestManager.notify.getDebug():
self.printInterests()
return 1 # for assert
def _addDebugInterestHistory(self, action, description, handle,
contextId, parentId, zoneIdList):
if description is None:
description = ''
DoInterestManager._debug_interestHistory.append(
(action, description, handle, contextId, parentId, zoneIdList))
DoInterestManager._debug_maxDescriptionLen = max(
DoInterestManager._debug_maxDescriptionLen, len(description))
def printInterestHistory(self):
print("***************** Interest History *************")
format = '%9s %' + str(DoInterestManager._debug_maxDescriptionLen) + 's %6s %6s %9s %s'
print(format % (
"Action", "Description", "Handle", "Context", "ParentId",
"ZoneIdList"))
for i in DoInterestManager._debug_interestHistory:
print(format % tuple(i))
print("Note: interests with a Context of 0 do not get" \
" done/finished notices.")
def printInterestSets(self):
print("******************* Interest Sets **************")
format = '%6s %' + str(DoInterestManager._debug_maxDescriptionLen) + 's %11s %11s %8s %8s %8s'
print(format % (
"Handle", "Description",
"ParentId", "ZoneIdList",
"State", "Context",
"Event"))
for id, state in DoInterestManager._interests.items():
if len(state.events) == 0:
event = ''
elif len(state.events) == 1:
event = state.events[0]
else:
event = state.events
print(format % (id, state.desc,
state.parentId, state.zoneIdList,
state.state, state.context,
event))
print("************************************************")
def printInterests(self):
self.printInterestHistory()
self.printInterestSets()
def _sendAddInterest(self, handle, contextId, parentId, zoneIdList, description,
action=None):
"""
Part of the new otp-server code.
handle is a client-side created number that refers to
a set of interests. The same handle number doesn't
necessarily have any relationship to the same handle
on another client.
"""
assert DoInterestManager.notify.debugCall()
if __debug__:
if isinstance(zoneIdList, list):
zoneIdList.sort()
if action is None:
action = 'add'
self._addDebugInterestHistory(
action, description, handle, contextId, parentId, zoneIdList)
if parentId == 0:
DoInterestManager.notify.error(
'trying to set interest to invalid parent: %s' % parentId)
datagram = PyDatagram()
# Add message type
if isinstance(zoneIdList, list):
vzl = list(zoneIdList)
vzl.sort()
uniqueElements(vzl)
datagram.addUint16(CLIENT_ADD_INTEREST_MULTIPLE)
datagram.addUint32(contextId)
datagram.addUint16(handle)
datagram.addUint32(parentId)
datagram.addUint16(len(vzl))
for zone in vzl:
datagram.addUint32(zone)
else:
datagram.addUint16(CLIENT_ADD_INTEREST)
datagram.addUint32(contextId)
datagram.addUint16(handle)
datagram.addUint32(parentId)
datagram.addUint32(zoneIdList)
self.send(datagram)
def _sendRemoveInterest(self, handle, contextId):
"""
handle is a client-side created number that refers to
a set of interests. The same handle number doesn't
necessarily have any relationship to the same handle
on another client.
"""
assert DoInterestManager.notify.debugCall()
assert handle in DoInterestManager._interests
datagram = PyDatagram()
# Add message type
datagram.addUint16(CLIENT_REMOVE_INTEREST)
datagram.addUint32(contextId)
datagram.addUint16(handle)
self.send(datagram)
if __debug__:
state = DoInterestManager._interests[handle]
self._addDebugInterestHistory(
"remove", state.desc, handle, contextId,
state.parentId, state.zoneIdList)
def _sendRemoveAIInterest(self, handle):
"""
handle is a bare int, NOT an InterestHandle. Use this to
close an AI opened interest.
"""
datagram = PyDatagram()
# Add message type
datagram.addUint16(CLIENT_REMOVE_INTEREST)
datagram.addUint16((1<<15) + handle)
self.send(datagram)
def cleanupWaitAllInterestsComplete(self):
if self._completeDelayedCallback is not None:
self._completeDelayedCallback.destroy()
self._completeDelayedCallback = None
def queueAllInterestsCompleteEvent(self, frames=5):
# wait for N frames, if no new interests, send out all-done event
# calling this is OK even if there are no pending interest completes
def checkMoreInterests():
# if there are new interests, cancel this delayed callback, another
# will automatically be scheduled when all interests complete
# print 'checkMoreInterests(',self._completeEventCount.num,'):',globalClock.getFrameCount()
return self._completeEventCount.num > 0
def sendEvent():
messenger.send(self.getAllInterestsCompleteEvent())
for callback in self._allInterestsCompleteCallbacks:
callback()
self._allInterestsCompleteCallbacks = []
self.cleanupWaitAllInterestsComplete()
self._completeDelayedCallback = FrameDelayedCall(
'waitForAllInterestCompletes',
callback=sendEvent,
frames=frames,
cancelFunc=checkMoreInterests)
checkMoreInterests = None
sendEvent = None
def handleInterestDoneMessage(self, di):
"""
This handles the interest done messages and may dispatch an event
"""
assert DoInterestManager.notify.debugCall()
contextId = di.getUint32()
handle = di.getUint16()
if self.__verbose():
print('CR::INTEREST.interestDone(handle=%s)' % handle)
DoInterestManager.notify.debug(
"handleInterestDoneMessage--> Received handle %s, context %s" % (
handle, contextId))
if handle in DoInterestManager._interests:
eventsToSend = []
# if the context matches, send out the event
if contextId == DoInterestManager._interests[handle].context:
DoInterestManager._interests[handle].context = NO_CONTEXT
# the event handlers may call back into the interest manager. Send out
# the events after we're once again in a stable state.
#DoInterestManager._interests[handle].sendEvents()
eventsToSend = list(DoInterestManager._interests[handle].getEvents())
DoInterestManager._interests[handle].clearEvents()
else:
DoInterestManager.notify.debug(
"handleInterestDoneMessage--> handle: %s: Expecting context %s, got %s" % (
handle, DoInterestManager._interests[handle].context, contextId))
if __debug__:
state = DoInterestManager._interests[handle]
self._addDebugInterestHistory(
"finished", state.desc, handle, contextId, state.parentId,
state.zoneIdList)
self._considerRemoveInterest(handle)
for event in eventsToSend:
messenger.send(event)
else:
DoInterestManager.notify.warning(
"handleInterestDoneMessage: handle not found: %s" % (handle))
# if there are no more outstanding interest-completes, send out global all-done event
if self._completeEventCount.num == 0:
self.queueAllInterestsCompleteEvent()
assert self.printInterestsIfDebug()
if __debug__:
import unittest
class AsyncTestCase(unittest.TestCase):
def setCompleted(self):
self._async_completed = True
def isCompleted(self):
return getattr(self, '_async_completed', False)
class AsyncTestSuite(unittest.TestSuite):
pass
class AsyncTestLoader(unittest.TestLoader):
suiteClass = AsyncTestSuite
class AsyncTextTestRunner(unittest.TextTestRunner):
def run(self, testCase):
result = self._makeResult()
startTime = time.time()
test(result)
stopTime = time.time()
timeTaken = stopTime - startTime
result.printErrors()
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
if not result.wasSuccessful():
self.stream.write("FAILED (")
failed, errored = map(len, (result.failures, result.errors))
if failed:
self.stream.write("failures=%d" % failed)
if errored:
if failed: self.stream.write(", ")
self.stream.write("errors=%d" % errored)
self.stream.writeln(")")
else:
self.stream.writeln("OK")
return result
class TestInterestAddRemove(AsyncTestCase, DirectObject.DirectObject):
def testInterestAdd(self):
event = uniqueName('InterestAdd')
self.acceptOnce(event, self.gotInterestAddResponse)
self.handle = base.cr.addInterest(base.cr.GameGlobalsId, 100, 'TestInterest', event=event)
def gotInterestAddResponse(self):
event = uniqueName('InterestRemove')
self.acceptOnce(event, self.gotInterestRemoveResponse)
base.cr.removeInterest(self.handle, event=event)
def gotInterestRemoveResponse(self):
self.setCompleted()
def runTests():
suite = unittest.makeSuite(TestInterestAddRemove)
unittest.AsyncTextTestRunner(verbosity=2).run(suite)
| chandler14362/panda3d | direct/src/distributed/DoInterestManager.py | Python | bsd-3-clause | 29,162 |
# -*- coding: utf-8 -*-
"""
uds.is.mysql
~~~~~~~~~~~~
:copyright: Copyright (c) 2015, National Institute of Information and Communications Technology.All rights reserved.
:license: GPL2, see LICENSE for more details.
"""
import MySQLdb
import uds.logging
from uds.io.base import M2MDataDao
class MySQLDao(M2MDataDao):
"""
"""
def __init__(self, client, table_name):
super(MySQLDao, self).__init__()
self._client = client
self._table_name = table_name
def reconnect(self):
self._client.disconnect()
self._client.connect()
def select_last(self, key_data):
pass
def insert(self, m2m_data):
columns = [] # Column Names
values = [] # Datum Values
for datum in m2m_data.data_values:
for key, value in datum.items():
if value is not None:
columns.append(key)
values.append(value)
for key, value in m2m_data.device_info.items():
if value is not None:
columns.append(key)
values.append(value)
for key, value in m2m_data.data_units.items():
if key in ['time', 'longitude', 'latitude']:
continue
if value is not None:
columns.append('unit_' + key)
values.append(value)
for key, value in m2m_data.info_summary.items():
if value is not None:
columns.append(key)
values.append(value)
if 'timezone' in columns:
pass
else:
columns.append('timezone')
values.append(m2m_data.dict['primary']['timezone'])
self.insert_values(columns, values)
self._client.commit()
def insert_json(self, hash):
"""Insert JSON data.
"""
t_str = "INSERT INTO " + self._table_name
info_str = "( "
value_str = "VALUES ( "
for info, value in hash.iteritems():
info_str += info
info_str += ", "
value_str += ("\'" + value + "\'")
value_str += ", "
info_str = info_str.rstrip(", ")
value_str = value_str.rstrip(", ")
info_str += ")"
value_str += ")"
t_str = t_str + info_str + value_str
res = None
try:
# print "debug: ", t_str
self._client.send(t_str)
insert_id = self._client.insert_id()
res = self._client.commit()
if res is None:
res = insert_id
except Exception, e:
print "DB Insert Fail: {0} {1}".format(e, t_str)
return res
def insert_values(self, columns, values):
# print values
com_header = "INSERT INTO"
t_str = com_header + " " + self._table_name + "("
for id in range(0, len(columns)):
if id < (len(columns) - 1):
t_str += "`" + columns[id] + "`, "
else:
t_str += "`" + columns[id] + "`) "
t_str += "values ("
for id in range(0, len(values)):
if columns[id] != 'timezone' and _type_to_string(values[id]) == 'float':
t_str += str(values[id]).replace("'", "\\'")
else:
values[id] = str(values[id]).replace("'", "\\'")
if values[id].find("Point") >= 0 or values[id].find("Polygon") >= 0:
values[id] = "GeomFromText(\'" + values[id] + "\')"
# print values[id]
t_str += values[id]
else:
t_str += "'" + values[id] + "'"
if id < (len(values) - 1):
t_str += ", "
else:
t_str += "); "
# print "Insert Query: ", t_str
# exit()
res = None
try:
# print "debug: ", t_str
self._client.send(t_str)
insert_id = self._client.insert_id()
res = self._client.commit()
if res is None:
res = insert_id
except Exception, e:
print "DB Insert Fail: {0} {1}".format(e, t_str)
return res
def get_values(self, columns, condition, limit=""):
com_header = "SELECT "
t_str = com_header
for id in range(0, len(columns)):
if id < (len(columns) - 1):
t_str += columns[id] + ", "
else:
t_str += columns[id] + " "
t_str += "FROM " + self._table_name + " "
if condition != "":
t_str += "WHERE " + condition
if limit != "":
t_str += limit
return self._client.send(t_str)
def get_count(self, condition):
com_header = "SELECT count(*) "
t_str = com_header
t_str += "FROM " + self._table_name
if condition != "":
t_str += " WHERE " + condition
return self._client.send(t_str)
def clear_table(self):
t_str = "DELETE FROM " + self._table_name
return self._client.send(t_str)
def exist(self, attr, value):
condition = attr + "=" + value
res = self.get_count(self._table_name, condition)
return res
def exist_in_pattern(self, attr, value):
condition = attr + " Like " + value
res = self.get_count(self._table_name, condition)
return res
def update(self, attr, value, condition):
t_str = "UPDATE " + self._table_name + " "
t_str += "SET " + attr + "=" + value + " "
t_str += "WHERE " + condition
return self._client.send(t_str)
def delete(self, condition):
t_str = "DELETE FROM " + self._table_name + " WHERE " + condition
return self._client.send(t_str)
class MySQLClient(object):
"""
"""
def __init__(self, user, password, host, db):
self._user = user
self._password = password
self._host = host
self._db = db
self._con = None
self._cur = None
@property
def db_name(self):
return self._db
def connect(self):
"""Connect to DB.
:return: None
"""
self._con = MySQLdb.connect(
user=self._user,
passwd=self._password,
host=self._host,
db=self._db,
use_unicode=True,
charset="utf8")
self._cur = self._con.cursor(cursorclass=MySQLdb.cursors.SSCursor)
uds.logging.info("Connect to MySQL. db_name=%s", self._db)
def disconnect(self):
"""Disconnect DB.
:return: None
"""
self._cur.close()
self._con.close()
def send(self, query):
"""Send SQL statement.
:param query: SQL statement
:return: response of execute query
"""
try:
# print "Query:", query
'''
if query.find("INSERT") != -1:
import os
f = open('sql_log_{0}.txt'.format(os.getpid()), 'a')
f.write("{0}\n".format(query))
f.close()'''
self._cur.execute(query)
res = self._cur.fetchall()
except Exception, e:
uds.logging.error('sql:%s,\nmessage:%s', query, e)
return []
return res
def commit(self):
"""Commit connection.
:return: response of commit
"""
return self._con.commit()
def insert_id(self):
return self._con.insert_id()
def try_create_database(user_name, password, host_name, db_name):
"""Create database if not exist.
:param user_name:
:param password:
:param host_name:
:param db_name:
:return: None
"""
con = MySQLdb.connect(user=user_name,
passwd=password,
host=host_name,
use_unicode=True,
charset="utf8")
cur = con.cursor(cursorclass=MySQLdb.cursors.SSCursor)
cur.execute('create database if not exists ' + db_name)
cur.execute('use ' + db_name)
print "Try to create database. db_name=" + db_name # Not use uds.logging
def try_create_m2m_table(client, table_name, m2m_data):
"""Create M2MData table, if not exist.
Query example::
CREATE TABLE `RainSensor` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`pointid` varchar(100) DEFAULT NULL,
`time` datetime DEFAULT NULL,
`prefname` varchar(100) DEFAULT NULL,
`pointname` varchar(100) DEFAULT NULL,
`value` float DEFAULT NULL,
`value_unit` varchar(100) DEFAULT NULL,
`geo_loc` geometry NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `time_loc` (`pointid`,`time`),
KEY `idx_pointid` (`pointid`),
KEY `idx_time` (`time`),
KEY `idx_prefname` (`prefname`),
KEY `idx_pointname` (`pointname`),
KEY `idx_value` (`value`),
SPATIAL KEY `idx_geo_loc` (`geo_loc`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8
:param client:
:param table_name:
:param m2m_data:
:return: None, or response of execute query
"""
assert m2m_data.size != 0
if check_table(client, table_name):
return
columns = '`time` datetime DEFAULT NULL,'
columns += '`timezone` varchar(10) DEFAULT NULL,'
columns += '`MySQLIndex` bigint(20) NOT NULL AUTO_INCREMENT,'
keys = ''
# Create columns for datum_values
for columnName, data in m2m_data[0].items():
keys = keys + 'KEY `idx_{columnName}` (`{columnName}`),'.format(columnName=columnName)
if columnName == 'time':
continue
columns = columns + '`{columnName}` {type} DEFAULT NULL,'.format(columnName=str(columnName),
type=_type_to_string(data))
# Create columns for device_info
for columnName, data in m2m_data.device_info.items():
keys = keys + 'KEY `idx_{columnName}` (`{columnName}`),'.format(columnName=columnName)
if columnName == 'latitude' or columnName == 'longitude':
columns = columns + '`{columnName}` decimal(13,10) NOT NULL,'.format(columnName=str(columnName))
else:
columns = columns + '`{columnName}` {type} DEFAULT NULL,'.format(columnName=str(columnName),
type=_type_to_string(data))
# Create columns for data_units
for columnName, data in m2m_data.data_units.items():
if columnName in ['time', 'longitude', 'latitude']:
continue
columns = columns + '`unit_{columnName}` {type} NULL,'.format(columnName=str(columnName),
type=_type_to_string(data))
pass
# Create columns for m2m_info
for columnName, data in m2m_data.info_summary.items():
keys = keys + 'KEY `idx_{columnName}` (`{columnName}`),'.format(columnName=columnName)
columns = columns + '`{columnName}` {type} NULL,'.format(columnName=str(columnName),
type=_type_to_string(data))
pass
query = 'CREATE TABLE `{0}` ('.format(table_name)
query += columns
query += keys
query += 'PRIMARY KEY (`MySQLIndex`),'
query += 'UNIQUE KEY `time_loc` (`time`, `latitude`, `longitude`) USING BTREE'
query += ') ENGINE=MyISAM DEFAULT CHARSET=utf8'
result = client.send(query)
return result
def check_table(client, table_name):
"""
:param client:
:param table_name:
:return:
"""
if (table_name, ) in show_tables(client):
return True
else:
return False
def show_tables(client):
"""
:param client:
:return: response of execute query
"""
query = 'SHOW TABLES FROM {db}'.format(db=client.db_name)
return client.send(query)
def _type_to_string(data):
if isinstance(data, int) or isinstance(data, float) or isinstance(data, long) or isinstance(data, complex):
return 'float'
else:
return 'VARCHAR(200)' | nict-isp/uds-sdk | uds/io/mysql.py | Python | gpl-2.0 | 12,337 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# Copyright 2012 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
import mox
from oslo.config import cfg
import webob
from nova.api.openstack.compute.contrib import security_groups
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
import nova.db
from nova import exception
from nova.openstack.common import jsonutils
from nova import quota
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import utils
CONF = cfg.CONF
FAKE_UUID1 = 'a47ae74e-ab08-447f-8eee-ffd43fc46c16'
FAKE_UUID2 = 'c6e6430a-6563-4efa-9542-5e93c9e97d18'
class AttrDict(dict):
def __getattr__(self, k):
return self[k]
def security_group_template(**kwargs):
sg = kwargs.copy()
sg.setdefault('tenant_id', '123')
sg.setdefault('name', 'test')
sg.setdefault('description', 'test-description')
return sg
def security_group_db(security_group, id=None):
attrs = security_group.copy()
if 'tenant_id' in attrs:
attrs['project_id'] = attrs.pop('tenant_id')
if id is not None:
attrs['id'] = id
attrs.setdefault('rules', [])
attrs.setdefault('instances', [])
return AttrDict(attrs)
def security_group_rule_template(**kwargs):
rule = kwargs.copy()
rule.setdefault('ip_protocol', 'tcp')
rule.setdefault('from_port', 22)
rule.setdefault('to_port', 22)
rule.setdefault('parent_group_id', 2)
return rule
def security_group_rule_db(rule, id=None):
attrs = rule.copy()
if 'ip_protocol' in attrs:
attrs['protocol'] = attrs.pop('ip_protocol')
return AttrDict(attrs)
def return_server(context, server_id):
return {'id': int(server_id),
'power_state': 0x01,
'host': "localhost",
'uuid': FAKE_UUID1,
'name': 'asdf'}
def return_server_by_uuid(context, server_uuid):
return {'id': 1,
'power_state': 0x01,
'host': "localhost",
'uuid': server_uuid,
'name': 'asdf'}
def return_non_running_server(context, server_id):
return {'id': server_id, 'power_state': 0x02, 'uuid': FAKE_UUID1,
'host': "localhost", 'name': 'asdf'}
def return_security_group_by_name(context, project_id, group_name):
return {'id': 1, 'name': group_name,
"instances": [{'id': 1, 'uuid': FAKE_UUID1}]}
def return_security_group_without_instances(context, project_id, group_name):
return {'id': 1, 'name': group_name}
def return_server_nonexistent(context, server_id):
raise exception.InstanceNotFound(instance_id=server_id)
class TestSecurityGroups(test.TestCase):
def setUp(self):
super(TestSecurityGroups, self).setUp()
self.controller = security_groups.SecurityGroupController()
self.server_controller = (
security_groups.ServerSecurityGroupController())
self.manager = security_groups.SecurityGroupActionController()
# This needs to be done here to set fake_id because the derived
# class needs to be called first if it wants to set
# 'security_group_api' and this setUp method needs to be called.
if self.controller.security_group_api.id_is_uuid:
self.fake_id = '11111111-1111-1111-1111-111111111111'
else:
self.fake_id = '11111111'
def _assert_no_security_groups_reserved(self, context):
"""Check that no reservations are leaked during tests."""
result = quota.QUOTAS.get_project_quotas(context, context.project_id)
self.assertEqual(result['security_groups']['reserved'], 0)
def test_create_security_group(self):
sg = security_group_template()
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
res_dict = self.controller.create(req, {'security_group': sg})
self.assertEqual(res_dict['security_group']['name'], 'test')
self.assertEqual(res_dict['security_group']['description'],
'test-description')
def test_create_security_group_with_no_name(self):
sg = security_group_template()
del sg['name']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, sg)
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_no_description(self):
sg = security_group_template()
del sg['description']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_blank_name(self):
sg = security_group_template(name='')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_whitespace_name(self):
sg = security_group_template(name=' ')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_blank_description(self):
sg = security_group_template(description='')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_whitespace_description(self):
sg = security_group_template(description=' ')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_duplicate_name(self):
sg = security_group_template()
# FIXME: Stub out _get instead of creating twice
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.controller.create(req, {'security_group': sg})
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_no_body(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, None)
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_no_security_group(self):
body = {'no-securityGroup': None}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, body)
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_above_255_characters_name(self):
sg = security_group_template(name='1234567890' * 26)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_above_255_characters_description(self):
sg = security_group_template(description='1234567890' * 26)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_non_string_name(self):
sg = security_group_template(name=12)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_non_string_description(self):
sg = security_group_template(description=12)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_quota_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
for num in range(1, CONF.quota_security_groups + 1):
name = 'test%s' % num
sg = security_group_template(name=name)
res_dict = self.controller.create(req, {'security_group': sg})
self.assertEqual(res_dict['security_group']['name'], name)
sg = security_group_template()
self.assertRaises(exception.SecurityGroupLimitExceeded,
self.controller.create,
req, {'security_group': sg})
def test_get_security_group_list(self):
groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
groups.append(sg)
expected = {'security_groups': groups}
def return_security_groups(context, project_id):
return [security_group_db(sg) for sg in groups]
self.stubs.Set(nova.db, 'security_group_get_by_project',
return_security_groups)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
res_dict = self.controller.index(req)
self.assertEquals(res_dict, expected)
def test_get_security_group_list_all_tenants(self):
all_groups = []
tenant_groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
all_groups.append(sg)
if name == 'default':
tenant_groups.append(sg)
all = {'security_groups': all_groups}
tenant_specific = {'security_groups': tenant_groups}
def return_all_security_groups(context):
return [security_group_db(sg) for sg in all_groups]
self.stubs.Set(nova.db, 'security_group_get_all',
return_all_security_groups)
def return_tenant_security_groups(context, project_id):
return [security_group_db(sg) for sg in tenant_groups]
self.stubs.Set(nova.db, 'security_group_get_by_project',
return_tenant_security_groups)
path = '/v2/fake/os-security-groups'
req = fakes.HTTPRequest.blank(path, use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEquals(res_dict, tenant_specific)
req = fakes.HTTPRequest.blank('%s?all_tenants=1' % path,
use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEquals(res_dict, all)
def test_get_security_group_by_instance(self):
groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
groups.append(sg)
expected = {'security_groups': groups}
def return_instance(context, server_id):
self.assertEquals(server_id, FAKE_UUID1)
return return_server_by_uuid(context, server_id)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_instance)
def return_security_groups(context, instance_id):
self.assertEquals(instance_id, 1)
return [security_group_db(sg) for sg in groups]
self.stubs.Set(nova.db, 'security_group_get_by_instance',
return_security_groups)
req = fakes.HTTPRequest.blank('/v2/%s/servers/%s/os-security-groups' %
('fake', FAKE_UUID1))
res_dict = self.server_controller.index(req, FAKE_UUID1)
self.assertEquals(res_dict, expected)
def test_get_security_group_by_instance_non_existing(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_nonexistent)
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/os-security-groups')
self.assertRaises(webob.exc.HTTPNotFound,
self.server_controller.index, req, '1')
def test_get_security_group_by_instance_invalid_id(self):
req = fakes.HTTPRequest.blank(
'/v2/fake/servers/invalid/os-security-groups')
self.assertRaises(webob.exc.HTTPNotFound,
self.server_controller.index, req, 'invalid')
def test_get_security_group_by_id(self):
sg = security_group_template(id=2, rules=[])
def return_security_group(context, group_id):
self.assertEquals(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/2')
res_dict = self.controller.show(req, '2')
expected = {'security_group': sg}
self.assertEquals(res_dict, expected)
def test_get_security_group_by_invalid_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/invalid')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, 'invalid')
def test_get_security_group_by_non_existing_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/%s' %
self.fake_id)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.fake_id)
def test_delete_security_group_by_id(self):
sg = security_group_template(id=1, rules=[])
self.called = False
def security_group_destroy(context, id):
self.called = True
def return_security_group(context, group_id):
self.assertEquals(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_destroy',
security_group_destroy)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/1')
self.controller.delete(req, '1')
self.assertTrue(self.called)
def test_delete_security_group_by_invalid_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/invalid')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, 'invalid')
def test_delete_security_group_by_non_existing_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/%s'
% self.fake_id)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.fake_id)
def test_delete_security_group_in_use(self):
sg = security_group_template(id=1, rules=[])
def security_group_in_use(context, id):
return True
def return_security_group(context, group_id):
self.assertEquals(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_in_use',
security_group_in_use)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/1')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, '1')
def test_associate_by_non_existing_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.assertEquals(return_server(None, '1'),
nova.db.instance_get(None, '1'))
body = dict(addSecurityGroup=dict(name='non-existing'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_by_invalid_server_id(self):
body = dict(addSecurityGroup=dict(name='test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/invalid/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, req, 'invalid', body)
def test_associate_without_body(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=None)
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_no_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=dict())
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_security_group_name_with_whitespaces(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=dict(name=" "))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_non_existing_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_nonexistent)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_non_running_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_non_running_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_non_running_server)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_already_associated_security_group_to_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.mox.StubOutWithMock(nova.db, 'instance_add_security_group')
nova.db.instance_add_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
self.mox.ReplayAll()
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._addSecurityGroup(req, '1', body)
def test_disassociate_by_non_existing_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.assertEquals(return_server(None, '1'),
nova.db.instance_get(None, '1'))
body = dict(removeSecurityGroup=dict(name='non-existing'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_by_invalid_server_id(self):
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name='test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/invalid/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, req, 'invalid',
body)
def test_disassociate_without_body(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=None)
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_no_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=dict())
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_security_group_name_with_whitespaces(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=dict(name=" "))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_non_existing_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_non_running_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_non_running_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_non_running_server)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_already_associated_security_group_to_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.mox.StubOutWithMock(nova.db, 'instance_remove_security_group')
nova.db.instance_remove_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
self.mox.ReplayAll()
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._removeSecurityGroup(req, '1', body)
class TestSecurityGroupRules(test.TestCase):
def setUp(self):
super(TestSecurityGroupRules, self).setUp()
self.controller = security_groups.SecurityGroupController()
if self.controller.security_group_api.id_is_uuid:
id1 = '11111111-1111-1111-1111-111111111111'
id2 = '22222222-2222-2222-2222-222222222222'
self.invalid_id = '33333333-3333-3333-3333-333333333333'
else:
id1 = 1
id2 = 2
self.invalid_id = '33333333'
self.sg1 = security_group_template(id=id1)
self.sg2 = security_group_template(
id=id2, name='authorize_revoke',
description='authorize-revoke testing')
db1 = security_group_db(self.sg1)
db2 = security_group_db(self.sg2)
def return_security_group(context, group_id):
if group_id == db1['id']:
return db1
if group_id == db2['id']:
return db2
raise exception.NotFound()
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
self.parent_security_group = db2
self.controller = security_groups.SecurityGroupRulesController()
def test_create_by_cidr(self):
rule = security_group_rule_template(cidr='10.2.3.124/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'],
self.sg2['id'])
self.assertEquals(security_group_rule['ip_range']['cidr'],
"10.2.3.124/24")
def test_create_by_group_id(self):
rule = security_group_rule_template(group_id=self.sg1['id'],
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'],
self.sg2['id'])
def test_create_by_same_group_id(self):
rule1 = security_group_rule_template(group_id=self.sg1['id'],
from_port=80, to_port=80,
parent_group_id=self.sg2['id'])
self.parent_security_group['rules'] = [security_group_rule_db(rule1)]
rule2 = security_group_rule_template(group_id=self.sg1['id'],
from_port=81, to_port=81,
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule2})
security_group_rule = res_dict['security_group_rule']
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'],
self.sg2['id'])
self.assertEquals(security_group_rule['from_port'], 81)
self.assertEquals(security_group_rule['to_port'], 81)
def test_create_by_invalid_cidr_json(self):
rule = security_group_rule_template(
ip_protocol="tcp",
from_port=22,
to_port=22,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/2433")
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_by_invalid_tcp_port_json(self):
rule = security_group_rule_template(
ip_protocol="tcp",
from_port=75534,
to_port=22,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/24")
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_by_invalid_icmp_port_json(self):
rule = security_group_rule_template(
ip_protocol="icmp",
from_port=1,
to_port=256,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/24")
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_add_existing_rules_by_cidr(self):
rule = security_group_rule_template(cidr='10.0.0.0/24',
parent_group_id=self.sg2['id'])
self.parent_security_group['rules'] = [security_group_rule_db(rule)]
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_add_existing_rules_by_group_id(self):
rule = security_group_rule_template(group_id=1)
self.parent_security_group['rules'] = [security_group_rule_db(rule)]
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_body(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, None)
def test_create_with_no_security_group_rule_in_body(self):
rules = {'test': 'test'}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, rules)
def test_create_with_invalid_parent_group_id(self):
rule = security_group_rule_template(parent_group_id='invalid')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_non_existing_parent_group_id(self):
rule = security_group_rule_template(group_id='invalid',
parent_group_id=self.invalid_id)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_invalid_protocol(self):
rule = security_group_rule_template(ip_protocol='invalid-protocol',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_protocol(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['ip_protocol']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_invalid_from_port(self):
rule = security_group_rule_template(from_port='666666',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_invalid_to_port(self):
rule = security_group_rule_template(to_port='666666',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_non_numerical_from_port(self):
rule = security_group_rule_template(from_port='invalid',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_non_numerical_to_port(self):
rule = security_group_rule_template(to_port='invalid',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_from_port(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['from_port']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_to_port(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['to_port']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_invalid_cidr(self):
rule = security_group_rule_template(cidr='10.2.2222.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_cidr_group(self):
rule = security_group_rule_template(parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEquals(security_group_rule['ip_range']['cidr'],
"0.0.0.0/0")
def test_create_with_invalid_group_id(self):
rule = security_group_rule_template(group_id='invalid',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_empty_group_id(self):
rule = security_group_rule_template(group_id='',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_nonexist_group_id(self):
rule = security_group_rule_template(group_id=self.invalid_id,
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_same_group_parent_id_and_group_id(self):
rule = security_group_rule_template(group_id=self.sg1['id'],
parent_group_id=self.sg1['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'],
self.sg1['id'])
self.assertEquals(security_group_rule['group']['name'],
self.sg1['name'])
def _test_create_with_no_ports_and_no_group(self, proto):
rule = {'ip_protocol': proto, 'parent_group_id': self.sg2['id']}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def _test_create_with_no_ports(self, proto):
rule = {'ip_protocol': proto, 'parent_group_id': self.sg2['id'],
'group_id': self.sg1['id']}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
expected_rule = {
'from_port': 1, 'group': {'tenant_id': '123', 'name': 'test'},
'ip_protocol': proto, 'to_port': 65535, 'parent_group_id':
self.sg2['id'], 'ip_range': {}, 'id': security_group_rule['id']
}
if proto == 'icmp':
expected_rule['to_port'] = -1
expected_rule['from_port'] = -1
self.assertTrue(security_group_rule == expected_rule)
def test_create_with_no_ports_icmp(self):
self._test_create_with_no_ports_and_no_group('icmp')
self._test_create_with_no_ports('icmp')
def test_create_with_no_ports_tcp(self):
self._test_create_with_no_ports_and_no_group('tcp')
self._test_create_with_no_ports('tcp')
def test_create_with_no_ports_udp(self):
self._test_create_with_no_ports_and_no_group('udp')
self._test_create_with_no_ports('udp')
def _test_create_with_ports(self, proto, from_port, to_port):
rule = {
'ip_protocol': proto, 'from_port': from_port, 'to_port': to_port,
'parent_group_id': self.sg2['id'], 'group_id': self.sg1['id']
}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
expected_rule = {
'from_port': from_port,
'group': {'tenant_id': '123', 'name': 'test'},
'ip_protocol': proto, 'to_port': to_port, 'parent_group_id':
self.sg2['id'], 'ip_range': {}, 'id': security_group_rule['id']
}
self.assertTrue(security_group_rule['ip_protocol'] == proto)
self.assertTrue(security_group_rule['from_port'] == from_port)
self.assertTrue(security_group_rule['to_port'] == to_port)
self.assertTrue(security_group_rule == expected_rule)
def test_create_with_ports_icmp(self):
self._test_create_with_ports('icmp', 0, 1)
self._test_create_with_ports('icmp', 0, 0)
self._test_create_with_ports('icmp', 1, 0)
def test_create_with_ports_tcp(self):
self._test_create_with_ports('tcp', 1, 1)
self._test_create_with_ports('tcp', 1, 65535)
self._test_create_with_ports('tcp', 65535, 65535)
def test_create_with_ports_udp(self):
self._test_create_with_ports('udp', 1, 1)
self._test_create_with_ports('udp', 1, 65535)
self._test_create_with_ports('udp', 65535, 65535)
def test_delete(self):
rule = security_group_rule_template(id=self.sg2['id'],
parent_group_id=self.sg2['id'])
def security_group_rule_get(context, id):
return security_group_rule_db(rule)
def security_group_rule_destroy(context, id):
pass
self.stubs.Set(nova.db, 'security_group_rule_get',
security_group_rule_get)
self.stubs.Set(nova.db, 'security_group_rule_destroy',
security_group_rule_destroy)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules/%s'
% self.sg2['id'])
self.controller.delete(req, self.sg2['id'])
def test_delete_invalid_rule_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules' +
'/invalid')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, 'invalid')
def test_delete_non_existing_rule_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules/%s'
% self.invalid_id)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.invalid_id)
def test_create_rule_quota_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
for num in range(100, 100 + CONF.quota_security_group_rules):
rule = {
'ip_protocol': 'tcp', 'from_port': num,
'to_port': num, 'parent_group_id': self.sg2['id'],
'group_id': self.sg1['id']
}
self.controller.create(req, {'security_group_rule': rule})
rule = {
'ip_protocol': 'tcp', 'from_port': '121', 'to_port': '121',
'parent_group_id': self.sg2['id'], 'group_id': self.sg1['id']
}
self.assertRaises(exception.SecurityGroupLimitExceeded,
self.controller.create,
req, {'security_group_rule': rule})
def test_create_rule_cidr_allow_all(self):
rule = security_group_rule_template(cidr='0.0.0.0/0',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEquals(security_group_rule['ip_range']['cidr'],
"0.0.0.0/0")
def test_create_rule_cidr_allow_some(self):
rule = security_group_rule_template(cidr='15.0.0.0/8',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEquals(security_group_rule['id'], 0)
self.assertEquals(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEquals(security_group_rule['ip_range']['cidr'],
"15.0.0.0/8")
def test_create_rule_cidr_bad_netmask(self):
rule = security_group_rule_template(cidr='15.0.0.0/0')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
class TestSecurityGroupRulesXMLDeserializer(test.TestCase):
def setUp(self):
super(TestSecurityGroupRulesXMLDeserializer, self).setUp()
self.deserializer = security_groups.SecurityGroupRulesXMLDeserializer()
def test_create_request(self):
serial_request = """
<security_group_rule>
<parent_group_id>12</parent_group_id>
<from_port>22</from_port>
<to_port>22</to_port>
<group_id></group_id>
<ip_protocol>tcp</ip_protocol>
<cidr>10.0.0.0/24</cidr>
</security_group_rule>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group_rule": {
"parent_group_id": "12",
"from_port": "22",
"to_port": "22",
"ip_protocol": "tcp",
"group_id": "",
"cidr": "10.0.0.0/24",
},
}
self.assertEquals(request['body'], expected)
def test_create_no_protocol_request(self):
serial_request = """
<security_group_rule>
<parent_group_id>12</parent_group_id>
<from_port>22</from_port>
<to_port>22</to_port>
<group_id></group_id>
<cidr>10.0.0.0/24</cidr>
</security_group_rule>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group_rule": {
"parent_group_id": "12",
"from_port": "22",
"to_port": "22",
"group_id": "",
"cidr": "10.0.0.0/24",
},
}
self.assertEquals(request['body'], expected)
def test_corrupt_xml(self):
"""Should throw a 400 error on corrupt xml."""
self.assertRaises(
exception.MalformedRequestBody,
self.deserializer.deserialize,
utils.killer_xml_body())
class TestSecurityGroupXMLDeserializer(test.TestCase):
def setUp(self):
super(TestSecurityGroupXMLDeserializer, self).setUp()
self.deserializer = security_groups.SecurityGroupXMLDeserializer()
def test_create_request(self):
serial_request = """
<security_group name="test">
<description>test</description>
</security_group>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group": {
"name": "test",
"description": "test",
},
}
self.assertEquals(request['body'], expected)
def test_create_no_description_request(self):
serial_request = """
<security_group name="test">
</security_group>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group": {
"name": "test",
},
}
self.assertEquals(request['body'], expected)
def test_create_no_name_request(self):
serial_request = """
<security_group>
<description>test</description>
</security_group>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group": {
"description": "test",
},
}
self.assertEquals(request['body'], expected)
def test_corrupt_xml(self):
"""Should throw a 400 error on corrupt xml."""
self.assertRaises(
exception.MalformedRequestBody,
self.deserializer.deserialize,
utils.killer_xml_body())
class TestSecurityGroupXMLSerializer(test.TestCase):
def setUp(self):
super(TestSecurityGroupXMLSerializer, self).setUp()
self.namespace = wsgi.XMLNS_V11
self.rule_serializer = security_groups.SecurityGroupRuleTemplate()
self.index_serializer = security_groups.SecurityGroupsTemplate()
self.default_serializer = security_groups.SecurityGroupTemplate()
def _tag(self, elem):
tagname = elem.tag
self.assertEqual(tagname[0], '{')
tmp = tagname.partition('}')
namespace = tmp[0][1:]
self.assertEqual(namespace, self.namespace)
return tmp[2]
def _verify_security_group_rule(self, raw_rule, tree):
self.assertEqual(raw_rule['id'], tree.get('id'))
self.assertEqual(raw_rule['parent_group_id'],
tree.get('parent_group_id'))
seen = set()
expected = set(['ip_protocol', 'from_port', 'to_port',
'group', 'group/name', 'group/tenant_id',
'ip_range', 'ip_range/cidr'])
for child in tree:
child_tag = self._tag(child)
self.assertTrue(child_tag in raw_rule)
seen.add(child_tag)
if child_tag in ('group', 'ip_range'):
for gr_child in child:
gr_child_tag = self._tag(gr_child)
self.assertTrue(gr_child_tag in raw_rule[child_tag])
seen.add('%s/%s' % (child_tag, gr_child_tag))
self.assertEqual(gr_child.text,
raw_rule[child_tag][gr_child_tag])
else:
self.assertEqual(child.text, raw_rule[child_tag])
self.assertEqual(seen, expected)
def _verify_security_group(self, raw_group, tree):
rules = raw_group['rules']
self.assertEqual('security_group', self._tag(tree))
self.assertEqual(raw_group['id'], tree.get('id'))
self.assertEqual(raw_group['tenant_id'], tree.get('tenant_id'))
self.assertEqual(raw_group['name'], tree.get('name'))
self.assertEqual(2, len(tree))
for child in tree:
child_tag = self._tag(child)
if child_tag == 'rules':
self.assertEqual(2, len(child))
for idx, gr_child in enumerate(child):
self.assertEqual(self._tag(gr_child), 'rule')
self._verify_security_group_rule(rules[idx], gr_child)
else:
self.assertEqual('description', child_tag)
self.assertEqual(raw_group['description'], child.text)
def test_rule_serializer(self):
raw_rule = dict(
id='123',
parent_group_id='456',
ip_protocol='tcp',
from_port='789',
to_port='987',
group=dict(name='group', tenant_id='tenant'),
ip_range=dict(cidr='10.0.0.0/8'))
rule = dict(security_group_rule=raw_rule)
text = self.rule_serializer.serialize(rule)
tree = etree.fromstring(text)
self.assertEqual('security_group_rule', self._tag(tree))
self._verify_security_group_rule(raw_rule, tree)
def test_group_serializer(self):
rules = [dict(
id='123',
parent_group_id='456',
ip_protocol='tcp',
from_port='789',
to_port='987',
group=dict(name='group1', tenant_id='tenant1'),
ip_range=dict(cidr='10.55.44.0/24')),
dict(
id='654',
parent_group_id='321',
ip_protocol='udp',
from_port='234',
to_port='567',
group=dict(name='group2', tenant_id='tenant2'),
ip_range=dict(cidr='10.44.55.0/24'))]
raw_group = dict(
id='890',
description='description',
name='name',
tenant_id='tenant',
rules=rules)
sg_group = dict(security_group=raw_group)
text = self.default_serializer.serialize(sg_group)
tree = etree.fromstring(text)
self._verify_security_group(raw_group, tree)
def test_groups_serializer(self):
rules = [dict(
id='123',
parent_group_id='1234',
ip_protocol='tcp',
from_port='12345',
to_port='123456',
group=dict(name='group1', tenant_id='tenant1'),
ip_range=dict(cidr='10.123.0.0/24')),
dict(
id='234',
parent_group_id='2345',
ip_protocol='udp',
from_port='23456',
to_port='234567',
group=dict(name='group2', tenant_id='tenant2'),
ip_range=dict(cidr='10.234.0.0/24')),
dict(
id='345',
parent_group_id='3456',
ip_protocol='tcp',
from_port='34567',
to_port='345678',
group=dict(name='group3', tenant_id='tenant3'),
ip_range=dict(cidr='10.345.0.0/24')),
dict(
id='456',
parent_group_id='4567',
ip_protocol='udp',
from_port='45678',
to_port='456789',
group=dict(name='group4', tenant_id='tenant4'),
ip_range=dict(cidr='10.456.0.0/24'))]
groups = [dict(
id='567',
description='description1',
name='name1',
tenant_id='tenant1',
rules=rules[0:2]),
dict(
id='678',
description='description2',
name='name2',
tenant_id='tenant2',
rules=rules[2:4])]
sg_groups = dict(security_groups=groups)
text = self.index_serializer.serialize(sg_groups)
tree = etree.fromstring(text)
self.assertEqual('security_groups', self._tag(tree))
self.assertEqual(len(groups), len(tree))
for idx, child in enumerate(tree):
self._verify_security_group(groups[idx], child)
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
UUID3 = '00000000-0000-0000-0000-000000000003'
def fake_compute_get_all(*args, **kwargs):
return [
fakes.stub_instance(1, uuid=UUID1,
security_groups=[{'name': 'fake-0-0'},
{'name': 'fake-0-1'}]),
fakes.stub_instance(2, uuid=UUID2,
security_groups=[{'name': 'fake-1-0'},
{'name': 'fake-1-1'}])
]
def fake_compute_get(*args, **kwargs):
return fakes.stub_instance(1, uuid=UUID3,
security_groups=[{'name': 'fake-2-0'},
{'name': 'fake-2-1'}])
def fake_compute_create(*args, **kwargs):
return ([fake_compute_get()], '')
def fake_get_instances_security_groups_bindings(inst, context):
return {UUID1: [{'name': 'fake-0-0'}, {'name': 'fake-0-1'}],
UUID2: [{'name': 'fake-1-0'}, {'name': 'fake-1-1'}]}
class SecurityGroupsOutputTest(test.TestCase):
content_type = 'application/json'
def setUp(self):
super(SecurityGroupsOutputTest, self).setUp()
self.controller = security_groups.SecurityGroupController()
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
self.stubs.Set(compute.api.API, 'get_all', fake_compute_get_all)
self.stubs.Set(compute.api.API, 'create', fake_compute_create)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Security_groups'])
def _make_request(self, url, body=None):
req = webob.Request.blank(url)
if body:
req.method = 'POST'
req.body = self._encode_body(body)
req.content_type = self.content_type
req.headers['Accept'] = self.content_type
res = req.get_response(fakes.wsgi_app(init_only=('servers',)))
return res
def _encode_body(self, body):
return jsonutils.dumps(body)
def _get_server(self, body):
return jsonutils.loads(body).get('server')
def _get_servers(self, body):
return jsonutils.loads(body).get('servers')
def _get_groups(self, server):
return server.get('security_groups')
def test_create(self):
url = '/v2/fake/servers'
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2)
res = self._make_request(url, {'server': server})
self.assertEqual(res.status_int, 202)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
def test_show(self):
url = '/v2/fake/servers/%s' % UUID3
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
def test_detail(self):
url = '/v2/fake/servers/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
for i, server in enumerate(self._get_servers(res.body)):
for j, group in enumerate(self._get_groups(server)):
name = 'fake-%s-%s' % (i, j)
self.assertEqual(group.get('name'), name)
def test_no_instance_passthrough_404(self):
def fake_compute_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
url = '/v2/fake/servers/70f6db34-de8d-4fbd-aafb-4065bdfa6115'
res = self._make_request(url)
self.assertEqual(res.status_int, 404)
class SecurityGroupsOutputXmlTest(SecurityGroupsOutputTest):
content_type = 'application/xml'
class MinimalCreateServerTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('server', selector='server')
root.set('name')
root.set('id')
root.set('imageRef')
root.set('flavorRef')
return xmlutil.MasterTemplate(root, 1,
nsmap={None: xmlutil.XMLNS_V11})
def _encode_body(self, body):
serializer = self.MinimalCreateServerTemplate()
return serializer.serialize(body)
def _get_server(self, body):
return etree.XML(body)
def _get_servers(self, body):
return etree.XML(body).getchildren()
def _get_groups(self, server):
# NOTE(vish): we are adding security groups without an extension
# namespace so we don't break people using the existing
# functionality, but that means we need to use find with
# the existing server namespace.
namespace = server.nsmap[None]
return server.find('{%s}security_groups' % namespace).getchildren()
| Triv90/Nova | nova/tests/api/openstack/compute/contrib/test_security_groups.py | Python | apache-2.0 | 62,744 |
import copy
from hearthbreaker.tags.base import Status, Action, Aura, Condition, AuraUntil, CardQuery, \
CARD_SOURCE, Effect, Buff, BuffUntil, Amount, Picker, Selector
from hearthbreaker.tags.condition import IsSecret
from hearthbreaker.tags.selector import AllPicker, ConstantSelector
class Give(Action):
def __init__(self, buffs, picker=AllPicker()):
if isinstance(buffs, Status):
self.buffs = [Buff(buffs)]
elif isinstance(buffs, list):
self.buffs = buffs
if isinstance(buffs[0], Aura):
raise TypeError("Aura passed where buff was expected")
elif isinstance(buffs, Aura):
raise TypeError("Aura passed where buff was expected")
else:
self.buffs = [buffs]
self.picker = picker
def act(self, actor, target, other=None):
buffs = self.picker.pick(actor, self.buffs)
for buff in buffs:
target.add_buff(buff.to_instance(target))
def __to_json__(self):
if isinstance(self.picker, AllPicker):
return {
'name': 'give',
'buffs': self.buffs
}
return {
'name': 'give',
'buffs': self.buffs,
'picker': self.picker,
}
def __from_json__(self, buffs=None, effects=None, auras=None, picker=None):
if effects: # To allow for give to work with effects as well, we check at load time
return GiveEffect.__new__(GiveEffect).__from_json__(effects)
if auras: # To allow for give to work with auras as well, we check at load time
return GiveAura.__new__(GiveAura).__from_json__(auras)
self.buffs = []
for buff in buffs:
if "until" in buff:
self.buffs.append(BuffUntil.from_json(**buff))
else:
self.buffs.append(Buff.from_json(**buff))
if not picker:
self.picker = AllPicker()
else:
self.picker = Picker.from_json(**picker)
return self
class GiveAura(Action):
def __init__(self, auras):
if isinstance(auras, list):
self.auras = auras
else:
self.auras = [auras]
def act(self, actor, target, other=None):
for aura in self.auras:
target.add_aura(aura)
def __to_json__(self):
return {
'name': 'give',
'auras': self.auras
}
def __from_json__(self, auras):
self.auras = []
for aura in auras:
if "until" in aura:
self.auras.append(AuraUntil.from_json(**aura))
else:
self.auras.append(Aura.from_json(**aura))
return self
class GiveEffect(Action):
def __init__(self, effects):
if isinstance(effects, Effect):
self.effects = [effects]
else:
self.effects = effects
def act(self, actor, target, other=None):
for effect in self.effects:
for tag in effect.tags:
for action in tag.actions:
if hasattr(action, "selector"):
action.selector = ConstantSelector([obj.born for obj in
action.selector.choose_targets(actor, target)])
target.add_effect(effect)
def __to_json__(self):
return {
'name': 'give',
'effects': self.effects
}
def __from_json__(self, effects):
self.effects = [Effect.from_json(**effect) for effect in effects]
return self
class Summon(Action):
def __init__(self, card, count=1):
if isinstance(card, CardQuery):
self.card = card
else:
self.card = CardQuery(card.ref_name)
self.count = count
def act(self, actor, target, other=None):
card = self.card.get_card(target, target, actor)
if card is None:
return
if actor.is_minion() and actor.player is target:
# When a minion is summoned around another minion, they alternate between left and right,
# starting on the right
if actor.removed:
c = 0
else:
c = 1
for summon in range(self.count):
index = actor.index + (c % 2)
card.summon(target, target.game, index)
if not actor.removed:
c += 1
else:
for summon in range(self.count):
card.summon(target, target.game, len(target.minions))
def __to_json__(self):
if self.count > 1:
return {
'name': 'summon',
'card': self.card,
'count': self.count
}
return {
'name': 'summon',
'card': self.card
}
def __from_json__(self, card, count=1):
self.card = CardQuery.from_json(**card)
self.count = count
return self
class ReplaceHeroWithMinion(Action):
# Used only for Jaraxxus currently
def __init__(self, card):
if isinstance(card, CardQuery):
self.card = card
else:
self.card = CardQuery(card.ref_name)
def act(self, actor, target, other=None):
card = self.card.get_card(target, target.player, actor)
hero = card.create_hero(target.player)
hero.card = card
target.player.trigger("minion_played", actor)
hero.buffs = copy.deepcopy(actor.buffs)
hero.health = actor.health
target.replace(hero)
if hero.health <= 0:
hero.die(None)
def __to_json__(self):
return {
'name': 'replace_hero_with_minion',
'card': self.card
}
def __from_json__(self, card):
self.card = CardQuery.from_json(**card)
return self
class Transform(Action):
def __init__(self, card):
if isinstance(card, CardQuery):
self.card = card
else:
self.card = CardQuery(card.ref_name)
def act(self, actor, target, other=None):
card = self.card.get_card(target, target.player, actor)
if target.is_card():
target.replace(card)
elif target.is_minion():
minion = card.create_minion(target.player)
minion.card = card
target.replace(minion)
elif target.is_hero():
hero = card.create_hero(target.player)
target.replace(hero)
def __to_json__(self):
return {
'name': 'transform',
'card': self.card
}
def __from_json__(self, card):
self.card = CardQuery.from_json(**card)
return self
class Kill(Action):
def act(self, actor, target, other=None):
target.die(None)
def __to_json__(self):
return {
'name': 'kill'
}
class Heal(Action, metaclass=Amount):
def __init__(self):
super().__init__()
def act(self, actor, target, other=None):
target.heal(actor.player.effective_heal_power(self.get_amount(actor, target, other)), actor)
def __to_json__(self):
return {
'name': 'heal',
}
class SetHealth(Action, metaclass=Amount):
def __init__(self):
super().__init__()
def act(self, actor, target, other=None):
target.set_health_to(self.get_amount(actor, target, other))
def __to_json__(self):
return {
'name': 'set_health'
}
class Damage(Action, metaclass=Amount):
def __init__(self):
super().__init__()
def act(self, actor, target, other=None):
target.damage(self.get_amount(actor, target, other), actor)
def __to_json__(self):
return {
'name': 'damage',
}
class Draw(Action, metaclass=Amount):
def __init__(self):
super().__init__()
def act(self, actor, target, other=None):
for draw in range(0, self.get_amount(actor, target, other)):
target.draw()
def __to_json__(self):
return {
'name': 'draw',
}
class Discard(Action, metaclass=Amount):
def __init__(self, query=CardQuery(source=CARD_SOURCE.MY_HAND)):
super().__init__()
self.query = query
def act(self, actor, target, other=None):
for index in range(0, self.get_amount(actor, target, other)):
card = self.query.get_card(target, actor.player, actor)
if card:
actor.player.trigger("discard", card)
def __to_json__(self):
return {
'name': 'discard',
'query': self.query,
}
def __from_json__(self, query):
self.query = CardQuery.from_json(**query)
return self
class IncreaseArmor(Action, metaclass=Amount):
def __init__(self):
super().__init__()
def act(self, actor, target, other=None):
target.armor += self.get_amount(actor, target, other)
def __to_json__(self):
return {
'name': 'increase_armor'
}
class ChangeTarget(Action):
def __init__(self, selector):
self.selector = selector
def act(self, actor, target, other=None):
possible_targets = [t for t in self.selector.choose_targets(target, target.current_target)]
if len(possible_targets) > 0:
target.current_target = possible_targets[0]
def __to_json__(self):
return {
'name': 'change_target',
'selector': self.selector,
}
def __from_json__(self, selector):
from hearthbreaker.tags.base import Selector
self.selector = Selector.from_json(**selector)
return self
class AddCard(Action):
def __init__(self, card, count=1, add_to_deck=False):
if isinstance(card, CardQuery):
self.card = card
else:
self.card = CardQuery(card.ref_name)
self.add_to_deck = add_to_deck
self.count = count
def act(self, actor, target, other=None):
if self.add_to_deck:
for i in range(self.count):
target.deck.put_back(self.card.get_card(target, target, actor))
else:
for i in range(self.count):
if len(target.hand) < 10:
card = self.card.get_card(target, target, actor)
if card:
target.hand.append(copy.copy(card))
card.drawn = True
def __to_json__(self):
if self.add_to_deck:
return {
'name': 'add_card',
'card': self.card,
'count': self.count,
'add_to_deck': self.add_to_deck,
}
return {
'name': 'add_card',
'card': self.card,
'count': self.count
}
def __from_json__(self, card, count=1, add_to_deck=False):
self.card = CardQuery.from_json(**card)
self.count = count
self.add_to_deck = add_to_deck
return self
class ResurrectFriendly(Action):
def __to_json__(self):
return {
'name': 'resurrect_friendly'
}
def act(self, actor, target, other=None):
# Will be called once per Kel'Thuzad on the board
# http://www.hearthhead.com/card=1794/kelthuzad#comments
for minion in sorted(target.dead_this_turn, key=lambda m: m.born):
minion.card.summon(target, target.game, len(target.minions))
class Bounce(Action):
def __init__(self):
super().__init__()
def act(self, actor, target, other=None):
target.bounce()
def __to_json__(self):
return {
'name': 'bounce'
}
class SwapWithHand(Action):
def __init__(self, condition=None):
self.condition = condition
def act(self, actor, target, other=None):
if actor.is_valid():
if self.condition:
chosen_card = target.game.random_draw(target.hand,
lambda c: self.condition.evaluate(c) and c.is_minion())
else:
chosen_card = target.game.random_draw(target.hand, lambda c: c.is_minion())
if chosen_card:
chosen_card.summon(target, target.game, len(target.minions))
chosen_card.unattach()
target.hand.remove(chosen_card)
actor.bounce()
def __to_json__(self):
if self.condition:
return {
'name': 'swap_with_hand',
'condition': self.condition
}
return {
'name': 'swap_with_hand'
}
def __from_json__(self, condition=None):
if condition:
self.condition = Condition.from_json(**condition)
else:
self.condition = None
return self
class ApplySecret(Action):
def __init__(self, source):
self.source = source
self._query = CardQuery(conditions=[IsSecret()], source=source)
def act(self, actor, target, other=None):
secret = self._query.get_card(target, target, actor)
if secret:
target.secrets.append(secret)
secret.player = target
if target is target.game.other_player:
secret.player = target
# To allow for Mad Scientist not to be redeemed or duplicated as a result of its death,
# but still allow other minions that die during the same cycle to be duplicated.
# Based on testing for patch 2.1.0.7785
if actor.dead:
target.bind_once("after_death", secret.activate)
else:
secret.activate(target)
def __to_json__(self):
return {
'name': 'apply_secret',
'source': CARD_SOURCE.to_str(self.source)
}
def __from_json__(self, source):
self.source = CARD_SOURCE.from_str(source)
self._query = CardQuery(conditions=[IsSecret()], source=self.source)
return self
class Equip(Action):
def __init__(self, weapon):
if isinstance(weapon, CardQuery):
self.weapon = weapon
else:
self.weapon = CardQuery(weapon.ref_name)
def act(self, actor, target, other=None):
card = self.weapon.get_card(target, target, actor)
weapon = card.create_weapon(target)
weapon.card = card
weapon.equip(target)
def __to_json__(self):
return {
'name': 'equip',
'weapon': self.weapon
}
def __from_json__(self, weapon):
self.weapon = CardQuery.from_json(**weapon)
return self
class Destroy(Action):
def act(self, actor, target, other=None):
target.destroy()
def __to_json__(self):
return {
'name': 'destroy'
}
class Steal(Action):
def act(self, actor, target, other=None):
new_minion = target.copy(actor.player)
target.unattach()
target.remove_from_board()
new_minion.add_to_board(len(actor.player.minions))
def __to_json__(self):
return {
'name': 'steal'
}
class Duplicate(Action):
def __init__(self, selector):
super().__init__()
self.selector = selector
def act(self, actor, target, other=None):
for minion in self.selector.choose_targets(actor, target):
if len(minion.player.minions) < 7:
dup = minion.copy(minion.player)
dup.add_to_board(minion.index + 1)
def __to_json__(self):
return {
"name": "duplicate",
"selector": self.selector,
}
def __from_json__(self, selector):
self.selector = Selector.from_json(**selector)
return self
class Replace(Action):
def act(self, actor, target, other=None):
new_minion = target.copy(actor.player)
actor.replace(new_minion)
def __to_json__(self):
return {
'name': 'replace'
}
class Silence(Action):
def act(self, actor, target, other=None):
target.silence()
def __to_json__(self):
return {
'name': 'silence'
}
class DestroyManaCrystal(Action):
def act(self, actor, target, other=None):
target.max_mana -= 1
if target.mana > 0:
target.mana -= 1
def __to_json__(self):
return {
'name': 'destroy_mana_crystal'
}
class GiveManaCrystal(Action):
def __init__(self, count=1, empty=False):
self.count = count
self.empty = empty
def act(self, actor, target, other=None):
target.max_mana = min(self.count + target.max_mana, 10)
if not self.empty:
target.mana += self.count
def __to_json__(self):
return {
'name': 'give_mana_crystal',
'count': self.count,
'empty': self.empty,
}
class IncreaseDurability(Action):
def act(self, actor, target, other=None):
target.durability += 1
def __to_json__(self):
return {
'name': 'increase_durability',
}
class DecreaseDurability(Action):
def act(self, actor, target, other=None):
target.durability -= 1
if target.durability <= 0:
target.destroy()
def __to_json__(self):
return {
'name': 'decrease_durability',
}
class IncreaseWeaponAttack(Action, metaclass=Amount):
def __init__(self):
pass
def act(self, actor, target, other=None):
target.base_attack += self.get_amount(actor, target, other)
def __to_json__(self):
return {
'name': 'increase_weapon_attack'
}
class RemoveDivineShields(Action):
def act(self, actor, target, other=None):
from hearthbreaker.tags.status import DivineShield
if target.divine_shield:
target.buffs = [buff for buff in target.buffs if not isinstance(buff.status, DivineShield)]
target.divine_shield = 0
def __to_json__(self):
return {
"name": "remove_divine_shields"
}
# class SwapStats(Action):
# def act(self, actor, target, other=None):
# temp_attack = target.calculate_attack()
# temp_health = target.health
# if temp_attack == 0:
# target.die(None)
# else:
# target.set_attack_to(temp_health)
# target.set_health_to(temp_attack)
#
# def __to_json__(self):
# return {
# 'name': 'swap_stats',
# }
class Remove(Action):
def act(self, actor, target, other=None):
target.unattach()
target.remove_from_board()
def __to_json__(self):
return {
'name': 'remove'
}
class SwapStats(Action):
def __init__(self, source_stat, dest_stat, swap_with_owner):
self.source_stat = source_stat
self.dest_stat = dest_stat
self.swap_with_owner = swap_with_owner
def act(self, actor, target, other=None):
if self.swap_with_owner:
source = actor
else:
source = target
temp = self.get_attribute(source, self.source_stat)
self.set_attribute(source, self.source_stat, self.get_attribute(target, self.dest_stat))
self.set_attribute(target, self.dest_stat, temp)
if source.health == 0:
source.die(None)
if target is not source and target.health == 0:
target.die(None)
actor.player.game.check_delayed()
@staticmethod
def get_attribute(obj, attribute):
if attribute == "damage":
return obj.calculate_max_health() - obj.health
elif attribute == 'mana':
return obj.card.mana
elif attribute == "attack":
return obj.calculate_attack()
elif attribute == "health":
return obj.health
@staticmethod
def set_attribute(obj, attribute, value):
from hearthbreaker.tags.status import ManaChange, SetAttack
if attribute == "damage":
was_enraged = obj.enraged
obj.health = max(0, obj.clculate_max_health() - value)
if value > 0:
obj.enraged = True
if not was_enraged:
obj._do_enrage()
elif attribute == 'mana':
obj.add_buff(Buff(ManaChange(value - obj.mana_cost())))
elif attribute == "attack":
obj.add_buff(Buff(SetAttack(value)))
elif attribute == "health":
obj.set_health_to(value)
def __to_json__(self):
return {
'name': 'swap_stats',
'source_stat': self.source_stat,
'dest_stat': self.dest_stat,
'swap_with_owner': self.swap_with_owner,
}
| Ragowit/hearthbreaker | hearthbreaker/tags/action.py | Python | mit | 20,965 |
# -*- coding: utf-8 -*-
from ldtools import __version__, url, author_email
import datetime
import logging
import mimetypes
import os
import shutil
import socket
import rdflib
from ldtools.utils import urllib2
# set socket timeout. URLError will occur if time passed
socket.setdefaulttimeout(5)
__useragent__ = 'ldtools-{version} ({url}, {author_email})'.format(
version=__version__, url=url, author_email=author_email
)
# add mimetypes python does not know yet
mimetypes.add_type("text/n3", ".n3")
mimetypes.add_type("text/rdf+n3", ".n3")
mimetypes.add_type("text/turtle", ".n3")
class FiletypeMappingError(Exception):
pass
class ContentNegotiationError(Exception):
pass
logger = logging.getLogger("ldtools")
def get_file_extension(filename):
extension = filename.split(".")[1:][-1:]
return str(extension[0]) if extension else ""
def assure_parser_plugin_exists(format):
try:
rdflib.graph.plugin.get(name=format, kind=rdflib.parser.Parser)
except rdflib.plugin.PluginException:
msg = "No parser plugin found for %s" % format
logger.error(msg)
raise ContentNegotiationError(msg)
def guess_format_from_filename(file_name):
file_extension = file_name.split(".")[-1]
if file_name != file_extension:
return file_extension
class AbstractBackend(object):
"""Abstract Backend. Overwrite in subclasses"""
pass
class RestBackend(AbstractBackend):
GET_headers = {
'User-agent': __useragent__,
'Accept': (
'text/n3,'
'text/rdf+n3,'
'application/rdf+xml;q=0.8'
"text/turtle;q=0.7,"
# 'application/xhtml+xml;q=0.5'
# '*/*;q=0.1'
# XHTML+RDFa
)
}
PUT_headers = {"User-Agent": __useragent__}
def GET(
self,
uri,
extra_headers=None,
httphandler=None,
):
"""Lookup URI and follow redirects. Return data"""
if not hasattr(self, "uri"):
self.uri = uri
else:
if not self.uri == uri:
raise Exception("You cannot pass different uris to the same "
"backend")
if httphandler:
if isinstance(httphandler, list):
opener = urllib2.build_opener(*httphandler)
else:
opener = urllib2.build_opener(httphandler)
else:
opener = urllib2.build_opener()
if extra_headers:
self.GET_headers.update(extra_headers)
reference_time = datetime.datetime.now()
request = urllib2.Request(url=uri, headers=self.GET_headers)
try:
resultF = opener.open(request)
except (UnicodeEncodeError, socket.timeout):
return None
now = datetime.datetime.now()
self.lookup_time = now - reference_time
if resultF.geturl() != uri:
logger.info(
"%s was redirected. Content url: %r" % (
uri, resultF.geturl()))
if "Content-Length" in resultF.headers:
logger.info(
"Content-Length: %s" % resultF.headers["Content-Length"])
if "Content-Type" not in resultF.headers:
raise FiletypeMappingError("No Content-Type specified in response")
self.content_type = resultF.headers['Content-Type'].split(";")[0]
# Many servers don't do content negotiation: if one of the following
# content_types are returned by server, assume the mapped type
overwrite_content_type_map = {
"text/plain": "application/rdf+xml",
}
if self.content_type in overwrite_content_type_map:
self.content_type = overwrite_content_type_map[self.content_type]
try:
file_extension = mimetypes.guess_extension(self.content_type)
assert file_extension
except AssertionError:
logger.error(
"{} not supported by ldtools".format(
resultF.headers['Content-Type']))
raise FiletypeMappingError(
"No mimetype found for %s" % self.content_type)
format = file_extension.strip(".")
# assure format is correct
if format in ["rdf", "ksh"]:
format = "xml"
# check if rdflib parser exists for format
assure_parser_plugin_exists(format)
self.format = format
return resultF.read()
def PUT(self, data):
assert self.uri, "GET has to be called before PUT possible"
self.PUT_headers.update({
"Content-Type": self.content_type,
"Content-Length": str(len(data)),
})
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(self.uri,
data=data,
headers=self.PUT_headers)
request.get_method = lambda: 'PUT'
response = opener.open(request)
return response
class FileBackend(AbstractBackend):
"""Manages one xml file as a data basis"""
def __init__(self, filename,
format=None,
store_old_versions=True):
assert os.path.exists(filename)
format = format if format else guess_format_from_filename(filename)
assure_parser_plugin_exists(format)
self.format = format
self.filename = filename
self.store_old_versions = store_old_versions
def GET(self,
uri,
extra_headers=None,
httphandler=None,
):
assert not extra_headers, "Not Implemented"
assert not httphandler, "Not Implemented"
if not hasattr(self, "uri"):
self.uri = uri
else:
if not self.uri == uri:
raise Exception("You cannot pass different uris to the same "
"backend")
with open(self.filename, "r") as f:
data = f.read()
return data
def PUT(self, data):
assert self.uri, "GET has to be called before PUT"
if os.path.exists(self.filename) and self.store_old_versions:
# File already exists. Make backup copy
now = datetime.datetime.strftime(datetime.datetime.utcnow(),
'%Y%m%d-%H%M%S')
file_extension = get_file_extension(self.filename)
if file_extension:
old_version = u"%s.%s.%s" % (
self.filename.strip(file_extension),
now, file_extension)
else:
old_version = u"%s_%s" % (self.filename, now)
self.old_version = old_version
shutil.copy(self.filename, old_version)
with open(self.filename, "w") as f:
f.write(data)
def revert_to_old_version(self):
assert self.store_old_versions, (
"This FileBackend is not configured to store old versions")
if hasattr(self, "old_version"):
logger.info("Reverting to version before last saved version")
shutil.copy(self.old_version, self.filename)
os.remove(self.old_version)
delattr(self, "old_version")
class MemoryBackend(AbstractBackend):
def __init__(self, data=None, format="xml"):
self.data = data if data else ""
assure_parser_plugin_exists(format)
self.format = format
def GET(self,
uri,
extra_headers=None,
httphandler=None,
):
assert not extra_headers, "Not Implemented"
assert not httphandler, "Not Implemented"
return self.data
def PUT(self, data):
self.data = data
| dmr/Ldtools | ldtools/backends.py | Python | bsd-2-clause | 7,763 |
import fnmatch
import argparse
import sys, traceback
import os
import sqlite3
import xml.etree.ElementTree as ET
import csv
# The workbench database metric definitions use official CHaMP tier 1 and tier 2 types.
# However the CRITFC metric result files use their customized tier types. See critfc_2018.py
# for definitions.
#
# Use this list to specify substitutions. The first item in each tuple is the part of an
# CHaMP XPath and the second tuple item is the CRITFC alternate to use. Any metrics that
# contain these CHaMP XPath parts will be substituted. Use an empty CRITFC string to
# skip a metric altogether.
metricExceptions = [
# ('ChannelUnitsTier1/SlowPool/', 'ChannelUnitsTier1/SlowWater/'),
# ('ChannelUnitsTier1/FastTurbulent/', 'ChannelUnitsTier2/FT/'),
# ('ChannelUnitsTier1/FastNonTurbulent/', 'ChannelUnitsTier2/FNT/'),
# ('ChannelUnitsTier2/FNTGlide/', 'ChannelUnitsTier2/FNT/'),
# ('ChannelUnitsTier1/FastNonTurbulentGlide/', 'ChannelUnitsTier2/FNT/'),
# ('ChannelUnitsTier1/SmallSideChannel/', None)
]
# These are the schemas from the Workbench that we want to process
# Note that they are the topo metric QA schemas, not the final schemas
schemas = {'Visit': 1, 'ChannelUnit': 2, 'Tier1': 3, 'Tier2': 4}
def BatchRun(workbench, outputDir):
# Open the CHaMP Workbench
dbCon = sqlite3.connect(workbench)
dbCurs = dbCon.cursor()
# Load a list of topo metric result XML file tuples.
resultXMLFiles = getMetricResultFilePaths(outputDir)
# Loop over all schemas
for schemaName, schemaID in schemas.iteritems():
print 'Processing schema {0}...'.format(schemaName)
# Get all the active metrics for this schema
metricDefs = getMetricDefs(dbCurs, schemaID)
# Create an ordered list of CSV column headings
csvHeaders = ['Visit', 'Site']
csvHeaders.extend(metric[1] for metric in metricDefs)
# This will hold all the metric instances that will be written to CSV
toCSV = []
# Loop over all topo metric result XML files
for resultFile in resultXMLFiles:
# Get the root node for this schema. List will contain
# single item for visit level. Multiple items for other dimensions.
tree = ET.parse(resultFile[2])
nodRoot = tree.findall(metricDefs[0][2])
if len(nodRoot) == 1:
# Visit level metrics
instance = getVisitMetrics(metricDefs, resultFile[0], resultFile[1], nodRoot[0])
if instance:
toCSV.append(instance)
else:
# Channel unit, tier 1, tier 2
instanceList = getRepeatingMetrics(metricDefs, resultFile[0], resultFile[1], nodRoot)
if instanceList:
toCSV.extend(instanceList)
# Write a single CSV for this schema that contains all metric instances
outputCSV = os.path.join(outputDir, '2019_yankee_fork_topo_{0}_metrics.csv'.format(schemaName))
with open(outputCSV, 'wb') as f: # Just use 'w' mode in 3.x
w = csv.DictWriter(f, csvHeaders)
w.writeheader()
for instance in toCSV:
w.writerow(instance)
print "Process Completed successfully."
def getVisitMetrics(metrics, visit, site, nodRoot):
""" Builds a single metric instance containing all metrics
in the dictionary that occur under the nodRoot.
This is used for visit level metrics and also
individual instances of higher dimensional metrics
such as tier 1, tier 2 and channel unit
"""
# always put the visit and site on every instance
instance = {'Visit': visit, 'Site': site}
# Loop over all required metrics
for metric in metrics:
# Get the correct CRITFC XPath and skip metric if not needed
xpath = getCRITFCXPath(metric[3])
if not xpath:
continue
# Find the metric XML node
nodMetric = nodRoot.find(xpath)
if nodMetric is not None:
instance[metric[1]] = nodMetric.text
else:
print('Missing metric ' + metric[1] + ': ' + xpath)
return instance
def getRepeatingMetrics(metrics, visit, site, nodRoots):
""" Gets a list of all instances of higher dimensional metrics
such as tier 1, tier 2 and channel units.
"""
instanceList = []
# Loop over all root nodes. This will be all channel units or
# all tier 1 or tier 2 types
for nodRoot in nodRoots:
instance = getVisitMetrics(metrics, visit, site, nodRoot)
if instance:
instanceList.append(instance)
return instanceList
def getCRITFCXPath(xpath):
""" See substitutions at top of file.
This method takes a complete CHaMP XPath and replaces parts of it
with the CRITFC alternative if the channel unit types are different.
It returns None if CRITFC doesn't use the metric
"""
# Loop over all CRITFC substitutions
for sub in metricExceptions:
if sub[0] in xpath:
# Return None if CRITFC doesn't use the metric
if not sub[1]:
return None
result = str(xpath).replace(sub[0], sub[1])
return result
return xpath
def getMetricResultFilePaths(parentFolder):
""" Return a list of tuples defining all the top metric XML
files that can be found recursively under the top level folder.
"""
result = []
for root, dirnames, filenames in os.walk(parentFolder):
for filename in fnmatch.filter(filenames, 'topo_metrics.xml'):
parts = os.path.basename(root).split('_')
visit = int(parts[1])
site = os.path.basename(os.path.dirname(root))
tup = (visit, site, os.path.join(root, filename))
result.append(tup)
return result
def getMetricDefs(dbCurs, schema):
""" Load all active metrics for the specified schema that have a valid XPath"""
dbCurs.execute(
'SELECT M.MetricID, DisplayNameShort, RootXPath, XPath' +
' FROM Metric_definitions M' +
' INNER JOIN Metric_Schema_Definitions MS ON M.MetricID = MS.MetricID' +
' INNER JOIN Metric_Schemas S ON MS.SchemaID = S.SchemaID' +
' WHERE (IsActive <> 0)' +
' AND (XPath IS NOT NULL) AND (S.SchemaID = {0})'.format(schema))
metrics = []
for row in dbCurs.fetchall():
rootPath = './' + '/'.join(row[2].split('/')[2:])
relativePath = row[3].replace(row[2], '')[1:]
metrics.append((row[0], row[1], rootPath, relativePath))
return metrics
def main():
parser = argparse.ArgumentParser()
parser.add_argument('workbench', help='Path to CHaMP Workbench.', type=str)
parser.add_argument('outputDir', help='Top level folder containing topo metric XML files to process.', type=str)
args = parser.parse_args()
try:
BatchRun(args.workbench, args.outputDir)
except Exception as e:
traceback.print_exc(file=sys.stdout)
if __name__ == "__main__":
main()
| SouthForkResearch/CHaMP_Metrics | scripts/critfc_scrape.py | Python | gpl-3.0 | 7,086 |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from scrapy.spiders import Spider
from scrapy.selector import Selector
from webcrawl.items import Supplements
class SupplementsSpider(Spider):
name = "supplements"
allowed_domains = ["iherb.cn"]
start_urls = (
'http://www.iherb.cn/Supplements',
)
def parse(self, response):
sel = Selector(response)
supplements = sel.xpath('//div[@id="divCategories"]/div[@class="content"]/ul[@class="categories"]/li/a')
items = []
for supplement in supplements:
item = Supplements()
item['name'] = supplement.xpath('text()').extract()
item['url'] = supplement.xpath('@href').extract()
items.append(item)
return items
| ideaalloc/web-crawl | webcrawl/spiders/supplements.py | Python | mit | 766 |
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp.osv import orm, fields
class clv_medicament(orm.Model):
_inherit = 'clv_medicament'
_columns = {
'cmed_id': fields.many2one('clv_cmed', string='CMED'),
}
class clv_cmed(orm.Model):
_inherit = 'clv_cmed'
_columns = {
'medicament_ids': fields.one2many('clv_medicament',
'cmed_id',
'Medicaments'),
}
| CLVsol/odoo_addons_l10n_br | l10n_br_clv_cmed/clv_medicament/clv_medicament.py | Python | agpl-3.0 | 1,842 |
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
LONGDOC = """
A very simple python library, used to format datetime with *** time ago statement.
Install
pip install timeago
Usage
import timeago, datetime
d = datetime.datetime.now() + datetime.timedelta(seconds = 60 * 3.4)
# locale
print (timeago.format(d, locale='zh_CN')) # will print 3分钟后
"""
setup(name = 'timeago',
version = '1.0.7',
description = 'A very simple python library, used to format datetime with `*** time ago` statement. eg: "3 hours ago".',
long_description = LONGDOC,
author = 'hustcc',
author_email = 'i@hust.cc',
url = 'https://github.com/hustcc/timeago',
license = 'MIT',
install_requires = [],
classifiers = [
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Natural Language :: Chinese (Simplified)',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
],
keywords = 'timeago, seconds ago, minutes ago, hours ago, just now',
packages = find_packages('src'),
package_dir = {'':'src'},
)
| avrong/timeago | setup.py | Python | mit | 1,607 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import rdkit
from rdkit.Chem import AllChem
from rdkit import DataStructs
__license__ = "X11"
METADATA = {
"id": "method_rdkit_fcfp2_1024_tanimoto",
"representation": "fcfp2_1024",
"similarity": "tanimoto"
}
def _compute_fingerprint(molecule):
return AllChem.GetMorganFingerprintAsBitVect(
molecule, 1, nBits=1024, useFeatures=True)
def _compute_similarity(left, right):
return DataStructs.TanimotoSimilarity(left, right)
def create_model(train_ligands, train_decoys):
model = []
for molecule in train_ligands:
model.append({
"name": molecule.GetProp("_Name"),
"fingerprint": _compute_fingerprint(molecule)
})
model_information = {}
return model, model_information
def compute_score(model, molecule):
fingerprint = _compute_fingerprint(molecule)
similarities = [_compute_similarity(fingerprint, item["fingerprint"])
for item in model]
max_score = max(similarities)
index_of_max_score = similarities.index(max_score)
closest_molecule = model[index_of_max_score]
return {
"value": max_score,
"info": {
"closest": closest_molecule["name"]
}
}
def compute_similarity(left, right):
return _compute_similarity(_compute_fingerprint(left),
_compute_fingerprint(right))
| skodapetr/lbvs-environment | methods/fcfp/fcfp2_1024_tanimoto.py | Python | mit | 1,423 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Converting code to AST.
Adapted from Tangent.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import textwrap
import gast
from tensorflow.python.util import tf_inspect
def parse_entity(entity):
"""Returns the AST of given entity."""
source = tf_inspect.getsource(entity)
def fail(comment):
raise ValueError(
'Failed to parse source code of {}, which Python reported as:\n{}\n'
'{}'.format(entity, source, comment))
# Comments and multiline strings can appear at arbitrary indentation levels,
# causing textwrap.dedent to not correctly dedent source code.
# TODO(b/115884650): Automatic handling of comments/multiline strings.
source = textwrap.dedent(source)
try:
return parse_str(source), source
except IndentationError:
# The text below lists the causes of this error known to us. There may
# be more.
fail('This may be caused by multiline strings or comments not indented at'
'the same level as the code.')
except SyntaxError as e:
if not tf_inspect.isfunction(entity) or entity.__name__ != '<lambda>':
raise
# Certain entities, like lambdas, only hold the raw code lines which defined
# them, which may include surrounding tokens and may be syntactically
# invalid out of context. For example:
#
# l = (
# lambda x: x,)[0]
#
# will have the dedented source "lambda x: x,)[0]"
# Here we make an attempt to stip away the garbage by looking at the
# information in the syntax error.
lines = source.split('\n')
lineno, offset = e.lineno, e.offset # 1-based
# Give up if there's nothing we can chip away.
if len(lines) == lineno and len(lines[-1]) == offset:
fail('If this is a lambda function, the error may be avoided by creating'
' the lambda in a standalone statement.')
# Drop all lines following the error location
# TODO(mdan): What's with the pylint errors?
lines = lines[:lineno] # pylint:disable=invalid-slice-index
# Drop all characters following the error location
lines[-1] = lines[-1][:offset - 1] # pylint:disable=invalid-slice-index
new_source = '\n'.join(lines)
try:
return parse_str(new_source), new_source
except SyntaxError as e:
fail('If this is a lambda function, the error may be avoided by creating'
' the lambda in a standalone statement. Tried to strip down the'
' source to:\n{}\nBut that did not work.'.format(new_source))
def parse_str(src):
"""Returns the AST of given piece of code."""
# TODO(mdan): This should exclude the module things are autowrapped in.
return gast.parse(src)
def parse_expression(src):
"""Returns the AST of given identifier.
Args:
src: A piece of code that represents a single Python expression
Returns:
A gast.AST object.
Raises:
ValueError: if src does not consist of a single Expression.
"""
node = parse_str(src)
assert isinstance(node, gast.Module)
if len(node.body) != 1 and not isinstance(node.body[0], gast.Expr):
raise ValueError(
'Expected a single expression, found instead %s' % node.body)
return node.body[0].value
| hehongliang/tensorflow | tensorflow/python/autograph/pyct/parser.py | Python | apache-2.0 | 3,936 |
"""
AskHandlers related to order relations: positive, negative, etc.
"""
from sympy.assumptions import Q, ask
from sympy.assumptions.handlers import CommonHandler
class AskNegativeHandler(CommonHandler):
"""
This is called by ask() when key='negative'
Test that an expression is less (strict) than zero.
Examples:
>>> from sympy import ask, Q, pi
>>> ask(Q.negative(pi+1)) # this calls AskNegativeHandler.Add
False
>>> ask(Q.negative(pi**2)) # this calls AskNegativeHandler.Pow
False
"""
@staticmethod
def _number(expr, assumptions):
if not expr.as_real_imag()[1]:
return expr.evalf() < 0
else:
return False
@staticmethod
def Basic(expr, assumptions):
if expr.is_number:
return AskNegativeHandler._number(expr, assumptions)
@staticmethod
def Add(expr, assumptions):
"""
Positive + Positive -> Positive,
Negative + Negative -> Negative
"""
if expr.is_number:
return AskNegativeHandler._number(expr, assumptions)
for arg in expr.args:
if not ask(Q.negative(arg), assumptions):
break
else:
# if all argument's are negative
return True
@staticmethod
def Mul(expr, assumptions):
if expr.is_number:
return AskNegativeHandler._number(expr, assumptions)
result = None
for arg in expr.args:
if result is None:
result = False
if ask(Q.negative(arg), assumptions):
result = not result
elif ask(Q.positive(arg), assumptions):
pass
else:
return
return result
@staticmethod
def Pow(expr, assumptions):
"""
Real ** Even -> NonNegative
Real ** Odd -> same_as_base
NonNegative ** Positive -> NonNegative
"""
if expr.is_number:
return AskNegativeHandler._number(expr, assumptions)
if ask(Q.real(expr.base), assumptions):
if ask(Q.positive(expr.base), assumptions):
return False
if ask(Q.even(expr.exp), assumptions):
return False
if ask(Q.odd(expr.exp), assumptions):
return ask(Q.negative(expr.base), assumptions)
ImaginaryUnit, Abs = [staticmethod(CommonHandler.AlwaysFalse)]*2
@staticmethod
def exp(expr, assumptions):
if ask(Q.real(expr.args[0]), assumptions):
return False
class AskNonZeroHandler(CommonHandler):
"""
Handler for key 'zero'
Test that an expression is not identically zero
"""
@staticmethod
def Basic(expr, assumptions):
if expr.is_number:
# if there are no symbols just evalf
return expr.evalf() != 0
@staticmethod
def Add(expr, assumptions):
if all(ask(Q.positive(x), assumptions) for x in expr.args) \
or all(ask(Q.negative(x), assumptions) for x in expr.args):
return True
@staticmethod
def Mul(expr, assumptions):
for arg in expr.args:
result = ask(Q.nonzero(arg), assumptions)
if result:
continue
return result
return True
@staticmethod
def Pow(expr, assumptions):
return ask(Q.nonzero(expr.base), assumptions)
NaN = staticmethod(CommonHandler.AlwaysTrue)
@staticmethod
def Abs(expr, assumptions):
return ask(Q.nonzero(expr.args[0]), assumptions)
class AskPositiveHandler(CommonHandler):
"""
Handler for key 'positive'
Test that an expression is greater (strict) than zero
"""
@staticmethod
def _number(expr, assumptions):
if not expr.as_real_imag()[1]:
return expr.evalf() > 0
else:
return False
@staticmethod
def Basic(expr, assumptions):
if expr.is_number:
return AskPositiveHandler._number(expr, assumptions)
@staticmethod
def Mul(expr, assumptions):
if expr.is_number:
return AskPositiveHandler._number(expr, assumptions)
result = True
for arg in expr.args:
if ask(Q.positive(arg), assumptions):
continue
elif ask(Q.negative(arg), assumptions):
result = result ^ True
else:
return
return result
@staticmethod
def Add(expr, assumptions):
if expr.is_number:
return AskPositiveHandler._number(expr, assumptions)
for arg in expr.args:
if ask(Q.positive(arg), assumptions) is not True:
break
else:
# if all argument's are positive
return True
@staticmethod
def Pow(expr, assumptions):
if expr.is_number:
return expr.evalf() > 0
if ask(Q.positive(expr.base), assumptions):
return True
if ask(Q.negative(expr.base), assumptions):
if ask(Q.even(expr.exp), assumptions):
return True
if ask(Q.even(expr.exp), assumptions):
return False
@staticmethod
def exp(expr, assumptions):
if ask(Q.real(expr.args[0]), assumptions):
return True
ImaginaryUnit = staticmethod(CommonHandler.AlwaysFalse)
@staticmethod
def Abs(expr, assumptions):
return ask(Q.nonzero(expr), assumptions)
@staticmethod
def Trace(expr, assumptions):
if ask(Q.positive_definite(expr.arg), assumptions):
return True
@staticmethod
def Determinant(expr, assumptions):
if ask(Q.positive_definite(expr.arg), assumptions):
return True
@staticmethod
def MatrixElement(expr, assumptions):
if (expr.i == expr.j
and ask(Q.positive_definite(expr.parent), assumptions)):
return True
| lidavidm/mathics-heroku | venv/lib/python2.7/site-packages/sympy/assumptions/handlers/order.py | Python | gpl-3.0 | 5,970 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.