repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
okfish/django-oscar
|
refs/heads/master
|
src/oscar/apps/catalogue/views.py
|
15
|
import warnings
from django.contrib import messages
from django.core.paginator import InvalidPage
from django.http import Http404, HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404, redirect
from django.utils.http import urlquote
from django.utils.translation import ugettext_lazy as _
from django.views.generic import DetailView, TemplateView
from oscar.apps.catalogue.signals import product_viewed
from oscar.core.loading import get_class, get_model
Product = get_model('catalogue', 'product')
Category = get_model('catalogue', 'category')
ProductAlert = get_model('customer', 'ProductAlert')
ProductAlertForm = get_class('customer.forms', 'ProductAlertForm')
get_product_search_handler_class = get_class(
'catalogue.search_handlers', 'get_product_search_handler_class')
class ProductDetailView(DetailView):
context_object_name = 'product'
model = Product
view_signal = product_viewed
template_folder = "catalogue"
# Whether to redirect to the URL with the right path
enforce_paths = True
# Whether to redirect child products to their parent's URL
enforce_parent = True
def get(self, request, **kwargs):
"""
Ensures that the correct URL is used before rendering a response
"""
self.object = product = self.get_object()
redirect = self.redirect_if_necessary(request.path, product)
if redirect is not None:
return redirect
response = super(ProductDetailView, self).get(request, **kwargs)
self.send_signal(request, response, product)
return response
def get_object(self, queryset=None):
# Check if self.object is already set to prevent unnecessary DB calls
if hasattr(self, 'object'):
return self.object
else:
return super(ProductDetailView, self).get_object(queryset)
def redirect_if_necessary(self, current_path, product):
if self.enforce_parent and product.is_child:
return HttpResponsePermanentRedirect(
product.parent.get_absolute_url())
if self.enforce_paths:
expected_path = product.get_absolute_url()
if expected_path != urlquote(current_path):
return HttpResponsePermanentRedirect(expected_path)
def get_context_data(self, **kwargs):
ctx = super(ProductDetailView, self).get_context_data(**kwargs)
ctx['alert_form'] = self.get_alert_form()
ctx['has_active_alert'] = self.get_alert_status()
return ctx
def get_alert_status(self):
# Check if this user already have an alert for this product
has_alert = False
if self.request.user.is_authenticated():
alerts = ProductAlert.objects.filter(
product=self.object, user=self.request.user,
status=ProductAlert.ACTIVE)
has_alert = alerts.exists()
return has_alert
def get_alert_form(self):
return ProductAlertForm(
user=self.request.user, product=self.object)
def send_signal(self, request, response, product):
self.view_signal.send(
sender=self, product=product, user=request.user, request=request,
response=response)
def get_template_names(self):
"""
Return a list of possible templates.
If an overriding class sets a template name, we use that. Otherwise,
we try 2 options before defaulting to catalogue/detail.html:
1). detail-for-upc-<upc>.html
2). detail-for-class-<classname>.html
This allows alternative templates to be provided for a per-product
and a per-item-class basis.
"""
if self.template_name:
return [self.template_name]
return [
'%s/detail-for-upc-%s.html' % (
self.template_folder, self.object.upc),
'%s/detail-for-class-%s.html' % (
self.template_folder, self.object.get_product_class().slug),
'%s/detail.html' % (self.template_folder)]
class CatalogueView(TemplateView):
"""
Browse all products in the catalogue
"""
context_object_name = "products"
template_name = 'catalogue/browse.html'
def get(self, request, *args, **kwargs):
try:
self.search_handler = self.get_search_handler(
self.request.GET, request.get_full_path(), [])
except InvalidPage:
# Redirect to page one.
messages.error(request, _('The given page number was invalid.'))
return redirect('catalogue:index')
return super(CatalogueView, self).get(request, *args, **kwargs)
def get_search_handler(self, *args, **kwargs):
return get_product_search_handler_class()(*args, **kwargs)
def get_context_data(self, **kwargs):
ctx = {}
ctx['summary'] = _("All products")
search_context = self.search_handler.get_search_context_data(
self.context_object_name)
ctx.update(search_context)
return ctx
class ProductCategoryView(TemplateView):
"""
Browse products in a given category
"""
context_object_name = "products"
template_name = 'catalogue/category.html'
enforce_paths = True
def get(self, request, *args, **kwargs):
# Fetch the category; return 404 or redirect as needed
self.category = self.get_category()
potential_redirect = self.redirect_if_necessary(
request.path, self.category)
if potential_redirect is not None:
return potential_redirect
try:
self.search_handler = self.get_search_handler(
request.GET, request.get_full_path(), self.get_categories())
except InvalidPage:
messages.error(request, _('The given page number was invalid.'))
return redirect(self.category.get_absolute_url())
return super(ProductCategoryView, self).get(request, *args, **kwargs)
def get_category(self):
if 'pk' in self.kwargs:
# Usual way to reach a category page. We just look at the primary
# key, which is easy on the database. If the slug changed, get()
# will redirect appropriately.
# WARNING: Category.get_absolute_url needs to look up it's parents
# to compute the URL. As this is slightly expensive, Oscar's
# default implementation caches the method. That's pretty safe
# as ProductCategoryView does the lookup by primary key, which
# will work even if the cache is stale. But if you override this
# logic, consider if that still holds true.
return get_object_or_404(Category, pk=self.kwargs['pk'])
elif 'category_slug' in self.kwargs:
# DEPRECATED. TODO: Remove in Oscar 1.2.
# For SEO and legacy reasons, we allow chopping off the primary
# key from the URL. In that case, we have the target category slug
# and it's ancestors' slugs concatenated together.
# To save on queries, we pick the last slug, look up all matching
# categories and only then compare.
# Note that currently we enforce uniqueness of slugs, but as that
# might feasibly change soon, it makes sense to be forgiving here.
concatenated_slugs = self.kwargs['category_slug']
slugs = concatenated_slugs.split(Category._slug_separator)
try:
last_slug = slugs[-1]
except IndexError:
raise Http404
else:
for category in Category.objects.filter(slug=last_slug):
if category.full_slug == concatenated_slugs:
message = (
"Accessing categories without a primary key"
" is deprecated will be removed in Oscar 1.2.")
warnings.warn(message, DeprecationWarning)
return category
raise Http404
def redirect_if_necessary(self, current_path, category):
if self.enforce_paths:
# Categories are fetched by primary key to allow slug changes.
# If the slug has changed, issue a redirect.
expected_path = category.get_absolute_url()
if expected_path != urlquote(current_path):
return HttpResponsePermanentRedirect(expected_path)
def get_search_handler(self, *args, **kwargs):
return get_product_search_handler_class()(*args, **kwargs)
def get_categories(self):
"""
Return a list of the current category and its ancestors
"""
return self.category.get_descendants_and_self()
def get_context_data(self, **kwargs):
context = super(ProductCategoryView, self).get_context_data(**kwargs)
context['category'] = self.category
search_context = self.search_handler.get_search_context_data(
self.context_object_name)
context.update(search_context)
return context
|
skearnes/pylearn2
|
refs/heads/master
|
pylearn2/sandbox/cuda_convnet/tests/test_rop_pool.py
|
5
|
import copy
import numpy
import theano
from theano.tensor import grad
from theano.tests import unittest_tools
import theano.sandbox.cuda as tcn
import warnings
if not tcn.cuda_available:
from nose.plugins.skip import SkipTest
raise SkipTest('Optional package cuda disabled.')
from pylearn2.sandbox.cuda_convnet.pool import MaxPool, MaxPoolGrad
from pylearn2.models.mlp import max_pool_c01b as gold_max_pool_c01b
if theano.config.mode == 'FAST_COMPILE':
mode_with_gpu = theano.compile.mode.get_mode('FAST_RUN').including('gpu')
mode_without_gpu = theano.compile.mode.get_mode(
'FAST_RUN').excluding('gpu')
else:
mode_with_gpu = theano.compile.mode.get_default_mode().including('gpu')
mode_without_gpu = theano.compile.mode.get_default_mode().excluding('gpu')
#The CPU tests already compare C/Py, so we only check C/GPU
mode_with_gpu = copy.copy(mode_with_gpu)
mode_without_gpu = copy.copy(mode_without_gpu)
mode_with_gpu.check_py_code = False
mode_without_gpu.check_py_code = False
def my_rand(*shape):
return theano._asarray(numpy.random.rand(*shape), dtype='float32')
def test_pool():
#(batch, channel, x, y)
shps = [(1, 1, 2, 2),
]
shps = [(channel, x, y, batch) for (batch, channel, x, y) in shps]
#numpy.random.RandomState(unittest_tools.fetch_seed()).shuffle(shps)
warnings.warn("TODO: Razvan needs to finish this")
for shp in shps:
for ds in range(1, min(4, shp[2] + 1)):
for start in [0]:
for stride in range(1, min(shp[2], ds, 4) + 1):
#print 'test_pool shape=%s, ds=%d, stride=%d start=%d' % (
# str(shp), ds, stride, start)
va = my_rand(*shp)
tva = va.flatten()
#print 'va', tva, tva.max(), tva.argmax()
vb = my_rand(*shp)
tvb = vb.flatten()
#print 'vb', tvb, tvb.max(), tvb.argmax(),\
# tvb[tva.argmax()]
a = tcn.shared_constructor(va, 'a')
b = tcn.shared_constructor(vb, 'b')
op = MaxPool(ds=ds, stride=stride)
v = op(a)
rval = theano.tensor.Rop(v, a, b)
f = theano.function([], rval,
mode=mode_with_gpu)
print f.maker.fgraph.toposort()
#ssert any([isinstance(node.op, MaxPool)
# for node in f.maker.fgraph.toposort()])
out = numpy.asarray(f())
#print out
#print
#print
|
vipins/ccccms
|
refs/heads/master
|
env/Lib/encodings/mac_croatian.py
|
593
|
""" Python Character Mapping Codec mac_croatian generated from 'MAPPINGS/VENDORS/APPLE/CROATIAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-croatian',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\u2206' # 0xB4 -> INCREMENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u0106' # 0xC6 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\uf8ff' # 0xD8 -> Apple logo
u'\xa9' # 0xD9 -> COPYRIGHT SIGN
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\xc6' # 0xDE -> LATIN CAPITAL LETTER AE
u'\xbb' # 0xDF -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2013' # 0xE0 -> EN DASH
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\u0107' # 0xE6 -> LATIN SMALL LETTER C WITH ACUTE
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u03c0' # 0xF9 -> GREEK SMALL LETTER PI
u'\xcb' # 0xFA -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\xca' # 0xFD -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xe6' # 0xFE -> LATIN SMALL LETTER AE
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
airbnb/s3cmd
|
refs/heads/master
|
S3/FileDict.py
|
18
|
# -*- coding: utf-8 -*-
## Amazon S3 manager
## Author: Michal Ludvig <michal@logix.cz>
## http://www.logix.cz/michal
## License: GPL Version 2
## Copyright: TGRMN Software and contributors
import logging
from SortedDict import SortedDict
import Utils
import Config
zero_length_md5 = "d41d8cd98f00b204e9800998ecf8427e"
cfg = Config.Config()
class FileDict(SortedDict):
def __init__(self, mapping = {}, ignore_case = True, **kwargs):
SortedDict.__init__(self, mapping = mapping, ignore_case = ignore_case, **kwargs)
self.hardlinks = dict() # { dev: { inode : {'md5':, 'relative_files':}}}
self.by_md5 = dict() # {md5: set(relative_files)}
def record_md5(self, relative_file, md5):
if md5 is None: return
if md5 == zero_length_md5: return
if md5 not in self.by_md5:
self.by_md5[md5] = set()
self.by_md5[md5].add(relative_file)
def find_md5_one(self, md5):
if md5 is None: return None
try:
return list(self.by_md5.get(md5, set()))[0]
except:
return None
def get_md5(self, relative_file):
"""returns md5 if it can, or raises IOError if file is unreadable"""
md5 = None
if 'md5' in self[relative_file]:
return self[relative_file]['md5']
md5 = self.get_hardlink_md5(relative_file)
if md5 is None and 'md5' in cfg.sync_checks:
logging.debug(u"doing file I/O to read md5 of %s" % relative_file)
md5 = Utils.hash_file_md5(self[relative_file]['full_name'])
self.record_md5(relative_file, md5)
self[relative_file]['md5'] = md5
return md5
def record_hardlink(self, relative_file, dev, inode, md5, size):
if md5 is None: return
if size == 0: return # don't record 0-length files
if dev == 0 or inode == 0: return # Windows
if dev not in self.hardlinks:
self.hardlinks[dev] = dict()
if inode not in self.hardlinks[dev]:
self.hardlinks[dev][inode] = dict(md5=md5, relative_files=set())
self.hardlinks[dev][inode]['relative_files'].add(relative_file)
def get_hardlink_md5(self, relative_file):
md5 = None
try:
dev = self[relative_file]['dev']
inode = self[relative_file]['inode']
md5 = self.hardlinks[dev][inode]['md5']
except KeyError:
pass
return md5
|
curzona/pytest-bdd
|
refs/heads/master
|
tests/feature/test_wrong.py
|
1
|
"""Test wrong feature syntax."""
import pytest
from pytest_bdd import scenario
from pytest_bdd.feature import FeatureError
@pytest.fixture(params=[
'When after then',
'Then first',
'Given after When',
'Given after Then',
])
def scenario_name(request):
return request.param
def test_wrong(request, scenario_name):
"""Test wrong feature scenarios."""
sc = scenario('wrong.feature', scenario_name)
with pytest.raises(FeatureError):
sc(request)
def test_verbose_output(request):
"""Test verbose output of failed feature scenario"""
sc = scenario('wrong.feature', 'When after then')
with pytest.raises(FeatureError) as excinfo:
sc(request)
msg, line_number, line = excinfo.value.args
assert line_number == 4
assert line == 'When I do it again'
|
cidles/poio-api
|
refs/heads/master
|
src/poioapi/io/memory.py
|
1
|
# -*- coding: utf-8 -*-
#
# Poio Tools for Linguists
#
# Copyright (C) 2009-2014 Poio Project
# Author: Peter Bouda <pbouda@cidles.eu>
# URL: <http://media.cidles.eu/poio/>
# For license information, see LICENSE.TXT
"""
This modules provides classes to store informations from Parsers to a data
structure in memors. The data that is stored is equivalent to a GrAF graph but
without the overhead of Python objects. The AnnotationGraph object thus can be
used wit memory or GrAF data storage.
"""
import redis
class MemoryConverter:
"""This class handles the conversion of different file formats into memory
data types. It uses a sub-class of BaseParser to get the
annotations and the tier hierarchies.
"""
def __init__(self, parser, writer=None):
self.parser = parser
self.tier_hierarchies = []
self.meta_information = None
self.primary_data = None
self.original_file = None
self.annotations_for_parent = dict()
self.region_for_annotation = dict()
def parse(self):
"""This method will be the responsible to transform
the parser into a redis key/value items. This method also
retrieves and stores the tiers hierarchies.
"""
self._tiers_parent_list = []
self.root_tiers = []
tiers_hierarchy_map = {}
for tier in self.parser.get_root_tiers():
self.root_tiers.append(tier.name)
self._convert_tier(tier, None)
i = 0
for t in self._tiers_parent_list:
if t[1] is None:
i += 1
tiers_hierarchy_map[str(i)] = [t[0]]
else:
self._append_tier_to_hierarchy(tiers_hierarchy_map[str(i)],
t[1], t[0])
for i, hierarchy in tiers_hierarchy_map.items():
self.tier_hierarchies.append(hierarchy)
if hasattr(self.parser, 'meta_information'):
self.meta_information = self.parser.meta_information
self.primary_data = self.parser.get_primary_data()
if hasattr(self.parser, 'filepath') and \
isinstance(self.parser.filepath, str):
self.original_file = os.path.abspath(self.parser.filepath)
def _convert_tier(self, tier, parent_annotation,
parent_prefix=None):
child_tiers = self.parser.get_child_tiers_for_tier(tier)
if tier.annotation_space is None:
prefix = tier.name
annotation_name = prefix
else:
annotation_name = tier.annotation_space.replace(' ', '_')
prefix = "{0}{1}{2}".format(annotation_name, GRAFSEPARATOR,
tier.name)
has_regions = False
if self.parser.tier_has_regions(tier):
has_regions = True
self._add_tier_in_hierarchy_list(prefix, parent_prefix)
annotations = self.parser.get_annotations_for_tier(tier,
parent_annotation)
for annotation in annotations:
region = None
if has_regions:
region = self.parser.region_for_annotation(annotation)
self.region_for_annotation[annotation.id] = region
#node_id = NodeId(prefix, annotation.id)
parent_annotation_id = None
if parent_annotation is not None:
parent_annotation_id = parent_annotation.id
self.annotations_for_parent[(parent_annotation_id, tier.name)] = \
annotation
#self._add_node(node_id, annotation, annotation_name, regions,
# parent_node)
#self._add_root_nodes(prefix, node_id)
if child_tiers:
for t in child_tiers:
self._convert_tier(t, annotation, prefix)
if annotations == [] and child_tiers:
for t in child_tiers:
self._convert_tier(t, None, prefix)
def _add_tier_in_hierarchy_list(self, prefix, parent_prefix):
if not (prefix, parent_prefix) in self._tiers_parent_list:
self._tiers_parent_list.append((prefix, parent_prefix))
def _append_tier_to_hierarchy(self, tiers_list, parent_tier, tier):
for t in tiers_list:
if isinstance(t, list):
self._append_tier_to_hierarchy(t, parent_tier, tier)
else:
if t == parent_tier:
tiers_list.append([tier])
# def _add_node(self, node_id, annotation, annotation_name, regions,
# from_node_id):
# self._add_node_to_graph(node_id, regions, from_node_id)
# self._add_graf_annotation(annotation_name, annotation.id, node_id,
# annotation.value, annotation.features)
# def _add_root_nodes(self, prefix, node_id):
# if prefix in self.root_tiers:
# self.graf.header.roots.append(node_id.to_str())
# def _add_graf_annotation(self, annotation_name, annotation_id,
# annotation_ref, annotation_value, annotation_features=None):
# annotation = graf.Annotation(annotation_name, annotation_features,
# annotation_id)
# if annotation_value is not None:
# annotation.features['annotation_value'] = annotation_value
# self.graf.nodes[annotation_ref.to_str()].annotations.add(annotation)
# if annotation_name in self.graf.annotation_spaces:
# #if annotation not in self.graf.annotation_spaces[annotation_name]:
# self.graf.annotation_spaces[annotation_name].add(annotation)
# else:
# annotation_space = graf.AnnotationSpace(annotation_name)
# annotation_space.add(annotation)
# self.graf.annotation_spaces.add(annotation_space)
# def _add_node_to_graph(self, node_id, regions=None,
# from_node_id=None):
# node = graf.Node(node_id.to_str())
# if from_node_id is not None:
# edge_id = node_id.str_edge()
# self.graf.create_edge(self.graf.nodes[from_node_id.to_str()], node,
# edge_id)
# if regions is not None:
# region_id = node_id.str_region()
# region = graf.Region(region_id, *regions)
# node.add_region(region)
# self.graf.regions.add(region)
# self.graf.nodes.add(node)
|
nhomar/odoo-mirror
|
refs/heads/8.0
|
addons/l10n_pa/__openerp__.py
|
117
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Cubic ERP - Teradata SAC (<http://cubicerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Panama Localization Chart Account",
"version": "1.0",
"description": """
Panamenian accounting chart and tax localization.
Plan contable panameño e impuestos de acuerdo a disposiciones vigentes
Con la Colaboración de
- AHMNET CORP http://www.ahmnet.com
""",
"author": "Cubic ERP",
"website": "http://cubicERP.com",
"category": "Localization/Account Charts",
"depends": [
"account_chart",
],
"data":[
"account_tax_code.xml",
"l10n_pa_chart.xml",
"account_tax.xml",
"l10n_pa_wizard.xml",
],
"demo_xml": [
],
"active": False,
"installable": True,
"certificate" : "",
'images': ['images/config_chart_l10n_cl.jpeg','images/l10n_cl_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
cfbraun/django-admintools-bootstrap
|
refs/heads/master
|
admintools_bootstrap/settings.py
|
8
|
from appconf import AppConf
class AdminToolsBootstrapConf(AppConf):
SITE_LINK = '/'
class Meta:
prefix = 'ADMINTOOLS_BOOTSTRAP'
|
mark47/OESandbox
|
refs/heads/master
|
liquibase/HaitiLNSPMassive/scripts/uom.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
old = []
old_file = open("currentUOM.txt")
new_file = open("newUOM.txt")
result = open("MassiveUOM.sql",'w')
for line in old_file:
old.append(line.strip())
old_file.close()
for line in new_file:
if len(line) > 1:
values = line.split(',')
for value in values:
if value.strip() not in old:
old.append(value.strip())
result.write("INSERT INTO unit_of_measure( id, name , description, lastupdated) \n\t")
result.write("VALUES ( nextval( 'unit_of_measure_seq' ) , '" + value.strip() + "' , '" + value.strip() + "' , now());\n")
result.close()
print "Done check MassiveUOM.sql for values"
|
aroche/django
|
refs/heads/master
|
tests/save_delete_hooks/models.py
|
409
|
"""
Adding hooks before/after saving and deleting
To execute arbitrary code around ``save()`` and ``delete()``, just subclass
the methods.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Person(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
def __init__(self, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
self.data = []
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
def save(self, *args, **kwargs):
self.data.append("Before save")
# Call the "real" save() method
super(Person, self).save(*args, **kwargs)
self.data.append("After save")
def delete(self):
self.data.append("Before deletion")
# Call the "real" delete() method
super(Person, self).delete()
self.data.append("After deletion")
|
FHannes/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/contrib/gis/db/backends/mysql/operations.py
|
312
|
from django.db.backends.mysql.base import DatabaseOperations
from django.contrib.gis.db.backends.adapter import WKTAdapter
from django.contrib.gis.db.backends.base import BaseSpatialOperations
class MySQLOperations(DatabaseOperations, BaseSpatialOperations):
compiler_module = 'django.contrib.gis.db.models.sql.compiler'
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
geometry_functions = {
'bbcontains' : 'MBRContains', # For consistency w/PostGIS API
'bboverlaps' : 'MBROverlaps', # .. ..
'contained' : 'MBRWithin', # .. ..
'contains' : 'MBRContains',
'disjoint' : 'MBRDisjoint',
'equals' : 'MBREqual',
'exact' : 'MBREqual',
'intersects' : 'MBRIntersects',
'overlaps' : 'MBROverlaps',
'same_as' : 'MBREqual',
'touches' : 'MBRTouches',
'within' : 'MBRWithin',
}
gis_terms = dict([(term, None) for term in geometry_functions.keys() + ['isnull']])
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, value, srid):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'expression'):
placeholder = '%s.%s' % tuple(map(self.quote_name, value.cols[value.expression]))
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
def spatial_lookup_sql(self, lvalue, lookup_type, value, field, qn):
alias, col, db_type = lvalue
geo_col = '%s.%s' % (qn(alias), qn(col))
lookup_info = self.geometry_functions.get(lookup_type, False)
if lookup_info:
return "%s(%s, %s)" % (lookup_info, geo_col,
self.get_geom_placeholder(value, field.srid))
# TODO: Is this really necessary? MySQL can't handle NULL geometries
# in its spatial indexes anyways.
if lookup_type == 'isnull':
return "%s IS %sNULL" % (geo_col, (not value and 'NOT ' or ''))
raise TypeError("Got invalid lookup_type: %s" % repr(lookup_type))
|
ayushagrawal288/zamboni
|
refs/heads/master
|
mkt/regions/tests/test_utils_.py
|
19
|
# -*- coding: utf-8 -*-
from nose.tools import eq_
from mkt.constants import regions
from mkt.regions.utils import parse_region, remove_accents
def test_parse_region():
eq_(parse_region('restofworld'), regions.RESTOFWORLD)
eq_(parse_region('br'), regions.BRA)
eq_(parse_region('brazil'), regions.BRA)
eq_(parse_region('bRaZiL'), regions.BRA)
eq_(parse_region('7'), regions.BRA)
eq_(parse_region(7), regions.BRA)
eq_(parse_region(regions.BRA), regions.BRA)
eq_(parse_region(''), None)
def test_parse_worldwide_region_as_restofworld():
eq_(parse_region('worldwide'), regions.RESTOFWORLD)
def test_remove_accents():
eq_(remove_accents(u'café'), u'cafe')
eq_(remove_accents(u'Équateur'), u'Equateur')
eq_(remove_accents(u'Pérou'), u'Perou')
eq_(remove_accents(u'Węgry'), u'Wegry')
# This hits the limitations of what's possible with built-in
# functions but shows that if the diacritic isn't found the
# string remains un-molested.
eq_(remove_accents(u'Włochy'), u'Włochy')
|
coordcn/LuaIO
|
refs/heads/master
|
tools/gyp/pylib/gyp/common_test.py
|
2542
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the common.py file."""
import gyp.common
import unittest
import sys
class TestTopologicallySorted(unittest.TestCase):
def test_Valid(self):
"""Test that sorting works on a valid graph with one possible order."""
graph = {
'a': ['b', 'c'],
'b': [],
'c': ['d'],
'd': ['b'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertEqual(
gyp.common.TopologicallySorted(graph.keys(), GetEdge),
['a', 'c', 'd', 'b'])
def test_Cycle(self):
"""Test that an exception is thrown on a cyclic graph."""
graph = {
'a': ['b'],
'b': ['c'],
'c': ['d'],
'd': ['a'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertRaises(
gyp.common.CycleError, gyp.common.TopologicallySorted,
graph.keys(), GetEdge)
class TestGetFlavor(unittest.TestCase):
"""Test that gyp.common.GetFlavor works as intended"""
original_platform = ''
def setUp(self):
self.original_platform = sys.platform
def tearDown(self):
sys.platform = self.original_platform
def assertFlavor(self, expected, argument, param):
sys.platform = argument
self.assertEqual(expected, gyp.common.GetFlavor(param))
def test_platform_default(self):
self.assertFlavor('freebsd', 'freebsd9' , {})
self.assertFlavor('freebsd', 'freebsd10', {})
self.assertFlavor('openbsd', 'openbsd5' , {})
self.assertFlavor('solaris', 'sunos5' , {});
self.assertFlavor('solaris', 'sunos' , {});
self.assertFlavor('linux' , 'linux2' , {});
self.assertFlavor('linux' , 'linux3' , {});
def test_param(self):
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
if __name__ == '__main__':
unittest.main()
|
Metaswitch/calico-nova
|
refs/heads/calico-readme
|
nova/virt/disk/vfs/__init__.py
|
129
|
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Operations on virtual filesystems
"""
|
Alzemand/ubi-virtual-assistant
|
refs/heads/master
|
lib/notify-module.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Edilson Alzemand
import pynotify
pynotify.init("Aplicativo")
notify = pynotify.Notification("Olá Edilson, em que eu posso ajudar?", "", "/home/edilson/Projetos/ubi-virtual-assistant/ubi.svg")
notify.show()
|
PrefPy/opra
|
refs/heads/master
|
compsocsite/groups/migrations/0002_group_open.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-01-09 15:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='group',
name='open',
field=models.IntegerField(default=0),
),
]
|
CeltonMcGrath/TACTIC
|
refs/heads/master
|
src/tactic/ui/sync/sync_filter.py
|
6
|
############################################################
#
# Copyright (c) 2011, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
__all__ = ['SyncFilter']
import tacticenv
from pyasm.common import Environment, Xml, TacticException
from pyasm.biz import Project
from pyasm.search import SearchType, Search
from pyasm.security import AccessManager
import os, codecs
class SyncFilter(object):
def __init__(my, **kwargs):
my.kwargs = kwargs
my.log = my.kwargs.get("transaction")
my.rules = my.kwargs.get("rules")
my.message = ""
def execute(my):
log = my.log
rules = my.rules
# Give rules. Only notes will get through
# we need heirarchical rules. This will ensure that only notes
# for project/assets will pass
# Here, the second one is much more difficult to do.
rulesXXX = '''
<rule group='heirarchy' key='project/asset.sthpw/note' access='allow'/>
<rule group='heirarchy' key="project/asset.sthpw/note['assigned','beth']" access='allow'/>"
'''
access_manager = AccessManager()
access_manager.add_xml_rules(rules)
# filter out project
namespace = log.get_value("namespace")
key1 = { 'code': namespace }
key2 = { 'code': '*' }
keys = [key1, key2]
if not access_manager.check_access("project", keys, "allow", default="deny"):
my.filtered_xml = Xml()
my.filtered_xml.read_string("<transaction/>")
my.message = "Transaction prevented due to project restriction"
return
# filter the transaction against the security model
xml = log.get_xml_value("transaction")
my.filtered_xml = Xml()
my.filtered_xml.create_doc("transaction")
root2 = my.filtered_xml.get_root_node()
nodes = xml.get_nodes("transaction/*")
num_nodes = len(nodes)
count = 0
for node in nodes:
if Xml.get_node_name(node) == "sobject":
search_type = xml.get_attribute(node, "search_type")
parts = search_type.split("?")
search_type = parts[0]
# filter search types
key1 = { 'code': search_type }
key2 = { 'code': "*" }
keys = [ key1, key2 ]
if not access_manager.check_access("search_type", keys, "allow", default="deny"):
continue
# check hierachical rule
parent_type = xml.get_attribute(node, "parent_type")
key = "%s.%s" % (parent_type, search_type)
my.filtered_xml.append_child(root2, node)
count += 1
else:
my.filtered_xml.append_child(root2, node)
count += 1
if len(nodes) != 0 and len(my.filtered_xml.get_nodes("transaction/*")) == 0:
my.message = "All actions filtered due to security restrictions (%s actions)" % num_nodes
def get_filtered_xml(my):
return my.filtered_xml
def get_message(my):
return my.message
if __name__ == '__main__':
from pyasm.security import Batch
Batch(project_code='new_project')
filter = SyncFilter()
filter.execute()
|
osiloke/Flumotion-Transcoder
|
refs/heads/master
|
flumotion/transcoder/admin/datastore/activity.py
|
1
|
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
import datetime
from zope.interface import implements, Attribute
from flumotion.inhouse import log, utils, annotate
from flumotion.transcoder.admin import interfaces
from flumotion.transcoder.admin.enums import NotificationTriggerEnum
from flumotion.transcoder.admin.enums import ActivityTypeEnum
from flumotion.transcoder.admin.enums import NotificationTypeEnum
from flumotion.transcoder.admin.enums import TranscodingTypeEnum
from flumotion.transcoder.admin.datastore import base, profile, notification
class IActivityStore(base.IBaseStore):
type = Attribute("The type of activity")
subtype = Attribute("The sub-type of activity")
startTime = Attribute("The time the activity was started")
lastTime = Attribute("The last time the activity was attempted")
state = Attribute("Activity's state")
def store(self):
pass
def delete(self):
pass
def reset(self):
pass
class ITranscodingActivityStore(IActivityStore):
inputRelPath = Attribute("Transcoded file relative path")
def getCustomerStore(self):
pass
def getProfileStore(self):
pass
class INotificationActivityStore(IActivityStore):
trigger = Attribute("What has triggered this notification")
timeout = Attribute("Timeout to perform the notification")
retryCount = Attribute("How many times the notification has been attempted")
retryMax = Attribute("Maximum time the notification should be attempted")
retrySleep = Attribute("Time to wait between notification attempts")
class IHTTPActivityStore(INotificationActivityStore):
url = Attribute("URL used to notify over HTTP")
class IMailActivityStore(INotificationActivityStore):
senderAddr = Attribute("Sender e-mail addresse")
subject = Attribute("Mail subject")
body = Attribute("Mail body")
class ISQLActivityStore(INotificationActivityStore):
databaseURI = Attribute("Database connection URI")
sqlStatement = Attribute("SQL statement to execute")
## Proxy Descriptors ##
class ReadWriteProxy(base.ReadOnlyProxy):
def __init__(self, fieldName, default=None):
base.ReadOnlyProxy.__init__(self, fieldName, default)
def __set__(self, obj, value):
assert not obj._deleted
setattr(obj._data, self._fieldName, utils.deepCopy(value))
try:
obj._touche()
except AttributeError:
pass
class ReadWriteDataProxy(object):
def __init__(self, fieldName, default=None):
self._fieldName = fieldName
self._default= default
def __get__(self, obj, type=None):
result = obj._data.data.get(self._fieldName, self._default)
return utils.deepCopy(result)
def __set__(self, obj, value):
assert not obj._deleted
obj._data.data[self._fieldName] = utils.deepCopy(value)
try:
obj._touche()
except AttributeError:
pass
def __delete__(self, obj):
raise AttributeError("Attribute cannot be deleted")
class ActivityStore(base.DataStore, log.LoggerProxy):
implements(IActivityStore)
type = base.ReadOnlyProxy("type")
subtype = base.ReadOnlyProxy("subtype")
startTime = base.ReadOnlyProxy("startTime")
lastTime = base.ReadOnlyProxy("lastTime")
state = ReadWriteProxy("state")
def __init__(self, logger, stateStore, data, isNew=True):
log.LoggerProxy.__init__(self, logger)
base.DataStore.__init__(self, stateStore, data, label=data.label)
self._deleted = False
self._isNew = isNew
def getAdminStore(self):
return self.parent.getAdminStore()
def getStateStore(self):
return self.parent
def store(self):
assert not self._deleted
d = self.parent._storeActivity(self, self._isNew)
d.addErrback(self.__ebActivityStoreFailed)
self._new = False
def delete(self):
assert not self._deleted
self._deleted = True
d = self.parent._deleteActivity(self)
d.addErrback(self.__ebActivityDeleteFailed)
def reset(self):
assert not self._deleted
return self.parent._resetActivity(self)
## Protected Methods ##
def _touche(self):
self._data.lastTime = datetime.datetime.now()
def _getData(self):
return self._data
## Private Methods ##
def __ebActivityStoreFailed(self, failure):
log.notifyFailure(self, failure,
"Fail to store %s activity '%s'",
self._data and self._data.type and self._data.type.nick,
self._data and self._data.label)
def __ebActivityDeleteFailed(self, failure):
log.notifyFailure(self, failure,
"Fail to delete %s activity '%s'",
self._data and self._data.type and self._data.type.nick,
self._data and self._data.label)
class TranscodingActivityStore(ActivityStore):
implements(ITranscodingActivityStore)
inputRelPath = base.ReadOnlyProxy("inputRelPath")
def __init__(self, logger, stateStore, data, isNew=True):
ActivityStore.__init__(self, logger, stateStore, data, isNew)
def getCustomerStore(self):
assert not self._deleted
custIdent = self._data.customerIdentifier
adminStore = self.getAdminStore()
return adminStore.getCustomerStore(custIdent, None)
def getProfileStore(self):
custStore = self.getCustomerStore()
if not custStore:
return None
profIdent = self._data.profileIdentifier
profStore = custStore.getProfileStore(profIdent, None)
return profStore
## Protected Methods ##
def _setup(self, profStore, relPath):
assert isinstance(profStore, profile.ProfileStore)
assert (not relPath) or isinstance(relPath, str)
custStore = profStore.getCustomerStore()
self._data.customerIdentifier = custStore.identifier
self._data.profileIdentifier = profStore.identifier
self._data.inputRelPath = relPath
self._touche()
class NotificationActivityStore(ActivityStore):
implements(INotificationActivityStore)
trigger = base.ReadOnlyProxy("trigger")
timeout = ReadWriteProxy("timeout")
retryCount = ReadWriteProxy("retryCount")
retryMax = ReadWriteProxy("retryMax")
retrySleep = ReadWriteProxy("retrySleep")
def __init__(self, logger, stateStore, data, isNew=True):
ActivityStore.__init__(self, logger, stateStore, data, isNew)
def incRetryCount(self):
assert not self._deleted
self._data.retryCount += 1
self._touche()
## Protected Methods ##
def _setup(self, notifStore, trigger):
assert isinstance(notifStore, notification.NotificationStore)
assert isinstance(trigger, NotificationTriggerEnum)
self._data.trigger = trigger
custStore = notifStore.getCustomerStore()
profStore = notifStore.getProfileStore()
targStore = notifStore.getTargetStore()
self._data.customerIdentifier = custStore and custStore.identifier
self._data.profileIdentifier = profStore and profStore.identifier
self._data.targetIdentifier = targStore and targStore.identifier
self._data.retryCount = 0
self._touche()
class HTTPActivityStore(NotificationActivityStore):
implements(IHTTPActivityStore)
url = ReadWriteDataProxy("request-url")
def __init__(self, logger, stateStore, data, isNew=True):
NotificationActivityStore.__init__(self, logger, stateStore, data, isNew)
class MailActivityStore(NotificationActivityStore):
implements(IMailActivityStore)
senderAddr = ReadWriteDataProxy("sender-addr")
subject = ReadWriteDataProxy("subject")
body = ReadWriteDataProxy("body")
def __init__(self, logger, stateStore, data, isNew=True):
NotificationActivityStore.__init__(self, logger, stateStore, data, isNew)
def _getRecipientsAddr(self):
"""
Not created by metaclass because it convert from str to list
"""
recipients = self._data.data.get("recipients", "")
return [e.strip() for e in recipients.split(", ")]
def _setRecipientsAddr(self, recipients):
"""
Not created by metaclass because it convert from list to str
"""
data = ", ".join([e.strip() for e in recipients])
self._data.data["recipients"] = data
recipientsAddr = property(_getRecipientsAddr, _setRecipientsAddr)
class SQLActivityStore(NotificationActivityStore):
implements(ISQLActivityStore)
databaseURI = ReadWriteDataProxy("uri")
sqlStatement = ReadWriteDataProxy("sql")
def __init__(self, logger, stateStore, data, isNew=True):
NotificationActivityStore.__init__(self, logger, stateStore, data, isNew)
## Protected Methods ##
def _setup(self, notifStore, trigger):
NotificationActivityStore._setup(self, notifStore, trigger)
uri = notifStore.databaseURI
if uri is not None: self._data.data["uri"] = uri
_activityLookup = {ActivityTypeEnum.transcoding:
{TranscodingTypeEnum.normal: TranscodingActivityStore},
ActivityTypeEnum.notification:
{NotificationTypeEnum.http_request: HTTPActivityStore,
NotificationTypeEnum.email: MailActivityStore,
NotificationTypeEnum.sql: SQLActivityStore}}
def ActivityFactory(logger, parent, data, isNew=True):
assert data.type in _activityLookup
return _activityLookup[data.type][data.subtype](logger, parent, data, isNew)
|
MoKee/android_kernel_sony_tianchi
|
refs/heads/kk_mkt
|
tools/perf/util/setup.py
|
4998
|
#!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
|
shuggiefisher/django-on-google-app-engine-base
|
refs/heads/master
|
django/db/models/related.py
|
231
|
from django.utils.encoding import smart_unicode
from django.db.models.fields import BLANK_CHOICE_DASH
class BoundRelatedObject(object):
def __init__(self, related_object, field_mapping, original):
self.relation = related_object
self.field_mappings = field_mapping[related_object.name]
def template_name(self):
raise NotImplementedError
def __repr__(self):
return repr(self.__dict__)
class RelatedObject(object):
def __init__(self, parent_model, model, field):
self.parent_model = parent_model
self.model = model
self.opts = model._meta
self.field = field
self.name = '%s:%s' % (self.opts.app_label, self.opts.module_name)
self.var_name = self.opts.object_name.lower()
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH,
limit_to_currently_related=False):
"""Returns choices with a default blank choices included, for use
as SelectField choices for this field.
Analogue of django.db.models.fields.Field.get_choices, provided
initially for utilisation by RelatedFilterSpec.
"""
first_choice = include_blank and blank_choice or []
queryset = self.model._default_manager.all()
if limit_to_currently_related:
queryset = queryset.complex_filter(
{'%s__isnull' % self.parent_model._meta.module_name: False})
lst = [(x._get_pk_val(), smart_unicode(x)) for x in queryset]
return first_choice + lst
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
# Defer to the actual field definition for db prep
return self.field.get_db_prep_lookup(lookup_type, value,
connection=connection, prepared=prepared)
def editable_fields(self):
"Get the fields in this class that should be edited inline."
return [f for f in self.opts.fields + self.opts.many_to_many if f.editable and f != self.field]
def __repr__(self):
return "<RelatedObject: %s related to %s>" % (self.name, self.field.name)
def bind(self, field_mapping, original, bound_related_object_class=BoundRelatedObject):
return bound_related_object_class(self, field_mapping, original)
def get_accessor_name(self):
# This method encapsulates the logic that decides what name to give an
# accessor descriptor that retrieves related many-to-one or
# many-to-many objects. It uses the lower-cased object_name + "_set",
# but this can be overridden with the "related_name" option.
if self.field.rel.multiple:
# If this is a symmetrical m2m relation on self, there is no reverse accessor.
if getattr(self.field.rel, 'symmetrical', False) and self.model == self.parent_model:
return None
return self.field.rel.related_name or (self.opts.object_name.lower() + '_set')
else:
return self.field.rel.related_name or (self.opts.object_name.lower())
def get_cache_name(self):
return "_%s_cache" % self.get_accessor_name()
|
mwrock/boxstarter
|
refs/heads/master
|
BuildPackages/example-light/tools/sublime/Packages/PowershellUtils/tests/sublimeplugin.py
|
20
|
class Plugin(object):
def onNew(self, view):
pass
def onClone(self, view):
pass
def onLoad(self, view):
pass
def onClose(self, view):
pass
def onPreSave(self, view):
pass
def onPostSave(self, view):
pass
def onModified(self, view):
pass
def onSelectionModified(self, view):
pass
def onActivated(self, view):
pass
def onProjectLoad(self, window):
pass
def onProjectClose(self, window):
pass
class ApplicationCommand(Plugin):
pass
class WindowCommand(Plugin):
pass
class TextCommand(Plugin):
pass
class TextCommand(Plugin):
def run(self, view, args):
pass
def isEnabled(self, view, args):
pass
|
ppoile/athletica-adapter
|
refs/heads/master
|
main/migrations/0002_auto_20150304_0224.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0001_squashed_0005_auto_20150304_0020'),
]
operations = [
migrations.AlterField(
model_name='rundenset',
name='rundenset',
field=models.IntegerField(db_column='xRundenset'),
),
migrations.AlterField(
model_name='serie',
name='anlage',
field=models.ForeignKey(to='main.Anlage', db_column='xAnlage'),
),
migrations.AlterField(
model_name='start',
name='staffel',
field=models.ForeignKey(to='main.Staffel', db_column='xStaffel'),
),
]
|
yuecong/dd-agent
|
refs/heads/master
|
tests/checks/integration/test_zk.py
|
27
|
# stdlib
from nose.plugins.attrib import attr
# project
from checks import AgentCheck
from tests.checks.common import AgentCheckTest
@attr(requires='zookeeper')
class ZooKeeperTestCase(AgentCheckTest):
CHECK_NAME = 'zk'
CONFIG = {
'host': "127.0.0.1",
'port': 2181,
'expected_mode': "standalone",
'tags': ["mytag"]
}
WRONG_EXPECTED_MODE = {
'host': "127.0.0.1",
'port': 2181,
'expected_mode': "follower",
'tags': []
}
CONNECTION_FAILURE_CONFIG = {
'host': "127.0.0.1",
'port': 2182,
'expected_mode': "follower",
'tags': []
}
METRICS = [
'zookeeper.latency.min',
'zookeeper.latency.avg',
'zookeeper.latency.max',
'zookeeper.bytes_received',
'zookeeper.bytes_sent',
'zookeeper.connections',
'zookeeper.connections',
'zookeeper.bytes_outstanding',
'zookeeper.outstanding_requests',
'zookeeper.zxid.epoch',
'zookeeper.zxid.count',
'zookeeper.nodes',
]
def test_check(self):
"""
Collect ZooKeeper metrics.
"""
config = {
'instances': [self.CONFIG]
}
self.run_check(config)
# Test metrics
for mname in self.METRICS:
self.assertMetric(mname, tags=["mode:standalone", "mytag"], count=1)
# Test service checks
self.assertServiceCheck("zookeeper.ruok", status=AgentCheck.OK)
self.assertServiceCheck("zookeeper.mode", status=AgentCheck.OK)
self.coverage_report()
def test_wrong_expected_mode(self):
"""
Raise a 'critical' service check when ZooKeeper is not in the expected mode
"""
config = {
'instances': [self.WRONG_EXPECTED_MODE]
}
self.run_check(config)
# Test service checks
self.assertServiceCheck("zookeeper.mode", status=AgentCheck.CRITICAL)
def test_error_state(self):
"""
Raise a 'critical' service check when ZooKeeper is in an error state
"""
config = {
'instances': [self.CONNECTION_FAILURE_CONFIG]
}
self.assertRaises(
Exception,
lambda: self.run_check(config)
)
# Test service checks
self.assertServiceCheck("zookeeper.ruok", status=AgentCheck.CRITICAL)
|
unnikrishnankgs/va
|
refs/heads/master
|
venv/lib/python3.5/site-packages/IPython/frontend.py
|
9
|
"""
Shim to maintain backwards compatibility with old frontend imports.
We have moved all contents of the old `frontend` subpackage into top-level
subpackages (`html`, `qt` and `terminal`), and flattened the notebook into
just `IPython.html`, formerly `IPython.frontend.html.notebook`.
This will let code that was making `from IPython.frontend...` calls continue
working, though a warning will be printed.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
from warnings import warn
from IPython.utils.shimmodule import ShimModule, ShimWarning
warn("The top-level `frontend` package has been deprecated since IPython 1.0. "
"All its subpackages have been moved to the top `IPython` level.", ShimWarning)
# Unconditionally insert the shim into sys.modules so that further import calls
# trigger the custom attribute access above
sys.modules['IPython.frontend.html.notebook'] = ShimModule(
src='IPython.frontend.html.notebook', mirror='IPython.html')
sys.modules['IPython.frontend'] = ShimModule(
src='IPython.frontend', mirror='IPython')
|
jtoppins/beaker
|
refs/heads/master
|
Server/bkr/server/needpropertyxml.py
|
1
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import operator
from sqlalchemy import or_, and_, not_, exists
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm import aliased
import datetime
from lxml import etree
from bkr.server.model import (Arch, Distro, DistroTree, DistroTag,
OSMajor, OSVersion, SystemPool, System, User,
Key, Key_Value_Int, Key_Value_String,
LabController, LabControllerDistroTree,
Hypervisor, Cpu, CpuFlag, Numa, Device,
DeviceClass, Disk, Power, PowerType)
# This follows the SI conventions used in disks and networks --
# *not* applicable to computer memory!
def bytes_multiplier(units):
return {
'bytes': 1,
'B': 1,
'kB': 1000,
'KB': 1000,
'KiB': 1024,
'MB': 1000*1000,
'MiB': 1024*1024,
'GB': 1000*1000*1000,
'GiB': 1024*1024*1024,
'TB': 1000*1000*1000*1000,
'TiB': 1024*1024*1024*1024,
}.get(units)
# convert a date to a datetime range
def get_dtrange(dt):
start_dt = datetime.datetime.combine(
dt, datetime.time(0,0,0))
end_dt = datetime.datetime.combine(
dt, datetime.time(23,59,59))
return start_dt, end_dt
# Common special query processing specific to
# System.date_added and System.date_lastcheckin
def date_filter(col, op, value):
try:
dt = datetime.datetime.strptime(value,'%Y-%m-%d').date()
except ValueError:
raise ValueError('Invalid date format: %s. '
'Use YYYY-MM-DD.' % value)
if op == '__eq__':
start_dt, end_dt = get_dtrange(dt)
clause = and_(getattr(col, '__ge__')(start_dt),
(getattr(col, '__le__')(end_dt)))
elif op == '__ne__':
start_dt, end_dt = get_dtrange(dt)
clause = not_(and_(getattr(col, '__ge__')(start_dt),
(getattr(col, '__le__')(end_dt))))
elif op == '__gt__':
clause = getattr(col, '__gt__')(datetime.datetime.combine
(dt,datetime.time(23, 59, 59)))
else:
clause = getattr(col, op)(datetime.datetime.combine
(dt,datetime.time(0, 0, 0)))
return clause
class ElementWrapper(object):
# Operator translation table
op_table = { '=' : '__eq__',
'==' : '__eq__',
'like': 'like',
'!=' : '__ne__',
'>' : '__gt__',
'>=' : '__ge__',
'<' : '__lt__',
'<=' : '__le__'}
subclassDict = []
def get_subclass(self, element):
name = element.tag
if name in self.subclassDict:
return self.subclassDict[name]
# As a kindness to the user we treat unrecognised elements like <and/>,
# so that valid elements inside the unrecognised one are not ignored.
return XmlAnd
def __init__(self, wrappedEl, subclassDict=None):
self.wrappedEl = wrappedEl
if self.subclassDict == None:
self.subclassDict = subclassDict
def __repr__(self):
return '%s("%s")' % (self.__class__, repr(self.wrappedEl))
def __iter__(self):
for child in self.wrappedEl:
if isinstance(child, etree._Element):
yield self.get_subclass(child)(child, self.subclassDict)
else:
yield child
def __getitem__(self, n):
child = self.wrappedEl[n]
if isinstance(child, etree._Element):
return self.get_subclass(child)(child, self.subclassDict)
else:
return child
def get_xml_attr(self, attr, typeCast, defaultValue):
attributes = self.wrappedEl.attrib
if attr in attributes:
return typeCast(attributes[attr])
else:
return defaultValue
# These are the default behaviours for each element.
# Note that unrecognised elements become XmlAnd!
def apply_filter(self, query):
query, clause = self.filter(query)
if clause is not None:
query = query.filter(clause)
return query
def filter(self, joins):
return (joins, None)
def filter_disk(self):
return None
def filter_openstack_flavors(self, flavors, lab_controller):
return []
def virtualisable(self):
"""
In addition to the flavor filtering, we have this simple boolean check as
an extra optimisation. This should return False if the host requirements
could *never* be satisfied by a dynamic virt guest. That way we can bail
out early and avoid going to OpenStack at all for this recipe.
"""
return False
class XmlAnd(ElementWrapper):
subclassDict = None
def filter(self, joins):
queries = []
for child in self:
if callable(getattr(child, 'filter', None)):
(joins, query) = child.filter(joins)
if query is not None:
queries.append(query)
if not queries:
return (joins, None)
return (joins, and_(*queries))
def filter_disk(self):
queries = []
for child in self:
if callable(getattr(child, 'filter_disk', None)):
query = child.filter_disk()
if query is not None:
queries.append(query)
return and_(*queries)
def filter_openstack_flavors(self, flavors, lab_controller):
result = set(flavors)
for child in self:
child_result = child.filter_openstack_flavors(flavors, lab_controller)
result.intersection_update(child_result)
return list(result)
def virtualisable(self):
for child in self:
if not child.virtualisable():
return False
return True
class XmlOr(ElementWrapper):
"""
Combine sub queries into or_ statements
"""
subclassDict = None
def filter(self, joins):
queries = []
for child in self:
if callable(getattr(child, 'filter', None)):
(joins, query) = child.filter(joins)
if query is not None:
queries.append(query)
if not queries:
return (joins, None)
return (joins, or_(*queries))
def filter_disk(self):
queries = []
for child in self:
if callable(getattr(child, 'filter_disk', None)):
query = child.filter_disk()
if query is not None:
queries.append(query)
return or_(*queries)
def filter_openstack_flavors(self, flavors, lab_controller):
result = set()
for child in self:
child_result = child.filter_openstack_flavors(flavors, lab_controller)
result.update(child_result)
return list(result)
def virtualisable(self):
for child in self:
if child.virtualisable():
return True
return False
class XmlNot(ElementWrapper):
"""
Combines sub-filters with not_(and_()).
"""
subclassDict = None
def filter(self, joins):
queries = []
for child in self:
if callable(getattr(child, 'filter', None)):
(joins, query) = child.filter(joins)
if query is not None:
queries.append(query)
if not queries:
return (joins, None)
return (joins, not_(and_(*queries)))
def filter_disk(self):
queries = []
for child in self:
if callable(getattr(child, 'filter_disk', None)):
query = child.filter_disk()
if query is not None:
queries.append(query)
return not_(and_(*queries))
class XmlDistroArch(ElementWrapper):
"""
Filter distro tree based on Aarch
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if op and value:
joins = joins.join(DistroTree.arch)
query = getattr(Arch.arch, op)(value)
return (joins, query)
class XmlDistroFamily(ElementWrapper):
"""
Filter distro tree based on Family
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if op and value:
joins = joins.join(DistroTree.distro, Distro.osversion, OSVersion.osmajor)
query = getattr(OSMajor.osmajor, op)(value)
return (joins, query)
class XmlDistroTag(ElementWrapper):
"""
Filter distro tree based on Tag
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'!=' : '__ne__'}
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
joins = joins.join(DistroTree.distro)
if op == '__ne__':
query = not_(Distro._tags.any(DistroTag.tag == value))
else:
query = Distro._tags.any(getattr(DistroTag.tag, op)(value))
return (joins, query)
class XmlDistroVariant(ElementWrapper):
"""
Filter distro tree based on variant
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if op and value:
query = getattr(DistroTree.variant, op)(value)
return (joins, query)
class XmlDistroName(ElementWrapper):
"""
Filter distro tree based on distro name
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if op and value:
joins = joins.join(DistroTree.distro)
query = getattr(Distro.name, op)(value)
return (joins, query)
class XmlDistroVirt(ElementWrapper):
"""
This is a noop, since we don't have virt distros anymore.
"""
pass
class XmlPool(ElementWrapper):
"""
Filter based on pool
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'!=' : '__ne__'}
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
if value:
# - '==' - search for system which is member of given pool
# - '!=' - search for system which is not member of given pool
try:
pool = SystemPool.by_name(value)
except NoResultFound:
return (joins, None)
if op == '__eq__':
query = System.pools.any(SystemPool.id == pool.id)
else:
query = not_(System.pools.any(SystemPool.id == pool.id))
else:
# - '!=' - search for system which is member of any pool
# - '==' - search for system which is not member of any pool
if op == '__eq__':
query = System.pools == None
else:
query = System.pools != None
return (joins, query)
class XmlKeyValue(ElementWrapper):
"""
Filter based on key_value
"""
def filter(self, joins):
key = self.get_xml_attr('key', unicode, None)
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
try:
_key = Key.by_name(key)
except NoResultFound:
return (joins, None)
if op not in ('__eq__', '__ne__') and not value:
# makes no sense, discard
return (joins, None)
if _key.numeric:
key_value_cls = Key_Value_Int
collection = System.key_values_int
else:
key_value_cls = Key_Value_String
collection = System.key_values_string
# <key_value key="THING" op="==" /> -- must have key with any value
# <key_value key="THING" op="==" value="VALUE" /> -- must have key with given value
# <key_value key="THING" op="!=" /> -- must not have key
# <key_value key="THING" op="!=" value="VALUE" /> -- must not have key with given value
if op == '__ne__' and value is None:
query = not_(collection.any(key_value_cls.key == _key))
elif op == '__ne__':
query = not_(collection.any(and_(
key_value_cls.key == _key,
key_value_cls.key_value == value)))
elif op == '__eq__' and value is None:
query = collection.any(key_value_cls.key == _key)
elif op == '__eq__':
query = collection.any(and_(
key_value_cls.key == _key,
key_value_cls.key_value == value))
else:
query = collection.any(and_(
key_value_cls.key == _key,
getattr(key_value_cls.key_value, op)(value)))
return (joins, query)
class XmlAutoProv(ElementWrapper):
"""
Verify that a system has the ability to power cycle and is connected to a
lab controller
"""
def filter(self, joins):
value = self.get_xml_attr('value', unicode, False)
query = None
if value:
joins = joins.join(System.power)
query = System.lab_controller != None
return (joins, query)
class XmlHostLabController(ElementWrapper):
"""
Pick a system from this lab controller
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'!=' : '__ne__'}
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
joins = joins.join(System.lab_controller)
query = getattr(LabController.fqdn, op)(value)
return (joins, query)
def filter_openstack_flavors(self, flavors, lab_controller):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
if not value:
return []
matched = getattr(lab_controller.fqdn, op)(value)
if matched:
return flavors
else:
return []
def virtualisable(self):
return True
class XmlDistroLabController(ElementWrapper):
"""
Pick a distro tree available on this lab controller
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'!=' : '__ne__'}
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
if not value:
return (joins, None)
if op == '__eq__':
query = exists([1],
from_obj=[LabControllerDistroTree.__table__.join(LabController.__table__)])\
.where(LabControllerDistroTree.distro_tree_id == DistroTree.id)\
.where(LabController.fqdn == value)
else:
query = not_(exists([1],
from_obj=[LabControllerDistroTree.__table__.join(LabController.__table__)])\
.where(LabControllerDistroTree.distro_tree_id == DistroTree.id)\
.where(LabController.fqdn == value))
return (joins, query)
class XmlHypervisor(ElementWrapper):
"""
Pick a system based on the hypervisor.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None) or None
joins = joins.outerjoin(System.hypervisor)
if op == '__ne__' and value:
query = or_(
Hypervisor.hypervisor == None,
getattr(Hypervisor.hypervisor, op)(value))
else:
query = getattr(Hypervisor.hypervisor, op)(value)
return (joins, query)
def filter_openstack_flavors(self, flavors, lab_controller):
if self._matches_kvm():
return flavors
else:
return []
def virtualisable(self):
return self._matches_kvm()
def _matches_kvm(self):
# XXX 'KVM' is hardcoded here assuming that is what OpenStack is using,
# but we should have a better solution
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None) or None
return getattr(operator, op)('KVM', value)
class XmlSystemType(ElementWrapper):
"""
Pick a system with the correct system type.
"""
def filter(self, joins):
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
query = System.type == value
return (joins, query)
def filter_openstack_flavors(self, flavors, lab_controller):
if self._matches_machine():
return flavors
else:
return []
def virtualisable(self):
return self._matches_machine()
def _matches_machine(self):
value = self.get_xml_attr('value', unicode, None)
return value == 'Machine'
class XmlSystemStatus(ElementWrapper):
"""
Pick a system with the correct system status.
"""
def filter(self, joins):
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
query = System.status == value
return (joins, query)
class XmlHostName(ElementWrapper):
"""
Pick a system wth the correct hostname.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
query = getattr(System.fqdn, op)(value)
return (joins, query)
class XmlLastInventoried(ElementWrapper):
"""
Pick a system wth the correct last inventoried date/status
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'!=' : '__ne__',
'>' : '__gt__',
'>=' : '__ge__',
'<' : '__lt__',
'<=' : '__le__'}
def filter(self, joins):
col = System.date_lastcheckin
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
if value:
clause = date_filter(col, op, value)
else:
clause = getattr(col, op)(None)
return (joins, clause)
class XmlSystemLender(ElementWrapper):
"""
Pick a system wth the correct lender.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
query = getattr(System.lender, op)(value)
return (joins, query)
class XmlSystemVendor(ElementWrapper):
"""
Pick a system wth the correct vendor.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
query = getattr(System.vendor, op)(value)
return (joins, query)
class XmlSystemLocation(ElementWrapper):
"""
Pick a system wth the correct location.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
query = getattr(System.location, op)(value)
return (joins, query)
class XmlSystemSerial(ElementWrapper):
"""
Pick a system wth the correct Serial Number.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
query = getattr(System.serial, op)(value)
return (joins, query)
class XmlSystemModel(ElementWrapper):
"""
Pick a system wth the correct model.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
query = getattr(System.model, op)(value)
return (joins, query)
class XmlMemory(ElementWrapper):
"""
Pick a system wth the correct amount of memory.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
query = getattr(System.memory, op)(value)
return (joins, query)
def filter_openstack_flavors(self, flavors, lab_controller):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
if value:
flavors = [flavor for flavor in flavors
if getattr(operator, op)(flavor.ram, value)]
return flavors
def virtualisable(self):
return True
class XmlSystemOwner(ElementWrapper):
"""
Pick a system with the correct owner.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
owner_alias = aliased(User)
joins = joins.join((owner_alias, System.owner))
query = getattr(owner_alias.user_name, op)(value)
return (joins, query)
class XmlSystemUser(ElementWrapper):
"""
Pick a system with the correct user.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
user_alias = aliased(User)
joins = joins.join((user_alias, System.user))
query = getattr(user_alias.user_name, op)(value)
return (joins, query)
class XmlSystemLoaned(ElementWrapper):
"""
Pick a system that has been loaned to this user.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
loaned_alias = aliased(User)
joins = joins.join((loaned_alias, System.loaned))
query = getattr(loaned_alias.user_name, op)(value)
return (joins, query)
class XmlSystemAdded(ElementWrapper):
"""
Pick a system based on when it was added
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'!=' : '__ne__',
'>' : '__gt__',
'>=' : '__ge__',
'<' : '__lt__',
'<=' : '__le__'}
def filter(self, joins):
col = System.date_added
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
clause = None
if value:
clause = date_filter(col, op, value)
return (joins, clause)
class XmlSystemPowertype(ElementWrapper):
"""
Pick a system that has been loaned to this user.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
joins = joins.join(System.power, Power.power_type)
query = getattr(PowerType.name, op)(value)
return (joins, query)
class XmlCpuProcessors(ElementWrapper):
"""
Pick a system with the correct amount of cpu processors.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.processors, op)(value)
return (joins, query)
def filter_openstack_flavors(self, flavors, lab_controller):
# We treat an OpenStack flavor with N vcpus as having N single-core
# processors. Not sure how realistic that is but we have to pick
# something...
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
if value:
flavors = [flavor for flavor in flavors
if getattr(operator, op)(flavor.vcpus, value)]
return flavors
def virtualisable(self):
return True
class XmlCpuCores(ElementWrapper):
"""
Pick a system with the correct amount of cpu cores.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.cores, op)(value)
return (joins, query)
def filter_openstack_flavors(self, flavors, lab_controller):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
if value:
flavors = [flavor for flavor in flavors
if getattr(operator, op)(flavor.vcpus, value)]
return flavors
def virtualisable(self):
return True
class XmlCpuFamily(ElementWrapper):
"""
Pick a system with the correct cpu family.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.family, op)(value)
return (joins, query)
class XmlCpuModel(ElementWrapper):
"""
Pick a system with the correct cpu model.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.model, op)(value)
return (joins, query)
class XmlCpuModelName(ElementWrapper):
"""
Pick a system with the correct cpu model_name.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.model_name, op)(value)
return (joins, query)
class XmlCpuSockets(ElementWrapper):
"""
Pick a system with the correct number of cpu sockets.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.sockets, op)(value)
return (joins, query)
class XmlCpuSpeed(ElementWrapper):
"""
Pick a system with the correct cpu speed.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', float, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.speed, op)(value)
return (joins, query)
class XmlCpuStepping(ElementWrapper):
"""
Pick a system with the correct cpu stepping.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.stepping, op)(value)
return (joins, query)
class XmlCpuVendor(ElementWrapper):
"""
Pick a system with the correct cpu vendor.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.vendor, op)(value)
return (joins, query)
class XmlCpuHyper(ElementWrapper):
"""
Pick a system with cpu's that have hyperthreading enabled.
"""
def filter(self, joins):
op = '__eq__'
uvalue = self.get_xml_attr('value', unicode, False).lower()
value = uvalue in ('true', '1') and True or False
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(Cpu.hyper, op)(value)
return (joins, query)
class XmlCpuFlag(ElementWrapper):
"""
Filter systems based on System.cpu.flags
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'like' : 'like',
'!=' : '__ne__'}
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
equal = op == '__ne__' and '__eq__' or op
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
joins = joins.join(System.cpu)
query = getattr(CpuFlag.flag, equal)(value)
if op == '__ne__':
query = not_(Cpu.flags.any(query))
else:
query = Cpu.flags.any(query)
return (joins, query)
class XmlArch(ElementWrapper):
"""
Pick a system with the correct arch
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'!=' : '__ne__'}
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
query = None
if value:
# As per XmlPool above,
# - '==' - search for system which has given arch
# - '!=' - search for system which does not have given arch
try:
arch = Arch.by_name(value)
except ValueError:
return (joins, None)
if op == '__eq__':
query = System.arch.any(Arch.id == arch.id)
else:
query = not_(System.arch.any(Arch.id == arch.id))
return (joins, query)
def filter_openstack_flavors(self, flavors, lab_controllers):
if self._matches_x86():
return flavors
else:
return []
def virtualisable(self):
return self._matches_x86()
def _matches_x86(self):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
return (getattr(operator, op)('x86_64', value) or
getattr(operator, op)('i386', value))
class XmlNumaNodeCount(ElementWrapper):
"""
Pick a system with the correct number of NUMA nodes.
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
joins = joins.join(System.numa)
query = getattr(Numa.nodes, op)(value)
return (joins, query)
class XmlDevice(ElementWrapper):
"""
Pick a system with a matching device.
"""
op_table = { '=' : '__eq__',
'==' : '__eq__',
'like' : 'like',
'!=' : '__ne__'}
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
equal = op == '__ne__' and '__eq__' or op
query = None
filter_clauses = []
for attr in ['bus', 'driver', 'vendor_id', 'device_id',
'subsys_vendor_id', 'subsys_device_id', 'description']:
value = self.get_xml_attr(attr, unicode, None)
if value:
filter_clauses.append(getattr(getattr(Device, attr),equal)(value))
if self.get_xml_attr('type', unicode, None):
filter_clauses.append(Device.device_class.has(
DeviceClass.device_class ==
self.get_xml_attr('type', unicode, None)))
if filter_clauses:
if op == '__ne__':
query = not_(System.devices.any(and_(*filter_clauses)))
else:
query = System.devices.any(and_(*filter_clauses))
return (joins, query)
# N.B. these XmlDisk* filters do not work outside of a <disk/> element!
class XmlDiskModel(ElementWrapper):
op_table = { '=' : '__eq__',
'==' : '__eq__',
'like' : 'like',
'!=' : '__ne__'}
def filter_disk(self):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', unicode, None)
if value:
return getattr(Disk.model, op)(value)
return None
class XmlDiskSize(ElementWrapper):
def _bytes_value(self):
value = self.get_xml_attr('value', int, None)
units = self.get_xml_attr('units', unicode, 'bytes')
if value:
return value * bytes_multiplier(units)
def filter_disk(self):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self._bytes_value()
if value:
return getattr(Disk.size, op)(value)
return None
def filter_openstack_flavors(self, flavors, lab_controller):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self._bytes_value()
if value:
flavors = [flavor for flavor in flavors
if getattr(operator, op)(flavor.disk, value)]
return flavors
def virtualisable(self):
return True
class XmlDiskSectorSize(ElementWrapper):
def filter_disk(self):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
units = self.get_xml_attr('units', unicode, 'bytes')
if value:
return getattr(Disk.phys_sector_size, op)(
value * bytes_multiplier(units))
return None
class XmlDiskPhysSectorSize(ElementWrapper):
def filter_disk(self):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
units = self.get_xml_attr('units', unicode, 'bytes')
if value:
return getattr(Disk.phys_sector_size, op)(
value * bytes_multiplier(units))
return None
class XmlDisk(XmlAnd):
subclassDict = {
'and': XmlAnd,
'or': XmlOr,
'not': XmlNot,
'model': XmlDiskModel,
'size': XmlDiskSize,
'sector_size': XmlDiskSectorSize,
'phys_sector_size': XmlDiskPhysSectorSize,
}
def filter(self, joins):
clauses = []
for child in self:
if callable(getattr(child, 'filter_disk', None)):
clause = child.filter_disk()
if clause is not None:
clauses.append(clause)
if not clauses:
return (joins, System.disks.any())
return (joins, System.disks.any(and_(*clauses)))
class XmlDiskSpace(ElementWrapper):
"""
Filter systems by total disk space
"""
def _bytes_value(self):
value = self.get_xml_attr('value', int, None)
units = self.get_xml_attr('units', unicode, 'bytes')
if value:
return value * bytes_multiplier(units)
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self._bytes_value()
query = None
if value:
query = getattr(System.diskspace, op)(value)
return (joins, query)
class XmlDiskCount(ElementWrapper):
"""
Filter systems by total number of disks
"""
def filter(self, joins):
op = self.op_table[self.get_xml_attr('op', unicode, '==')]
value = self.get_xml_attr('value', int, None)
query = None
if value:
query = getattr(System.diskcount, op)(value)
return (joins, query)
class XmlCpu(XmlAnd):
subclassDict = {
'and': XmlAnd,
'or': XmlOr,
'not': XmlNot,
'processors': XmlCpuProcessors,
'cores': XmlCpuCores,
'family': XmlCpuFamily,
'hyper': XmlCpuHyper,
'model': XmlCpuModel,
'model_name': XmlCpuModelName,
'sockets': XmlCpuSockets,
'speed': XmlCpuSpeed,
'stepping': XmlCpuStepping,
'vendor': XmlCpuVendor,
'flag': XmlCpuFlag,
}
class XmlSystem(XmlAnd):
subclassDict = {
'and': XmlAnd,
'or': XmlOr,
'not': XmlNot,
'name': XmlHostName,
'type': XmlSystemType,
'status': XmlSystemStatus,
'lender': XmlSystemLender,
'vendor': XmlSystemVendor,
'model': XmlSystemModel,
'owner': XmlSystemOwner,
'user': XmlSystemUser,
'loaned': XmlSystemLoaned,
'location': XmlSystemLocation,
'powertype': XmlSystemPowertype, #Should this be here?
'serial': XmlSystemSerial,
'memory': XmlMemory,
'arch': XmlArch,
'numanodes': XmlNumaNodeCount,
'hypervisor': XmlHypervisor,
'added': XmlSystemAdded,
'last_inventoried':XmlLastInventoried
}
class XmlHost(XmlAnd):
subclassDict = {
'and': XmlAnd,
'or': XmlOr,
'not': XmlNot,
'labcontroller': XmlHostLabController,
'system': XmlSystem,
'cpu': XmlCpu,
'device': XmlDevice,
'disk': XmlDisk,
'diskspace': XmlDiskSpace,
'diskcount': XmlDiskCount,
'pool': XmlPool,
# for backward compatibility
'group': XmlPool,
'key_value': XmlKeyValue,
'auto_prov': XmlAutoProv,
'hostlabcontroller': XmlHostLabController, #deprecated
'system_type': XmlSystemType, #deprecated
'memory': XmlMemory, #deprecated
'cpu_count': XmlCpuProcessors, #deprecated
'hostname': XmlHostName, #deprecated
'arch': XmlArch, #deprecated
'numa_node_count': XmlNumaNodeCount, #deprecated
'hypervisor': XmlHypervisor, #deprecated
}
@classmethod
def from_string(cls, xml_string):
try:
return cls(etree.fromstring(xml_string))
except etree.XMLSyntaxError as e:
raise ValueError('Invalid XML syntax for host filter: %s' % e)
@property
def force(self):
"""
<hostRequires force="$FQDN"/> means to skip all normal host filtering
and always use the named system.
"""
return self.get_xml_attr('force', unicode, None)
def virtualisable(self):
if self.force:
return False
return super(XmlHost, self).virtualisable()
# Physical Beaker systems are expected to have at least one disk of a sane
# size, so recipes will often not bother including a requirement on disk
# size. But OpenStack flavors can have no disk at all, so we filter those
# out here.
def filter_openstack_flavors(self, flavors, lab_controller):
result = super(XmlHost, self).filter_openstack_flavors(flavors, lab_controller)
return [flavor for flavor in result if flavor.disk > 0]
class XmlDistro(XmlAnd):
subclassDict = {
'and': XmlAnd,
'or': XmlOr,
'not': XmlNot,
'arch': XmlDistroArch,
'family': XmlDistroFamily,
'variant': XmlDistroVariant,
'name': XmlDistroName,
'tag': XmlDistroTag,
'virt': XmlDistroVirt,
'labcontroller': XmlDistroLabController,
'distro_arch': XmlDistroArch, #deprecated
'distro_family': XmlDistroFamily, #deprecated
'distro_variant': XmlDistroVariant, #deprecated
'distro_name': XmlDistroName, #deprecated
'distro_tag': XmlDistroTag, #deprecated
'distro_virt': XmlDistroVirt, #deprecated
'distrolabcontroller': XmlDistroLabController, #deprecated
}
def apply_distro_filter(filter, query):
if isinstance(filter, basestring):
filter = XmlDistro(etree.fromstring(filter))
clauses = []
for child in filter:
if callable(getattr(child, 'filter', None)):
(query, clause) = child.filter(query)
if clause is not None:
clauses.append(clause)
if clauses:
query = query.filter(and_(*clauses))
return query
|
andyzsf/django
|
refs/heads/master
|
tests/defaultfilters/tests.py
|
15
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import decimal
import unittest
import warnings
from django.template.defaultfilters import (
add, addslashes, capfirst, center, cut, date, default, default_if_none,
dictsort, dictsortreversed, divisibleby, escape, escapejs_filter,
filesizeformat, first, floatformat, force_escape,
get_digit, iriencode, join, length, length_is, linebreaksbr,
linebreaks_filter, linenumbers, ljust, lower, make_list,
phone2numeric_filter, pluralize, removetags, rjust, slice_filter, slugify,
stringformat, striptags, time, timesince_filter, timeuntil_filter, title,
truncatechars_html, truncatewords, truncatewords_html, unordered_list,
upper, urlencode, urlize, urlizetrunc, wordcount, wordwrap, yesno,
)
from django.test import TestCase
from django.utils import six
from django.utils import translation
from django.utils.encoding import python_2_unicode_compatible
from django.utils.safestring import mark_safe, SafeData
class DefaultFiltersTests(TestCase):
def test_floatformat(self):
self.assertEqual(floatformat(7.7), '7.7')
self.assertEqual(floatformat(7.0), '7')
self.assertEqual(floatformat(0.7), '0.7')
self.assertEqual(floatformat(0.07), '0.1')
self.assertEqual(floatformat(0.007), '0.0')
self.assertEqual(floatformat(0.0), '0')
self.assertEqual(floatformat(7.7, 3), '7.700')
self.assertEqual(floatformat(6.000000, 3), '6.000')
self.assertEqual(floatformat(6.200000, 3), '6.200')
self.assertEqual(floatformat(6.200000, -3), '6.200')
self.assertEqual(floatformat(13.1031, -3), '13.103')
self.assertEqual(floatformat(11.1197, -2), '11.12')
self.assertEqual(floatformat(11.0000, -2), '11')
self.assertEqual(floatformat(11.000001, -2), '11.00')
self.assertEqual(floatformat(8.2798, 3), '8.280')
self.assertEqual(floatformat(5555.555, 2), '5555.56')
self.assertEqual(floatformat(001.3000, 2), '1.30')
self.assertEqual(floatformat(0.12345, 2), '0.12')
self.assertEqual(floatformat(decimal.Decimal('555.555'), 2), '555.56')
self.assertEqual(floatformat(decimal.Decimal('09.000')), '9')
self.assertEqual(floatformat('foo'), '')
self.assertEqual(floatformat(13.1031, 'bar'), '13.1031')
self.assertEqual(floatformat(18.125, 2), '18.13')
self.assertEqual(floatformat('foo', 'bar'), '')
self.assertEqual(floatformat('¿Cómo esta usted?'), '')
self.assertEqual(floatformat(None), '')
# Check that we're not converting to scientific notation.
self.assertEqual(floatformat(0, 6), '0.000000')
self.assertEqual(floatformat(0, 7), '0.0000000')
self.assertEqual(floatformat(0, 10), '0.0000000000')
self.assertEqual(floatformat(0.000000000000000000015, 20),
'0.00000000000000000002')
pos_inf = float(1e30000)
self.assertEqual(floatformat(pos_inf), six.text_type(pos_inf))
neg_inf = float(-1e30000)
self.assertEqual(floatformat(neg_inf), six.text_type(neg_inf))
nan = pos_inf / pos_inf
self.assertEqual(floatformat(nan), six.text_type(nan))
class FloatWrapper(object):
def __init__(self, value):
self.value = value
def __float__(self):
return self.value
self.assertEqual(floatformat(FloatWrapper(11.000001), -2), '11.00')
# Regression for #15789
decimal_ctx = decimal.getcontext()
old_prec, decimal_ctx.prec = decimal_ctx.prec, 2
try:
self.assertEqual(floatformat(1.2345, 2), '1.23')
self.assertEqual(floatformat(15.2042, -3), '15.204')
self.assertEqual(floatformat(1.2345, '2'), '1.23')
self.assertEqual(floatformat(15.2042, '-3'), '15.204')
self.assertEqual(floatformat(decimal.Decimal('1.2345'), 2), '1.23')
self.assertEqual(floatformat(decimal.Decimal('15.2042'), -3), '15.204')
finally:
decimal_ctx.prec = old_prec
def test_floatformat_py2_fail(self):
self.assertEqual(floatformat(1.00000000000000015, 16), '1.0000000000000002')
# The test above fails because of Python 2's float handling. Floats with
# many zeroes after the decimal point should be passed in as another type
# such as unicode or Decimal.
if six.PY2:
test_floatformat_py2_fail = unittest.expectedFailure(test_floatformat_py2_fail)
def test_addslashes(self):
self.assertEqual(addslashes('"double quotes" and \'single quotes\''),
'\\"double quotes\\" and \\\'single quotes\\\'')
self.assertEqual(addslashes(r'\ : backslashes, too'),
'\\\\ : backslashes, too')
def test_capfirst(self):
self.assertEqual(capfirst('hello world'), 'Hello world')
def test_escapejs(self):
self.assertEqual(escapejs_filter('"double quotes" and \'single quotes\''),
'\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027')
self.assertEqual(escapejs_filter(r'\ : backslashes, too'),
'\\u005C : backslashes, too')
self.assertEqual(escapejs_filter('and lots of whitespace: \r\n\t\v\f\b'),
'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008')
self.assertEqual(escapejs_filter(r'<script>and this</script>'),
'\\u003Cscript\\u003Eand this\\u003C/script\\u003E')
self.assertEqual(
escapejs_filter('paragraph separator:\u2029and line separator:\u2028'),
'paragraph separator:\\u2029and line separator:\\u2028')
def test_linenumbers(self):
self.assertEqual(linenumbers('line 1\nline 2'),
'1. line 1\n2. line 2')
self.assertEqual(linenumbers('\n'.join(['x'] * 10)),
'01. x\n02. x\n03. x\n04. x\n05. x\n06. x\n07. '
'x\n08. x\n09. x\n10. x')
def test_lower(self):
self.assertEqual(lower('TEST'), 'test')
# uppercase E umlaut
self.assertEqual(lower('\xcb'), '\xeb')
def test_make_list(self):
self.assertEqual(make_list('abc'), ['a', 'b', 'c'])
self.assertEqual(make_list(1234), ['1', '2', '3', '4'])
def test_slugify(self):
self.assertEqual(slugify(' Jack & Jill like numbers 1,2,3 and 4 and'
' silly characters ?%.$!/'),
'jack-jill-like-numbers-123-and-4-and-silly-characters')
self.assertEqual(slugify("Un \xe9l\xe9phant \xe0 l'or\xe9e du bois"),
'un-elephant-a-loree-du-bois')
def test_stringformat(self):
self.assertEqual(stringformat(1, '03d'), '001')
self.assertEqual(stringformat(1, 'z'), '')
def test_title(self):
self.assertEqual(title('a nice title, isn\'t it?'),
"A Nice Title, Isn't It?")
self.assertEqual(title('discoth\xe8que'), 'Discoth\xe8que')
def test_truncatewords(self):
self.assertEqual(
truncatewords('A sentence with a few words in it', 1), 'A ...')
self.assertEqual(
truncatewords('A sentence with a few words in it', 5),
'A sentence with a few ...')
self.assertEqual(
truncatewords('A sentence with a few words in it', 100),
'A sentence with a few words in it')
self.assertEqual(
truncatewords('A sentence with a few words in it',
'not a number'), 'A sentence with a few words in it')
def test_truncatewords_html(self):
self.assertEqual(truncatewords_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 0), '')
self.assertEqual(truncatewords_html('<p>one <a href="#">two - '
'three <br>four</a> five</p>', 2),
'<p>one <a href="#">two ...</a></p>')
self.assertEqual(truncatewords_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 4),
'<p>one <a href="#">two - three <br>four ...</a></p>')
self.assertEqual(truncatewords_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 5),
'<p>one <a href="#">two - three <br>four</a> five</p>')
self.assertEqual(truncatewords_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 100),
'<p>one <a href="#">two - three <br>four</a> five</p>')
self.assertEqual(truncatewords_html(
'\xc5ngstr\xf6m was here', 1), '\xc5ngstr\xf6m ...')
self.assertEqual(truncatewords_html('<i>Buenos días! '
'¿Cómo está?</i>', 3),
'<i>Buenos días! ¿Cómo ...</i>')
def test_truncatechars_html(self):
self.assertEqual(truncatechars_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 0), '...')
self.assertEqual(truncatechars_html('<p>one <a href="#">two - '
'three <br>four</a> five</p>', 6),
'<p>one...</p>')
self.assertEqual(truncatechars_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 11),
'<p>one <a href="#">two ...</a></p>')
self.assertEqual(truncatechars_html(
'<p>one <a href="#">two - three <br>four</a> five</p>', 100),
'<p>one <a href="#">two - three <br>four</a> five</p>')
self.assertEqual(truncatechars_html(
'<b>\xc5ngstr\xf6m</b> was here', 5), '<b>\xc5n...</b>')
self.assertEqual(truncatechars_html(
'a<b>b</b>c', 3), 'a<b>b</b>c')
def test_upper(self):
self.assertEqual(upper('Mixed case input'), 'MIXED CASE INPUT')
# lowercase e umlaut
self.assertEqual(upper('\xeb'), '\xcb')
def test_urlencode(self):
self.assertEqual(urlencode('fran\xe7ois & jill'),
'fran%C3%A7ois%20%26%20jill')
self.assertEqual(urlencode(1), '1')
def test_iriencode(self):
self.assertEqual(iriencode('S\xf8r-Tr\xf8ndelag'),
'S%C3%B8r-Tr%C3%B8ndelag')
self.assertEqual(iriencode(urlencode('fran\xe7ois & jill')),
'fran%C3%A7ois%20%26%20jill')
def test_urlizetrunc(self):
self.assertEqual(urlizetrunc('http://short.com/', 20), '<a href='
'"http://short.com/" rel="nofollow">http://short.com/</a>')
self.assertEqual(urlizetrunc('http://www.google.co.uk/search?hl=en'
'&q=some+long+url&btnG=Search&meta=', 20), '<a href="http://'
'www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search&'
'meta=" rel="nofollow">http://www.google...</a>')
self.assertEqual(urlizetrunc('http://www.google.co.uk/search?hl=en'
'&q=some+long+url&btnG=Search&meta=', 20), '<a href="http://'
'www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search'
'&meta=" rel="nofollow">http://www.google...</a>')
# Check truncating of URIs which are the exact length
uri = 'http://31characteruri.com/test/'
self.assertEqual(len(uri), 31)
self.assertEqual(urlizetrunc(uri, 31),
'<a href="http://31characteruri.com/test/" rel="nofollow">'
'http://31characteruri.com/test/</a>')
self.assertEqual(urlizetrunc(uri, 30),
'<a href="http://31characteruri.com/test/" rel="nofollow">'
'http://31characteruri.com/t...</a>')
self.assertEqual(urlizetrunc(uri, 2),
'<a href="http://31characteruri.com/test/"'
' rel="nofollow">...</a>')
def test_urlize(self):
# Check normal urlize
self.assertEqual(urlize('http://google.com'),
'<a href="http://google.com" rel="nofollow">http://google.com</a>')
self.assertEqual(urlize('http://google.com/'),
'<a href="http://google.com/" rel="nofollow">http://google.com/</a>')
self.assertEqual(urlize('www.google.com'),
'<a href="http://www.google.com" rel="nofollow">www.google.com</a>')
self.assertEqual(urlize('djangoproject.org'),
'<a href="http://djangoproject.org" rel="nofollow">djangoproject.org</a>')
self.assertEqual(urlize('djangoproject.org/'),
'<a href="http://djangoproject.org/" rel="nofollow">djangoproject.org/</a>')
self.assertEqual(urlize('info@djangoproject.org'),
'<a href="mailto:info@djangoproject.org">info@djangoproject.org</a>')
self.assertEqual(urlize('some.organization'),
'some.organization'),
# Check urlize with https addresses
self.assertEqual(urlize('https://google.com'),
'<a href="https://google.com" rel="nofollow">https://google.com</a>')
# Check urlize doesn't overquote already quoted urls - see #9655
# The teststring is the urlquoted version of 'http://hi.baidu.com/重新开始'
self.assertEqual(urlize('http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B'),
'<a href="http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B" rel="nofollow">'
'http://hi.baidu.com/%E9%87%8D%E6%96%B0%E5%BC%80%E5%A7%8B</a>')
self.assertEqual(urlize('www.mystore.com/30%OffCoupons!'),
'<a href="http://www.mystore.com/30%25OffCoupons" rel="nofollow">'
'www.mystore.com/30%OffCoupons</a>!')
self.assertEqual(urlize('http://en.wikipedia.org/wiki/Caf%C3%A9'),
'<a href="http://en.wikipedia.org/wiki/Caf%C3%A9" rel="nofollow">'
'http://en.wikipedia.org/wiki/Caf%C3%A9</a>')
self.assertEqual(urlize('http://en.wikipedia.org/wiki/Café'),
'<a href="http://en.wikipedia.org/wiki/Caf%C3%A9" rel="nofollow">'
'http://en.wikipedia.org/wiki/Café</a>')
# Check urlize keeps balanced parentheses - see #11911
self.assertEqual(urlize('http://en.wikipedia.org/wiki/Django_(web_framework)'),
'<a href="http://en.wikipedia.org/wiki/Django_(web_framework)" rel="nofollow">'
'http://en.wikipedia.org/wiki/Django_(web_framework)</a>')
self.assertEqual(urlize('(see http://en.wikipedia.org/wiki/Django_(web_framework))'),
'(see <a href="http://en.wikipedia.org/wiki/Django_(web_framework)" rel="nofollow">'
'http://en.wikipedia.org/wiki/Django_(web_framework)</a>)')
# Check urlize adds nofollow properly - see #12183
self.assertEqual(urlize('foo@bar.com or www.bar.com'),
'<a href="mailto:foo@bar.com">foo@bar.com</a> or '
'<a href="http://www.bar.com" rel="nofollow">www.bar.com</a>')
# Check urlize handles IDN correctly - see #13704
self.assertEqual(urlize('http://c✶.ws'),
'<a href="http://xn--c-lgq.ws" rel="nofollow">http://c✶.ws</a>')
self.assertEqual(urlize('www.c✶.ws'),
'<a href="http://www.xn--c-lgq.ws" rel="nofollow">www.c✶.ws</a>')
self.assertEqual(urlize('c✶.org'),
'<a href="http://xn--c-lgq.org" rel="nofollow">c✶.org</a>')
self.assertEqual(urlize('info@c✶.org'),
'<a href="mailto:info@xn--c-lgq.org">info@c✶.org</a>')
# Check urlize doesn't highlight malformed URIs - see #16395
self.assertEqual(urlize('http:///www.google.com'),
'http:///www.google.com')
self.assertEqual(urlize('http://.google.com'),
'http://.google.com')
self.assertEqual(urlize('http://@foo.com'),
'http://@foo.com')
# Check urlize accepts more TLDs - see #16656
self.assertEqual(urlize('usa.gov'),
'<a href="http://usa.gov" rel="nofollow">usa.gov</a>')
# Check urlize don't crash on invalid email with dot-starting domain - see #17592
self.assertEqual(urlize('email@.stream.ru'),
'email@.stream.ru')
# Check urlize accepts uppercased URL schemes - see #18071
self.assertEqual(urlize('HTTPS://github.com/'),
'<a href="https://github.com/" rel="nofollow">HTTPS://github.com/</a>')
# Check urlize trims trailing period when followed by parenthesis - see #18644
self.assertEqual(urlize('(Go to http://www.example.com/foo.)'),
'(Go to <a href="http://www.example.com/foo" rel="nofollow">http://www.example.com/foo</a>.)')
# Check urlize handles brackets properly (#19070)
self.assertEqual(urlize('[see www.example.com]'),
'[see <a href="http://www.example.com" rel="nofollow">www.example.com</a>]')
self.assertEqual(urlize('see test[at[example.com'),
'see <a href="http://test[at[example.com" rel="nofollow">test[at[example.com</a>')
self.assertEqual(urlize('[http://168.192.0.1](http://168.192.0.1)'),
'[<a href="http://168.192.0.1](http://168.192.0.1)" rel="nofollow">http://168.192.0.1](http://168.192.0.1)</a>')
# Check urlize works with IPv4/IPv6 addresses
self.assertEqual(urlize('http://192.168.0.15/api/9'),
'<a href="http://192.168.0.15/api/9" rel="nofollow">http://192.168.0.15/api/9</a>')
self.assertEqual(urlize('http://[2001:db8:cafe::2]/api/9'),
'<a href="http://[2001:db8:cafe::2]/api/9" rel="nofollow">http://[2001:db8:cafe::2]/api/9</a>')
# Check urlize correctly include quotation marks in links - #20364
self.assertEqual(urlize('before "hi@example.com" afterwards'),
'before "<a href="mailto:hi@example.com">hi@example.com</a>" afterwards')
self.assertEqual(urlize('before hi@example.com" afterwards'),
'before <a href="mailto:hi@example.com">hi@example.com</a>" afterwards')
self.assertEqual(urlize('before "hi@example.com afterwards'),
'before "<a href="mailto:hi@example.com">hi@example.com</a> afterwards')
self.assertEqual(urlize('before \'hi@example.com\' afterwards'),
'before \'<a href="mailto:hi@example.com">hi@example.com</a>\' afterwards')
self.assertEqual(urlize('before hi@example.com\' afterwards'),
'before <a href="mailto:hi@example.com">hi@example.com</a>\' afterwards')
self.assertEqual(urlize('before \'hi@example.com afterwards'),
'before \'<a href="mailto:hi@example.com">hi@example.com</a> afterwards')
# Check urlize copes with commas following URLs in quotes - see #20364
self.assertEqual(urlize('Email us at "hi@example.com", or phone us at +xx.yy'),
'Email us at "<a href="mailto:hi@example.com">hi@example.com</a>", or phone us at +xx.yy')
# Check urlize correctly handles exclamation marks after TLDs or query string - see #23715
self.assertEqual(urlize('Go to djangoproject.com! and enjoy.'),
'Go to <a href="http://djangoproject.com" rel="nofollow">djangoproject.com</a>! and enjoy.')
self.assertEqual(urlize('Search for google.com/?q=! and see.'),
'Search for <a href="http://google.com/?q=" rel="nofollow">google.com/?q=</a>! and see.')
self.assertEqual(urlize('Search for google.com/?q=dj!`? and see.'),
'Search for <a href="http://google.com/?q=dj%21%60%3F" rel="nofollow">google.com/?q=dj!`?</a> and see.')
self.assertEqual(urlize('Search for google.com/?q=dj!`?! and see.'),
'Search for <a href="http://google.com/?q=dj%21%60%3F" rel="nofollow">google.com/?q=dj!`?</a>! and see.')
def test_wordcount(self):
self.assertEqual(wordcount(''), 0)
self.assertEqual(wordcount('oneword'), 1)
self.assertEqual(wordcount('lots of words'), 3)
def test_wordwrap(self):
self.assertEqual(wordwrap('this is a long paragraph of text that '
"really needs to be wrapped I'm afraid", 14),
'this is a long\nparagraph of\ntext that\nreally needs\nto be '
"wrapped\nI'm afraid")
self.assertEqual(wordwrap('this is a short paragraph of text.\n '
'But this line should be indented', 14),
'this is a\nshort\nparagraph of\ntext.\n But this\nline '
'should be\nindented')
self.assertEqual(wordwrap('this is a short paragraph of text.\n '
'But this line should be indented', 15), 'this is a short\n'
'paragraph of\ntext.\n But this line\nshould be\nindented')
def test_rjust(self):
self.assertEqual(ljust('test', 10), 'test ')
self.assertEqual(ljust('test', 3), 'test')
self.assertEqual(rjust('test', 10), ' test')
self.assertEqual(rjust('test', 3), 'test')
def test_center(self):
self.assertEqual(center('test', 6), ' test ')
def test_cut(self):
self.assertEqual(cut('a string to be mangled', 'a'),
' string to be mngled')
self.assertEqual(cut('a string to be mangled', 'ng'),
'a stri to be maled')
self.assertEqual(cut('a string to be mangled', 'strings'),
'a string to be mangled')
def test_force_escape(self):
escaped = force_escape('<some html & special characters > here')
self.assertEqual(
escaped, '<some html & special characters > here')
self.assertIsInstance(escaped, SafeData)
self.assertEqual(
force_escape('<some html & special characters > here ĐÅ€£'),
'<some html & special characters > here'
' \u0110\xc5\u20ac\xa3')
def test_linebreaks(self):
self.assertEqual(linebreaks_filter('line 1'), '<p>line 1</p>')
self.assertEqual(linebreaks_filter('line 1\nline 2'),
'<p>line 1<br />line 2</p>')
self.assertEqual(linebreaks_filter('line 1\rline 2'),
'<p>line 1<br />line 2</p>')
self.assertEqual(linebreaks_filter('line 1\r\nline 2'),
'<p>line 1<br />line 2</p>')
def test_linebreaksbr(self):
self.assertEqual(linebreaksbr('line 1\nline 2'),
'line 1<br />line 2')
self.assertEqual(linebreaksbr('line 1\rline 2'),
'line 1<br />line 2')
self.assertEqual(linebreaksbr('line 1\r\nline 2'),
'line 1<br />line 2')
def test_removetags(self):
with warnings.catch_warnings(record=True):
warnings.simplefilter("always")
self.assertEqual(removetags('some <b>html</b> with <script>alert'
'("You smell")</script> disallowed <img /> tags', 'script img'),
'some <b>html</b> with alert("You smell") disallowed tags')
def test_striptags(self):
self.assertEqual(striptags('some <b>html</b> with <script>alert'
'("You smell")</script> disallowed <img /> tags'),
'some html with alert("You smell") disallowed tags')
def test_dictsort(self):
sorted_dicts = dictsort([{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}], 'age')
self.assertEqual([sorted(dict.items()) for dict in sorted_dicts],
[[('age', 18), ('name', 'Jonny B Goode')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 63), ('name', 'Ra Ra Rasputin')]])
# If it gets passed a list of something else different from
# dictionaries it should fail silently
self.assertEqual(dictsort([1, 2, 3], 'age'), '')
self.assertEqual(dictsort('Hello!', 'age'), '')
self.assertEqual(dictsort({'a': 1}, 'age'), '')
self.assertEqual(dictsort(1, 'age'), '')
def test_dictsort_complex_sorting_key(self):
"""
Since dictsort uses template.Variable under the hood, it can sort
on keys like 'foo.bar'.
"""
data = [
{'foo': {'bar': 1, 'baz': 'c'}},
{'foo': {'bar': 2, 'baz': 'b'}},
{'foo': {'bar': 3, 'baz': 'a'}},
]
sorted_data = dictsort(data, 'foo.baz')
self.assertEqual([d['foo']['bar'] for d in sorted_data], [3, 2, 1])
def test_dictsortreversed(self):
sorted_dicts = dictsortreversed([{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}],
'age')
self.assertEqual([sorted(dict.items()) for dict in sorted_dicts],
[[('age', 63), ('name', 'Ra Ra Rasputin')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 18), ('name', 'Jonny B Goode')]])
# If it gets passed a list of something else different from
# dictionaries it should fail silently
self.assertEqual(dictsortreversed([1, 2, 3], 'age'), '')
self.assertEqual(dictsortreversed('Hello!', 'age'), '')
self.assertEqual(dictsortreversed({'a': 1}, 'age'), '')
self.assertEqual(dictsortreversed(1, 'age'), '')
def test_first(self):
self.assertEqual(first([0, 1, 2]), 0)
self.assertEqual(first(''), '')
self.assertEqual(first('test'), 't')
def test_join(self):
self.assertEqual(join([0, 1, 2], 'glue'), '0glue1glue2')
def test_length(self):
self.assertEqual(length('1234'), 4)
self.assertEqual(length(mark_safe('1234')), 4)
self.assertEqual(length([1, 2, 3, 4]), 4)
self.assertEqual(length_is([], 0), True)
self.assertEqual(length_is([], 1), False)
self.assertEqual(length_is('a', 1), True)
self.assertEqual(length_is('a', 10), False)
def test_slice(self):
self.assertEqual(slice_filter('abcdefg', '0'), '')
self.assertEqual(slice_filter('abcdefg', '1'), 'a')
self.assertEqual(slice_filter('abcdefg', '-1'), 'abcdef')
self.assertEqual(slice_filter('abcdefg', '1:2'), 'b')
self.assertEqual(slice_filter('abcdefg', '1:3'), 'bc')
self.assertEqual(slice_filter('abcdefg', '0::2'), 'aceg')
def test_unordered_list(self):
self.assertEqual(unordered_list(['item 1', 'item 2']),
'\t<li>item 1</li>\n\t<li>item 2</li>')
self.assertEqual(unordered_list(['item 1', ['item 1.1']]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>')
self.assertEqual(
unordered_list(['item 1', ['item 1.1', 'item1.2'], 'item 2']),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t\t<li>item1.2'
'</li>\n\t</ul>\n\t</li>\n\t<li>item 2</li>')
self.assertEqual(
unordered_list(['item 1', ['item 1.1', ['item 1.1.1', ['item 1.1.1.1']]]]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1\n\t\t<ul>\n\t\t\t<li>'
'item 1.1.1\n\t\t\t<ul>\n\t\t\t\t<li>item 1.1.1.1</li>\n\t\t\t'
'</ul>\n\t\t\t</li>\n\t\t</ul>\n\t\t</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list(
['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]),
'\t<li>States\n\t<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>'
'Lawrence</li>\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>'
'\n\t\t<li>Illinois</li>\n\t</ul>\n\t</li>')
@python_2_unicode_compatible
class ULItem(object):
def __init__(self, title):
self.title = title
def __str__(self):
return 'ulitem-%s' % str(self.title)
a = ULItem('a')
b = ULItem('b')
self.assertEqual(unordered_list([a, b]), '\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>')
def item_generator():
yield a
yield b
self.assertEqual(unordered_list(item_generator()), '\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>')
# Old format for unordered lists should still work
with warnings.catch_warnings(record=True):
warnings.simplefilter("always")
self.assertEqual(unordered_list(['item 1', []]), '\t<li>item 1</li>')
self.assertEqual(unordered_list(['item 1', [['item 1.1', []]]]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list(['item 1', [['item 1.1', []],
['item 1.2', []]]]), '\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1'
'</li>\n\t\t<li>item 1.2</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list(['States', [['Kansas', [['Lawrence',
[]], ['Topeka', []]]], ['Illinois', []]]]), '\t<li>States\n\t'
'<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>Lawrence</li>'
'\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>\n\t\t<li>'
'Illinois</li>\n\t</ul>\n\t</li>')
def test_add(self):
self.assertEqual(add('1', '2'), 3)
def test_get_digit(self):
self.assertEqual(get_digit(123, 1), 3)
self.assertEqual(get_digit(123, 2), 2)
self.assertEqual(get_digit(123, 3), 1)
self.assertEqual(get_digit(123, 4), 0)
self.assertEqual(get_digit(123, 0), 123)
self.assertEqual(get_digit('xyz', 0), 'xyz')
def test_date(self):
# real testing of date() is in dateformat.py
self.assertEqual(date(datetime.datetime(2005, 12, 29), "d F Y"),
'29 December 2005')
self.assertEqual(date(datetime.datetime(2005, 12, 29), r'jS \o\f F'),
'29th of December')
def test_time(self):
# real testing of time() is done in dateformat.py
self.assertEqual(time(datetime.time(13), "h"), '01')
self.assertEqual(time(datetime.time(0), "h"), '12')
def test_timesince(self):
# real testing is done in timesince.py, where we can provide our own 'now'
# NOTE: \xa0 avoids wrapping between value and unit
self.assertEqual(
timesince_filter(datetime.datetime.now() - datetime.timedelta(1)),
'1\xa0day')
self.assertEqual(
timesince_filter(datetime.datetime(2005, 12, 29),
datetime.datetime(2005, 12, 30)),
'1\xa0day')
def test_timeuntil(self):
# NOTE: \xa0 avoids wrapping between value and unit
self.assertEqual(
timeuntil_filter(datetime.datetime.now() + datetime.timedelta(1, 1)),
'1\xa0day')
self.assertEqual(
timeuntil_filter(datetime.datetime(2005, 12, 30),
datetime.datetime(2005, 12, 29)),
'1\xa0day')
def test_default(self):
self.assertEqual(default("val", "default"), 'val')
self.assertEqual(default(None, "default"), 'default')
self.assertEqual(default('', "default"), 'default')
def test_if_none(self):
self.assertEqual(default_if_none("val", "default"), 'val')
self.assertEqual(default_if_none(None, "default"), 'default')
self.assertEqual(default_if_none('', "default"), '')
def test_divisibleby(self):
self.assertEqual(divisibleby(4, 2), True)
self.assertEqual(divisibleby(4, 3), False)
def test_yesno(self):
self.assertEqual(yesno(True), 'yes')
self.assertEqual(yesno(False), 'no')
self.assertEqual(yesno(None), 'maybe')
self.assertEqual(yesno(True, 'certainly,get out of town,perhaps'),
'certainly')
self.assertEqual(yesno(False, 'certainly,get out of town,perhaps'),
'get out of town')
self.assertEqual(yesno(None, 'certainly,get out of town,perhaps'),
'perhaps')
self.assertEqual(yesno(None, 'certainly,get out of town'),
'get out of town')
def test_filesizeformat(self):
# NOTE: \xa0 avoids wrapping between value and unit
self.assertEqual(filesizeformat(1023), '1023\xa0bytes')
self.assertEqual(filesizeformat(1024), '1.0\xa0KB')
self.assertEqual(filesizeformat(10 * 1024), '10.0\xa0KB')
self.assertEqual(filesizeformat(1024 * 1024 - 1), '1024.0\xa0KB')
self.assertEqual(filesizeformat(1024 * 1024), '1.0\xa0MB')
self.assertEqual(filesizeformat(1024 * 1024 * 50), '50.0\xa0MB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024 - 1), '1024.0\xa0MB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024), '1.0\xa0GB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024), '1.0\xa0TB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024), '1.0\xa0PB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024 * 2000), '2000.0\xa0PB')
self.assertEqual(filesizeformat(complex(1, -1)), '0\xa0bytes')
self.assertEqual(filesizeformat(""), '0\xa0bytes')
self.assertEqual(filesizeformat("\N{GREEK SMALL LETTER ALPHA}"), '0\xa0bytes')
def test_pluralize(self):
self.assertEqual(pluralize(1), '')
self.assertEqual(pluralize(0), 's')
self.assertEqual(pluralize(2), 's')
# Ticket #22798
self.assertEqual(pluralize(0.5), 's')
self.assertEqual(pluralize(1.5), 's')
self.assertEqual(pluralize(decimal.Decimal(1)), '')
self.assertEqual(pluralize(decimal.Decimal(0)), 's')
self.assertEqual(pluralize(decimal.Decimal(2)), 's')
self.assertEqual(pluralize([1]), '')
self.assertEqual(pluralize([]), 's')
self.assertEqual(pluralize([1, 2, 3]), 's')
self.assertEqual(pluralize(1, 'es'), '')
self.assertEqual(pluralize(0, 'es'), 'es')
self.assertEqual(pluralize(2, 'es'), 'es')
self.assertEqual(pluralize(1, 'y,ies'), 'y')
self.assertEqual(pluralize(0, 'y,ies'), 'ies')
self.assertEqual(pluralize(2, 'y,ies'), 'ies')
self.assertEqual(pluralize(0, 'y,ies,error'), '')
def test_phone2numeric(self):
self.assertEqual(phone2numeric_filter('0800 flowers'), '0800 3569377')
def test_non_string_input(self):
# Filters shouldn't break if passed non-strings
self.assertEqual(addslashes(123), '123')
self.assertEqual(linenumbers(123), '1. 123')
self.assertEqual(lower(123), '123')
self.assertEqual(make_list(123), ['1', '2', '3'])
self.assertEqual(slugify(123), '123')
self.assertEqual(title(123), '123')
self.assertEqual(truncatewords(123, 2), '123')
self.assertEqual(upper(123), '123')
self.assertEqual(urlencode(123), '123')
self.assertEqual(urlize(123), '123')
self.assertEqual(urlizetrunc(123, 1), '123')
self.assertEqual(wordcount(123), 1)
self.assertEqual(wordwrap(123, 2), '123')
self.assertEqual(ljust('123', 4), '123 ')
self.assertEqual(rjust('123', 4), ' 123')
self.assertEqual(center('123', 5), ' 123 ')
self.assertEqual(center('123', 6), ' 123 ')
self.assertEqual(cut(123, '2'), '13')
self.assertEqual(escape(123), '123')
self.assertEqual(linebreaks_filter(123), '<p>123</p>')
self.assertEqual(linebreaksbr(123), '123')
with warnings.catch_warnings(record=True):
warnings.simplefilter("always")
self.assertEqual(removetags(123, 'a'), '123')
self.assertEqual(striptags(123), '123')
class DefaultFiltersI18NTests(TestCase):
def test_localized_filesizeformat(self):
# NOTE: \xa0 avoids wrapping between value and unit
with self.settings(USE_L10N=True), translation.override('de'):
self.assertEqual(filesizeformat(1023), '1023\xa0Bytes')
self.assertEqual(filesizeformat(1024), '1,0\xa0KB')
self.assertEqual(filesizeformat(10 * 1024), '10,0\xa0KB')
self.assertEqual(filesizeformat(1024 * 1024 - 1), '1024,0\xa0KB')
self.assertEqual(filesizeformat(1024 * 1024), '1,0\xa0MB')
self.assertEqual(filesizeformat(1024 * 1024 * 50), '50,0\xa0MB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024 - 1), '1024,0\xa0MB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024), '1,0\xa0GB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024), '1,0\xa0TB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024), '1,0\xa0PB')
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024 * 2000), '2000,0\xa0PB')
self.assertEqual(filesizeformat(complex(1, -1)), '0\xa0Bytes')
self.assertEqual(filesizeformat(""), '0\xa0Bytes')
self.assertEqual(filesizeformat("\N{GREEK SMALL LETTER ALPHA}"), '0\xa0Bytes')
|
lucafavatella/otp
|
refs/heads/maint
|
lib/asn1/test/asn1_SUITE_data/LargeConstraints.py
|
95
|
LargeConstraints DEFINITIONS ::=
BEGIN
-- Maximum number that can be encoded as a constrained whole number: 1 bsl (255*8)
-- The number of octets needed to represent a number cannot be more than 255
-- As the length field is encoded as a 8-bit bitfield.
RangeMax ::= INTEGER (1..126238304966058622268417487065116999845484776053576109500509161826268184136202698801551568013761380717534054534851164138648904527931605160527688095259563605939964364716019515983399209962459578542172100149937763938581219604072733422507180056009672540900709554109516816573779593326332288314873251559077853068444977864803391962580800682760017849589281937637993445539366428356761821065267423102149447628375691862210717202025241630303118559188678304314076943801692528246980959705901641444238894928620825482303431806955690226308773426829503900930529395181208739591967195841536053143145775307050594328881077553168201547776)
END
|
ostrokach/bioconda-recipes
|
refs/heads/master
|
recipes/deepvariant/dv_postprocess_variants.py
|
9
|
#!/opt/anaconda1anaconda2anaconda3/bin/python
#
# Wrapper script for DeepVariant postprocess_variants
BINARY_DIR="/opt/anaconda1anaconda2anaconda3/BINARYSUB"
import argparse
import os
import subprocess
import sys
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.realpath(path)
class DVHelp(argparse._HelpAction):
def __call__(self, parser, namespace, values, option_string=None):
print("Baseline DeepVariant arguments")
bin_dir = real_dirname(BINARY_DIR)
conda_path = os.path.dirname(os.path.realpath(sys.executable))
lib_path = os.path.join(os.path.dirname(conda_path), "lib")
py_exe = sys.executable
cmd = ("export LD_LIBRARY_PATH={lib_path}:\"$LD_LIBRARY_PATH\" && "
"{py_exe} {bin_dir}/postprocess_variants.zip --help")
print(subprocess.check_output(cmd.format(**locals()), shell=True))
print()
print("Wrapper arguments")
parser.print_help()
def main():
parser = argparse.ArgumentParser(description="DeepVariant postprocess_variants wrapper", add_help=False)
parser.add_argument("--ref", required=True, help="Reference genome")
parser.add_argument("--infile", required=True, help="Input tfrecord file from call_variants")
parser.add_argument("--outfile", required=True)
parser.add_argument("-h", "--help", action=DVHelp)
args = parser.parse_args()
bin_dir = real_dirname(BINARY_DIR)
conda_path = os.path.dirname(os.path.realpath(sys.executable))
lib_path = os.path.join(os.path.dirname(conda_path), "lib")
py_exe = sys.executable
cmd = ("export LD_LIBRARY_PATH={lib_path}:\"$LD_LIBRARY_PATH\" && "
"{py_exe} {bin_dir}/postprocess_variants.zip "
"--ref {args.ref} --infile {args.infile} --outfile {args.outfile}")
sys.exit(subprocess.call(cmd.format(**locals()), shell=True))
if __name__ == '__main__':
main()
|
amith01994/intellij-community
|
refs/heads/master
|
python/helpers/pycharm_generator_utils/constants.py
|
11
|
import os
import re
import types
import sys
import string
import time
# !!! Don't forget to update VERSION and required_gen_version if necessary !!!
VERSION = "1.137"
OUT_ENCODING = 'utf-8'
version = (
(sys.hexversion & (0xff << 24)) >> 24,
(sys.hexversion & (0xff << 16)) >> 16
)
if version[0] >= 3:
#noinspection PyUnresolvedReferences
import builtins as the_builtins
string = "".__class__
STR_TYPES = (getattr(the_builtins, "bytes"), str)
NUM_TYPES = (int, float)
SIMPLEST_TYPES = NUM_TYPES + STR_TYPES + (None.__class__,)
EASY_TYPES = NUM_TYPES + STR_TYPES + (None.__class__, dict, tuple, list)
def the_exec(source, context):
exec (source, context)
else: # < 3.0
import __builtin__ as the_builtins
STR_TYPES = (getattr(the_builtins, "unicode"), str)
NUM_TYPES = (int, long, float)
SIMPLEST_TYPES = NUM_TYPES + STR_TYPES + (types.NoneType,)
EASY_TYPES = NUM_TYPES + STR_TYPES + (types.NoneType, dict, tuple, list)
def the_exec(source, context):
#noinspection PyRedundantParentheses
exec (source) in context
if version[0] == 2 and version[1] < 4:
HAS_DECORATORS = False
def lstrip(s, prefix):
i = 0
while s[i] == prefix:
i += 1
return s[i:]
else:
HAS_DECORATORS = True
lstrip = string.lstrip
# return type inference helper table
INT_LIT = '0'
FLOAT_LIT = '0.0'
DICT_LIT = '{}'
LIST_LIT = '[]'
TUPLE_LIT = '()'
BOOL_LIT = 'False'
RET_TYPE = {# {'type_name': 'value_string'} lookup table
# chaining
"self": "self",
"self.": "self",
# int
"int": INT_LIT,
"Int": INT_LIT,
"integer": INT_LIT,
"Integer": INT_LIT,
"short": INT_LIT,
"long": INT_LIT,
"number": INT_LIT,
"Number": INT_LIT,
# float
"float": FLOAT_LIT,
"Float": FLOAT_LIT,
"double": FLOAT_LIT,
"Double": FLOAT_LIT,
"floating": FLOAT_LIT,
# boolean
"bool": BOOL_LIT,
"boolean": BOOL_LIT,
"Bool": BOOL_LIT,
"Boolean": BOOL_LIT,
"True": BOOL_LIT,
"true": BOOL_LIT,
"False": BOOL_LIT,
"false": BOOL_LIT,
# list
'list': LIST_LIT,
'List': LIST_LIT,
'[]': LIST_LIT,
# tuple
"tuple": TUPLE_LIT,
"sequence": TUPLE_LIT,
"Sequence": TUPLE_LIT,
# dict
"dict": DICT_LIT,
"Dict": DICT_LIT,
"dictionary": DICT_LIT,
"Dictionary": DICT_LIT,
"map": DICT_LIT,
"Map": DICT_LIT,
"hashtable": DICT_LIT,
"Hashtable": DICT_LIT,
"{}": DICT_LIT,
# "objects"
"object": "object()",
}
if version[0] < 3:
UNICODE_LIT = 'u""'
BYTES_LIT = '""'
RET_TYPE.update({
'string': BYTES_LIT,
'String': BYTES_LIT,
'str': BYTES_LIT,
'Str': BYTES_LIT,
'character': BYTES_LIT,
'char': BYTES_LIT,
'unicode': UNICODE_LIT,
'Unicode': UNICODE_LIT,
'bytes': BYTES_LIT,
'byte': BYTES_LIT,
'Bytes': BYTES_LIT,
'Byte': BYTES_LIT,
})
DEFAULT_STR_LIT = BYTES_LIT
# also, files:
RET_TYPE.update({
'file': "file('/dev/null')",
})
def ensureUnicode(data):
if type(data) == str:
return data.decode(OUT_ENCODING, 'replace')
return unicode(data)
else:
UNICODE_LIT = '""'
BYTES_LIT = 'b""'
RET_TYPE.update({
'string': UNICODE_LIT,
'String': UNICODE_LIT,
'str': UNICODE_LIT,
'Str': UNICODE_LIT,
'character': UNICODE_LIT,
'char': UNICODE_LIT,
'unicode': UNICODE_LIT,
'Unicode': UNICODE_LIT,
'bytes': BYTES_LIT,
'byte': BYTES_LIT,
'Bytes': BYTES_LIT,
'Byte': BYTES_LIT,
})
DEFAULT_STR_LIT = UNICODE_LIT
# also, files: we can't provide an easy expression on py3k
RET_TYPE.update({
'file': None,
})
def ensureUnicode(data):
if type(data) == bytes:
return data.decode(OUT_ENCODING, 'replace')
return str(data)
if version[0] > 2:
import io # in 3.0
#noinspection PyArgumentList
fopen = lambda name, mode: io.open(name, mode, encoding=OUT_ENCODING)
else:
fopen = open
if sys.platform == 'cli':
#noinspection PyUnresolvedReferences
from System import DateTime
class Timer(object):
def __init__(self):
self.started = DateTime.Now
def elapsed(self):
return (DateTime.Now - self.started).TotalMilliseconds
else:
class Timer(object):
def __init__(self):
self.started = time.time()
def elapsed(self):
return int((time.time() - self.started) * 1000)
IS_JAVA = hasattr(os, "java")
BUILTIN_MOD_NAME = the_builtins.__name__
IDENT_PATTERN = "[A-Za-z_][0-9A-Za-z_]*" # re pattern for identifier
NUM_IDENT_PATTERN = re.compile("([A-Za-z_]+)[0-9]?[A-Za-z_]*") # 'foo_123' -> $1 = 'foo_'
STR_CHAR_PATTERN = "[0-9A-Za-z_.,\+\-&\*% ]"
DOC_FUNC_RE = re.compile("(?:.*\.)?(\w+)\(([^\)]*)\).*") # $1 = function name, $2 = arglist
SANE_REPR_RE = re.compile(IDENT_PATTERN + "(?:\(.*\))?") # identifier with possible (...), go catches
IDENT_RE = re.compile("(" + IDENT_PATTERN + ")") # $1 = identifier
STARS_IDENT_RE = re.compile("(\*?\*?" + IDENT_PATTERN + ")") # $1 = identifier, maybe with a * or **
IDENT_EQ_RE = re.compile("(" + IDENT_PATTERN + "\s*=)") # $1 = identifier with a following '='
SIMPLE_VALUE_RE = re.compile(
"(\([+-]?[0-9](?:\s*,\s*[+-]?[0-9])*\))|" + # a numeric tuple, e.g. in pygame
"([+-]?[0-9]+\.?[0-9]*(?:[Ee]?[+-]?[0-9]+\.?[0-9]*)?)|" + # number
"('" + STR_CHAR_PATTERN + "*')|" + # single-quoted string
'("' + STR_CHAR_PATTERN + '*")|' + # double-quoted string
"(\[\])|" +
"(\{\})|" +
"(\(\))|" +
"(True|False|None)"
) # $? = sane default value
########################### parsing ###########################################################
if version[0] < 3:
from pycharm_generator_utils.pyparsing import *
else:
#noinspection PyUnresolvedReferences
from pycharm_generator_utils.pyparsing_py3 import *
# grammar to parse parameter lists
# // snatched from parsePythonValue.py, from pyparsing samples, copyright 2006 by Paul McGuire but under BSD license.
# we don't suppress lots of punctuation because we want it back when we reconstruct the lists
lparen, rparen, lbrack, rbrack, lbrace, rbrace, colon = map(Literal, "()[]{}:")
integer = Combine(Optional(oneOf("+ -")) + Word(nums)).setName("integer")
real = Combine(Optional(oneOf("+ -")) + Word(nums) + "." +
Optional(Word(nums)) +
Optional(oneOf("e E") + Optional(oneOf("+ -")) + Word(nums))).setName("real")
tupleStr = Forward()
listStr = Forward()
dictStr = Forward()
boolLiteral = oneOf("True False")
noneLiteral = Literal("None")
listItem = real | integer | quotedString | unicodeString | boolLiteral | noneLiteral | \
Group(listStr) | tupleStr | dictStr
tupleStr << ( Suppress("(") + Optional(delimitedList(listItem)) +
Optional(Literal(",")) + Suppress(")") ).setResultsName("tuple")
listStr << (lbrack + Optional(delimitedList(listItem) +
Optional(Literal(","))) + rbrack).setResultsName("list")
dictEntry = Group(listItem + colon + listItem)
dictStr << (lbrace + Optional(delimitedList(dictEntry) + Optional(Literal(","))) + rbrace).setResultsName("dict")
# \\ end of the snatched part
# our output format is s-expressions:
# (simple name optional_value) is name or name=value
# (nested (simple ...) (simple ...)) is (name, name,...)
# (opt ...) is [, ...] or suchlike.
T_SIMPLE = 'Simple'
T_NESTED = 'Nested'
T_OPTIONAL = 'Opt'
T_RETURN = "Ret"
TRIPLE_DOT = '...'
COMMA = Suppress(",")
APOS = Suppress("'")
QUOTE = Suppress('"')
SP = Suppress(Optional(White()))
ident = Word(alphas + "_", alphanums + "_-.").setName("ident") # we accept things like "foo-or-bar"
decorated_ident = ident + Optional(Suppress(SP + Literal(":") + SP + ident)) # accept "foo: bar", ignore "bar"
spaced_ident = Combine(decorated_ident + ZeroOrMore(Literal(' ') + decorated_ident)) # we accept 'list or tuple' or 'C struct'
# allow quoted names, because __setattr__, etc docs use it
paramname = spaced_ident | \
APOS + spaced_ident + APOS | \
QUOTE + spaced_ident + QUOTE
parenthesized_tuple = ( Literal("(") + Optional(delimitedList(listItem, combine=True)) +
Optional(Literal(",")) + Literal(")") ).setResultsName("(tuple)")
initializer = (SP + Suppress("=") + SP + Combine(parenthesized_tuple | listItem | ident)).setName("=init") # accept foo=defaultfoo
param = Group(Empty().setParseAction(replaceWith(T_SIMPLE)) + Combine(Optional(oneOf("* **")) + paramname) + Optional(initializer))
ellipsis = Group(
Empty().setParseAction(replaceWith(T_SIMPLE)) + \
(Literal("..") +
ZeroOrMore(Literal('.'))).setParseAction(replaceWith(TRIPLE_DOT)) # we want to accept both 'foo,..' and 'foo, ...'
)
paramSlot = Forward()
simpleParamSeq = ZeroOrMore(paramSlot + COMMA) + Optional(paramSlot + Optional(COMMA))
nestedParamSeq = Group(
Suppress('(').setParseAction(replaceWith(T_NESTED)) + \
simpleParamSeq + Optional(ellipsis + Optional(COMMA) + Optional(simpleParamSeq)) + \
Suppress(')')
) # we accept "(a1, ... an)"
paramSlot << (param | nestedParamSeq)
optionalPart = Forward()
paramSeq = simpleParamSeq + Optional(optionalPart) # this is our approximate target
optionalPart << (
Group(
Suppress('[').setParseAction(replaceWith(T_OPTIONAL)) + Optional(COMMA) +
paramSeq + Optional(ellipsis) +
Suppress(']')
)
| ellipsis
)
return_type = Group(
Empty().setParseAction(replaceWith(T_RETURN)) +
Suppress(SP + (Literal("->") | (Literal(":") + SP + Literal("return"))) + SP) +
ident
)
# this is our ideal target, with balancing paren and a multiline rest of doc.
paramSeqAndRest = paramSeq + Suppress(')') + Optional(return_type) + Suppress(Optional(Regex(".*(?s)")))
############################################################################################
# Some values are known to be of no use in source and needs to be suppressed.
# Dict is keyed by module names, with "*" meaning "any module";
# values are lists of names of members whose value must be pruned.
SKIP_VALUE_IN_MODULE = {
"sys": (
"modules", "path_importer_cache", "argv", "builtins",
"last_traceback", "last_type", "last_value", "builtin_module_names",
),
"posix": (
"environ",
),
"zipimport": (
"_zip_directory_cache",
),
"*": (BUILTIN_MOD_NAME,)
}
# {"module": ("name",..)}: omit the names from the skeleton at all.
OMIT_NAME_IN_MODULE = {}
if version[0] >= 3:
v = OMIT_NAME_IN_MODULE.get(BUILTIN_MOD_NAME, []) + ["True", "False", "None", "__debug__"]
OMIT_NAME_IN_MODULE[BUILTIN_MOD_NAME] = v
if IS_JAVA and version > (2, 4): # in 2.5.1 things are way weird!
OMIT_NAME_IN_MODULE['_codecs'] = ['EncodingMap']
OMIT_NAME_IN_MODULE['_hashlib'] = ['Hash']
ADD_VALUE_IN_MODULE = {
"sys": ("exc_value = Exception()", "exc_traceback=None"), # only present after an exception in current thread
}
# Some values are special and are better represented by hand-crafted constructs.
# Dict is keyed by (module name, member name) and value is the replacement.
REPLACE_MODULE_VALUES = {
("numpy.core.multiarray", "typeinfo"): "{}",
("psycopg2._psycopg", "string_types"): "{}", # badly mangled __eq__ breaks fmtValue
("PyQt5.QtWidgets", "qApp") : "QApplication()", # instead of None
}
if version[0] <= 2:
REPLACE_MODULE_VALUES[(BUILTIN_MOD_NAME, "None")] = "object()"
for std_file in ("stdin", "stdout", "stderr"):
REPLACE_MODULE_VALUES[("sys", std_file)] = "open('')" #
# Some functions and methods of some builtin classes have special signatures.
# {("class", "method"): ("signature_string")}
PREDEFINED_BUILTIN_SIGS = { #TODO: user-skeleton
("type", "__init__"): "(cls, what, bases=None, dict=None)", # two sigs squeezed into one
("object", "__init__"): "(self)",
("object", "__new__"): "(cls, *more)", # only for the sake of parameter names readability
("object", "__subclasshook__"): "(cls, subclass)", # trusting PY-1818 on sig
("int", "__init__"): "(self, x, base=10)", # overrides a fake
("list", "__init__"): "(self, seq=())",
("tuple", "__init__"): "(self, seq=())", # overrides a fake
("set", "__init__"): "(self, seq=())",
("dict", "__init__"): "(self, seq=None, **kwargs)",
("property", "__init__"): "(self, fget=None, fset=None, fdel=None, doc=None)",
# TODO: infer, doc comments have it
("dict", "update"): "(self, E=None, **F)", # docstring nearly lies
(None, "zip"): "(seq1, seq2, *more_seqs)",
(None, "range"): "(start=None, stop=None, step=None)", # suboptimal: allows empty arglist
(None, "filter"): "(function_or_none, sequence)",
(None, "iter"): "(source, sentinel=None)",
(None, "getattr"): "(object, name, default=None)",
('frozenset', "__init__"): "(self, seq=())",
("bytearray", "__init__"): "(self, source=None, encoding=None, errors='strict')",
}
if version[0] < 3:
PREDEFINED_BUILTIN_SIGS[
("unicode", "__init__")] = "(self, string=u'', encoding=None, errors='strict')" # overrides a fake
PREDEFINED_BUILTIN_SIGS[("super", "__init__")] = "(self, type1, type2=None)"
PREDEFINED_BUILTIN_SIGS[
(None, "min")] = "(*args, **kwargs)" # too permissive, but py2.x won't allow a better sig
PREDEFINED_BUILTIN_SIGS[(None, "max")] = "(*args, **kwargs)"
PREDEFINED_BUILTIN_SIGS[("str", "__init__")] = "(self, string='')" # overrides a fake
PREDEFINED_BUILTIN_SIGS[(None, "print")] = "(*args, **kwargs)" # can't do better in 2.x
else:
PREDEFINED_BUILTIN_SIGS[("super", "__init__")] = "(self, type1=None, type2=None)"
PREDEFINED_BUILTIN_SIGS[(None, "min")] = "(*args, key=None)"
PREDEFINED_BUILTIN_SIGS[(None, "max")] = "(*args, key=None)"
PREDEFINED_BUILTIN_SIGS[
(None, "open")] = "(file, mode='r', buffering=None, encoding=None, errors=None, newline=None, closefd=True)"
PREDEFINED_BUILTIN_SIGS[
("str", "__init__")] = "(self, value='', encoding=None, errors='strict')" # overrides a fake
PREDEFINED_BUILTIN_SIGS[("str", "format")] = "(*args, **kwargs)"
PREDEFINED_BUILTIN_SIGS[
("bytes", "__init__")] = "(self, value=b'', encoding=None, errors='strict')" # overrides a fake
PREDEFINED_BUILTIN_SIGS[("bytes", "format")] = "(*args, **kwargs)"
PREDEFINED_BUILTIN_SIGS[(None, "print")] = "(*args, sep=' ', end='\\n', file=None)" # proper signature
if (2, 6) <= version < (3, 0):
PREDEFINED_BUILTIN_SIGS[("unicode", "format")] = "(*args, **kwargs)"
PREDEFINED_BUILTIN_SIGS[("str", "format")] = "(*args, **kwargs)"
if version == (2, 5):
PREDEFINED_BUILTIN_SIGS[("unicode", "splitlines")] = "(keepends=None)" # a typo in docstring there
if version >= (2, 7):
PREDEFINED_BUILTIN_SIGS[
("enumerate", "__init__")] = "(self, iterable, start=0)" # dosctring omits this completely.
if version < (3, 3):
datetime_mod = "datetime"
else:
datetime_mod = "_datetime"
# NOTE: per-module signature data may be lazily imported
# keyed by (module_name, class_name, method_name). PREDEFINED_BUILTIN_SIGS might be a layer of it.
# value is ("signature", "return_literal")
PREDEFINED_MOD_CLASS_SIGS = { #TODO: user-skeleton
(BUILTIN_MOD_NAME, None, 'divmod'): ("(x, y)", "(0, 0)"),
("binascii", None, "hexlify"): ("(data)", BYTES_LIT),
("binascii", None, "unhexlify"): ("(hexstr)", BYTES_LIT),
("time", None, "ctime"): ("(seconds=None)", DEFAULT_STR_LIT),
("_struct", None, "pack"): ("(fmt, *args)", BYTES_LIT),
("_struct", None, "pack_into"): ("(fmt, buffer, offset, *args)", None),
("_struct", None, "unpack"): ("(fmt, string)", None),
("_struct", None, "unpack_from"): ("(fmt, buffer, offset=0)", None),
("_struct", None, "calcsize"): ("(fmt)", INT_LIT),
("_struct", "Struct", "__init__"): ("(self, fmt)", None),
("_struct", "Struct", "pack"): ("(self, *args)", BYTES_LIT),
("_struct", "Struct", "pack_into"): ("(self, buffer, offset, *args)", None),
("_struct", "Struct", "unpack"): ("(self, string)", None),
("_struct", "Struct", "unpack_from"): ("(self, buffer, offset=0)", None),
(datetime_mod, "date", "__new__"): ("(cls, year=None, month=None, day=None)", None),
(datetime_mod, "date", "fromordinal"): ("(cls, ordinal)", "date(1,1,1)"),
(datetime_mod, "date", "fromtimestamp"): ("(cls, timestamp)", "date(1,1,1)"),
(datetime_mod, "date", "isocalendar"): ("(self)", "(1, 1, 1)"),
(datetime_mod, "date", "isoformat"): ("(self)", DEFAULT_STR_LIT),
(datetime_mod, "date", "isoweekday"): ("(self)", INT_LIT),
(datetime_mod, "date", "replace"): ("(self, year=None, month=None, day=None)", "date(1,1,1)"),
(datetime_mod, "date", "strftime"): ("(self, format)", DEFAULT_STR_LIT),
(datetime_mod, "date", "timetuple"): ("(self)", "(0, 0, 0, 0, 0, 0, 0, 0, 0)"),
(datetime_mod, "date", "today"): ("(self)", "date(1, 1, 1)"),
(datetime_mod, "date", "toordinal"): ("(self)", INT_LIT),
(datetime_mod, "date", "weekday"): ("(self)", INT_LIT),
(datetime_mod, "timedelta", "__new__"
): (
"(cls, days=None, seconds=None, microseconds=None, milliseconds=None, minutes=None, hours=None, weeks=None)",
None),
(datetime_mod, "datetime", "__new__"
): (
"(cls, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None, tzinfo=None)",
None),
(datetime_mod, "datetime", "astimezone"): ("(self, tz)", "datetime(1, 1, 1)"),
(datetime_mod, "datetime", "combine"): ("(cls, date, time)", "datetime(1, 1, 1)"),
(datetime_mod, "datetime", "date"): ("(self)", "datetime(1, 1, 1)"),
(datetime_mod, "datetime", "fromtimestamp"): ("(cls, timestamp, tz=None)", "datetime(1, 1, 1)"),
(datetime_mod, "datetime", "isoformat"): ("(self, sep='T')", DEFAULT_STR_LIT),
(datetime_mod, "datetime", "now"): ("(cls, tz=None)", "datetime(1, 1, 1)"),
(datetime_mod, "datetime", "strptime"): ("(cls, date_string, format)", DEFAULT_STR_LIT),
(datetime_mod, "datetime", "replace" ):
(
"(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None, tzinfo=None)",
"datetime(1, 1, 1)"),
(datetime_mod, "datetime", "time"): ("(self)", "time(0, 0)"),
(datetime_mod, "datetime", "timetuple"): ("(self)", "(0, 0, 0, 0, 0, 0, 0, 0, 0)"),
(datetime_mod, "datetime", "timetz"): ("(self)", "time(0, 0)"),
(datetime_mod, "datetime", "utcfromtimestamp"): ("(self, timestamp)", "datetime(1, 1, 1)"),
(datetime_mod, "datetime", "utcnow"): ("(cls)", "datetime(1, 1, 1)"),
(datetime_mod, "datetime", "utctimetuple"): ("(self)", "(0, 0, 0, 0, 0, 0, 0, 0, 0)"),
(datetime_mod, "time", "__new__"): (
"(cls, hour=None, minute=None, second=None, microsecond=None, tzinfo=None)", None),
(datetime_mod, "time", "isoformat"): ("(self)", DEFAULT_STR_LIT),
(datetime_mod, "time", "replace"): (
"(self, hour=None, minute=None, second=None, microsecond=None, tzinfo=None)", "time(0, 0)"),
(datetime_mod, "time", "strftime"): ("(self, format)", DEFAULT_STR_LIT),
(datetime_mod, "tzinfo", "dst"): ("(self, date_time)", INT_LIT),
(datetime_mod, "tzinfo", "fromutc"): ("(self, date_time)", "datetime(1, 1, 1)"),
(datetime_mod, "tzinfo", "tzname"): ("(self, date_time)", DEFAULT_STR_LIT),
(datetime_mod, "tzinfo", "utcoffset"): ("(self, date_time)", INT_LIT),
("_io", None, "open"): ("(name, mode=None, buffering=None)", "file('/dev/null')"),
("_io", "FileIO", "read"): ("(self, size=-1)", DEFAULT_STR_LIT),
("_fileio", "_FileIO", "read"): ("(self, size=-1)", DEFAULT_STR_LIT),
("thread", None, "start_new"): ("(function, args, kwargs=None)", INT_LIT),
("_thread", None, "start_new"): ("(function, args, kwargs=None)", INT_LIT),
("itertools", "groupby", "__init__"): ("(self, iterable, key=None)", None),
("itertools", None, "groupby"): ("(iterable, key=None)", LIST_LIT),
("cStringIO", "OutputType", "seek"): ("(self, position, mode=0)", None),
("cStringIO", "InputType", "seek"): ("(self, position, mode=0)", None),
# NOTE: here we stand on shaky ground providing sigs for 3rd-party modules, though well-known
("numpy.core.multiarray", "ndarray", "__array__"): ("(self, dtype=None)", None),
("numpy.core.multiarray", None, "arange"): ("(start=None, stop=None, step=None, dtype=None)", None),
# same as range()
("numpy.core.multiarray", None, "set_numeric_ops"): ("(**ops)", None),
}
bin_collections_names = ['collections', '_collections']
for name in bin_collections_names:
PREDEFINED_MOD_CLASS_SIGS[(name, "deque", "__init__")] = ("(self, iterable=(), maxlen=None)", None)
PREDEFINED_MOD_CLASS_SIGS[(name, "defaultdict", "__init__")] = ("(self, default_factory=None, **kwargs)", None)
if version[0] < 3:
PREDEFINED_MOD_CLASS_SIGS[("exceptions", "BaseException", "__unicode__")] = ("(self)", UNICODE_LIT)
PREDEFINED_MOD_CLASS_SIGS[("itertools", "product", "__init__")] = ("(self, *iterables, **kwargs)", LIST_LIT)
else:
PREDEFINED_MOD_CLASS_SIGS[("itertools", "product", "__init__")] = ("(self, *iterables, repeat=1)", LIST_LIT)
if version[0] < 3:
PREDEFINED_MOD_CLASS_SIGS[("PyQt4.QtCore", None, "pyqtSlot")] = (
"(*types, **keywords)", None) # doc assumes py3k syntax
# known properties of modules
# {{"module": {"class", "property" : ("letters", ("getter", "type"))}},
# where letters is any set of r,w,d (read, write, del) and "getter" is a source of typed getter.
# if value is None, the property should be omitted.
# read-only properties that return an object are not listed.
G_OBJECT = ("lambda self: object()", None)
G_TYPE = ("lambda self: type(object)", "type")
G_DICT = ("lambda self: {}", "dict")
G_STR = ("lambda self: ''", "string")
G_TUPLE = ("lambda self: tuple()", "tuple")
G_FLOAT = ("lambda self: 0.0", "float")
G_INT = ("lambda self: 0", "int")
G_BOOL = ("lambda self: True", "bool")
KNOWN_PROPS = {
BUILTIN_MOD_NAME: {
("object", '__class__'): ('r', G_TYPE),
('complex', 'real'): ('r', G_FLOAT),
('complex', 'imag'): ('r', G_FLOAT),
("file", 'softspace'): ('r', G_BOOL),
("file", 'name'): ('r', G_STR),
("file", 'encoding'): ('r', G_STR),
("file", 'mode'): ('r', G_STR),
("file", 'closed'): ('r', G_BOOL),
("file", 'newlines'): ('r', G_STR),
("slice", 'start'): ('r', G_INT),
("slice", 'step'): ('r', G_INT),
("slice", 'stop'): ('r', G_INT),
("super", '__thisclass__'): ('r', G_TYPE),
("super", '__self__'): ('r', G_TYPE),
("super", '__self_class__'): ('r', G_TYPE),
("type", '__basicsize__'): ('r', G_INT),
("type", '__itemsize__'): ('r', G_INT),
("type", '__base__'): ('r', G_TYPE),
("type", '__flags__'): ('r', G_INT),
("type", '__mro__'): ('r', G_TUPLE),
("type", '__bases__'): ('r', G_TUPLE),
("type", '__dictoffset__'): ('r', G_INT),
("type", '__dict__'): ('r', G_DICT),
("type", '__name__'): ('r', G_STR),
("type", '__weakrefoffset__'): ('r', G_INT),
},
"exceptions": {
("BaseException", '__dict__'): ('r', G_DICT),
("BaseException", 'message'): ('rwd', G_STR),
("BaseException", 'args'): ('r', G_TUPLE),
("EnvironmentError", 'errno'): ('rwd', G_INT),
("EnvironmentError", 'message'): ('rwd', G_STR),
("EnvironmentError", 'strerror'): ('rwd', G_INT),
("EnvironmentError", 'filename'): ('rwd', G_STR),
("SyntaxError", 'text'): ('rwd', G_STR),
("SyntaxError", 'print_file_and_line'): ('rwd', G_BOOL),
("SyntaxError", 'filename'): ('rwd', G_STR),
("SyntaxError", 'lineno'): ('rwd', G_INT),
("SyntaxError", 'offset'): ('rwd', G_INT),
("SyntaxError", 'msg'): ('rwd', G_STR),
("SyntaxError", 'message'): ('rwd', G_STR),
("SystemExit", 'message'): ('rwd', G_STR),
("SystemExit", 'code'): ('rwd', G_OBJECT),
("UnicodeDecodeError", '__basicsize__'): None,
("UnicodeDecodeError", '__itemsize__'): None,
("UnicodeDecodeError", '__base__'): None,
("UnicodeDecodeError", '__flags__'): ('rwd', G_INT),
("UnicodeDecodeError", '__mro__'): None,
("UnicodeDecodeError", '__bases__'): None,
("UnicodeDecodeError", '__dictoffset__'): None,
("UnicodeDecodeError", '__dict__'): None,
("UnicodeDecodeError", '__name__'): None,
("UnicodeDecodeError", '__weakrefoffset__'): None,
("UnicodeEncodeError", 'end'): ('rwd', G_INT),
("UnicodeEncodeError", 'encoding'): ('rwd', G_STR),
("UnicodeEncodeError", 'object'): ('rwd', G_OBJECT),
("UnicodeEncodeError", 'start'): ('rwd', G_INT),
("UnicodeEncodeError", 'reason'): ('rwd', G_STR),
("UnicodeEncodeError", 'message'): ('rwd', G_STR),
("UnicodeTranslateError", 'end'): ('rwd', G_INT),
("UnicodeTranslateError", 'encoding'): ('rwd', G_STR),
("UnicodeTranslateError", 'object'): ('rwd', G_OBJECT),
("UnicodeTranslateError", 'start'): ('rwd', G_INT),
("UnicodeTranslateError", 'reason'): ('rwd', G_STR),
("UnicodeTranslateError", 'message'): ('rwd', G_STR),
},
'_ast': {
("AST", '__dict__'): ('rd', G_DICT),
},
'posix': {
("statvfs_result", 'f_flag'): ('r', G_INT),
("statvfs_result", 'f_bavail'): ('r', G_INT),
("statvfs_result", 'f_favail'): ('r', G_INT),
("statvfs_result", 'f_files'): ('r', G_INT),
("statvfs_result", 'f_frsize'): ('r', G_INT),
("statvfs_result", 'f_blocks'): ('r', G_INT),
("statvfs_result", 'f_ffree'): ('r', G_INT),
("statvfs_result", 'f_bfree'): ('r', G_INT),
("statvfs_result", 'f_namemax'): ('r', G_INT),
("statvfs_result", 'f_bsize'): ('r', G_INT),
("stat_result", 'st_ctime'): ('r', G_INT),
("stat_result", 'st_rdev'): ('r', G_INT),
("stat_result", 'st_mtime'): ('r', G_INT),
("stat_result", 'st_blocks'): ('r', G_INT),
("stat_result", 'st_gid'): ('r', G_INT),
("stat_result", 'st_nlink'): ('r', G_INT),
("stat_result", 'st_ino'): ('r', G_INT),
("stat_result", 'st_blksize'): ('r', G_INT),
("stat_result", 'st_dev'): ('r', G_INT),
("stat_result", 'st_size'): ('r', G_INT),
("stat_result", 'st_mode'): ('r', G_INT),
("stat_result", 'st_uid'): ('r', G_INT),
("stat_result", 'st_atime'): ('r', G_INT),
},
"pwd": {
("struct_pwent", 'pw_dir'): ('r', G_STR),
("struct_pwent", 'pw_gid'): ('r', G_INT),
("struct_pwent", 'pw_passwd'): ('r', G_STR),
("struct_pwent", 'pw_gecos'): ('r', G_STR),
("struct_pwent", 'pw_shell'): ('r', G_STR),
("struct_pwent", 'pw_name'): ('r', G_STR),
("struct_pwent", 'pw_uid'): ('r', G_INT),
("struct_passwd", 'pw_dir'): ('r', G_STR),
("struct_passwd", 'pw_gid'): ('r', G_INT),
("struct_passwd", 'pw_passwd'): ('r', G_STR),
("struct_passwd", 'pw_gecos'): ('r', G_STR),
("struct_passwd", 'pw_shell'): ('r', G_STR),
("struct_passwd", 'pw_name'): ('r', G_STR),
("struct_passwd", 'pw_uid'): ('r', G_INT),
},
"thread": {
("_local", '__dict__'): None
},
"xxsubtype": {
("spamdict", 'state'): ('r', G_INT),
("spamlist", 'state'): ('r', G_INT),
},
"zipimport": {
("zipimporter", 'prefix'): ('r', G_STR),
("zipimporter", 'archive'): ('r', G_STR),
("zipimporter", '_files'): ('r', G_DICT),
},
"_struct": {
("Struct", "size"): ('r', G_INT),
("Struct", "format"): ('r', G_STR),
},
datetime_mod: {
("datetime", "hour"): ('r', G_INT),
("datetime", "minute"): ('r', G_INT),
("datetime", "second"): ('r', G_INT),
("datetime", "microsecond"): ('r', G_INT),
("date", "day"): ('r', G_INT),
("date", "month"): ('r', G_INT),
("date", "year"): ('r', G_INT),
("time", "hour"): ('r', G_INT),
("time", "minute"): ('r', G_INT),
("time", "second"): ('r', G_INT),
("time", "microsecond"): ('r', G_INT),
("timedelta", "days"): ('r', G_INT),
("timedelta", "seconds"): ('r', G_INT),
("timedelta", "microseconds"): ('r', G_INT),
},
}
# Sometimes module X defines item foo but foo.__module__ == 'Y' instead of 'X';
# module Y just re-exports foo, and foo fakes being defined in Y.
# We list all such Ys keyed by X, all fully-qualified names:
# {"real_definer_module": ("fake_reexporter_module",..)}
KNOWN_FAKE_REEXPORTERS = {
"_collections": ('collections',),
"_functools": ('functools',),
"_socket": ('socket',), # .error, etc
"pyexpat": ('xml.parsers.expat',),
"_bsddb": ('bsddb.db',),
"pysqlite2._sqlite": ('pysqlite2.dbapi2',), # errors
"numpy.core.multiarray": ('numpy', 'numpy.core'),
"numpy.core._dotblas": ('numpy', 'numpy.core'),
"numpy.core.umath": ('numpy', 'numpy.core'),
"gtk._gtk": ('gtk', 'gtk.gdk',),
"gobject._gobject": ('gobject',),
"gnomecanvas": ("gnome.canvas",),
}
KNOWN_FAKE_BASES = []
# list of classes that pretend to be base classes but are mere wrappers, and their defining modules
# [(class, module),...] -- real objects, not names
#noinspection PyBroadException
try:
#noinspection PyUnresolvedReferences
import sip as sip_module # Qt specifically likes it
if hasattr(sip_module, 'wrapper'):
KNOWN_FAKE_BASES.append((sip_module.wrapper, sip_module))
if hasattr(sip_module, 'simplewrapper'):
KNOWN_FAKE_BASES.append((sip_module.simplewrapper, sip_module))
del sip_module
except:
pass
# This is a list of builtin classes to use fake init
FAKE_BUILTIN_INITS = (tuple, type, int, str)
if version[0] < 3:
FAKE_BUILTIN_INITS = FAKE_BUILTIN_INITS + (getattr(the_builtins, "unicode"),)
else:
FAKE_BUILTIN_INITS = FAKE_BUILTIN_INITS + (getattr(the_builtins, "str"), getattr(the_builtins, "bytes"))
# Some builtin methods are decorated, but this is hard to detect.
# {("class_name", "method_name"): "decorator"}
KNOWN_DECORATORS = {
("dict", "fromkeys"): "staticmethod",
("object", "__subclasshook__"): "classmethod",
("bytearray", "fromhex"): "classmethod",
("bytes", "fromhex"): "classmethod",
("bytearray", "maketrans"): "staticmethod",
("bytes", "maketrans"): "staticmethod",
("int", "from_bytes"): "classmethod",
}
classobj_txt = ( #TODO: user-skeleton
"class ___Classobj:" "\n"
" '''A mock class representing the old style class base.'''" "\n"
" __module__ = ''" "\n"
" __class__ = None" "\n"
"\n"
" def __init__(self):" "\n"
" pass" "\n"
" __dict__ = {}" "\n"
" __doc__ = ''" "\n"
)
MAC_STDLIB_PATTERN = re.compile("/System/Library/Frameworks/Python\\.framework/Versions/(.+)/lib/python\\1/(.+)")
MAC_SKIP_MODULES = ["test", "ctypes/test", "distutils/tests", "email/test",
"importlib/test", "json/tests", "lib2to3/tests",
"bsddb/test",
"sqlite3/test", "tkinter/test", "idlelib", "antigravity"]
POSIX_SKIP_MODULES = ["vtemodule", "PAMmodule", "_snackmodule", "/quodlibet/_mmkeys"]
BIN_MODULE_FNAME_PAT = re.compile('([a-zA-Z_]+[0-9a-zA-Z]*)\\.(?:pyc|pyo|(?:[a-zA-Z_]+-\\d\\d[a-zA-Z]*\\.|.+-linux-gnu\\.)?(?:so|pyd))')
# possible binary module filename: letter, alphanum architecture per PEP-3149
TYPELIB_MODULE_FNAME_PAT = re.compile("([a-zA-Z_]+[0-9a-zA-Z]*)[0-9a-zA-Z-.]*\\.typelib")
MODULES_INSPECT_DIR = ['gi.repository']
|
JesseLivezey/pylearn2
|
refs/heads/master
|
pylearn2/utils/python26.py
|
49
|
"""
.. todo::
WRITEME
"""
import functools
import sys
if sys.version_info[:2] < (2, 7):
def cmp_to_key(mycmp):
"""Convert a cmp= function into a key= function"""
class K(object):
__slots__ = ['obj']
def __init__(self, obj, *args):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
def __hash__(self):
raise TypeError('hash not implemented')
return K
else:
from functools import cmp_to_key
|
abenzbiria/clients_odoo
|
refs/heads/master
|
addons/base_action_rule/base_action_rule.py
|
12
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
import time
import logging
import openerp
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
_logger = logging.getLogger(__name__)
DATE_RANGE_FUNCTION = {
'minutes': lambda interval: timedelta(minutes=interval),
'hour': lambda interval: timedelta(hours=interval),
'day': lambda interval: timedelta(days=interval),
'month': lambda interval: timedelta(months=interval),
False: lambda interval: timedelta(0),
}
def get_datetime(date_str):
'''Return a datetime from a date string or a datetime string'''
# complete date time if date_str contains only a date
if ' ' not in date_str:
date_str = date_str + " 00:00:00"
return datetime.strptime(date_str, DEFAULT_SERVER_DATETIME_FORMAT)
class base_action_rule(osv.osv):
""" Base Action Rules """
_name = 'base.action.rule'
_description = 'Action Rules'
_order = 'sequence'
_columns = {
'name': fields.char('Rule Name', required=True),
'model_id': fields.many2one('ir.model', 'Related Document Model',
required=True, domain=[('osv_memory', '=', False)]),
'model': fields.related('model_id', 'model', type="char", string='Model'),
'create_date': fields.datetime('Create Date', readonly=1),
'active': fields.boolean('Active',
help="When unchecked, the rule is hidden and will not be executed."),
'sequence': fields.integer('Sequence',
help="Gives the sequence order when displaying a list of rules."),
'kind': fields.selection(
[('on_create', 'On Creation'),
('on_write', 'On Update'),
('on_create_or_write', 'On Creation & Update'),
('on_time', 'Based on Timed Condition')],
string='When to Run'),
'trg_date_id': fields.many2one('ir.model.fields', string='Trigger Date',
help="When should the condition be triggered. If present, will be checked by the scheduler. If empty, will be checked at creation and update.",
domain="[('model_id', '=', model_id), ('ttype', 'in', ('date', 'datetime'))]"),
'trg_date_range': fields.integer('Delay after trigger date',
help="Delay after the trigger date." \
"You can put a negative number if you need a delay before the" \
"trigger date, like sending a reminder 15 minutes before a meeting."),
'trg_date_range_type': fields.selection([('minutes', 'Minutes'), ('hour', 'Hours'),
('day', 'Days'), ('month', 'Months')], 'Delay type'),
'trg_date_calendar_id': fields.many2one(
'resource.calendar', 'Use Calendar',
help='When calculating a day-based timed condition, it is possible to use a calendar to compute the date based on working days.',
ondelete='set null',
),
'act_user_id': fields.many2one('res.users', 'Set Responsible'),
'act_followers': fields.many2many("res.partner", string="Add Followers"),
'server_action_ids': fields.many2many('ir.actions.server', string='Server Actions',
domain="[('model_id', '=', model_id)]",
help="Examples: email reminders, call object service, etc."),
'filter_pre_id': fields.many2one('ir.filters', string='Before Update Filter',
ondelete='restrict',
domain="[('model_id', '=', model_id.model)]",
help="If present, this condition must be satisfied before the update of the record."),
'filter_id': fields.many2one('ir.filters', string='Filter',
ondelete='restrict',
domain="[('model_id', '=', model_id.model)]",
help="If present, this condition must be satisfied before executing the action rule."),
'last_run': fields.datetime('Last Run', readonly=1, copy=False),
}
_defaults = {
'active': True,
'trg_date_range_type': 'day',
}
def onchange_kind(self, cr, uid, ids, kind, context=None):
clear_fields = []
if kind in ['on_create', 'on_create_or_write']:
clear_fields = ['filter_pre_id', 'trg_date_id', 'trg_date_range', 'trg_date_range_type']
elif kind in ['on_write', 'on_create_or_write']:
clear_fields = ['trg_date_id', 'trg_date_range', 'trg_date_range_type']
elif kind == 'on_time':
clear_fields = ['filter_pre_id']
return {'value': dict.fromkeys(clear_fields, False)}
def _filter(self, cr, uid, action, action_filter, record_ids, context=None):
""" filter the list record_ids that satisfy the action filter """
if record_ids and action_filter:
assert action.model == action_filter.model_id, "Filter model different from action rule model"
model = self.pool[action_filter.model_id]
domain = [('id', 'in', record_ids)] + eval(action_filter.domain)
ctx = dict(context or {})
ctx.update(eval(action_filter.context))
record_ids = model.search(cr, uid, domain, context=ctx)
return record_ids
def _process(self, cr, uid, action, record_ids, context=None):
""" process the given action on the records """
model = self.pool[action.model_id.model]
# modify records
values = {}
if 'date_action_last' in model._all_columns:
values['date_action_last'] = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
if action.act_user_id and 'user_id' in model._all_columns:
values['user_id'] = action.act_user_id.id
if values:
model.write(cr, uid, record_ids, values, context=context)
if action.act_followers and hasattr(model, 'message_subscribe'):
follower_ids = map(int, action.act_followers)
model.message_subscribe(cr, uid, record_ids, follower_ids, context=context)
# execute server actions
if action.server_action_ids:
server_action_ids = map(int, action.server_action_ids)
for record in model.browse(cr, uid, record_ids, context):
action_server_obj = self.pool.get('ir.actions.server')
ctx = dict(context, active_model=model._name, active_ids=[record.id], active_id=record.id)
action_server_obj.run(cr, uid, server_action_ids, context=ctx)
return True
def _register_hook(self, cr, ids=None):
""" Wrap the methods `create` and `write` of the models specified by
the rules given by `ids` (or all existing rules if `ids` is `None`.)
"""
#
# Note: the patched methods create and write must be defined inside
# another function, otherwise their closure may be wrong. For instance,
# the function create refers to the outer variable 'create', which you
# expect to be bound to create itself. But that expectation is wrong if
# create is defined inside a loop; in that case, the variable 'create'
# is bound to the last function defined by the loop.
#
def make_create():
""" instanciate a create method that processes action rules """
def create(self, cr, uid, vals, context=None, **kwargs):
# avoid loops or cascading actions
if context and context.get('action'):
return create.origin(self, cr, uid, vals, context=context)
# call original method with a modified context
context = dict(context or {}, action=True)
new_id = create.origin(self, cr, uid, vals, context=context, **kwargs)
# as it is a new record, we do not consider the actions that have a prefilter
action_model = self.pool.get('base.action.rule')
action_dom = [('model', '=', self._name),
('kind', 'in', ['on_create', 'on_create_or_write'])]
action_ids = action_model.search(cr, uid, action_dom, context=context)
# check postconditions, and execute actions on the records that satisfy them
for action in action_model.browse(cr, uid, action_ids, context=context):
if action_model._filter(cr, uid, action, action.filter_id, [new_id], context=context):
action_model._process(cr, uid, action, [new_id], context=context)
return new_id
return create
def make_write():
""" instanciate a write method that processes action rules """
def write(self, cr, uid, ids, vals, context=None, **kwargs):
# avoid loops or cascading actions
if context and context.get('action'):
return write.origin(self, cr, uid, ids, vals, context=context)
# modify context
context = dict(context or {}, action=True)
ids = [ids] if isinstance(ids, (int, long, str)) else ids
# retrieve the action rules to possibly execute
action_model = self.pool.get('base.action.rule')
action_dom = [('model', '=', self._name),
('kind', 'in', ['on_write', 'on_create_or_write'])]
action_ids = action_model.search(cr, uid, action_dom, context=context)
actions = action_model.browse(cr, uid, action_ids, context=context)
# check preconditions
pre_ids = {}
for action in actions:
pre_ids[action] = action_model._filter(cr, uid, action, action.filter_pre_id, ids, context=context)
# call original method
write.origin(self, cr, uid, ids, vals, context=context, **kwargs)
# check postconditions, and execute actions on the records that satisfy them
for action in actions:
post_ids = action_model._filter(cr, uid, action, action.filter_id, pre_ids[action], context=context)
if post_ids:
action_model._process(cr, uid, action, post_ids, context=context)
return True
return write
updated = False
if ids is None:
ids = self.search(cr, SUPERUSER_ID, [])
for action_rule in self.browse(cr, SUPERUSER_ID, ids):
model = action_rule.model_id.model
model_obj = self.pool[model]
if not hasattr(model_obj, 'base_action_ruled'):
# monkey-patch methods create and write
model_obj._patch_method('create', make_create())
model_obj._patch_method('write', make_write())
model_obj.base_action_ruled = True
updated = True
return updated
def create(self, cr, uid, vals, context=None):
res_id = super(base_action_rule, self).create(cr, uid, vals, context=context)
if self._register_hook(cr, [res_id]):
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res_id
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
super(base_action_rule, self).write(cr, uid, ids, vals, context=context)
if self._register_hook(cr, ids):
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return True
def onchange_model_id(self, cr, uid, ids, model_id, context=None):
data = {'model': False, 'filter_pre_id': False, 'filter_id': False}
if model_id:
model = self.pool.get('ir.model').browse(cr, uid, model_id, context=context)
data.update({'model': model.model})
return {'value': data}
def _check_delay(self, cr, uid, action, record, record_dt, context=None):
if action.trg_date_calendar_id and action.trg_date_range_type == 'day':
start_dt = get_datetime(record_dt)
action_dt = self.pool['resource.calendar'].schedule_days_get_date(
cr, uid, action.trg_date_calendar_id.id, action.trg_date_range,
day_date=start_dt, compute_leaves=True, context=context
)
else:
delay = DATE_RANGE_FUNCTION[action.trg_date_range_type](action.trg_date_range)
action_dt = get_datetime(record_dt) + delay
return action_dt
def _check(self, cr, uid, automatic=False, use_new_cursor=False, context=None):
""" This Function is called by scheduler. """
context = context or {}
# retrieve all the action rules to run based on a timed condition
action_dom = [('kind', '=', 'on_time')]
action_ids = self.search(cr, uid, action_dom, context=context)
for action in self.browse(cr, uid, action_ids, context=context):
now = datetime.now()
if action.last_run:
last_run = get_datetime(action.last_run)
else:
last_run = datetime.utcfromtimestamp(0)
# retrieve all the records that satisfy the action's condition
model = self.pool[action.model_id.model]
domain = []
ctx = dict(context)
if action.filter_id:
domain = eval(action.filter_id.domain)
ctx.update(eval(action.filter_id.context))
if 'lang' not in ctx:
# Filters might be language-sensitive, attempt to reuse creator lang
# as we are usually running this as super-user in background
[filter_meta] = action.filter_id.get_metadata()
user_id = filter_meta['write_uid'] and filter_meta['write_uid'][0] or \
filter_meta['create_uid'][0]
ctx['lang'] = self.pool['res.users'].browse(cr, uid, user_id).lang
record_ids = model.search(cr, uid, domain, context=ctx)
# determine when action should occur for the records
date_field = action.trg_date_id.name
if date_field == 'date_action_last' and 'create_date' in model._all_columns:
get_record_dt = lambda record: record[date_field] or record.create_date
else:
get_record_dt = lambda record: record[date_field]
# process action on the records that should be executed
for record in model.browse(cr, uid, record_ids, context=context):
record_dt = get_record_dt(record)
if not record_dt:
continue
action_dt = self._check_delay(cr, uid, action, record, record_dt, context=context)
if last_run <= action_dt < now:
try:
context = dict(context or {}, action=True)
self._process(cr, uid, action, [record.id], context=context)
except Exception:
import traceback
_logger.error(traceback.format_exc())
action.write({'last_run': now.strftime(DEFAULT_SERVER_DATETIME_FORMAT)})
if automatic:
# auto-commit for batch processing
cr.commit()
|
madarivi/QuantumDynamics
|
refs/heads/master
|
QD_Crank_Nicolson.py
|
1
|
import scipy.sparse as sparse
import scipy.sparse.linalg as linalg
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.animation as animation
import sys
from time import gmtime, strftime
# Crank-Nicolson method QD time evolution simulation of a gaussian
# wave packet incident on a rectangular potential barrier
def main():
## simulation constants ##
# epsilon is E/v0
epsilon = np.linspace(0.8, 3.0, 301)
v0 = 6.125 # barrier height,
a = 2. # barrier width
sigma = 22. # gaussian wave packet spread
dt = .1 # time step
dx = .1 # spatial grid distance
L = 300. # simulation space length
animate_and_print = False # animate the time evolution
plotTransmission = True # plot the transmission
saveTransmission = True # save the transmission
shape = "block" # pick from 'triangle' and 'blok'
T = np.zeros(len(epsilon))
x = np.linspace(0, L, np.floor(L/dx))
v = potential(x, v0, a, L, shape)
# loop over epsilon
for i,k in enumerate(np.sqrt(2*epsilon*v0)):
print "---- epsilon={eps} ({i}/{n}) ----".format(eps=round(epsilon[i],3),i=i+1,n=len(epsilon))
psi = init_psi(x, (L-a)/2 - 3*sigma, sigma, k, dx)
A, B = calculate_AB(dx, dt, v)
if animate_and_print:
fig, ax ,ims = init_animation(psi, x, v, v0)
psi, T[i] = run(A, B, x, psi, a, L, dx, ims)
print_norms(psi, a, L, dx)
im_ani = animation.ArtistAnimation(fig, ims, interval=10, repeat_delay=500, blit=True)
plt.show()
else:
psi, T[i] = run(A, B, x, psi, a, L, dx)
if plotTransmission: plot_transmission(epsilon, T, shape, saveTransmission)
###############
## functions ##
###############
# calculate the matrices for the Crank-Nicolson method
# A = Identity + constant*Hamiltonian
# B = Identity - constant*Hamiltonian
def calculate_AB(dx, dt, v):
n = len(v)
# two constants to simplify the expressions
c1 = 1.j * dt / (4. * dx**2)
c2 = 1.j * dt / 2.
A = sparse.diags(1.+2.*c1+c2*v, 0) - c1*sparse.eye(n,n,1) - c1*sparse.eye(n,n,-1)
B = sparse.diags(1.-2.*c1-c2*v, 0) + c1*sparse.eye(n,n,1) + c1*sparse.eye(n,n,-1)
# fill in for periodic boundary conditions
A = A + sparse.diags([[-c1, 0],[0, -c1]],[n-2, 2-n])
B = B + sparse.diags([[c1, 0],[0, c1]],[n-2, 2-n])
return A, B
# run the time evolution and return the transmission by solving the
# matrix equation A*psi(t+dt) = B*psi(t) using the bicgstab method.
# The function stops time stepping and calculates the transmission
# when the norm in the barrier has reduced below 10e-6
def run(A, B, x, psi, a, L, dx, ims = None):
y = 2*np.max(abs(psi))**2
c = cn = i = 0
while cn>=c or c>10e-6:
c = cn
if ims!=None and i%4==0:
plt.axis((0,L,0,y))
im, = plt.plot(x, np.abs(psi)**2, 'b')
ims.append([im])
# use the sparse stabilized biconjugate gradient method to solve the matrix eq
[psi, error] = linalg.bicgstab(A, B*psi, x0=psi)
if error != 0: sys.exit("bicgstab did not converge")
i = i+1
# calculate the new norm in the barrier
cn = sum(abs(psi[int(round((L - a)/(2*dx))):int(round((L + a)/(2*dx)))])**2)*dx
return psi, sum(abs(psi[int(round((L - a)/(2*dx))):])**2)*dx/(sum(abs(psi)**2)*dx)
# initialize the wave function
def init_psi(x, x0, sigma, k, dx):
# create a gaussian wave function moving in the positive x direction
psi = np.exp(-(1/(2*sigma**2))*(x-x0)**2) * np.exp(1.j*k*x)
# normalize the wave function
psi /= np.sqrt(sum(abs(psi)**2*dx))
return psi
# initialize a rectangular potential barrier at position s
def potential(x, v0, a, L, shape):
if shape.lower() == 'triangle':
v = (v0/a)*(x+(a-L)/2)*(abs(x-L/2) < a/2)
else:
v = v0*(abs(x-L/2) < a/2)
return v
# initialize the animation
def init_animation(psi, x, v, v0):
fig, ax = plt.subplots()
plt.plot(x, 1.5*max(abs(psi)**2)*v/v0, 'r')
ims = []
return fig, ax, ims
# prints norms after every time evolution
def print_norms(psi, a, L, dx):
print "norm wave function : ",
print sum(abs(psi)**2)*dx
print "norm left of barrier: ",
print sum(abs(psi[0:int(round((L - a)/(2*dx)))])**2)*dx
print "norm right of barrier: ",
print sum(abs(psi[int(round((L - a)/(2*dx))):])**2)*dx
print "approximation Transmission: ",
print sum(abs(psi[int(round((L - a)/(2*dx))):])**2)*dx/(sum(abs(psi)**2)*dx)
# plots the transmission after the run
def plot_transmission(epsilon, T, shape, saveTransmission):
plt.figure()
plt.title("Transmission of a gaussian wave packet \n through a {s} potential barrier".format(s=shape))
plt.xlabel('epsilon = E/$V_0$')
plt.ylabel('Transmission')
plt.axis((0, np.max(epsilon), 0, 1.1))
plt.axhline(y=1, linewidth=2, color='r')
plt.vlines(1, 0, 1, color='g', linestyle='--')
plt.plot(epsilon, T)
if saveTransmission:
plt.savefig("{s}.png".format(s=strftime("%d-%m-%Y_%H-%M", gmtime())))
plt.show()
##############
## main ##
##############
main()
|
suninsky/ReceiptOCR
|
refs/heads/master
|
Python/server/lib/python2.7/site-packages/click/testing.py
|
136
|
import os
import sys
import shutil
import tempfile
import contextlib
from ._compat import iteritems, PY2
# If someone wants to vendor click, we want to ensure the
# correct package is discovered. Ideally we could use a
# relative import here but unfortunately Python does not
# support that.
clickpkg = sys.modules[__name__.rsplit('.', 1)[0]]
if PY2:
from cStringIO import StringIO
else:
import io
from ._compat import _find_binary_reader
class EchoingStdin(object):
def __init__(self, input, output):
self._input = input
self._output = output
def __getattr__(self, x):
return getattr(self._input, x)
def _echo(self, rv):
self._output.write(rv)
return rv
def read(self, n=-1):
return self._echo(self._input.read(n))
def readline(self, n=-1):
return self._echo(self._input.readline(n))
def readlines(self):
return [self._echo(x) for x in self._input.readlines()]
def __iter__(self):
return iter(self._echo(x) for x in self._input)
def __repr__(self):
return repr(self._input)
def make_input_stream(input, charset):
# Is already an input stream.
if hasattr(input, 'read'):
if PY2:
return input
rv = _find_binary_reader(input)
if rv is not None:
return rv
raise TypeError('Could not find binary reader for input stream.')
if input is None:
input = b''
elif not isinstance(input, bytes):
input = input.encode(charset)
if PY2:
return StringIO(input)
return io.BytesIO(input)
class Result(object):
"""Holds the captured result of an invoked CLI script."""
def __init__(self, runner, output_bytes, exit_code, exception,
exc_info=None):
#: The runner that created the result
self.runner = runner
#: The output as bytes.
self.output_bytes = output_bytes
#: The exit code as integer.
self.exit_code = exit_code
#: The exception that happend if one did.
self.exception = exception
#: The traceback
self.exc_info = exc_info
@property
def output(self):
"""The output as unicode string."""
return self.output_bytes.decode(self.runner.charset, 'replace') \
.replace('\r\n', '\n')
def __repr__(self):
return '<Result %s>' % (
self.exception and repr(self.exception) or 'okay',
)
class CliRunner(object):
"""The CLI runner provides functionality to invoke a Click command line
script for unittesting purposes in a isolated environment. This only
works in single-threaded systems without any concurrency as it changes the
global interpreter state.
:param charset: the character set for the input and output data. This is
UTF-8 by default and should not be changed currently as
the reporting to Click only works in Python 2 properly.
:param env: a dictionary with environment variables for overriding.
:param echo_stdin: if this is set to `True`, then reading from stdin writes
to stdout. This is useful for showing examples in
some circumstances. Note that regular prompts
will automatically echo the input.
"""
def __init__(self, charset=None, env=None, echo_stdin=False):
if charset is None:
charset = 'utf-8'
self.charset = charset
self.env = env or {}
self.echo_stdin = echo_stdin
def get_default_prog_name(self, cli):
"""Given a command object it will return the default program name
for it. The default is the `name` attribute or ``"root"`` if not
set.
"""
return cli.name or 'root'
def make_env(self, overrides=None):
"""Returns the environment overrides for invoking a script."""
rv = dict(self.env)
if overrides:
rv.update(overrides)
return rv
@contextlib.contextmanager
def isolation(self, input=None, env=None, color=False):
"""A context manager that sets up the isolation for invoking of a
command line tool. This sets up stdin with the given input data
and `os.environ` with the overrides from the given dictionary.
This also rebinds some internals in Click to be mocked (like the
prompt functionality).
This is automatically done in the :meth:`invoke` method.
.. versionadded:: 4.0
The ``color`` parameter was added.
:param input: the input stream to put into sys.stdin.
:param env: the environment overrides as dictionary.
:param color: whether the output should contain color codes. The
application can still override this explicitly.
"""
input = make_input_stream(input, self.charset)
old_stdin = sys.stdin
old_stdout = sys.stdout
old_stderr = sys.stderr
old_forced_width = clickpkg.formatting.FORCED_WIDTH
clickpkg.formatting.FORCED_WIDTH = 80
env = self.make_env(env)
if PY2:
sys.stdout = sys.stderr = bytes_output = StringIO()
if self.echo_stdin:
input = EchoingStdin(input, bytes_output)
else:
bytes_output = io.BytesIO()
if self.echo_stdin:
input = EchoingStdin(input, bytes_output)
input = io.TextIOWrapper(input, encoding=self.charset)
sys.stdout = sys.stderr = io.TextIOWrapper(
bytes_output, encoding=self.charset)
sys.stdin = input
def visible_input(prompt=None):
sys.stdout.write(prompt or '')
val = input.readline().rstrip('\r\n')
sys.stdout.write(val + '\n')
sys.stdout.flush()
return val
def hidden_input(prompt=None):
sys.stdout.write((prompt or '') + '\n')
sys.stdout.flush()
return input.readline().rstrip('\r\n')
def _getchar(echo):
char = sys.stdin.read(1)
if echo:
sys.stdout.write(char)
sys.stdout.flush()
return char
default_color = color
def should_strip_ansi(stream=None, color=None):
if color is None:
return not default_color
return not color
old_visible_prompt_func = clickpkg.termui.visible_prompt_func
old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func
old__getchar_func = clickpkg.termui._getchar
old_should_strip_ansi = clickpkg.utils.should_strip_ansi
clickpkg.termui.visible_prompt_func = visible_input
clickpkg.termui.hidden_prompt_func = hidden_input
clickpkg.termui._getchar = _getchar
clickpkg.utils.should_strip_ansi = should_strip_ansi
old_env = {}
try:
for key, value in iteritems(env):
old_env[key] = os.environ.get(key)
if value is None:
try:
del os.environ[key]
except Exception:
pass
else:
os.environ[key] = value
yield bytes_output
finally:
for key, value in iteritems(old_env):
if value is None:
try:
del os.environ[key]
except Exception:
pass
else:
os.environ[key] = value
sys.stdout = old_stdout
sys.stderr = old_stderr
sys.stdin = old_stdin
clickpkg.termui.visible_prompt_func = old_visible_prompt_func
clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func
clickpkg.termui._getchar = old__getchar_func
clickpkg.utils.should_strip_ansi = old_should_strip_ansi
clickpkg.formatting.FORCED_WIDTH = old_forced_width
def invoke(self, cli, args=None, input=None, env=None,
catch_exceptions=True, color=False, **extra):
"""Invokes a command in an isolated environment. The arguments are
forwarded directly to the command line script, the `extra` keyword
arguments are passed to the :meth:`~clickpkg.Command.main` function of
the command.
This returns a :class:`Result` object.
.. versionadded:: 3.0
The ``catch_exceptions`` parameter was added.
.. versionchanged:: 3.0
The result object now has an `exc_info` attribute with the
traceback if available.
.. versionadded:: 4.0
The ``color`` parameter was added.
:param cli: the command to invoke
:param args: the arguments to invoke
:param input: the input data for `sys.stdin`.
:param env: the environment overrides.
:param catch_exceptions: Whether to catch any other exceptions than
``SystemExit``.
:param extra: the keyword arguments to pass to :meth:`main`.
:param color: whether the output should contain color codes. The
application can still override this explicitly.
"""
exc_info = None
with self.isolation(input=input, env=env, color=color) as out:
exception = None
exit_code = 0
try:
cli.main(args=args or (),
prog_name=self.get_default_prog_name(cli), **extra)
except SystemExit as e:
if e.code != 0:
exception = e
exc_info = sys.exc_info()
exit_code = e.code
if not isinstance(exit_code, int):
sys.stdout.write(str(exit_code))
sys.stdout.write('\n')
exit_code = 1
except Exception as e:
if not catch_exceptions:
raise
exception = e
exit_code = -1
exc_info = sys.exc_info()
finally:
sys.stdout.flush()
output = out.getvalue()
return Result(runner=self,
output_bytes=output,
exit_code=exit_code,
exception=exception,
exc_info=exc_info)
@contextlib.contextmanager
def isolated_filesystem(self):
"""A context manager that creates a temporary folder and changes
the current working directory to it for isolated filesystem tests.
"""
cwd = os.getcwd()
t = tempfile.mkdtemp()
os.chdir(t)
try:
yield t
finally:
os.chdir(cwd)
try:
shutil.rmtree(t)
except (OSError, IOError):
pass
|
Crypto-Expert/Electrum-obsolete
|
refs/heads/master
|
gui/qt/main_window.py
|
2
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, time, datetime, re, threading
from electrum.i18n import _, set_language
from electrum.util import print_error, print_msg
import os.path, json, ast, traceback
import shutil
import StringIO
import PyQt4
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
from electrum.bitcoin import MIN_RELAY_TX_FEE, is_valid
from electrum.plugins import run_hook
import icons_rc
from electrum.wallet import format_satoshis
from electrum import Transaction
from electrum import mnemonic
from electrum import util, bitcoin, commands, Interface, Wallet
from electrum import SimpleConfig, Wallet, WalletStorage
from electrum import bmp, pyqrnative
from amountedit import AmountEdit
from network_dialog import NetworkDialog
from qrcodewidget import QRCodeWidget
from decimal import Decimal
import platform
import httplib
import socket
import webbrowser
import csv
if platform.system() == 'Windows':
MONOSPACE_FONT = 'Lucida Console'
elif platform.system() == 'Darwin':
MONOSPACE_FONT = 'Monaco'
else:
MONOSPACE_FONT = 'monospace'
from electrum import ELECTRUM_VERSION
import re
from util import *
class StatusBarButton(QPushButton):
def __init__(self, icon, tooltip, func):
QPushButton.__init__(self, icon, '')
self.setToolTip(tooltip)
self.setFlat(True)
self.setMaximumWidth(25)
self.clicked.connect(func)
self.func = func
self.setIconSize(QSize(25,25))
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Return:
apply(self.func,())
default_column_widths = { "history":[40,140,350,140], "contacts":[350,330], "receive": [370,200,130] }
class ElectrumWindow(QMainWindow):
def changeEvent(self, event):
flags = self.windowFlags();
if event and event.type() == QtCore.QEvent.WindowStateChange:
if self.windowState() & QtCore.Qt.WindowMinimized:
self.build_menu(True)
# The only way to toggle the icon in the window managers taskbar is to use the Qt.Tooltip flag
# The problem is that it somehow creates an (in)visible window that will stay active and prevent
# Electrum from closing.
# As for now I have no clue how to implement a proper 'hide to tray' functionality.
# self.setWindowFlags(flags & ~Qt.ToolTip)
elif event.oldState() & QtCore.Qt.WindowMinimized:
self.build_menu(False)
#self.setWindowFlags(flags | Qt.ToolTip)
def build_menu(self, is_hidden = False):
m = QMenu()
if self.isMinimized():
m.addAction(_("Show"), self.showNormal)
else:
m.addAction(_("Hide"), self.showMinimized)
m.addSeparator()
m.addAction(_("Exit Electrum"), self.close)
self.tray.setContextMenu(m)
def tray_activated(self, reason):
if reason == QSystemTrayIcon.DoubleClick:
self.showNormal()
def showNormal(self):
self.setWindowState(self.windowState() & ~QtCore.Qt.WindowMinimized | QtCore.Qt.WindowActive)
def __init__(self, config, network):
QMainWindow.__init__(self)
self.config = config
self.network = network
self._close_electrum = False
self.lite = None
if sys.platform == 'darwin':
self.icon = QIcon(":icons/electrum_dark_icon.png")
#self.icon = QIcon(":icons/lock.png")
else:
self.icon = QIcon(':icons/electrum_light_icon.png')
self.tray = QSystemTrayIcon(self.icon, self)
self.tray.setToolTip('Electrum')
self.tray.activated.connect(self.tray_activated)
self.build_menu()
self.tray.show()
self.create_status_bar()
self.need_update = threading.Event()
self.decimal_point = config.get('decimal_point', 8)
self.num_zeros = int(config.get('num_zeros',0))
set_language(config.get('language'))
self.funds_error = False
self.completions = QStringListModel()
self.tabs = tabs = QTabWidget(self)
self.column_widths = self.config.get("column_widths_2", default_column_widths )
tabs.addTab(self.create_history_tab(), _('History') )
tabs.addTab(self.create_send_tab(), _('Send') )
tabs.addTab(self.create_receive_tab(), _('Receive') )
tabs.addTab(self.create_contacts_tab(), _('Contacts') )
tabs.addTab(self.create_console_tab(), _('Console') )
tabs.setMinimumSize(600, 400)
tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setCentralWidget(tabs)
g = self.config.get("winpos-qt",[100, 100, 840, 400])
self.setGeometry(g[0], g[1], g[2], g[3])
self.setWindowIcon(QIcon(":icons/electrum.png"))
self.init_menubar()
QShortcut(QKeySequence("Ctrl+W"), self, self.close)
QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)
QShortcut(QKeySequence("Ctrl+Q"), self, self.close)
QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() - 1 )%tabs.count() ))
QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() + 1 )%tabs.count() ))
self.connect(self, QtCore.SIGNAL('update_status'), self.update_status)
self.connect(self, QtCore.SIGNAL('banner_signal'), lambda: self.console.showMessage(self.network.banner) )
self.connect(self, QtCore.SIGNAL('transaction_signal'), lambda: self.notify_transactions() )
self.history_list.setFocus(True)
# network callbacks
if self.network:
self.network.register_callback('updated', lambda: self.need_update.set())
self.network.register_callback('banner', lambda: self.emit(QtCore.SIGNAL('banner_signal')))
self.network.register_callback('disconnected', lambda: self.emit(QtCore.SIGNAL('update_status')))
self.network.register_callback('disconnecting', lambda: self.emit(QtCore.SIGNAL('update_status')))
self.network.register_callback('new_transaction', lambda: self.emit(QtCore.SIGNAL('transaction_signal')))
# set initial message
self.console.showMessage(self.network.banner)
self.wallet = None
self.init_lite()
def go_full(self):
self.config.set_key('lite_mode', False, True)
self.mini.hide()
self.show()
def go_lite(self):
self.config.set_key('lite_mode', True, True)
self.hide()
self.mini.show()
def init_lite(self):
import lite_window
if not self.check_qt_version():
if self.config.get('lite_mode') is True:
msg = "Electrum was unable to load the 'Lite GUI' because it needs Qt version >= 4.7.\nChanging your config to use the 'Classic' GUI"
QMessageBox.warning(None, "Could not start Lite GUI.", msg)
self.config.set_key('lite_mode', False, True)
sys.exit(0)
self.mini = None
self.show()
return
actuator = lite_window.MiniActuator(self)
# Should probably not modify the current path but instead
# change the behaviour of rsrc(...)
old_path = QDir.currentPath()
actuator.load_theme()
self.mini = lite_window.MiniWindow(actuator, self.go_full, self.config)
driver = lite_window.MiniDriver(self, self.mini)
# Reset path back to original value now that loading the GUI
# is completed.
QDir.setCurrent(old_path)
if self.config.get('lite_mode') is True:
self.go_lite()
else:
self.go_full()
def check_qt_version(self):
qtVersion = qVersion()
return int(qtVersion[0]) >= 4 and int(qtVersion[2]) >= 7
def update_account_selector(self):
# account selector
accounts = self.wallet.get_account_names()
self.account_selector.clear()
if len(accounts) > 1:
self.account_selector.addItems([_("All accounts")] + accounts.values())
self.account_selector.setCurrentIndex(0)
self.account_selector.show()
else:
self.account_selector.hide()
def load_wallet(self, wallet):
import electrum
self.wallet = wallet
self.accounts_expanded = self.wallet.storage.get('accounts_expanded',{})
self.current_account = self.wallet.storage.get("current_account", None)
title = 'Electrum ' + self.wallet.electrum_version + ' - ' + self.wallet.storage.path
if self.wallet.is_watching_only(): title += ' [%s]' % (_('watching only'))
self.setWindowTitle( title )
self.update_wallet()
# Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized
self.notify_transactions()
self.update_account_selector()
self.new_account.setEnabled(self.wallet.seed_version>4)
self.update_lock_icon()
self.update_buttons_on_seed()
self.update_console()
run_hook('load_wallet', wallet)
def open_wallet(self):
wallet_folder = self.wallet.storage.path
filename = unicode( QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder) )
if not filename:
return
storage = WalletStorage({'wallet_path': filename})
if not storage.file_exists:
self.show_message("file not found "+ filename)
return
self.wallet.stop_threads()
# create new wallet
wallet = Wallet(storage)
wallet.start_threads(self.network)
self.load_wallet(wallet)
def backup_wallet(self):
import shutil
path = self.wallet.storage.path
wallet_folder = os.path.dirname(path)
filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder) )
if not filename:
return
new_path = os.path.join(wallet_folder, filename)
if new_path != path:
try:
shutil.copy2(path, new_path)
QMessageBox.information(None,"Wallet backup created", _("A copy of your wallet file was created in")+" '%s'" % str(new_path))
except (IOError, os.error), reason:
QMessageBox.critical(None,"Unable to create backup", _("Electrum was unable to copy your wallet file to the specified location.")+"\n" + str(reason))
def new_wallet(self):
import installwizard
wallet_folder = os.path.dirname(self.wallet.storage.path)
filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a new file name'), wallet_folder) )
if not filename:
return
filename = os.path.join(wallet_folder, filename)
storage = WalletStorage({'wallet_path': filename})
if storage.file_exists:
QMessageBox.critical(None, "Error", _("File exists"))
return
wizard = installwizard.InstallWizard(self.config, self.network, storage)
wallet = wizard.run()
if wallet:
self.load_wallet(wallet)
def init_menubar(self):
menubar = QMenuBar()
file_menu = menubar.addMenu(_("&File"))
open_wallet_action = file_menu.addAction(_("&Open"))
open_wallet_action.triggered.connect(self.open_wallet)
new_wallet_action = file_menu.addAction(_("&Create/Restore"))
new_wallet_action.triggered.connect(self.new_wallet)
wallet_backup = file_menu.addAction(_("&Copy"))
wallet_backup.triggered.connect(self.backup_wallet)
quit_item = file_menu.addAction(_("&Close"))
quit_item.triggered.connect(self.close)
wallet_menu = menubar.addMenu(_("&Wallet"))
new_contact = wallet_menu.addAction(_("&New contact"))
new_contact.triggered.connect(self.new_contact_dialog)
self.new_account = wallet_menu.addAction(_("&New account"))
self.new_account.triggered.connect(self.new_account_dialog)
wallet_menu.addSeparator()
pw = wallet_menu.addAction(_("&Password"))
pw.triggered.connect(self.change_password_dialog)
show_seed = wallet_menu.addAction(_("&Seed"))
show_seed.triggered.connect(self.show_seed_dialog)
show_mpk = wallet_menu.addAction(_("&Master Public Key"))
show_mpk.triggered.connect(self.show_master_public_key)
wallet_menu.addSeparator()
labels_menu = wallet_menu.addMenu(_("&Labels"))
import_labels = labels_menu.addAction(_("&Import"))
import_labels.triggered.connect(self.do_import_labels)
export_labels = labels_menu.addAction(_("&Export"))
export_labels.triggered.connect(self.do_export_labels)
keys_menu = wallet_menu.addMenu(_("&Private keys"))
import_keys = keys_menu.addAction(_("&Import"))
import_keys.triggered.connect(self.do_import_privkey)
export_keys = keys_menu.addAction(_("&Export"))
export_keys.triggered.connect(self.do_export_privkeys)
ex_history = wallet_menu.addAction(_("&Export History"))
ex_history.triggered.connect(self.do_export_history)
tools_menu = menubar.addMenu(_("&Tools"))
# Settings / Preferences are all reserved keywords in OSX using this as work around
preferences_name = _("Electrum preferences") if sys.platform == 'darwin' else _("Preferences")
preferences_menu = tools_menu.addAction(preferences_name)
preferences_menu.triggered.connect(self.settings_dialog)
network = tools_menu.addAction(_("&Network"))
network.triggered.connect(self.run_network_dialog)
plugins_labels = tools_menu.addAction(_("&Plugins"))
plugins_labels.triggered.connect(self.plugins_dialog)
tools_menu.addSeparator()
csv_transaction_menu = tools_menu.addMenu(_("&Create transaction"))
csv_transaction_file = csv_transaction_menu.addAction(_("&From CSV file"))
csv_transaction_file.triggered.connect(self.do_process_from_csv_file)
csv_transaction_text = csv_transaction_menu.addAction(_("&From CSV text"))
csv_transaction_text.triggered.connect(self.do_process_from_csv_text)
raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))
raw_transaction_file = raw_transaction_menu.addAction(_("&From file"))
raw_transaction_file.triggered.connect(self.do_process_from_file)
raw_transaction_text = raw_transaction_menu.addAction(_("&From text"))
raw_transaction_text.triggered.connect(self.do_process_from_text)
help_menu = menubar.addMenu(_("&Help"))
show_about = help_menu.addAction(_("&About"))
show_about.triggered.connect(self.show_about)
web_open = help_menu.addAction(_("&Official website"))
web_open.triggered.connect(lambda: webbrowser.open("http://electrum.org"))
help_menu.addSeparator()
doc_open = help_menu.addAction(_("&Documentation"))
doc_open.triggered.connect(lambda: webbrowser.open("http://electrum.org/documentation.html"))
report_bug = help_menu.addAction(_("&Report Bug"))
report_bug.triggered.connect(self.show_report_bug)
self.setMenuBar(menubar)
def show_about(self):
QMessageBox.about(self, "Electrum",
_("Version")+" %s" % (self.wallet.electrum_version) + "\n\n" + _("Electrum's focus is speed, with low resource usage and simplifying Bitcoin. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the Bitcoin system."))
def show_report_bug(self):
QMessageBox.information(self, "Electrum - " + _("Reporting Bugs"),
_("Please report any bugs as issues on github:")+" <a href=\"https://github.com/spesmilo/electrum/issues\">https://github.com/spesmilo/electrum/issues</a>")
def notify_transactions(self):
if not self.network or not self.network.is_connected():
return
print_error("Notifying GUI")
if len(self.network.interface.pending_transactions_for_notifications) > 0:
# Combine the transactions if there are more then three
tx_amount = len(self.network.interface.pending_transactions_for_notifications)
if(tx_amount >= 3):
total_amount = 0
for tx in self.network.interface.pending_transactions_for_notifications:
is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx)
if(v > 0):
total_amount += v
self.notify(_("%(txs)s new transactions received. Total amount received in the new transactions %(amount)s %(unit)s") \
% { 'txs' : tx_amount, 'amount' : self.format_amount(total_amount), 'unit' : self.base_unit()})
self.network.interface.pending_transactions_for_notifications = []
else:
for tx in self.network.interface.pending_transactions_for_notifications:
if tx:
self.network.interface.pending_transactions_for_notifications.remove(tx)
is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx)
if(v > 0):
self.notify(_("New transaction received. %(amount)s %(unit)s") % { 'amount' : self.format_amount(v), 'unit' : self.base_unit()})
def notify(self, message):
self.tray.showMessage("Electrum", message, QSystemTrayIcon.Information, 20000)
# custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user
def getOpenFileName(self, title, filter = ""):
directory = self.config.get('io_dir', os.path.expanduser('~'))
fileName = unicode( QFileDialog.getOpenFileName(self, title, directory, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def getSaveFileName(self, title, filename, filter = ""):
directory = self.config.get('io_dir', os.path.expanduser('~'))
path = os.path.join( directory, filename )
fileName = unicode( QFileDialog.getSaveFileName(self, title, path, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def close(self):
QMainWindow.close(self)
run_hook('close_main_window')
def connect_slots(self, sender):
self.connect(sender, QtCore.SIGNAL('timersignal'), self.timer_actions)
self.previous_payto_e=''
def timer_actions(self):
if self.need_update.is_set():
self.update_wallet()
self.need_update.clear()
run_hook('timer_actions')
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, is_diff, self.num_zeros, self.decimal_point, whitespaces)
def read_amount(self, x):
if x in['.', '']: return None
p = pow(10, self.decimal_point)
return int( p * Decimal(x) )
def base_unit(self):
assert self.decimal_point in [5,8]
return "BTC" if self.decimal_point == 8 else "mBTC"
def update_status(self):
if self.network is None:
text = _("Offline")
icon = QIcon(":icons/status_disconnected.png")
elif self.network.is_connected():
if not self.wallet.up_to_date:
text = _("Synchronizing...")
icon = QIcon(":icons/status_waiting.png")
elif self.network.server_lag > 1:
text = _("Server is lagging (%d blocks)"%self.network.server_lag)
icon = QIcon(":icons/status_lagging.png")
else:
c, u = self.wallet.get_account_balance(self.current_account)
text = _( "Balance" ) + ": %s "%( self.format_amount(c) ) + self.base_unit()
if u: text += " [%s unconfirmed]"%( self.format_amount(u,True).strip() )
r = {}
run_hook('set_quote_text', c+u, r)
quote = r.get(0)
if quote:
text += " (%s)"%quote
self.tray.setToolTip(text)
icon = QIcon(":icons/status_connected.png")
else:
text = _("Not connected")
icon = QIcon(":icons/status_disconnected.png")
self.balance_label.setText(text)
self.status_button.setIcon( icon )
def update_wallet(self):
self.update_status()
if self.wallet.up_to_date or not self.network or not self.network.is_connected():
self.update_history_tab()
self.update_receive_tab()
self.update_contacts_tab()
self.update_completions()
def create_history_tab(self):
self.history_list = l = MyTreeWidget(self)
l.setColumnCount(5)
for i,width in enumerate(self.column_widths['history']):
l.setColumnWidth(i, width)
l.setHeaderLabels( [ '', _('Date'), _('Description') , _('Amount'), _('Balance')] )
self.connect(l, SIGNAL('itemDoubleClicked(QTreeWidgetItem*, int)'), self.tx_label_clicked)
self.connect(l, SIGNAL('itemChanged(QTreeWidgetItem*, int)'), self.tx_label_changed)
l.customContextMenuRequested.connect(self.create_history_menu)
return l
def create_history_menu(self, position):
self.history_list.selectedIndexes()
item = self.history_list.currentItem()
if not item: return
tx_hash = str(item.data(0, Qt.UserRole).toString())
if not tx_hash: return
menu = QMenu()
menu.addAction(_("Copy ID to Clipboard"), lambda: self.app.clipboard().setText(tx_hash))
menu.addAction(_("Details"), lambda: self.show_transaction(self.wallet.transactions.get(tx_hash)))
menu.addAction(_("Edit description"), lambda: self.tx_label_clicked(item,2))
menu.exec_(self.contacts_list.viewport().mapToGlobal(position))
def show_transaction(self, tx):
import transaction_dialog
d = transaction_dialog.TxDialog(tx, self)
d.exec_()
def tx_label_clicked(self, item, column):
if column==2 and item.isSelected():
self.is_edit=True
item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
self.history_list.editItem( item, column )
item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
self.is_edit=False
def tx_label_changed(self, item, column):
if self.is_edit:
return
self.is_edit=True
tx_hash = str(item.data(0, Qt.UserRole).toString())
tx = self.wallet.transactions.get(tx_hash)
text = unicode( item.text(2) )
self.wallet.set_label(tx_hash, text)
if text:
item.setForeground(2, QBrush(QColor('black')))
else:
text = self.wallet.get_default_label(tx_hash)
item.setText(2, text)
item.setForeground(2, QBrush(QColor('gray')))
self.is_edit=False
def edit_label(self, is_recv):
l = self.receive_list if is_recv else self.contacts_list
item = l.currentItem()
item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
l.editItem( item, 1 )
item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
def address_label_clicked(self, item, column, l, column_addr, column_label):
if column == column_label and item.isSelected():
is_editable = item.data(0, 32).toBool()
if not is_editable:
return
addr = unicode( item.text(column_addr) )
label = unicode( item.text(column_label) )
item.setFlags(Qt.ItemIsEditable|Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
l.editItem( item, column )
item.setFlags(Qt.ItemIsSelectable | Qt.ItemIsUserCheckable | Qt.ItemIsEnabled | Qt.ItemIsDragEnabled)
def address_label_changed(self, item, column, l, column_addr, column_label):
if column == column_label:
addr = unicode( item.text(column_addr) )
text = unicode( item.text(column_label) )
is_editable = item.data(0, 32).toBool()
if not is_editable:
return
changed = self.wallet.set_label(addr, text)
if changed:
self.update_history_tab()
self.update_completions()
self.current_item_changed(item)
run_hook('item_changed', item, column)
def current_item_changed(self, a):
run_hook('current_item_changed', a)
self.pay_from = []
self.tabs.emit(SIGNAL('currentChanged(int)'), 1)
def update_history_tab(self):
self.history_list.clear()
for item in self.wallet.get_tx_history(self.current_account):
tx_hash, conf, is_mine, value, fee, balance, timestamp = item
time_str = _("unknown")
if conf > 0:
try:
time_str = datetime.datetime.fromtimestamp( timestamp).isoformat(' ')[:-3]
except Exception:
time_str = _("error")
if conf == -1:
time_str = 'unverified'
icon = QIcon(":icons/unconfirmed.png")
elif conf == 0:
time_str = 'pending'
icon = QIcon(":icons/unconfirmed.png")
elif conf < 6:
icon = QIcon(":icons/clock%d.png"%conf)
else:
icon = QIcon(":icons/confirmed.png")
if value is not None:
v_str = self.format_amount(value, True, whitespaces=True)
else:
v_str = '--'
balance_str = self.format_amount(balance, whitespaces=True)
if tx_hash:
label, is_default_label = self.wallet.get_label(tx_hash)
else:
label = _('Pruned transaction outputs')
is_default_label = False
item = QTreeWidgetItem( [ '', time_str, label, v_str, balance_str] )
item.setFont(2, QFont(MONOSPACE_FONT))
item.setFont(3, QFont(MONOSPACE_FONT))
item.setFont(4, QFont(MONOSPACE_FONT))
if value < 0:
item.setForeground(3, QBrush(QColor("#BC1E1E")))
if tx_hash:
item.setData(0, Qt.UserRole, tx_hash)
item.setToolTip(0, "%d %s\nTxId:%s" % (conf, _('Confirmations'), tx_hash) )
if is_default_label:
item.setForeground(2, QBrush(QColor('grey')))
item.setIcon(0, icon)
self.history_list.insertTopLevelItem(0,item)
self.history_list.setCurrentItem(self.history_list.topLevelItem(0))
def create_send_tab(self):
w = QWidget()
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnMinimumWidth(3,300)
grid.setColumnStretch(5,1)
self.payto_e = QLineEdit()
grid.addWidget(QLabel(_('Pay to')), 1, 0)
grid.addWidget(self.payto_e, 1, 1, 1, 3)
grid.addWidget(HelpButton(_('Recipient of the funds.') + '\n\n' + _('You may enter a Bitcoin address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a Bitcoin address)')), 1, 4)
completer = QCompleter()
completer.setCaseSensitivity(False)
self.payto_e.setCompleter(completer)
completer.setModel(self.completions)
self.message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 2, 0)
grid.addWidget(self.message_e, 2, 1, 1, 3)
grid.addWidget(HelpButton(_('Description of the transaction (not mandatory).') + '\n\n' + _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')), 2, 4)
self.pay_from = []
self.from_label = QLabel(_('From'))
grid.addWidget(self.from_label, 3, 0)
self.from_list = QTreeWidget(self)
self.from_list.setColumnCount(2)
self.from_list.setColumnWidth(0, 350)
self.from_list.setColumnWidth(1, 50)
self.from_list.setHeaderHidden (True)
self.from_list.setMaximumHeight(80)
grid.addWidget(self.from_list, 3, 1, 1, 3)
self.connect(self.tabs, SIGNAL('currentChanged(int)'), lambda: self.update_pay_from_list(grid))
self.amount_e = AmountEdit(self.base_unit)
grid.addWidget(QLabel(_('Amount')), 4, 0)
grid.addWidget(self.amount_e, 4, 1, 1, 2)
grid.addWidget(HelpButton(
_('Amount to be sent.') + '\n\n' \
+ _('The amount will be displayed in red if you do not have enough funds in your wallet. Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') \
+ '\n\n' + _('Keyboard shortcut: type "!" to send all your coins.')), 4, 3)
self.fee_e = AmountEdit(self.base_unit)
grid.addWidget(QLabel(_('Fee')), 5, 0)
grid.addWidget(self.fee_e, 5, 1, 1, 2)
grid.addWidget(HelpButton(
_('Bitcoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\
+ _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\
+ _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')), 5, 3)
self.send_button = EnterButton(_("Send"), self.do_send)
grid.addWidget(self.send_button, 6, 1)
b = EnterButton(_("Clear"),self.do_clear)
grid.addWidget(b, 6, 2)
self.payto_sig = QLabel('')
grid.addWidget(self.payto_sig, 7, 0, 1, 4)
QShortcut(QKeySequence("Up"), w, w.focusPreviousChild)
QShortcut(QKeySequence("Down"), w, w.focusNextChild)
w.setLayout(grid)
w2 = QWidget()
vbox = QVBoxLayout()
vbox.addWidget(w)
vbox.addStretch(1)
w2.setLayout(vbox)
def entry_changed( is_fee ):
self.funds_error = False
if self.amount_e.is_shortcut:
self.amount_e.is_shortcut = False
sendable = self.get_sendable_balance()
inputs, total, fee = self.wallet.choose_tx_inputs( sendable, 0, self.get_payment_sources())
fee = self.wallet.estimated_fee(inputs)
amount = total - fee
self.amount_e.setText( self.format_amount(amount) )
self.fee_e.setText( self.format_amount( fee ) )
return
amount = self.read_amount(str(self.amount_e.text()))
fee = self.read_amount(str(self.fee_e.text()))
if not is_fee: fee = None
if amount is None:
return
inputs, total, fee = self.wallet.choose_tx_inputs(amount, fee, self.get_payment_sources())
if not is_fee:
self.fee_e.setText( self.format_amount( fee ) )
if inputs:
palette = QPalette()
palette.setColor(self.amount_e.foregroundRole(), QColor('black'))
text = ""
else:
palette = QPalette()
palette.setColor(self.amount_e.foregroundRole(), QColor('red'))
self.funds_error = True
text = _( "Not enough funds" )
c, u = self.wallet.get_frozen_balance()
if c+u: text += ' (' + self.format_amount(c+u).strip() + self.base_unit() + ' ' +_("are frozen") + ')'
self.statusBar().showMessage(text)
self.amount_e.setPalette(palette)
self.fee_e.setPalette(palette)
self.amount_e.textChanged.connect(lambda: entry_changed(False) )
self.fee_e.textChanged.connect(lambda: entry_changed(True) )
run_hook('create_send_tab', grid)
return w2
def update_pay_from_list(self, grid):
self.from_list.clear()
self.from_label.setHidden(len(self.pay_from) == 0)
self.from_list.setHidden(len(self.pay_from) == 0)
for addr in self.pay_from:
c, u = self.wallet.get_addr_balance(addr)
balance = self.format_amount(c + u)
self.from_list.addTopLevelItem(QTreeWidgetItem( [addr, balance] ))
def update_completions(self):
l = []
for addr,label in self.wallet.labels.items():
if addr in self.wallet.addressbook:
l.append( label + ' <' + addr + '>')
run_hook('update_completions', l)
self.completions.setStringList(l)
def protected(func):
return lambda s, *args: s.do_protect(func, args)
def do_send(self):
label = unicode( self.message_e.text() )
r = unicode( self.payto_e.text() )
r = r.strip()
# label or alias, with address in brackets
m = re.match('(.*?)\s*\<([1-9A-HJ-NP-Za-km-z]{26,})\>', r)
to_address = m.group(2) if m else r
if not is_valid(to_address):
QMessageBox.warning(self, _('Error'), _('Invalid Bitcoin Address') + ':\n' + to_address, _('OK'))
return
try:
amount = self.read_amount(unicode( self.amount_e.text()))
except Exception:
QMessageBox.warning(self, _('Error'), _('Invalid Amount'), _('OK'))
return
try:
fee = self.read_amount(unicode( self.fee_e.text()))
except Exception:
QMessageBox.warning(self, _('Error'), _('Invalid Fee'), _('OK'))
return
confirm_amount = self.config.get('confirm_amount', 100000000)
if amount >= confirm_amount:
if not self.question(_("send %(amount)s to %(address)s?")%{ 'amount' : self.format_amount(amount) + ' '+ self.base_unit(), 'address' : to_address}):
return
self.send_tx(to_address, amount, fee, label)
@protected
def send_tx(self, to_address, amount, fee, label, password):
try:
tx = self.wallet.mktx( [(to_address, amount)], password, fee,
domain=self.get_payment_sources())
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
if tx.requires_fee(self.wallet.verifier) and fee < MIN_RELAY_TX_FEE:
QMessageBox.warning(self, _('Error'), _("This transaction requires a higher fee, or it will not be propagated by the network."), _('OK'))
return
if label:
self.wallet.set_label(tx.hash(), label)
if tx.is_complete:
h = self.wallet.send_tx(tx)
waiting_dialog(lambda: False if self.wallet.tx_event.isSet() else _("Please wait..."))
status, msg = self.wallet.receive_tx( h )
if status:
QMessageBox.information(self, '', _('Payment sent.')+'\n'+msg, _('OK'))
self.do_clear()
self.update_contacts_tab()
else:
QMessageBox.warning(self, _('Error'), msg, _('OK'))
else:
self.show_transaction(tx)
# add recipient to addressbook
if to_address not in self.wallet.addressbook and not self.wallet.is_mine(to_address):
self.wallet.addressbook.append(to_address)
def set_url(self, url):
address, amount, label, message, signature, identity, url = util.parse_url(url)
try:
if amount and self.base_unit() == 'mBTC': amount = str( 1000* Decimal(amount))
elif amount: amount = str(Decimal(amount))
except Exception:
amount = "0.0"
QMessageBox.warning(self, _('Error'), _('Invalid Amount'), _('OK'))
if self.mini:
self.mini.set_payment_fields(address, amount)
if label and self.wallet.labels.get(address) != label:
if self.question('Give label "%s" to address %s ?'%(label,address)):
if address not in self.wallet.addressbook and not self.wallet.is_mine(address):
self.wallet.addressbook.append(address)
self.wallet.set_label(address, label)
run_hook('set_url', url, self.show_message, self.question)
self.tabs.setCurrentIndex(1)
label = self.wallet.labels.get(address)
m_addr = label + ' <'+ address +'>' if label else address
self.payto_e.setText(m_addr)
self.message_e.setText(message)
if amount:
self.amount_e.setText(amount)
if identity:
self.set_frozen(self.payto_e,True)
self.set_frozen(self.amount_e,True)
self.set_frozen(self.message_e,True)
self.payto_sig.setText( ' '+_('The bitcoin URI was signed by')+' ' + identity )
else:
self.payto_sig.setVisible(False)
def do_clear(self):
self.payto_sig.setVisible(False)
for e in [self.payto_e, self.message_e, self.amount_e, self.fee_e]:
e.setText('')
self.set_frozen(e,False)
self.pay_from = []
self.tabs.emit(SIGNAL('currentChanged(int)'), 1)
self.update_status()
def set_frozen(self,entry,frozen):
if frozen:
entry.setReadOnly(True)
entry.setFrame(False)
palette = QPalette()
palette.setColor(entry.backgroundRole(), QColor('lightgray'))
entry.setPalette(palette)
else:
entry.setReadOnly(False)
entry.setFrame(True)
palette = QPalette()
palette.setColor(entry.backgroundRole(), QColor('white'))
entry.setPalette(palette)
def set_addrs_frozen(self,addrs,freeze):
for addr in addrs:
if not addr: continue
if addr in self.wallet.frozen_addresses and not freeze:
self.wallet.unfreeze(addr)
elif addr not in self.wallet.frozen_addresses and freeze:
self.wallet.freeze(addr)
self.update_receive_tab()
def create_list_tab(self, headers):
"generic tab creation method"
l = MyTreeWidget(self)
l.setColumnCount( len(headers) )
l.setHeaderLabels( headers )
w = QWidget()
vbox = QVBoxLayout()
w.setLayout(vbox)
vbox.setMargin(0)
vbox.setSpacing(0)
vbox.addWidget(l)
buttons = QWidget()
vbox.addWidget(buttons)
hbox = QHBoxLayout()
hbox.setMargin(0)
hbox.setSpacing(0)
buttons.setLayout(hbox)
return l,w,hbox
def create_receive_tab(self):
l,w,hbox = self.create_list_tab([ _('Address'), _('Label'), _('Balance'), _('Tx')])
l.setContextMenuPolicy(Qt.CustomContextMenu)
l.customContextMenuRequested.connect(self.create_receive_menu)
l.setSelectionMode(QAbstractItemView.ExtendedSelection)
self.connect(l, SIGNAL('itemDoubleClicked(QTreeWidgetItem*, int)'), lambda a, b: self.address_label_clicked(a,b,l,0,1))
self.connect(l, SIGNAL('itemChanged(QTreeWidgetItem*, int)'), lambda a,b: self.address_label_changed(a,b,l,0,1))
self.connect(l, SIGNAL('currentItemChanged(QTreeWidgetItem*, QTreeWidgetItem*)'), lambda a,b: self.current_item_changed(a))
self.receive_list = l
self.receive_buttons_hbox = hbox
hbox.addStretch(1)
return w
def save_column_widths(self):
self.column_widths["receive"] = []
for i in range(self.receive_list.columnCount() -1):
self.column_widths["receive"].append(self.receive_list.columnWidth(i))
self.column_widths["history"] = []
for i in range(self.history_list.columnCount() - 1):
self.column_widths["history"].append(self.history_list.columnWidth(i))
self.column_widths["contacts"] = []
for i in range(self.contacts_list.columnCount() - 1):
self.column_widths["contacts"].append(self.contacts_list.columnWidth(i))
self.config.set_key("column_widths_2", self.column_widths, True)
def create_contacts_tab(self):
l,w,hbox = self.create_list_tab([_('Address'), _('Label'), _('Tx')])
l.setContextMenuPolicy(Qt.CustomContextMenu)
l.customContextMenuRequested.connect(self.create_contact_menu)
for i,width in enumerate(self.column_widths['contacts']):
l.setColumnWidth(i, width)
self.connect(l, SIGNAL('itemDoubleClicked(QTreeWidgetItem*, int)'), lambda a, b: self.address_label_clicked(a,b,l,0,1))
self.connect(l, SIGNAL('itemChanged(QTreeWidgetItem*, int)'), lambda a,b: self.address_label_changed(a,b,l,0,1))
self.contacts_list = l
self.contacts_buttons_hbox = hbox
hbox.addStretch(1)
return w
def delete_imported_key(self, addr):
if self.question(_("Do you want to remove")+" %s "%addr +_("from your wallet?")):
self.wallet.delete_imported_key(addr)
self.update_receive_tab()
self.update_history_tab()
def edit_account_label(self, k):
text, ok = QInputDialog.getText(self, _('Rename account'), _('Name') + ':', text = self.wallet.labels.get(k,''))
if ok:
label = unicode(text)
self.wallet.set_label(k,label)
self.update_receive_tab()
def account_set_expanded(self, item, k, b):
item.setExpanded(b)
self.accounts_expanded[k] = b
def create_account_menu(self, position, k, item):
menu = QMenu()
if item.isExpanded():
menu.addAction(_("Minimize"), lambda: self.account_set_expanded(item, k, False))
else:
menu.addAction(_("Maximize"), lambda: self.account_set_expanded(item, k, True))
menu.addAction(_("Rename"), lambda: self.edit_account_label(k))
menu.addAction(_("View details"), lambda: self.show_account_details(k))
if self.wallet.account_is_pending(k):
menu.addAction(_("Delete"), lambda: self.delete_pending_account(k))
menu.exec_(self.receive_list.viewport().mapToGlobal(position))
def delete_pending_account(self, k):
self.wallet.delete_pending_account(k)
self.update_receive_tab()
def create_receive_menu(self, position):
# fixme: this function apparently has a side effect.
# if it is not called the menu pops up several times
#self.receive_list.selectedIndexes()
selected = self.receive_list.selectedItems()
multi_select = len(selected) > 1
addrs = [unicode(item.text(0)) for item in selected]
if not multi_select:
item = self.receive_list.itemAt(position)
if not item: return
addr = addrs[0]
if not is_valid(addr):
k = str(item.data(0,32).toString())
if k:
self.create_account_menu(position, k, item)
else:
item.setExpanded(not item.isExpanded())
return
menu = QMenu()
if not multi_select:
menu.addAction(_("Copy to clipboard"), lambda: self.app.clipboard().setText(addr))
menu.addAction(_("QR code"), lambda: self.show_qrcode("bitcoin:" + addr, _("Address")) )
menu.addAction(_("Edit label"), lambda: self.edit_label(True))
if self.wallet.seed:
menu.addAction(_("Private key"), lambda: self.show_private_key(addr))
menu.addAction(_("Sign message"), lambda: self.sign_message(addr))
if addr in self.wallet.imported_keys:
menu.addAction(_("Remove from wallet"), lambda: self.delete_imported_key(addr))
if any(addr not in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Freeze"), lambda: self.set_addrs_frozen(addrs, True))
if any(addr in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Unfreeze"), lambda: self.set_addrs_frozen(addrs, False))
menu.addAction(_("Send From"), lambda: self.send_from_addresses(addrs))
run_hook('receive_menu', menu, addrs)
menu.exec_(self.receive_list.viewport().mapToGlobal(position))
def get_sendable_balance(self):
return sum(sum(self.wallet.get_addr_balance(a)) for a in self.get_payment_sources())
def get_payment_sources(self):
if self.pay_from:
return self.pay_from
else:
return self.wallet.get_account_addresses(self.current_account)
def send_from_addresses(self, addrs):
self.pay_from = addrs[:]
self.tabs.setCurrentIndex(1)
def payto(self, addr):
if not addr: return
label = self.wallet.labels.get(addr)
m_addr = label + ' <' + addr + '>' if label else addr
self.tabs.setCurrentIndex(1)
self.payto_e.setText(m_addr)
self.amount_e.setFocus()
def delete_contact(self, x):
if self.question(_("Do you want to remove")+" %s "%x +_("from your list of contacts?")):
self.wallet.delete_contact(x)
self.wallet.set_label(x, None)
self.update_history_tab()
self.update_contacts_tab()
self.update_completions()
def create_contact_menu(self, position):
item = self.contacts_list.itemAt(position)
if not item: return
addr = unicode(item.text(0))
label = unicode(item.text(1))
is_editable = item.data(0,32).toBool()
payto_addr = item.data(0,33).toString()
menu = QMenu()
menu.addAction(_("Copy to Clipboard"), lambda: self.app.clipboard().setText(addr))
menu.addAction(_("Pay to"), lambda: self.payto(payto_addr))
menu.addAction(_("QR code"), lambda: self.show_qrcode("bitcoin:" + addr, _("Address")))
if is_editable:
menu.addAction(_("Edit label"), lambda: self.edit_label(False))
menu.addAction(_("Delete"), lambda: self.delete_contact(addr))
run_hook('create_contact_menu', menu, item)
menu.exec_(self.contacts_list.viewport().mapToGlobal(position))
def update_receive_item(self, item):
item.setFont(0, QFont(MONOSPACE_FONT))
address = str(item.data(0,0).toString())
label = self.wallet.labels.get(address,'')
item.setData(1,0,label)
item.setData(0,32, True) # is editable
run_hook('update_receive_item', address, item)
if not self.wallet.is_mine(address): return
c, u = self.wallet.get_addr_balance(address)
balance = self.format_amount(c + u)
item.setData(2,0,balance)
if address in self.wallet.frozen_addresses:
item.setBackgroundColor(0, QColor('lightblue'))
def update_receive_tab(self):
l = self.receive_list
l.clear()
l.setColumnHidden(2, False)
l.setColumnHidden(3, False)
for i,width in enumerate(self.column_widths['receive']):
l.setColumnWidth(i, width)
if self.current_account is None:
account_items = self.wallet.accounts.items()
elif self.current_account != -1:
account_items = [(self.current_account, self.wallet.accounts.get(self.current_account))]
else:
account_items = []
for k, account in account_items:
name = self.wallet.get_account_name(k)
c,u = self.wallet.get_account_balance(k)
account_item = QTreeWidgetItem( [ name, '', self.format_amount(c+u), ''] )
l.addTopLevelItem(account_item)
account_item.setExpanded(self.accounts_expanded.get(k, True))
account_item.setData(0, 32, k)
if not self.wallet.is_seeded(k):
icon = QIcon(":icons/key.png")
account_item.setIcon(0, icon)
for is_change in ([0,1]):
name = _("Receiving") if not is_change else _("Change")
seq_item = QTreeWidgetItem( [ name, '', '', '', ''] )
account_item.addChild(seq_item)
used_item = QTreeWidgetItem( [ _("Used"), '', '', '', ''] )
used_flag = False
if not is_change: seq_item.setExpanded(True)
is_red = False
gap = 0
for address in account.get_addresses(is_change):
h = self.wallet.history.get(address,[])
if h == []:
gap += 1
if gap > self.wallet.gap_limit:
is_red = True
else:
gap = 0
c, u = self.wallet.get_addr_balance(address)
num_tx = '*' if h == ['*'] else "%d"%len(h)
item = QTreeWidgetItem( [ address, '', '', num_tx] )
self.update_receive_item(item)
if is_red:
item.setBackgroundColor(1, QColor('red'))
if len(h) > 0 and c == -u:
if not used_flag:
seq_item.addChild(used_item)
used_flag = True
used_item.addChild(item)
else:
seq_item.addChild(item)
for k, addr in self.wallet.get_pending_accounts():
name = self.wallet.labels.get(k,'')
account_item = QTreeWidgetItem( [ name + " [ "+_('pending account')+" ]", '', '', ''] )
self.update_receive_item(item)
l.addTopLevelItem(account_item)
account_item.setExpanded(True)
account_item.setData(0, 32, k)
item = QTreeWidgetItem( [ addr, '', '', '', ''] )
account_item.addChild(item)
self.update_receive_item(item)
if self.wallet.imported_keys and (self.current_account is None or self.current_account == -1):
c,u = self.wallet.get_imported_balance()
account_item = QTreeWidgetItem( [ _('Imported'), '', self.format_amount(c+u), ''] )
l.addTopLevelItem(account_item)
account_item.setExpanded(True)
for address in self.wallet.imported_keys.keys():
item = QTreeWidgetItem( [ address, '', '', ''] )
self.update_receive_item(item)
account_item.addChild(item)
# we use column 1 because column 0 may be hidden
l.setCurrentItem(l.topLevelItem(0),1)
def update_contacts_tab(self):
l = self.contacts_list
l.clear()
for address in self.wallet.addressbook:
label = self.wallet.labels.get(address,'')
n = self.wallet.get_num_tx(address)
item = QTreeWidgetItem( [ address, label, "%d"%n] )
item.setFont(0, QFont(MONOSPACE_FONT))
# 32 = label can be edited (bool)
item.setData(0,32, True)
# 33 = payto string
item.setData(0,33, address)
l.addTopLevelItem(item)
run_hook('update_contacts_tab', l)
l.setCurrentItem(l.topLevelItem(0))
def create_console_tab(self):
from console import Console
self.console = console = Console()
return console
def update_console(self):
console = self.console
console.history = self.config.get("console-history",[])
console.history_index = len(console.history)
console.updateNamespace({'wallet' : self.wallet, 'network' : self.network, 'gui':self})
console.updateNamespace({'util' : util, 'bitcoin':bitcoin})
c = commands.Commands(self.wallet, self.network, lambda: self.console.set_json(True))
methods = {}
def mkfunc(f, method):
return lambda *args: apply( f, (method, args, self.password_dialog ))
for m in dir(c):
if m[0]=='_' or m in ['network','wallet']: continue
methods[m] = mkfunc(c._run, m)
console.updateNamespace(methods)
def change_account(self,s):
if s == _("All accounts"):
self.current_account = None
else:
accounts = self.wallet.get_account_names()
for k, v in accounts.items():
if v == s:
self.current_account = k
self.update_history_tab()
self.update_status()
self.update_receive_tab()
def create_status_bar(self):
sb = QStatusBar()
sb.setFixedHeight(35)
qtVersion = qVersion()
self.balance_label = QLabel("")
sb.addWidget(self.balance_label)
from version_getter import UpdateLabel
self.updatelabel = UpdateLabel(self.config, sb)
self.account_selector = QComboBox()
self.connect(self.account_selector,SIGNAL("activated(QString)"),self.change_account)
sb.addPermanentWidget(self.account_selector)
if (int(qtVersion[0]) >= 4 and int(qtVersion[2]) >= 7):
sb.addPermanentWidget( StatusBarButton( QIcon(":icons/switchgui.png"), _("Switch to Lite Mode"), self.go_lite ) )
self.lock_icon = QIcon()
self.password_button = StatusBarButton( self.lock_icon, _("Password"), self.change_password_dialog )
sb.addPermanentWidget( self.password_button )
sb.addPermanentWidget( StatusBarButton( QIcon(":icons/preferences.png"), _("Preferences"), self.settings_dialog ) )
self.seed_button = StatusBarButton( QIcon(":icons/seed.png"), _("Seed"), self.show_seed_dialog )
sb.addPermanentWidget( self.seed_button )
self.status_button = StatusBarButton( QIcon(":icons/status_disconnected.png"), _("Network"), self.run_network_dialog )
sb.addPermanentWidget( self.status_button )
run_hook('create_status_bar', (sb,))
self.setStatusBar(sb)
def update_lock_icon(self):
icon = QIcon(":icons/lock.png") if self.wallet.use_encryption else QIcon(":icons/unlock.png")
self.password_button.setIcon( icon )
def update_buttons_on_seed(self):
if not self.wallet.is_watching_only():
self.seed_button.show()
self.password_button.show()
self.send_button.setText(_("Send"))
else:
self.password_button.hide()
self.seed_button.hide()
self.send_button.setText(_("Create unsigned transaction"))
def change_password_dialog(self):
from password_dialog import PasswordDialog
d = PasswordDialog(self.wallet, self)
d.run()
self.update_lock_icon()
def new_contact_dialog(self):
d = QDialog(self)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('New Contact')+':'))
grid = QGridLayout()
line1 = QLineEdit()
line2 = QLineEdit()
grid.addWidget(QLabel(_("Address")), 1, 0)
grid.addWidget(line1, 1, 1)
grid.addWidget(QLabel(_("Name")), 2, 0)
grid.addWidget(line2, 2, 1)
vbox.addLayout(grid)
vbox.addLayout(ok_cancel_buttons(d))
if not d.exec_():
return
address = str(line1.text())
label = unicode(line2.text())
if not is_valid(address):
QMessageBox.warning(self, _('Error'), _('Invalid Address'), _('OK'))
return
self.wallet.add_contact(address)
if label:
self.wallet.set_label(address, label)
self.update_contacts_tab()
self.update_history_tab()
self.update_completions()
self.tabs.setCurrentIndex(3)
def new_account_dialog(self):
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("New Account"))
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_('Account name')+':'))
e = QLineEdit()
vbox.addWidget(e)
msg = _("Note: Newly created accounts are 'pending' until they receive bitcoins.") + " " \
+ _("You will need to wait for 2 confirmations until the correct balance is displayed and more addresses are created for that account.")
l = QLabel(msg)
l.setWordWrap(True)
vbox.addWidget(l)
vbox.addLayout(ok_cancel_buttons(dialog))
dialog.setLayout(vbox)
r = dialog.exec_()
if not r: return
name = str(e.text())
if not name: return
self.wallet.create_pending_account('1', name)
self.update_receive_tab()
self.tabs.setCurrentIndex(2)
def show_master_public_key_old(self):
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("Master Public Key"))
main_text = QTextEdit()
main_text.setText(self.wallet.get_master_public_key())
main_text.setReadOnly(True)
main_text.setMaximumHeight(170)
qrw = QRCodeWidget(self.wallet.get_master_public_key())
ok_button = QPushButton(_("OK"))
ok_button.setDefault(True)
ok_button.clicked.connect(dialog.accept)
main_layout = QGridLayout()
main_layout.addWidget(QLabel(_('Your Master Public Key is:')), 0, 0, 1, 2)
main_layout.addWidget(main_text, 1, 0)
main_layout.addWidget(qrw, 1, 1 )
vbox = QVBoxLayout()
vbox.addLayout(main_layout)
vbox.addLayout(close_button(dialog))
dialog.setLayout(vbox)
dialog.exec_()
def show_master_public_key(self):
if self.wallet.seed_version == 4:
self.show_master_public_key_old()
return
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("Master Public Keys"))
chain_text = QTextEdit()
chain_text.setReadOnly(True)
chain_text.setMaximumHeight(170)
chain_qrw = QRCodeWidget()
mpk_text = QTextEdit()
mpk_text.setReadOnly(True)
mpk_text.setMaximumHeight(170)
mpk_qrw = QRCodeWidget()
main_layout = QGridLayout()
main_layout.addWidget(QLabel(_('Key')), 1, 0)
main_layout.addWidget(mpk_text, 1, 1)
main_layout.addWidget(mpk_qrw, 1, 2)
main_layout.addWidget(QLabel(_('Chain')), 2, 0)
main_layout.addWidget(chain_text, 2, 1)
main_layout.addWidget(chain_qrw, 2, 2)
def update(key):
c, K, cK = self.wallet.master_public_keys[str(key)]
chain_text.setText(c)
chain_qrw.set_addr(c)
chain_qrw.update_qr()
mpk_text.setText(K)
mpk_qrw.set_addr(K)
mpk_qrw.update_qr()
key_selector = QComboBox()
keys = sorted(self.wallet.master_public_keys.keys())
key_selector.addItems(keys)
main_layout.addWidget(QLabel(_('Derivation:')), 0, 0)
main_layout.addWidget(key_selector, 0, 1)
dialog.connect(key_selector,SIGNAL("activated(QString)"),update)
update(keys[0])
vbox = QVBoxLayout()
vbox.addLayout(main_layout)
vbox.addLayout(close_button(dialog))
dialog.setLayout(vbox)
dialog.exec_()
@protected
def show_seed_dialog(self, password):
if self.wallet.is_watching_only():
QMessageBox.information(self, _('Message'), _('This is a watching-only wallet'), _('OK'))
return
if self.wallet.seed:
try:
mnemonic = self.wallet.get_mnemonic(password)
except Exception:
QMessageBox.warning(self, _('Error'), _('Incorrect Password'), _('OK'))
return
from seed_dialog import SeedDialog
d = SeedDialog(self, mnemonic, self.wallet.imported_keys)
d.exec_()
else:
l = {}
for k in self.wallet.master_private_keys.keys():
pk = self.wallet.get_master_private_key(k, password)
l[k] = pk
from seed_dialog import PrivateKeysDialog
d = PrivateKeysDialog(self,l)
d.exec_()
def show_qrcode(self, data, title = _("QR code")):
if not data: return
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(title)
d.setMinimumSize(270, 300)
vbox = QVBoxLayout()
qrw = QRCodeWidget(data)
vbox.addWidget(qrw, 1)
vbox.addWidget(QLabel(data), 0, Qt.AlignHCenter)
hbox = QHBoxLayout()
hbox.addStretch(1)
filename = os.path.join(self.config.path, "qrcode.bmp")
def print_qr():
bmp.save_qrcode(qrw.qr, filename)
QMessageBox.information(None, _('Message'), _("QR code saved to file") + " " + filename, _('OK'))
def copy_to_clipboard():
bmp.save_qrcode(qrw.qr, filename)
self.app.clipboard().setImage(QImage(filename))
QMessageBox.information(None, _('Message'), _("QR code saved to clipboard"), _('OK'))
b = QPushButton(_("Copy"))
hbox.addWidget(b)
b.clicked.connect(copy_to_clipboard)
b = QPushButton(_("Save"))
hbox.addWidget(b)
b.clicked.connect(print_qr)
b = QPushButton(_("Close"))
hbox.addWidget(b)
b.clicked.connect(d.accept)
b.setDefault(True)
vbox.addLayout(hbox)
d.setLayout(vbox)
d.exec_()
def do_protect(self, func, args):
if self.wallet.use_encryption:
password = self.password_dialog()
if not password:
return
else:
password = None
if args != (False,):
args = (self,) + args + (password,)
else:
args = (self,password)
apply( func, args)
@protected
def show_private_key(self, address, password):
if not address: return
try:
pk_list = self.wallet.get_private_key(address, password)
except Exception as e:
self.show_message(str(e))
return
QMessageBox.information(self, _('Private key'), _('Address')+ ': ' + address + '\n\n' + _('Private key') + ': ' + '\n'.join(pk_list), _('OK'))
@protected
def do_sign(self, address, message, signature, password):
message = unicode(message.toPlainText())
message = message.encode('utf-8')
try:
sig = self.wallet.sign_message(str(address.text()), message, password)
signature.setText(sig)
except Exception as e:
self.show_message(str(e))
def sign_message(self, address):
if not address: return
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_('Sign Message'))
d.setMinimumSize(410, 290)
tab_widget = QTabWidget()
tab = QWidget()
layout = QGridLayout(tab)
sign_address = QLineEdit()
sign_address.setText(address)
layout.addWidget(QLabel(_('Address')), 1, 0)
layout.addWidget(sign_address, 1, 1)
sign_message = QTextEdit()
layout.addWidget(QLabel(_('Message')), 2, 0)
layout.addWidget(sign_message, 2, 1)
layout.setRowStretch(2,3)
sign_signature = QTextEdit()
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(sign_signature, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Sign"))
hbox.addWidget(b)
b.clicked.connect(lambda: self.do_sign(sign_address, sign_message, sign_signature))
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
tab_widget.addTab(tab, _("Sign"))
tab = QWidget()
layout = QGridLayout(tab)
verify_address = QLineEdit()
layout.addWidget(QLabel(_('Address')), 1, 0)
layout.addWidget(verify_address, 1, 1)
verify_message = QTextEdit()
layout.addWidget(QLabel(_('Message')), 2, 0)
layout.addWidget(verify_message, 2, 1)
layout.setRowStretch(2,3)
verify_signature = QTextEdit()
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(verify_signature, 3, 1)
layout.setRowStretch(3,1)
def do_verify():
message = unicode(verify_message.toPlainText())
message = message.encode('utf-8')
if bitcoin.verify_message(verify_address.text(), str(verify_signature.toPlainText()), message):
self.show_message(_("Signature verified"))
else:
self.show_message(_("Error: wrong signature"))
hbox = QHBoxLayout()
b = QPushButton(_("Verify"))
b.clicked.connect(do_verify)
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
tab_widget.addTab(tab, _("Verify"))
vbox = QVBoxLayout()
vbox.addWidget(tab_widget)
d.setLayout(vbox)
d.exec_()
def question(self, msg):
return QMessageBox.question(self, _('Message'), msg, QMessageBox.Yes | QMessageBox.No, QMessageBox.No) == QMessageBox.Yes
def show_message(self, msg):
QMessageBox.information(self, _('Message'), msg, _('OK'))
def password_dialog(self ):
d = QDialog(self)
d.setModal(1)
pw = QLineEdit()
pw.setEchoMode(2)
vbox = QVBoxLayout()
msg = _('Please enter your password')
vbox.addWidget(QLabel(msg))
grid = QGridLayout()
grid.setSpacing(8)
grid.addWidget(QLabel(_('Password')), 1, 0)
grid.addWidget(pw, 1, 1)
vbox.addLayout(grid)
vbox.addLayout(ok_cancel_buttons(d))
d.setLayout(vbox)
run_hook('password_dialog', pw, grid, 1)
if not d.exec_(): return
return unicode(pw.text())
def tx_from_text(self, txt):
"json or raw hexadecimal"
try:
txt.decode('hex')
tx = Transaction(txt)
return tx
except Exception:
pass
try:
tx_dict = json.loads(str(txt))
assert "hex" in tx_dict.keys()
assert "complete" in tx_dict.keys()
tx = Transaction(tx_dict["hex"], tx_dict["complete"])
if not tx_dict["complete"]:
assert "input_info" in tx_dict.keys()
input_info = json.loads(tx_dict['input_info'])
tx.add_input_info(input_info)
return tx
except Exception:
pass
QMessageBox.critical(None, _("Unable to parse transaction"), _("Electrum was unable to parse your transaction"))
def read_tx_from_file(self):
fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")
if not fileName:
return
try:
with open(fileName, "r") as f:
file_content = f.read()
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return self.tx_from_text(file_content)
@protected
def sign_raw_transaction(self, tx, input_info, password):
self.wallet.signrawtransaction(tx, input_info, [], password)
def do_process_from_text(self):
text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))
if not text:
return
tx = self.tx_from_text(text)
if tx:
self.show_transaction(tx)
def do_process_from_file(self):
tx = self.read_tx_from_file()
if tx:
self.show_transaction(tx)
def do_process_from_csvReader(self, csvReader):
outputs = []
try:
for row in csvReader:
address = row[0]
amount = Decimal(row[1])
amount = int(100000000*amount)
outputs.append((address, amount))
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return
try:
tx = self.wallet.make_unsigned_transaction(outputs, None, None)
except Exception as e:
self.show_message(str(e))
return
self.show_transaction(tx)
def do_process_from_csv_file(self):
fileName = self.getOpenFileName(_("Select your transaction CSV"), "*.csv")
if not fileName:
return
try:
with open(fileName, "r") as f:
csvReader = csv.reader(f)
self.do_process_from_csvReader(csvReader)
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return
def do_process_from_csv_text(self):
text = text_dialog(self, _('Input CSV'), _("Please enter a list of outputs.") + '\n' \
+ _("Format: address, amount. One output per line"), _("Load CSV"))
if not text:
return
f = StringIO.StringIO(text)
csvReader = csv.reader(f)
self.do_process_from_csvReader(csvReader)
@protected
def do_export_privkeys(self, password):
if not self.wallet.seed:
self.show_message(_("This wallet has no seed"))
return
self.show_message("%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."), _("Exposing a single private key can compromise your entire wallet!"), _("In particular, DO NOT use 'redeem private key' services proposed by third parties.")))
try:
select_export = _('Select file to export your private keys to')
fileName = self.getSaveFileName(select_export, 'electrum-private-keys.csv', "*.csv")
if fileName:
with open(fileName, "w+") as csvfile:
transaction = csv.writer(csvfile)
transaction.writerow(["address", "private_key"])
addresses = self.wallet.addresses(True)
for addr in addresses:
pk = "".join(self.wallet.get_private_key(addr, password))
transaction.writerow(["%34s"%addr,pk])
self.show_message(_("Private keys exported."))
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a private key-export.")
QMessageBox.critical(None, _("Unable to create csv"), export_error_label + "\n" + str(reason))
except Exception as e:
self.show_message(str(e))
return
def do_import_labels(self):
labelsFile = self.getOpenFileName(_("Open labels file"), "*.dat")
if not labelsFile: return
try:
f = open(labelsFile, 'r')
data = f.read()
f.close()
for key, value in json.loads(data).items():
self.wallet.set_label(key, value)
QMessageBox.information(None, _("Labels imported"), _("Your labels were imported from")+" '%s'" % str(labelsFile))
except (IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to import labels"), _("Electrum was unable to import your labels.")+"\n" + str(reason))
def do_export_labels(self):
labels = self.wallet.labels
try:
fileName = self.getSaveFileName(_("Select file to save your labels"), 'electrum_labels.dat', "*.dat")
if fileName:
with open(fileName, 'w+') as f:
json.dump(labels, f)
QMessageBox.information(None, _("Labels exported"), _("Your labels where exported to")+" '%s'" % str(fileName))
except (IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to export labels"), _("Electrum was unable to export your labels.")+"\n" + str(reason))
def do_export_history(self):
from lite_window import csv_transaction
csv_transaction(self.wallet)
@protected
def do_import_privkey(self, password):
if not self.wallet.imported_keys:
r = QMessageBox.question(None, _('Warning'), '<b>'+_('Warning') +':\n</b><br/>'+ _('Imported keys are not recoverable from seed.') + ' ' \
+ _('If you ever need to restore your wallet from its seed, these keys will be lost.') + '<p>' \
+ _('Are you sure you understand what you are doing?'), 3, 4)
if r == 4: return
text = text_dialog(self, _('Import private keys'), _("Enter private keys")+':', _("Import"))
if not text: return
text = str(text).split()
badkeys = []
addrlist = []
for key in text:
try:
addr = self.wallet.import_key(key, password)
except Exception as e:
badkeys.append(key)
continue
if not addr:
badkeys.append(key)
else:
addrlist.append(addr)
if addrlist:
QMessageBox.information(self, _('Information'), _("The following addresses were added") + ':\n' + '\n'.join(addrlist))
if badkeys:
QMessageBox.critical(self, _('Error'), _("The following inputs could not be imported") + ':\n'+ '\n'.join(badkeys))
self.update_receive_tab()
self.update_history_tab()
def settings_dialog(self):
d = QDialog(self)
d.setWindowTitle(_('Electrum Settings'))
d.setModal(1)
vbox = QVBoxLayout()
grid = QGridLayout()
grid.setColumnStretch(0,1)
nz_label = QLabel(_('Display zeros') + ':')
grid.addWidget(nz_label, 0, 0)
nz_e = AmountEdit(None,True)
nz_e.setText("%d"% self.num_zeros)
grid.addWidget(nz_e, 0, 1)
msg = _('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')
grid.addWidget(HelpButton(msg), 0, 2)
if not self.config.is_modifiable('num_zeros'):
for w in [nz_e, nz_label]: w.setEnabled(False)
lang_label=QLabel(_('Language') + ':')
grid.addWidget(lang_label, 1, 0)
lang_combo = QComboBox()
from electrum.i18n import languages
lang_combo.addItems(languages.values())
try:
index = languages.keys().index(self.config.get("language",''))
except Exception:
index = 0
lang_combo.setCurrentIndex(index)
grid.addWidget(lang_combo, 1, 1)
grid.addWidget(HelpButton(_('Select which language is used in the GUI (after restart).')+' '), 1, 2)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
fee_label = QLabel(_('Transaction fee') + ':')
grid.addWidget(fee_label, 2, 0)
fee_e = AmountEdit(self.base_unit)
fee_e.setText(self.format_amount(self.wallet.fee).strip())
grid.addWidget(fee_e, 2, 1)
msg = _('Fee per kilobyte of transaction.') + ' ' \
+ _('Recommended value') + ': ' + self.format_amount(20000)
grid.addWidget(HelpButton(msg), 2, 2)
if not self.config.is_modifiable('fee_per_kb'):
for w in [fee_e, fee_label]: w.setEnabled(False)
units = ['BTC', 'mBTC']
unit_label = QLabel(_('Base unit') + ':')
grid.addWidget(unit_label, 3, 0)
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.base_unit()))
grid.addWidget(unit_combo, 3, 1)
grid.addWidget(HelpButton(_('Base unit of your wallet.')\
+ '\n1BTC=1000mBTC.\n' \
+ _(' This settings affects the fields in the Send tab')+' '), 3, 2)
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
grid.addWidget(usechange_cb, 4, 0)
grid.addWidget(HelpButton(_('Using change addresses makes it more difficult for other people to track your transactions.')+' '), 4, 2)
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
grid.setRowStretch(5,1)
vbox.addLayout(grid)
vbox.addLayout(ok_cancel_buttons(d))
d.setLayout(vbox)
# run the dialog
if not d.exec_(): return
fee = unicode(fee_e.text())
try:
fee = self.read_amount(fee)
except Exception:
QMessageBox.warning(self, _('Error'), _('Invalid value') +': %s'%fee, _('OK'))
return
self.wallet.set_fee(fee)
nz = unicode(nz_e.text())
try:
nz = int( nz )
if nz>8: nz=8
except Exception:
QMessageBox.warning(self, _('Error'), _('Invalid value')+':%s'%nz, _('OK'))
return
if self.num_zeros != nz:
self.num_zeros = nz
self.config.set_key('num_zeros', nz, True)
self.update_history_tab()
self.update_receive_tab()
usechange_result = usechange_cb.isChecked()
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.storage.put('use_change', self.wallet.use_change)
unit_result = units[unit_combo.currentIndex()]
if self.base_unit() != unit_result:
self.decimal_point = 8 if unit_result == 'BTC' else 5
self.config.set_key('decimal_point', self.decimal_point, True)
self.update_history_tab()
self.update_status()
need_restart = False
lang_request = languages.keys()[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
need_restart = True
run_hook('close_settings_dialog')
if need_restart:
QMessageBox.warning(self, _('Success'), _('Please restart Electrum to activate the new GUI settings'), _('OK'))
def run_network_dialog(self):
if not self.network:
return
NetworkDialog(self.wallet.network, self.config, self).do_exec()
def closeEvent(self, event):
self.tray.hide()
g = self.geometry()
self.config.set_key("winpos-qt", [g.left(),g.top(),g.width(),g.height()], True)
self.save_column_widths()
self.config.set_key("console-history", self.console.history[-50:], True)
self.wallet.storage.put('accounts_expanded', self.accounts_expanded)
event.accept()
def plugins_dialog(self):
from electrum.plugins import plugins
d = QDialog(self)
d.setWindowTitle(_('Electrum Plugins'))
d.setModal(1)
vbox = QVBoxLayout(d)
# plugins
scroll = QScrollArea()
scroll.setEnabled(True)
scroll.setWidgetResizable(True)
scroll.setMinimumSize(400,250)
vbox.addWidget(scroll)
w = QWidget()
scroll.setWidget(w)
w.setMinimumHeight(len(plugins)*35)
grid = QGridLayout()
grid.setColumnStretch(0,1)
w.setLayout(grid)
def do_toggle(cb, p, w):
r = p.toggle()
cb.setChecked(r)
if w: w.setEnabled(r)
def mk_toggle(cb, p, w):
return lambda: do_toggle(cb,p,w)
for i, p in enumerate(plugins):
try:
cb = QCheckBox(p.fullname())
cb.setDisabled(not p.is_available())
cb.setChecked(p.is_enabled())
grid.addWidget(cb, i, 0)
if p.requires_settings():
w = p.settings_widget(self)
w.setEnabled( p.is_enabled() )
grid.addWidget(w, i, 1)
else:
w = None
cb.clicked.connect(mk_toggle(cb,p,w))
grid.addWidget(HelpButton(p.description()), i, 2)
except Exception:
print_msg(_("Error: cannot display plugin"), p)
traceback.print_exc(file=sys.stdout)
grid.setRowStretch(i+1,1)
vbox.addLayout(close_button(d))
d.exec_()
def show_account_details(self, k):
d = QDialog(self)
d.setWindowTitle(_('Account Details'))
d.setModal(1)
vbox = QVBoxLayout(d)
roots = self.wallet.get_roots(k)
name = self.wallet.get_account_name(k)
label = QLabel('Name: ' + name)
vbox.addWidget(label)
acctype = '2 of 2' if len(roots) == 2 else '2 of 3' if len(roots) == 3 else 'Single key'
vbox.addWidget(QLabel('Type: ' + acctype))
label = QLabel('Derivation: ' + k)
vbox.addWidget(label)
#for root in roots:
# mpk = self.wallet.master_public_keys[root]
# text = QTextEdit()
# text.setReadOnly(True)
# text.setMaximumHeight(120)
# text.setText(repr(mpk))
# vbox.addWidget(text)
vbox.addLayout(close_button(d))
d.exec_()
|
pballand/congress
|
refs/heads/master
|
thirdparty/antlr3-antlr-3.5/runtime/Python/tests/t031emptyAlt.py
|
22
|
import antlr3
import testbase
import unittest
class t031emptyAlt(testbase.ANTLRTest):
def setUp(self):
self.compileGrammar()
def testValid1(self):
cStream = antlr3.StringStream('foo')
lexer = self.getLexer(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = self.getParser(tStream)
events = parser.r()
if __name__ == '__main__':
unittest.main()
|
vijayanandnandam/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/bet.py
|
64
|
from __future__ import unicode_literals
from .mtv import MTVServicesInfoExtractor
from ..utils import unified_strdate
class BetIE(MTVServicesInfoExtractor):
_VALID_URL = r'https?://(?:www\.)?bet\.com/(?:[^/]+/)+(?P<id>.+?)\.html'
_TESTS = [
{
'url': 'http://www.bet.com/news/politics/2014/12/08/in-bet-exclusive-obama-talks-race-and-racism.html',
'info_dict': {
'id': '07e96bd3-8850-3051-b856-271b457f0ab8',
'display_id': 'in-bet-exclusive-obama-talks-race-and-racism',
'ext': 'flv',
'title': 'A Conversation With President Obama',
'description': 'President Obama urges persistence in confronting racism and bias.',
'duration': 1534,
'upload_date': '20141208',
'thumbnail': r're:(?i)^https?://.*\.jpg$',
'subtitles': {
'en': 'mincount:2',
}
},
'params': {
# rtmp download
'skip_download': True,
},
},
{
'url': 'http://www.bet.com/video/news/national/2014/justice-for-ferguson-a-community-reacts.html',
'info_dict': {
'id': '9f516bf1-7543-39c4-8076-dd441b459ba9',
'display_id': 'justice-for-ferguson-a-community-reacts',
'ext': 'flv',
'title': 'Justice for Ferguson: A Community Reacts',
'description': 'A BET News special.',
'duration': 1696,
'upload_date': '20141125',
'thumbnail': r're:(?i)^https?://.*\.jpg$',
'subtitles': {
'en': 'mincount:2',
}
},
'params': {
# rtmp download
'skip_download': True,
},
}
]
_FEED_URL = "http://feeds.mtvnservices.com/od/feed/bet-mrss-player"
def _get_feed_query(self, uri):
return {
'uuid': uri,
}
def _extract_mgid(self, webpage):
return self._search_regex(r'data-uri="([^"]+)', webpage, 'mgid')
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
mgid = self._extract_mgid(webpage)
videos_info = self._get_videos_info(mgid)
info_dict = videos_info['entries'][0]
upload_date = unified_strdate(self._html_search_meta('date', webpage))
description = self._html_search_meta('description', webpage)
info_dict.update({
'display_id': display_id,
'description': description,
'upload_date': upload_date,
})
return info_dict
|
vine/luigi
|
refs/heads/master
|
examples/foo_complex.py
|
32
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
import time
import random
import luigi
max_depth = 10
max_total_nodes = 50
current_nodes = 0
class Foo(luigi.Task):
task_namespace = 'examples'
def run(self):
print("Running Foo")
def requires(self):
global current_nodes
for i in range(30 / max_depth):
current_nodes += 1
yield Bar(i)
class Bar(luigi.Task):
task_namespace = 'examples'
num = luigi.IntParameter()
def run(self):
time.sleep(1)
self.output().open('w').close()
def requires(self):
global current_nodes
if max_total_nodes > current_nodes:
valor = int(random.uniform(1, 30))
for i in range(valor / max_depth):
current_nodes += 1
yield Bar(current_nodes)
def output(self):
"""
Returns the target output for this task.
:return: the target output for this task.
:rtype: object (:py:class:`~luigi.target.Target`)
"""
time.sleep(1)
return luigi.LocalTarget('/tmp/bar/%d' % self.num)
if __name__ == "__main__":
if os.path.exists('/tmp/bar'):
shutil.rmtree('/tmp/bar')
luigi.run(['--task', 'Foo', '--workers', '2'])
|
nop33/indico-plugins
|
refs/heads/master
|
search_invenio/indico_search_invenio/forms.py
|
1
|
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from wtforms.fields.core import SelectField
from indico_search import SearchForm
from indico_search_invenio import _
COLLECTION_CHOICES = [('', _('Both (Events + Contributions)')),
('events', _('Events')),
('contributions', _('Contributions'))]
SORT_ORDER_CHOICES = [('a', _('Oldest first')),
('d', _('Newest first'))]
class InvenioSearchForm(SearchForm):
collection = SelectField(_('Search for'), choices=COLLECTION_CHOICES, default='')
sort_order = SelectField(_('Sort order'), choices=SORT_ORDER_CHOICES, default='d')
|
kjc88/sl4a
|
refs/heads/master
|
python/gdata/tests/module_test_runner.py
|
132
|
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'api.jscudder@gmail.com (Jeff Scudder)'
import unittest
class ModuleTestRunner(object):
def __init__(self, module_list=None, module_settings=None):
"""Constructor for a runner to run tests in the modules listed.
Args:
module_list: list (optional) The modules whose test cases will be run.
module_settings: dict (optional) A dictionary of module level varables
which should be set in the modules if they are present. An
example is the username and password which is a module variable
in most service_test modules.
"""
self.modules = module_list or []
self.settings = module_settings or {}
def RunAllTests(self):
"""Executes all tests in this objects modules list.
It also sets any module variables which match the settings keys to the
corresponding values in the settings member.
"""
runner = unittest.TextTestRunner()
for module in self.modules:
# Set any module variables according to the contents in the settings
for setting, value in self.settings.iteritems():
try:
setattr(module, setting, value)
except AttributeError:
# This module did not have a variable for the current setting, so
# we skip it and try the next setting.
pass
# We have set all of the applicable settings for the module, now
# run the tests.
print '\nRunning all tests in module', module.__name__
runner.run(unittest.defaultTestLoader.loadTestsFromModule(module))
|
UWPCE-PythonCert/IntroPython2016
|
refs/heads/master
|
students/weidnem/session2/series.py
|
3
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 6 17:09:17 2016
@author: weidner, matthew
"""
fib = 0
def fibonacci(n):
series = [0,1]
get_next(series,n)
def get_next(series,n):
if len(series) != n:
series.append(series[-2]+series[-1])
get_next(series,n)
else:
print(series)
fibonacci(9)
|
amyvmiwei/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/test/test_sys_setprofile.py
|
177
|
import gc
import pprint
import sys
import unittest
from test import support
class TestGetProfile(unittest.TestCase):
def setUp(self):
sys.setprofile(None)
def tearDown(self):
sys.setprofile(None)
def test_empty(self):
self.assertIsNone(sys.getprofile())
def test_setget(self):
def fn(*args):
pass
sys.setprofile(fn)
self.assertIs(sys.getprofile(), fn)
class HookWatcher:
def __init__(self):
self.frames = []
self.events = []
def callback(self, frame, event, arg):
if (event == "call"
or event == "return"
or event == "exception"):
self.add_event(event, frame)
def add_event(self, event, frame=None):
"""Add an event to the log."""
if frame is None:
frame = sys._getframe(1)
try:
frameno = self.frames.index(frame)
except ValueError:
frameno = len(self.frames)
self.frames.append(frame)
self.events.append((frameno, event, ident(frame)))
def get_events(self):
"""Remove calls to add_event()."""
disallowed = [ident(self.add_event.__func__), ident(ident)]
self.frames = None
return [item for item in self.events if item[2] not in disallowed]
class ProfileSimulator(HookWatcher):
def __init__(self, testcase):
self.testcase = testcase
self.stack = []
HookWatcher.__init__(self)
def callback(self, frame, event, arg):
# Callback registered with sys.setprofile()/sys.settrace()
self.dispatch[event](self, frame)
def trace_call(self, frame):
self.add_event('call', frame)
self.stack.append(frame)
def trace_return(self, frame):
self.add_event('return', frame)
self.stack.pop()
def trace_exception(self, frame):
self.testcase.fail(
"the profiler should never receive exception events")
def trace_pass(self, frame):
pass
dispatch = {
'call': trace_call,
'exception': trace_exception,
'return': trace_return,
'c_call': trace_pass,
'c_return': trace_pass,
'c_exception': trace_pass,
}
class TestCaseBase(unittest.TestCase):
def check_events(self, callable, expected):
events = capture_events(callable, self.new_watcher())
if events != expected:
self.fail("Expected events:\n%s\nReceived events:\n%s"
% (pprint.pformat(expected), pprint.pformat(events)))
class ProfileHookTestCase(TestCaseBase):
def new_watcher(self):
return HookWatcher()
def test_simple(self):
def f(p):
pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_exception(self):
def f(p):
1/0
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_caught_exception(self):
def f(p):
try: 1/0
except: pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_caught_nested_exception(self):
def f(p):
try: 1/0
except: pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_nested_exception(self):
def f(p):
1/0
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
# This isn't what I expected:
# (0, 'exception', protect_ident),
# I expected this again:
(1, 'return', f_ident),
])
def test_exception_in_except_clause(self):
def f(p):
1/0
def g(p):
try:
f(p)
except:
try: f(p)
except: pass
f_ident = ident(f)
g_ident = ident(g)
self.check_events(g, [(1, 'call', g_ident),
(2, 'call', f_ident),
(2, 'return', f_ident),
(3, 'call', f_ident),
(3, 'return', f_ident),
(1, 'return', g_ident),
])
def test_exception_propogation(self):
def f(p):
1/0
def g(p):
try: f(p)
finally: p.add_event("falling through")
f_ident = ident(f)
g_ident = ident(g)
self.check_events(g, [(1, 'call', g_ident),
(2, 'call', f_ident),
(2, 'return', f_ident),
(1, 'falling through', g_ident),
(1, 'return', g_ident),
])
def test_raise_twice(self):
def f(p):
try: 1/0
except: 1/0
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_raise_reraise(self):
def f(p):
try: 1/0
except: raise
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_raise(self):
def f(p):
raise Exception()
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_distant_exception(self):
def f():
1/0
def g():
f()
def h():
g()
def i():
h()
def j(p):
i()
f_ident = ident(f)
g_ident = ident(g)
h_ident = ident(h)
i_ident = ident(i)
j_ident = ident(j)
self.check_events(j, [(1, 'call', j_ident),
(2, 'call', i_ident),
(3, 'call', h_ident),
(4, 'call', g_ident),
(5, 'call', f_ident),
(5, 'return', f_ident),
(4, 'return', g_ident),
(3, 'return', h_ident),
(2, 'return', i_ident),
(1, 'return', j_ident),
])
def test_generator(self):
def f():
for i in range(2):
yield i
def g(p):
for i in f():
pass
f_ident = ident(f)
g_ident = ident(g)
self.check_events(g, [(1, 'call', g_ident),
# call the iterator twice to generate values
(2, 'call', f_ident),
(2, 'return', f_ident),
(2, 'call', f_ident),
(2, 'return', f_ident),
# once more; returns end-of-iteration with
# actually raising an exception
(2, 'call', f_ident),
(2, 'return', f_ident),
(1, 'return', g_ident),
])
def test_stop_iteration(self):
def f():
for i in range(2):
yield i
raise StopIteration
def g(p):
for i in f():
pass
f_ident = ident(f)
g_ident = ident(g)
self.check_events(g, [(1, 'call', g_ident),
# call the iterator twice to generate values
(2, 'call', f_ident),
(2, 'return', f_ident),
(2, 'call', f_ident),
(2, 'return', f_ident),
# once more to hit the raise:
(2, 'call', f_ident),
(2, 'return', f_ident),
(1, 'return', g_ident),
])
class ProfileSimulatorTestCase(TestCaseBase):
def new_watcher(self):
return ProfileSimulator(self)
def test_simple(self):
def f(p):
pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_basic_exception(self):
def f(p):
1/0
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_caught_exception(self):
def f(p):
try: 1/0
except: pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_distant_exception(self):
def f():
1/0
def g():
f()
def h():
g()
def i():
h()
def j(p):
i()
f_ident = ident(f)
g_ident = ident(g)
h_ident = ident(h)
i_ident = ident(i)
j_ident = ident(j)
self.check_events(j, [(1, 'call', j_ident),
(2, 'call', i_ident),
(3, 'call', h_ident),
(4, 'call', g_ident),
(5, 'call', f_ident),
(5, 'return', f_ident),
(4, 'return', g_ident),
(3, 'return', h_ident),
(2, 'return', i_ident),
(1, 'return', j_ident),
])
def ident(function):
if hasattr(function, "f_code"):
code = function.f_code
else:
code = function.__code__
return code.co_firstlineno, code.co_name
def protect(f, p):
try: f(p)
except: pass
protect_ident = ident(protect)
def capture_events(callable, p=None):
if p is None:
p = HookWatcher()
# Disable the garbage collector. This prevents __del__s from showing up in
# traces.
old_gc = gc.isenabled()
gc.disable()
try:
sys.setprofile(p.callback)
protect(callable, p)
sys.setprofile(None)
finally:
if old_gc:
gc.enable()
return p.get_events()[1:-1]
def show_events(callable):
import pprint
pprint.pprint(capture_events(callable))
def test_main():
support.run_unittest(
TestGetProfile,
ProfileHookTestCase,
ProfileSimulatorTestCase
)
if __name__ == "__main__":
test_main()
|
nvoron23/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/json/tests/test_separators.py
|
55
|
import textwrap
from unittest import TestCase
import json
class TestSeparators(TestCase):
def test_separators(self):
h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth',
{'nifty': 87}, {'field': 'yes', 'morefield': False} ]
expect = textwrap.dedent("""\
[
[
"blorpie"
] ,
[
"whoops"
] ,
[] ,
"d-shtaeou" ,
"d-nthiouh" ,
"i-vhbjkhnth" ,
{
"nifty" : 87
} ,
{
"field" : "yes" ,
"morefield" : false
}
]""")
d1 = json.dumps(h)
d2 = json.dumps(h, indent=2, sort_keys=True, separators=(' ,', ' : '))
h1 = json.loads(d1)
h2 = json.loads(d2)
self.assertEquals(h1, h)
self.assertEquals(h2, h)
self.assertEquals(d2, expect)
|
karlw00t/flask
|
refs/heads/master
|
tests/test_appctx.py
|
7
|
# -*- coding: utf-8 -*-
"""
tests.appctx
~~~~~~~~~~~~
Tests the application context.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
import flask
def test_basic_url_generation():
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
app.config['PREFERRED_URL_SCHEME'] = 'https'
@app.route('/')
def index():
pass
with app.app_context():
rv = flask.url_for('index')
assert rv == 'https://localhost/'
def test_url_generation_requires_server_name():
app = flask.Flask(__name__)
with app.app_context():
with pytest.raises(RuntimeError):
flask.url_for('index')
def test_url_generation_without_context_fails():
with pytest.raises(RuntimeError):
flask.url_for('index')
def test_request_context_means_app_context():
app = flask.Flask(__name__)
with app.test_request_context():
assert flask.current_app._get_current_object() == app
assert flask._app_ctx_stack.top is None
def test_app_context_provides_current_app():
app = flask.Flask(__name__)
with app.app_context():
assert flask.current_app._get_current_object() == app
assert flask._app_ctx_stack.top is None
def test_app_tearing_down():
cleanup_stuff = []
app = flask.Flask(__name__)
@app.teardown_appcontext
def cleanup(exception):
cleanup_stuff.append(exception)
with app.app_context():
pass
assert cleanup_stuff == [None]
def test_app_tearing_down_with_previous_exception():
cleanup_stuff = []
app = flask.Flask(__name__)
@app.teardown_appcontext
def cleanup(exception):
cleanup_stuff.append(exception)
try:
raise Exception('dummy')
except Exception:
pass
with app.app_context():
pass
assert cleanup_stuff == [None]
def test_app_tearing_down_with_handled_exception():
cleanup_stuff = []
app = flask.Flask(__name__)
@app.teardown_appcontext
def cleanup(exception):
cleanup_stuff.append(exception)
with app.app_context():
try:
raise Exception('dummy')
except Exception:
pass
assert cleanup_stuff == [None]
def test_app_ctx_globals_methods():
app = flask.Flask(__name__)
with app.app_context():
# get
assert flask.g.get('foo') is None
assert flask.g.get('foo', 'bar') == 'bar'
# __contains__
assert 'foo' not in flask.g
flask.g.foo = 'bar'
assert 'foo' in flask.g
# setdefault
flask.g.setdefault('bar', 'the cake is a lie')
flask.g.setdefault('bar', 'hello world')
assert flask.g.bar == 'the cake is a lie'
# pop
assert flask.g.pop('bar') == 'the cake is a lie'
with pytest.raises(KeyError):
flask.g.pop('bar')
assert flask.g.pop('bar', 'more cake') == 'more cake'
# __iter__
assert list(flask.g) == ['foo']
def test_custom_app_ctx_globals_class():
class CustomRequestGlobals(object):
def __init__(self):
self.spam = 'eggs'
app = flask.Flask(__name__)
app.app_ctx_globals_class = CustomRequestGlobals
with app.app_context():
assert flask.render_template_string('{{ g.spam }}') == 'eggs'
def test_context_refcounts():
called = []
app = flask.Flask(__name__)
@app.teardown_request
def teardown_req(error=None):
called.append('request')
@app.teardown_appcontext
def teardown_app(error=None):
called.append('app')
@app.route('/')
def index():
with flask._app_ctx_stack.top:
with flask._request_ctx_stack.top:
pass
env = flask._request_ctx_stack.top.request.environ
assert env['werkzeug.request'] is not None
return u''
c = app.test_client()
res = c.get('/')
assert res.status_code == 200
assert res.data == b''
assert called == ['request', 'app']
def test_clean_pop():
called = []
app = flask.Flask(__name__)
@app.teardown_request
def teardown_req(error=None):
1 / 0
@app.teardown_appcontext
def teardown_app(error=None):
called.append('TEARDOWN')
try:
with app.test_request_context():
called.append(flask.current_app.name)
except ZeroDivisionError:
pass
assert called == ['test_appctx', 'TEARDOWN']
assert not flask.current_app
|
nkming2/sc-studio
|
refs/heads/master
|
src/sc_studio/config.py
|
2
|
'''
sc_studio.config
Author: Ming Tsang
Copyright (c) 2014-2015 HKUST SmartCar Team
Refer to LICENSE for details
'''
# Comment out to disable logging
LOGFILE = "scstudio.log"
REMOTE_DEBUG = False
PYSRC = ""
COL_GREY_100 = "#F5F5F5"
COL_GREY_200 = "#EEEEEE"
COL_GREY_700 = "#616161"
COL_GREY_900 = "#212121"
FONT = "Courier"
MSG_BEGIN = 0xDC
MSG_END = 0xCD
MSG_NOP = 0
MSG_STRING = 1
MSG_CCD_DATA = 2
MSG_CAMERA = 3
MSG_GRAPH = 4
MSG_TOKENS = [MSG_STRING, MSG_CCD_DATA, MSG_CAMERA, MSG_GRAPH]
|
CZ-NIC/foris
|
refs/heads/master
|
foris/config_handlers/backups.py
|
1
|
# Foris - web administration interface for OpenWrt based on NETCONF
# Copyright (C) 2017 CZ.NIC, z.s.p.o. <http://www.nic.cz>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import base64
from foris import fapi
from foris.form import File
from foris.state import current_state
from foris.utils.translators import gettext_dummy as gettext, _
from .base import BaseConfigHandler
class MaintenanceHandler(BaseConfigHandler):
userfriendly_title = gettext("Maintenance")
def get_form(self):
maintenance_form = fapi.ForisForm("maintenance", self.data)
maintenance_main = maintenance_form.add_section(
name="restore_backup", title=_(self.userfriendly_title)
)
maintenance_main.add_field(File, name="backup_file", label=_("Backup file"), required=True)
def maintenance_form_cb(data):
data = current_state.backend.perform(
"maintain",
"restore_backup",
{"backup": base64.b64encode(data["backup_file"].file.read()).decode("utf-8")},
)
return "save_result", {"result": data["result"]}
maintenance_form.add_callback(maintenance_form_cb)
return maintenance_form
|
xNovax/SickRage
|
refs/heads/master
|
lib/github/AuthorizationApplication.py
|
74
|
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class AuthorizationApplication(github.GithubObject.CompletableGithubObject):
"""
This class represents AuthorizationApplications as returned for example by http://developer.github.com/v3/todo
"""
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def _initAttributes(self):
self._name = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
|
direvus/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/scaleway/scaleway_sshkey.py
|
75
|
#!/usr/bin/python
#
# Scaleway SSH keys management module
#
# Copyright (C) 2018 Online SAS.
# https://www.scaleway.com
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: scaleway_sshkey
short_description: Scaleway SSH keys management module
version_added: "2.6"
author: Remy Leone (@sieben)
description:
- This module manages SSH keys on Scaleway account
U(https://developer.scaleway.com)
extends_documentation_fragment: scaleway
options:
state:
description:
- Indicate desired state of the SSH key.
default: present
choices:
- present
- absent
ssh_pub_key:
description:
- The public SSH key as a string to add.
required: true
api_url:
description:
- Scaleway API URL
default: 'https://account.scaleway.com'
aliases: ['base_url']
'''
EXAMPLES = '''
- name: "Add SSH key"
scaleway_sshkey:
ssh_pub_key: "ssh-rsa AAAA..."
state: "present"
- name: "Delete SSH key"
scaleway_sshkey:
ssh_pub_key: "ssh-rsa AAAA..."
state: "absent"
- name: "Add SSH key with explicit token"
scaleway_sshkey:
ssh_pub_key: "ssh-rsa AAAA..."
state: "present"
oauth_token: "6ecd2c9b-6f4f-44d4-a187-61a92078d08c"
'''
RETURN = '''
data:
description: This is only present when C(state=present)
returned: when C(state=present)
type: dict
sample: {
"ssh_public_keys": [
{"key": "ssh-rsa AAAA...."}
]
}
'''
from ansible.module_utils.basic import AnsibleModule, env_fallback
from ansible.module_utils.scaleway import scaleway_argument_spec, Scaleway
def extract_present_sshkeys(raw_organization_dict):
ssh_key_list = raw_organization_dict["organizations"][0]["users"][0]["ssh_public_keys"]
ssh_key_lookup = [ssh_key["key"] for ssh_key in ssh_key_list]
return ssh_key_lookup
def extract_user_id(raw_organization_dict):
return raw_organization_dict["organizations"][0]["users"][0]["id"]
def sshkey_user_patch(ssh_lookup):
ssh_list = {"ssh_public_keys": [{"key": key}
for key in ssh_lookup]}
return ssh_list
def core(module):
ssh_pub_key = module.params['ssh_pub_key']
state = module.params["state"]
account_api = Scaleway(module)
response = account_api.get('organizations')
status_code = response.status_code
organization_json = response.json
if not response.ok:
module.fail_json(msg='Error getting ssh key [{0}: {1}]'.format(
status_code, response.json['message']))
user_id = extract_user_id(organization_json)
present_sshkeys = []
try:
present_sshkeys = extract_present_sshkeys(organization_json)
except (KeyError, IndexError) as e:
module.fail_json(changed=False, data="Error while extracting present SSH keys from API")
if state in ('present',):
if ssh_pub_key in present_sshkeys:
module.exit_json(changed=False)
# If key not found create it!
if module.check_mode:
module.exit_json(changed=True)
present_sshkeys.append(ssh_pub_key)
payload = sshkey_user_patch(present_sshkeys)
response = account_api.patch('/users/%s' % user_id, data=payload)
if response.ok:
module.exit_json(changed=True, data=response.json)
module.fail_json(msg='Error creating ssh key [{0}: {1}]'.format(
response.status_code, response.json))
elif state in ('absent',):
if ssh_pub_key not in present_sshkeys:
module.exit_json(changed=False)
if module.check_mode:
module.exit_json(changed=True)
present_sshkeys.remove(ssh_pub_key)
payload = sshkey_user_patch(present_sshkeys)
response = account_api.patch('/users/%s' % user_id, data=payload)
if response.ok:
module.exit_json(changed=True, data=response.json)
module.fail_json(msg='Error deleting ssh key [{0}: {1}]'.format(
response.status_code, response.json))
def main():
argument_spec = scaleway_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['absent', 'present']),
ssh_pub_key=dict(required=True),
api_url=dict(fallback=(env_fallback, ['SCW_API_URL']), default='https://account.scaleway.com', aliases=['base_url']),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
core(module)
if __name__ == '__main__':
main()
|
azureplus/hue
|
refs/heads/master
|
desktop/core/ext-py/Babel-0.9.6/babel/messages/checkers.py
|
47
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Various routines that help with validation of translations.
:since: version 0.9
"""
from itertools import izip
from babel.messages.catalog import TranslationError, PYTHON_FORMAT
from babel.util import set
#: list of format chars that are compatible to each other
_string_format_compatibilities = [
set(['i', 'd', 'u']),
set(['x', 'X']),
set(['f', 'F', 'g', 'G'])
]
def num_plurals(catalog, message):
"""Verify the number of plurals in the translation."""
if not message.pluralizable:
if not isinstance(message.string, basestring):
raise TranslationError("Found plural forms for non-pluralizable "
"message")
return
# skip further tests if no catalog is provided.
elif catalog is None:
return
msgstrs = message.string
if not isinstance(msgstrs, (list, tuple)):
msgstrs = (msgstrs,)
if len(msgstrs) != catalog.num_plurals:
raise TranslationError("Wrong number of plural forms (expected %d)" %
catalog.num_plurals)
def python_format(catalog, message):
"""Verify the format string placeholders in the translation."""
if 'python-format' not in message.flags:
return
msgids = message.id
if not isinstance(msgids, (list, tuple)):
msgids = (msgids,)
msgstrs = message.string
if not isinstance(msgstrs, (list, tuple)):
msgstrs = (msgstrs,)
for msgid, msgstr in izip(msgids, msgstrs):
if msgstr:
_validate_format(msgid, msgstr)
def _validate_format(format, alternative):
"""Test format string `alternative` against `format`. `format` can be the
msgid of a message and `alternative` one of the `msgstr`\s. The two
arguments are not interchangeable as `alternative` may contain less
placeholders if `format` uses named placeholders.
The behavior of this function is undefined if the string does not use
string formattings.
If the string formatting of `alternative` is compatible to `format` the
function returns `None`, otherwise a `TranslationError` is raised.
Examples for compatible format strings:
>>> _validate_format('Hello %s!', 'Hallo %s!')
>>> _validate_format('Hello %i!', 'Hallo %d!')
Example for an incompatible format strings:
>>> _validate_format('Hello %(name)s!', 'Hallo %s!')
Traceback (most recent call last):
...
TranslationError: the format strings are of different kinds
This function is used by the `python_format` checker.
:param format: The original format string
:param alternative: The alternative format string that should be checked
against format
:return: None on success
:raises TranslationError: on formatting errors
"""
def _parse(string):
result = []
for match in PYTHON_FORMAT.finditer(string):
name, format, typechar = match.groups()
if typechar == '%' and name is None:
continue
result.append((name, str(typechar)))
return result
def _compatible(a, b):
if a == b:
return True
for set in _string_format_compatibilities:
if a in set and b in set:
return True
return False
def _check_positional(results):
positional = None
for name, char in results:
if positional is None:
positional = name is None
else:
if (name is None) != positional:
raise TranslationError('format string mixes positional '
'and named placeholders')
return bool(positional)
a, b = map(_parse, (format, alternative))
# now check if both strings are positional or named
a_positional, b_positional = map(_check_positional, (a, b))
if a_positional and not b_positional and not b:
raise TranslationError('placeholders are incompatible')
elif a_positional != b_positional:
raise TranslationError('the format strings are of different kinds')
# if we are operating on positional strings both must have the
# same number of format chars and those must be compatible
if a_positional:
if len(a) != len(b):
raise TranslationError('positional format placeholders are '
'unbalanced')
for idx, ((_, first), (_, second)) in enumerate(izip(a, b)):
if not _compatible(first, second):
raise TranslationError('incompatible format for placeholder '
'%d: %r and %r are not compatible' %
(idx + 1, first, second))
# otherwise the second string must not have names the first one
# doesn't have and the types of those included must be compatible
else:
type_map = dict(a)
for name, typechar in b:
if name not in type_map:
raise TranslationError('unknown named placeholder %r' % name)
elif not _compatible(typechar, type_map[name]):
raise TranslationError('incompatible format for '
'placeholder %r: '
'%r and %r are not compatible' %
(name, typechar, type_map[name]))
def _find_checkers():
try:
from pkg_resources import working_set
except ImportError:
return [num_plurals, python_format]
checkers = []
for entry_point in working_set.iter_entry_points('babel.checkers'):
checkers.append(entry_point.load())
return checkers
checkers = _find_checkers()
|
laperry1/android_external_chromium_org
|
refs/heads/cm-12.1
|
tools/usb_gadget/server.py
|
91
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""WSGI application to manage a USB gadget.
"""
import datetime
import hashlib
import re
import subprocess
import sys
import time
import urllib2
from tornado import httpserver
from tornado import ioloop
from tornado import web
import default_gadget
VERSION_PATTERN = re.compile(r'.*usb_gadget-([a-z0-9]{32})\.zip')
address = None
chip = None
claimed_by = None
default = default_gadget.DefaultGadget()
gadget = None
hardware = None
interface = None
port = None
def SwitchGadget(new_gadget):
if chip.IsConfigured():
chip.Destroy()
global gadget
gadget = new_gadget
gadget.AddStringDescriptor(3, address)
chip.Create(gadget)
class VersionHandler(web.RequestHandler):
def get(self):
version = 'unpackaged'
for path in sys.path:
match = VERSION_PATTERN.match(path)
if match:
version = match.group(1)
break
self.write(version)
class UpdateHandler(web.RequestHandler):
def post(self):
fileinfo = self.request.files['file'][0]
match = VERSION_PATTERN.match(fileinfo['filename'])
if match is None:
self.write('Filename must contain MD5 hash.')
self.set_status(400)
return
content = fileinfo['body']
md5sum = hashlib.md5(content).hexdigest()
if md5sum != match.group(1):
self.write('File hash does not match.')
self.set_status(400)
return
filename = 'usb_gadget-{}.zip'.format(md5sum)
with open(filename, 'wb') as f:
f.write(content)
args = ['/usr/bin/python', filename,
'--interface', interface,
'--port', str(port),
'--hardware', hardware]
if claimed_by is not None:
args.extend(['--start-claimed', claimed_by])
print 'Reloading with version {}...'.format(md5sum)
global http_server
if chip.IsConfigured():
chip.Destroy()
http_server.stop()
child = subprocess.Popen(args, close_fds=True)
while True:
child.poll()
if child.returncode is not None:
self.write('New package exited with error {}.'
.format(child.returncode))
self.set_status(500)
http_server = httpserver.HTTPServer(app)
http_server.listen(port)
SwitchGadget(gadget)
return
try:
f = urllib2.urlopen('http://{}/version'.format(address))
if f.getcode() == 200:
# Update complete, wait 1 second to make sure buffers are flushed.
io_loop = ioloop.IOLoop.instance()
io_loop.add_timeout(datetime.timedelta(seconds=1), io_loop.stop)
return
except urllib2.URLError:
pass
time.sleep(0.1)
class ClaimHandler(web.RequestHandler):
def post(self):
global claimed_by
if claimed_by is None:
claimed_by = self.get_argument('session_id')
else:
self.write('Device is already claimed by "{}".'.format(claimed_by))
self.set_status(403)
class UnclaimHandler(web.RequestHandler):
def post(self):
global claimed_by
claimed_by = None
if gadget != default:
SwitchGadget(default)
class UnconfigureHandler(web.RequestHandler):
def post(self):
SwitchGadget(default)
class DisconnectHandler(web.RequestHandler):
def post(self):
if chip.IsConfigured():
chip.Destroy()
class ReconnectHandler(web.RequestHandler):
def post(self):
if not chip.IsConfigured():
chip.Create(gadget)
app = web.Application([
(r'/version', VersionHandler),
(r'/update', UpdateHandler),
(r'/claim', ClaimHandler),
(r'/unclaim', UnclaimHandler),
(r'/unconfigure', UnconfigureHandler),
(r'/disconnect', DisconnectHandler),
(r'/reconnect', ReconnectHandler),
])
http_server = httpserver.HTTPServer(app)
|
mm1ke/portage
|
refs/heads/master
|
pym/portage/_emirrordist/main.py
|
6
|
# Copyright 2013-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import argparse
import logging
import sys
import portage
from portage import os
from portage.util import normalize_path, writemsg_level, _recursive_file_list
from portage.util._async.run_main_scheduler import run_main_scheduler
from portage.util._async.SchedulerInterface import SchedulerInterface
from portage.util._eventloop.global_event_loop import global_event_loop
from .Config import Config
from .MirrorDistTask import MirrorDistTask
if sys.hexversion >= 0x3000000:
# pylint: disable=W0622
long = int
seconds_per_day = 24 * 60 * 60
common_options = (
{
"longopt" : "--dry-run",
"help" : "perform a trial run with no changes made (usually combined "
"with --verbose)",
"action" : "store_true"
},
{
"longopt" : "--verbose",
"shortopt" : "-v",
"help" : "display extra information on stderr "
"(multiple occurences increase verbosity)",
"action" : "count",
"default" : 0,
},
{
"longopt" : "--ignore-default-opts",
"help" : "do not use the EMIRRORDIST_DEFAULT_OPTS environment variable",
"action" : "store_true"
},
{
"longopt" : "--distfiles",
"help" : "distfiles directory to use (required)",
"metavar" : "DIR"
},
{
"longopt" : "--jobs",
"shortopt" : "-j",
"help" : "number of concurrent jobs to run",
"type" : int
},
{
"longopt" : "--load-average",
"shortopt" : "-l",
"help" : "load average limit for spawning of new concurrent jobs",
"metavar" : "LOAD",
"type" : float
},
{
"longopt" : "--tries",
"help" : "maximum number of tries per file, 0 means unlimited (default is 10)",
"default" : 10,
"type" : int
},
{
"longopt" : "--repo",
"help" : "name of repo to operate on"
},
{
"longopt" : "--config-root",
"help" : "location of portage config files",
"metavar" : "DIR"
},
{
"longopt" : "--repositories-configuration",
"help" : "override configuration of repositories (in format of repos.conf)"
},
{
"longopt" : "--strict-manifests",
"help" : "manually override \"strict\" FEATURES setting",
"choices" : ("y", "n"),
"metavar" : "<y|n>",
},
{
"longopt" : "--failure-log",
"help" : "log file for fetch failures, with tab-delimited "
"output, for reporting purposes",
"metavar" : "FILE"
},
{
"longopt" : "--success-log",
"help" : "log file for fetch successes, with tab-delimited "
"output, for reporting purposes",
"metavar" : "FILE"
},
{
"longopt" : "--scheduled-deletion-log",
"help" : "log file for scheduled deletions, with tab-delimited "
"output, for reporting purposes",
"metavar" : "FILE"
},
{
"longopt" : "--delete",
"help" : "enable deletion of unused distfiles",
"action" : "store_true"
},
{
"longopt" : "--deletion-db",
"help" : "database file used to track lifetime of files "
"scheduled for delayed deletion",
"metavar" : "FILE"
},
{
"longopt" : "--deletion-delay",
"help" : "delay time for deletion, measured in seconds",
"metavar" : "SECONDS"
},
{
"longopt" : "--temp-dir",
"help" : "temporary directory for downloads",
"metavar" : "DIR"
},
{
"longopt" : "--mirror-overrides",
"help" : "file holding a list of mirror overrides",
"metavar" : "FILE"
},
{
"longopt" : "--mirror-skip",
"help" : "comma delimited list of mirror targets to skip "
"when fetching"
},
{
"longopt" : "--restrict-mirror-exemptions",
"help" : "comma delimited list of mirror targets for which to "
"ignore RESTRICT=\"mirror\""
},
{
"longopt" : "--verify-existing-digest",
"help" : "use digest as a verification of whether existing "
"distfiles are valid",
"action" : "store_true"
},
{
"longopt" : "--distfiles-local",
"help" : "distfiles-local directory to use",
"metavar" : "DIR"
},
{
"longopt" : "--distfiles-db",
"help" : "database file used to track which ebuilds a "
"distfile belongs to",
"metavar" : "FILE"
},
{
"longopt" : "--recycle-dir",
"help" : "directory for extended retention of files that "
"are removed from distdir with the --delete option",
"metavar" : "DIR"
},
{
"longopt" : "--recycle-db",
"help" : "database file used to track lifetime of files "
"in recycle dir",
"metavar" : "FILE"
},
{
"longopt" : "--recycle-deletion-delay",
"help" : "delay time for deletion of unused files from "
"recycle dir, measured in seconds (defaults to "
"the equivalent of 60 days)",
"default" : 60 * seconds_per_day,
"metavar" : "SECONDS",
"type" : int
},
{
"longopt" : "--fetch-log-dir",
"help" : "directory for individual fetch logs",
"metavar" : "DIR"
},
{
"longopt" : "--whitelist-from",
"help" : "specifies a file containing a list of files to "
"whitelist, one per line, # prefixed lines ignored",
"action" : "append",
"metavar" : "FILE"
},
)
def parse_args(args):
description = "emirrordist - a fetch tool for mirroring " \
"of package distfiles"
usage = "emirrordist [options] <action>"
parser = argparse.ArgumentParser(description=description, usage=usage)
actions = parser.add_argument_group('Actions')
actions.add_argument("--version",
action="store_true",
help="display portage version and exit")
actions.add_argument("--mirror",
action="store_true",
help="mirror distfiles for the selected repository")
common = parser.add_argument_group('Common options')
for opt_info in common_options:
opt_pargs = [opt_info["longopt"]]
if opt_info.get("shortopt"):
opt_pargs.append(opt_info["shortopt"])
opt_kwargs = {"help" : opt_info["help"]}
for k in ("action", "choices", "default", "metavar", "type"):
if k in opt_info:
opt_kwargs[k] = opt_info[k]
common.add_argument(*opt_pargs, **opt_kwargs)
options, args = parser.parse_known_args(args)
return (parser, options, args)
def emirrordist_main(args):
# The calling environment is ignored, so the program is
# completely controlled by commandline arguments.
env = {}
if not sys.stdout.isatty():
portage.output.nocolor()
env['NOCOLOR'] = 'true'
parser, options, args = parse_args(args)
if options.version:
sys.stdout.write("Portage %s\n" % portage.VERSION)
return os.EX_OK
config_root = options.config_root
if options.repositories_configuration is not None:
env['PORTAGE_REPOSITORIES'] = options.repositories_configuration
settings = portage.config(config_root=config_root,
local_config=False, env=env)
default_opts = None
if not options.ignore_default_opts:
default_opts = settings.get('EMIRRORDIST_DEFAULT_OPTS', '').split()
if default_opts:
parser, options, args = parse_args(default_opts + args)
settings = portage.config(config_root=config_root,
local_config=False, env=env)
if options.repo is None:
if len(settings.repositories.prepos) == 2:
for repo in settings.repositories:
if repo.name != "DEFAULT":
options.repo = repo.name
break
if options.repo is None:
parser.error("--repo option is required")
repo_path = settings.repositories.treemap.get(options.repo)
if repo_path is None:
parser.error("Unable to locate repository named '%s'" % (options.repo,))
if options.jobs is not None:
options.jobs = int(options.jobs)
if options.load_average is not None:
options.load_average = float(options.load_average)
if options.failure_log is not None:
options.failure_log = normalize_path(
os.path.abspath(options.failure_log))
parent_dir = os.path.dirname(options.failure_log)
if not (os.path.isdir(parent_dir) and
os.access(parent_dir, os.W_OK|os.X_OK)):
parser.error(("--failure-log '%s' parent is not a "
"writable directory") % options.failure_log)
if options.success_log is not None:
options.success_log = normalize_path(
os.path.abspath(options.success_log))
parent_dir = os.path.dirname(options.success_log)
if not (os.path.isdir(parent_dir) and
os.access(parent_dir, os.W_OK|os.X_OK)):
parser.error(("--success-log '%s' parent is not a "
"writable directory") % options.success_log)
if options.scheduled_deletion_log is not None:
options.scheduled_deletion_log = normalize_path(
os.path.abspath(options.scheduled_deletion_log))
parent_dir = os.path.dirname(options.scheduled_deletion_log)
if not (os.path.isdir(parent_dir) and
os.access(parent_dir, os.W_OK|os.X_OK)):
parser.error(("--scheduled-deletion-log '%s' parent is not a "
"writable directory") % options.scheduled_deletion_log)
if options.deletion_db is None:
parser.error("--scheduled-deletion-log requires --deletion-db")
if options.deletion_delay is not None:
options.deletion_delay = long(options.deletion_delay)
if options.deletion_db is None:
parser.error("--deletion-delay requires --deletion-db")
if options.deletion_db is not None:
if options.deletion_delay is None:
parser.error("--deletion-db requires --deletion-delay")
options.deletion_db = normalize_path(
os.path.abspath(options.deletion_db))
if options.temp_dir is not None:
options.temp_dir = normalize_path(
os.path.abspath(options.temp_dir))
if not (os.path.isdir(options.temp_dir) and
os.access(options.temp_dir, os.W_OK|os.X_OK)):
parser.error(("--temp-dir '%s' is not a "
"writable directory") % options.temp_dir)
if options.distfiles is not None:
options.distfiles = normalize_path(
os.path.abspath(options.distfiles))
if not (os.path.isdir(options.distfiles) and
os.access(options.distfiles, os.W_OK|os.X_OK)):
parser.error(("--distfiles '%s' is not a "
"writable directory") % options.distfiles)
else:
parser.error("missing required --distfiles parameter")
if options.mirror_overrides is not None:
options.mirror_overrides = normalize_path(
os.path.abspath(options.mirror_overrides))
if not (os.access(options.mirror_overrides, os.R_OK) and
os.path.isfile(options.mirror_overrides)):
parser.error(
"--mirror-overrides-file '%s' is not a readable file" %
options.mirror_overrides)
if options.distfiles_local is not None:
options.distfiles_local = normalize_path(
os.path.abspath(options.distfiles_local))
if not (os.path.isdir(options.distfiles_local) and
os.access(options.distfiles_local, os.W_OK|os.X_OK)):
parser.error(("--distfiles-local '%s' is not a "
"writable directory") % options.distfiles_local)
if options.distfiles_db is not None:
options.distfiles_db = normalize_path(
os.path.abspath(options.distfiles_db))
if options.tries is not None:
options.tries = int(options.tries)
if options.recycle_dir is not None:
options.recycle_dir = normalize_path(
os.path.abspath(options.recycle_dir))
if not (os.path.isdir(options.recycle_dir) and
os.access(options.recycle_dir, os.W_OK|os.X_OK)):
parser.error(("--recycle-dir '%s' is not a "
"writable directory") % options.recycle_dir)
if options.recycle_db is not None:
if options.recycle_dir is None:
parser.error("--recycle-db requires "
"--recycle-dir to be specified")
options.recycle_db = normalize_path(
os.path.abspath(options.recycle_db))
if options.recycle_deletion_delay is not None:
options.recycle_deletion_delay = \
long(options.recycle_deletion_delay)
if options.fetch_log_dir is not None:
options.fetch_log_dir = normalize_path(
os.path.abspath(options.fetch_log_dir))
if not (os.path.isdir(options.fetch_log_dir) and
os.access(options.fetch_log_dir, os.W_OK|os.X_OK)):
parser.error(("--fetch-log-dir '%s' is not a "
"writable directory") % options.fetch_log_dir)
if options.whitelist_from:
normalized_paths = []
for x in options.whitelist_from:
path = normalize_path(os.path.abspath(x))
if not os.access(path, os.R_OK):
parser.error("--whitelist-from '%s' is not readable" % x)
if os.path.isfile(path):
normalized_paths.append(path)
elif os.path.isdir(path):
for file in _recursive_file_list(path):
if not os.access(file, os.R_OK):
parser.error("--whitelist-from '%s' directory contains not readable file '%s'" % (x, file))
normalized_paths.append(file)
else:
parser.error("--whitelist-from '%s' is not a regular file or a directory" % x)
options.whitelist_from = normalized_paths
if options.strict_manifests is not None:
if options.strict_manifests == "y":
settings.features.add("strict")
else:
settings.features.discard("strict")
settings.lock()
portdb = portage.portdbapi(mysettings=settings)
# Limit ebuilds to the specified repo.
portdb.porttrees = [repo_path]
portage.util.initialize_logger()
if options.verbose > 0:
l = logging.getLogger()
l.setLevel(l.getEffectiveLevel() - 10 * options.verbose)
with Config(options, portdb,
SchedulerInterface(global_event_loop())) as config:
if not options.mirror:
parser.error('No action specified')
returncode = os.EX_OK
if options.mirror:
signum = run_main_scheduler(MirrorDistTask(config))
if signum is not None:
sys.exit(128 + signum)
return returncode
|
Qwertycal/19520-Eye-Tracker
|
refs/heads/master
|
Accuracy/imgThreshold.py
|
1
|
#author: Nadezhda Shivarova
#date created: 19/03/16
#Description: Combining thresholding and math morphology into
#a single function to use within main()
import numpy as np
import math
import cv2
#import removeOutliersThresh as outliers
import bi_level_img_threshold as thresh
from matplotlib import pyplot as plt
def imgThreshold(frame):
# Convert to greyscale frame
frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('original',frame_gray)
# Create structuring element - disk to remove glint
struct_el = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(25,25))
frame_open = cv2.morphologyEx(frame_gray, cv2.MORPH_OPEN, struct_el)
cv2.imshow('open',frame_open)
# Get histogram of frame
# more efficient than calcHist and eliminates memory error
hist_img = np.bincount(frame_gray.flatten())
print('len_hist',len(hist_img))
# truncate histogram to remove bin 0 and bin 255 instead of removing outliers which
# removes all the bins
hist_img = hist_img[1:len(hist_img)-2]
# Pass histogram to adaptive thresholding to determine level
threshLevel = thresh.bi_level_img_threshold(hist_img)
if(threshLevel > 100):
threshLevel = 45
# Adjust start index of hist and add manual level adjustment
#threshLevelAdjust = threshLevel + lower_index
#print('Bi level thresh', threshLevelAdjust)
# Threshold frame using level obtained from adaptive threshold
print ("thresh level %d " % threshLevel)
ret,threshPupil = cv2.threshold(frame_open,threshLevel,255,cv2.THRESH_BINARY)
cv2.imshow('thresh pupil',threshPupil)
# Invert and threshold frame to isolate only glint
#frameInv = np.invert(frame_gray)
#cv2.imshow('frameInv',frameInv)
ret,threshGlint = cv2.threshold(frame_gray,200,255,cv2.THRESH_BINARY_INV)
cv2.imshow('thresh glint',threshGlint)
return threshPupil, threshGlint
|
uranusjr/django
|
refs/heads/master
|
tests/custom_columns/tests.py
|
40
|
from django.core.exceptions import FieldError
from django.test import TestCase
from .models import Article, Author
class CustomColumnsTests(TestCase):
def setUp(self):
self.a1 = Author.objects.create(first_name="John", last_name="Smith")
self.a2 = Author.objects.create(first_name="Peter", last_name="Jones")
self.authors = [self.a1, self.a2]
self.article = Article.objects.create(headline="Django lets you build Web apps easily", primary_author=self.a1)
self.article.authors.set(self.authors)
def test_query_all_available_authors(self):
self.assertQuerysetEqual(
Author.objects.all(), [
"Peter Jones", "John Smith",
],
str
)
def test_get_first_name(self):
self.assertEqual(
Author.objects.get(first_name__exact="John"),
self.a1,
)
def test_filter_first_name(self):
self.assertQuerysetEqual(
Author.objects.filter(first_name__exact="John"), [
"John Smith",
],
str
)
def test_field_error(self):
msg = (
"Cannot resolve keyword 'firstname' into field. Choices are: "
"Author_ID, article, first_name, last_name, primary_set"
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.filter(firstname__exact="John")
def test_attribute_error(self):
with self.assertRaises(AttributeError):
self.a1.firstname
with self.assertRaises(AttributeError):
self.a1.last
def test_get_all_authors_for_an_article(self):
self.assertQuerysetEqual(
self.article.authors.all(), [
"Peter Jones",
"John Smith",
],
str
)
def test_get_all_articles_for_an_author(self):
self.assertQuerysetEqual(
self.a1.article_set.all(), [
"Django lets you build Web apps easily",
],
lambda a: a.headline
)
def test_get_author_m2m_relation(self):
self.assertQuerysetEqual(
self.article.authors.filter(last_name='Jones'), [
"Peter Jones"
],
str
)
def test_author_querying(self):
self.assertQuerysetEqual(
Author.objects.all().order_by('last_name'),
['<Author: Peter Jones>', '<Author: John Smith>']
)
def test_author_filtering(self):
self.assertQuerysetEqual(
Author.objects.filter(first_name__exact='John'),
['<Author: John Smith>']
)
def test_author_get(self):
self.assertEqual(self.a1, Author.objects.get(first_name__exact='John'))
def test_filter_on_nonexistent_field(self):
msg = (
"Cannot resolve keyword 'firstname' into field. Choices are: "
"Author_ID, article, first_name, last_name, primary_set"
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.filter(firstname__exact='John')
def test_author_get_attributes(self):
a = Author.objects.get(last_name__exact='Smith')
self.assertEqual('John', a.first_name)
self.assertEqual('Smith', a.last_name)
with self.assertRaisesMessage(AttributeError, "'Author' object has no attribute 'firstname'"):
getattr(a, 'firstname')
with self.assertRaisesMessage(AttributeError, "'Author' object has no attribute 'last'"):
getattr(a, 'last')
def test_m2m_table(self):
self.assertQuerysetEqual(
self.article.authors.all().order_by('last_name'),
['<Author: Peter Jones>', '<Author: John Smith>']
)
self.assertQuerysetEqual(
self.a1.article_set.all(),
['<Article: Django lets you build Web apps easily>']
)
self.assertQuerysetEqual(
self.article.authors.filter(last_name='Jones'),
['<Author: Peter Jones>']
)
|
sdeleeuw/mediocris
|
refs/heads/master
|
project/settings_docker.py
|
1
|
from .settings_shared import *
SECRET_KEY = '1234567890'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS += [
'corsheaders',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
] + MIDDLEWARE
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'mediocris',
'USER': 'docker',
'PASSWORD': 'docker',
'HOST': 'db',
}
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_ROOT = '/var/www/mediocris/static'
STATIC_URL = '/static/'
MEDIA_ROOT = '/var/www/mediocris/media'
MEDIA_URL = '/media/'
# django-cors-headers
CORS_ORIGIN_ALLOW_ALL = True
|
supergis/QGIS
|
refs/heads/master
|
python/plugins/processing/gui/SilentProgress.py
|
17
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
SilentProgress.py
---------------------
Date : April 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'April 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
class SilentProgress(object):
def error(self, msg):
print msg
def setText(self, text):
pass
def setPercentage(self, i):
pass
def setInfo(self, _):
pass
def setCommand(self, _):
pass
def setDebugInfo(self, _):
pass
def setConsoleInfo(self, _):
pass
def close(self):
pass
|
UdK-VPT/Open_eQuarter
|
refs/heads/master
|
mole3x/extensions/calc_geometry/oeq_geometry.py
|
2
|
# -*- coding: utf-8 -*-
import os
from qgis.core import NULL
from mole3 import oeq_global
from mole3.project import config
from mole3.extensions import OeQExtension
from mole3.stat_corr import common_walls_by_population_density_corr,window_wall_ratio_AVG_by_building_age_lookup
def calculation(self=None, parameters={},feature = None):
from scipy.constants import golden
from math import floor, ceil
from qgis.PyQt.QtCore import QVariant
# factor for golden rule
dataset = {'AREA': NULL, 'PERIMETER': NULL, 'LENGTH': NULL, 'WIDTH': NULL, 'HEIGHT': NULL, 'FLOORS': NULL,
'WN_RAT':NULL,'WL_COM':NULL,'BS_AR':NULL,'WL_AR':NULL,'WN_AR':NULL,'RF_AR':NULL,'LIV_AR':NULL}
dataset.update(parameters)
#print parameters
if (not oeq_global.isnull(dataset['AREA'])):
if (not oeq_global.isnull(dataset['PERIMETER'])):
if oeq_global.isnull(dataset['LENGTH']):
p = -float(dataset['PERIMETER'] / 2.0)
q = float(dataset['AREA'])
if ((p / 2) ** 2) > q:
dataset['LENGTH'] = -p / 2 + ((((p / 2) ** 2) - q) ** 0.5)
else:
dataset['LENGTH'] = -p / 4
#print config.building_id_key
#print 'LENGTH'
#print dataset['LENGTH']
#print 'AREA'
#print dataset['AREA']
dataset['WIDTH'] = float(dataset['AREA']) / float(dataset['LENGTH'])
l_max = max(dataset['WIDTH'], dataset['LENGTH'])
l_min = min(dataset['WIDTH'], dataset['LENGTH'])
dataset['WIDTH'] = l_min
dataset['LENGTH'] = l_max
else:
if oeq_global.isnull(dataset['WIDTH']):
if oeq_global.isnull(dataset['LENGTH']):
dataset['LENGTH'] = (float(dataset['AREA']) / golden) ** 0.5
dataset['WIDTH'] = float(dataset['AREA']) / dataset['LENGTH']
else:
dataset['LENGTH'] = float(dataset['AREA']) / dataset['WIDTH']
l_max = max(dataset['WIDTH'], dataset['LENGTH'])
l_min = min(dataset['WIDTH'], dataset['LENGTH'])
dataset['WIDTH'] = l_min
dataset['LENGTH'] = l_max
dataset['PERIMETER'] = 2 * (dataset['WIDTH'] + dataset['LENGTH'])
else:
if (not oeq_global.isnull(dataset['PERIMETER'])):
if oeq_global.isnull(dataset['WIDTH']):
if oeq_global.isnull(dataset['LENGTH']):
dataset['LENGTH'] = float(dataset['PERIMETER']) / (2 + 2 * golden)
dataset['WIDTH'] = float(dataset['AREA']) / dataset['LENGTH']
else:
dataset['LENGTH'] = float(dataset['AREA']) / dataset['WIDTH']
l_max = max(dataset['WIDTH'], dataset['LENGTH'])
l_min = min(dataset['WIDTH'], dataset['LENGTH'])
dataset['WIDTH'] = l_min
dataset['LENGTH'] = l_max
dataset['AREA'] = dataset['WIDTH'] * dataset['LENGTH']
if oeq_global.isnull(dataset['FLOORS']):
if (not oeq_global.isnull(dataset['HEIGHT'])):
dataset['FLOORS'] = floor(dataset['HEIGHT'] / 3.3)
else:
if (not oeq_global.isnull(parameters['PDENS'])):
dataset['FLOORS'] = ceil(float(parameters['PDENS'] / 4000))
dataset['HEIGHT'] = dataset['FLOORS'] * 3.3
else:
if (oeq_global.isnull(dataset['HEIGHT'])):
dataset['HEIGHT'] = dataset['FLOORS'] * 3.3
#print type(dataset['YOC'])
#print dataset['YOC']
if oeq_global.isnull(dataset['WN_RAT']) & (not oeq_global.isnull(parameters['YOC'])):
# try:
dataset['WN_RAT']=window_wall_ratio_AVG_by_building_age_lookup.get(parameters['YOC'])
#except:
# pass
if oeq_global.isnull(dataset['WL_COM']) & (not oeq_global.isnull(parameters['PDENS'])):
dataset['WL_COM']=common_walls_by_population_density_corr.get(parameters['PDENS'])
if oeq_global.isnull(dataset['BS_AR']) & (not oeq_global.isnull(dataset['AREA'])):
dataset['BS_AR']=dataset['AREA']
if oeq_global.isnull(dataset['WL_AR'])& (not oeq_global.isnull(dataset['PERIMETER'])) & (not oeq_global.isnull(dataset['WL_COM']))& (not oeq_global.isnull(dataset['WIDTH'])) & (not oeq_global.isnull(dataset['WN_RAT'])):
dataset['WL_AR']=(dataset['PERIMETER']-dataset['WL_COM']* dataset['WIDTH'])* dataset['HEIGHT']*(1-dataset['WN_RAT'])
if oeq_global.isnull(dataset['RF_AR']):
dataset['RF_AR']=dataset['AREA']
if oeq_global.isnull(dataset['WN_AR'])& (not oeq_global.isnull(dataset['PERIMETER'])) & (not oeq_global.isnull(dataset['WL_COM']))& (not oeq_global.isnull(dataset['WIDTH'])) & (not oeq_global.isnull(dataset['WN_RAT'])):
dataset['WN_AR']=(dataset['PERIMETER']-dataset['WL_COM']* dataset['WIDTH'])*dataset['HEIGHT']*dataset['WN_RAT']
if not oeq_global.isnull([dataset['AREA'],dataset['FLOORS']]):
dataset['LIV_AR'] = float(dataset['AREA']) * float(dataset['FLOORS']) * 0.8
#print dataset
result = {}
for i in list(dataset.keys()):
result.update({i: {'type': QVariant.Double,
'value': dataset[i]}})
result['FLOORS']['type'] = QVariant.Int
return result
extension = OeQExtension(
extension_id=__name__,
category='Evaluation',
subcategory='Geometry',
extension_name='Building Dimensions',
layer_name= 'Dimensions',
field_id='DIM',
source_type='none',
par_in=['AREA', 'PERIMETER', 'LENGTH', 'WIDTH', 'HEIGHT', 'FLOORS', 'PDENS','YOC',config.building_id_key],
sourcelayer_name=config.data_layer_name,
targetlayer_name=config.data_layer_name,
active=False,
description='Calculate the Building dimensions from scratch',
extension_filepath=os.path.join(__file__),
colortable = os.path.join(os.path.splitext(__file__)[0] + '.qml'),
evaluation_method=calculation)
extension.registerExtension(default=True)
|
vmthunder/packages
|
refs/heads/master
|
volt/volt/__init__.py
|
3
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo('volt').version_string()
|
jeffchau1979/pyFormUI
|
refs/heads/master
|
Demo/CustomControl.py
|
1
|
#!/usr/bin/env python
## Copyright 2012, En7788.com, Inc. All rights reserved.
##
## FormUI is a easy used GUI framwork for python, which is based on wxpython.
## FormUI is a free software: you can redistribute it and/or modify it under
## the terms of version 3 of the GNU Lesser General Public License as
## published by the Free Software Foundation.
##
## You should have received a copy of the GNU Lesser General Public License
## along with AndBug. If not, see <http://www.gnu.org/licenses/>.
import sys
sys.path.append('../')
from FormUI import *
#Load layout xml file
builder = Builder()
builder.loadLayout('customcontrol.xml')
#Setup Handler
def OkButtonHandler(windowHandler, handlerPara):
print handlerPara.valueList
windowHandler.highlightItem('id_custom_ctrl')
windowHandler.enableCtrl('id_custom_ctrl',False)
print windowHandler.sendMessage('id_custom_ctrl', 'get_message', '')
windowHandler.sendMessage('id_custom_ctrl', 'set_message', 'message_para')
#windowHandler.closeWindow()
builder.setCtrlHandler('id_ok', OkButtonHandler)
class CustomCtrl(FormControlBase,wx.BoxSizer):
def __init__(self, item, parent, windowControl):
wx.BoxSizer.__init__(self, wx.HORIZONTAL)
FormControlBase.__init__(self, item, parent)
self.staticText = wx.StaticText(id=wx.NewId(),
label='Label:', name='', parent=parent,
pos=wx.Point(0, 0), size=wx.Size(-1, -1),
style=0)
self.Add(self.staticText, 0, wx.ALL)
para = FormControlUtil.makeCommonPara(item,parent)
if 'multi_line' in item.keys() and getItemValue(item, 'multi_line') == 'true':
para['style'] = para['style'] | wx.TE_MULTILINE
if 'password' in item.keys() and getItemValue(item, 'password') == 'true':
para['style'] = para['style'] | wx.TE_PASSWORD
para['value'] = BuilderUtil.getItemValue(item, 'value', '')
self.m_textCtrl = wx.TextCtrl(**para)
self.Add(self.m_textCtrl, 1, wx.ALL | wx.EXPAND)
def GetValue(self):
return self.m_textCtrl.GetValue()
def SetValue(self,value):
self.m_textCtrl.SetValue(value)
def Enable(self,bEnable):
self.m_textCtrl.Enable(bEnable)
def SetFocus(self):
self.m_textCtrl.SetFocus()
def onMessage(self, messageId, messagePara):
if messageId == 'get_message':
return "message:" + self.item['control'].GetValue()
elif messageId == "set_message":
self.item['control'].SetValue(messageId + ":" + messagePara)
return None
builder.registControlType('custom_ctrl', CustomCtrl)
#Show FormUI
formUI = FormUI(builder)
formUI.show()
|
edvardlindelof/ERICA-prediction
|
refs/heads/master
|
models/QLasso/train_and_export_model.py
|
1
|
# -*- coding: utf-8 -*-
import tensorflow as tf
from tensorflow.contrib import learn
from tensorflow.contrib.learn import LinearRegressor
from tensorflow.contrib import layers
from tensorflow.contrib.learn.python.learn.utils import input_fn_utils
tf.logging.set_verbosity(tf.logging.INFO)
import pandas as pd
import numpy as np
from time_of_week_feature import to_time_of_week_feature, to_time_of_week_feature_np
# TODO plain model stuff into own file
def model(features, targets, mode):
W = tf.get_variable("W", [1, len(features)])
b = tf.get_variable("b", [1])
X = [features[key] for key in features]
y = tf.reshape(tf.matmul(W, X) + b, [-1])
#un_penaltied_loss = tf.reduce_mean(tf.square(y - targets))
un_penaltied_loss = tf.losses.mean_squared_error(targets, y)
#loss = un_penaltied_loss + 100.0 * tf.norm(W, ord=1) # TODO penalty hyperparameter
loss = un_penaltied_loss
global_step = tf.train.get_global_step()
optimizer = tf.train.GradientDescentOptimizer(0.01)
train = tf.group(optimizer.minimize(loss), tf.assign_add(global_step, 1))
mse_minutes = tf.div(un_penaltied_loss, 3600, name="mse_minutes")
tf.summary.scalar("mse_in_minutes", mse_minutes)
zero = tf.constant(0, dtype=tf.float32)
#non_zero_weights = tf.not_equal(W, zero)
non_zero_weights = tf.greater(tf.abs(W), zero + 0.3)
n_non_zero_weights = tf.reduce_sum(tf.cast(non_zero_weights, tf.float32), name="n_non_zero_weights")
tf.summary.scalar("non-zero_weights", n_non_zero_weights)
#for i in range(len(features)):
# tf.summary.scalar("W_element" + str(i), W[0, i])
return tf.contrib.learn.ModelFnOps(
mode=mode,
predictions=y,
loss=loss,
train_op=train
)
WAIT_TIME_FEATURES = ["ttt30", "ttl30", "ttk30", "ttt60", "ttl60", "ttk60", "ttt120", "ttl120", "ttk120"]
# to be picky one should calculate e.g. untriaged = all - triaged, but regression weights can be negative so leaving it for now
WORKLOAD_FEATURES = ["UntreatedLowPrio", "all", "MEP", "triaged", "metdoctor", "done", "PRIO1", "PRIO2", "PRIO3", "PRIO4", "PRIO5"]
CAPACITY_FEATURES = ["doctors60", "teams60"]
FEATURES = WAIT_TIME_FEATURES + WORKLOAD_FEATURES + CAPACITY_FEATURES
pdframe = pd.read_csv("QLasso2017-08-04T16:02:58.809+02:00.csv")
# doing this outside of input_fn_train bc if placed in there it will be called maaaaaaany times
epoch_seconds = np.array(pdframe["epochseconds"].get_values(), dtype=np.int32)
time_of_week_feature = to_time_of_week_feature_np(epoch_seconds)
time_of_week_feature = time_of_week_feature / np.max(time_of_week_feature)
def generate_Q_features(frame, workload_features, capacity_features):
Q_features = {}
for workload in workload_features:
for capacity in capacity_features:
load = tf.constant(frame[workload].get_values(), dtype=tf.float32)
cap = tf.constant(frame[capacity].get_values(), dtype=tf.float32)
where_cap_small = tf.cast(tf.less(cap, 0.99), tf.float32)
min_bound_cap = cap + 0.5 * where_cap_small * tf.ones_like(cap) # replace 0s with 0.5s
Q_features[workload + "/" + capacity] = load / min_bound_cap
return Q_features
def input_fn_train():
feature_cols = {}
ttl_next_low_prio_patient = tf.constant(pdframe["TTLOfNextPatient"].get_values(), dtype=tf.float32)
epoch_seconds = tf.constant(pdframe["epochseconds"].get_values(), dtype=tf.int32)
# slows down step time with a factor of about 5 despite only needing to be called once..
#time_of_week_feature = to_time_of_week_feature(epoch_seconds)
#feature_cols["TimeOfWeekFeature"] = time_of_week_feature / tf.reduce_max(time_of_week_feature)
feature_cols["TimeOfWeekFeature"] = tf.constant(time_of_week_feature)
untreated_low_prio_col = tf.constant(pdframe["UntreatedLowPrio"].get_values(), dtype=tf.float32)
feature_cols["UntreatedLowPrio"] = untreated_low_prio_col / tf.reduce_max(untreated_low_prio_col) # normalization
for workload in WORKLOAD_FEATURES:
load = tf.constant(pdframe[workload].get_values(), dtype=tf.float32)
feature_cols[workload] = load / tf.reduce_max(load)
for feature in WAIT_TIME_FEATURES:
col = tf.constant(pdframe[feature].get_values(), dtype=tf.float32)
feature_cols[feature] = col / tf.reduce_max(col) # normalization
Q_features = generate_Q_features(pdframe, WORKLOAD_FEATURES, CAPACITY_FEATURES)
for key in Q_features:
col = Q_features[key]
feature_cols[key] = col / tf.reduce_max(col) # normalization
outputs = ttl_next_low_prio_patient
#outputs = outputs / tf.reduce_max(outputs)
return feature_cols, outputs
regressor = learn.Estimator(model_fn=model, model_dir="./modeldir")
print_tensor = learn.monitors.PrintTensor(["n_non_zero_weights", "mse_minutes", "W"])
regressor.fit(input_fn=input_fn_train, steps=50000, monitors=[print_tensor])
'''
def serving_input_fn():
default_inputs = {col.name: tf.placeholder(col.dtype, [None]) for col in feature_cols}
features = {key: tf.expand_dims(tensor, -1) for key, tensor in default_inputs.items()}
return input_fn_utils.InputFnOps(
features=features,
labels=None,
default_inputs=default_inputs
)
regressor.export_savedmodel(
"exportedmodel",
serving_input_fn
)
'''
|
coxley/ansible
|
refs/heads/devel
|
lib/ansible/vars/__init__.py
|
15
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from collections import defaultdict
from collections import MutableMapping
from jinja2.exceptions import UndefinedError
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
from ansible import constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError
from ansible.parsing import DataLoader
from ansible.plugins.cache import FactCache
from ansible.template import Templar
from ansible.utils.debug import debug
from ansible.utils.vars import combine_vars
from ansible.vars.hostvars import HostVars
from ansible.vars.unsafe_proxy import UnsafeProxy
CACHED_VARS = dict()
def preprocess_vars(a):
'''
Ensures that vars contained in the parameter passed in are
returned as a list of dictionaries, to ensure for instance
that vars loaded from a file conform to an expected state.
'''
if a is None:
return None
elif not isinstance(a, list):
data = [ a ]
else:
data = a
for item in data:
if not isinstance(item, MutableMapping):
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
return data
class VariableManager:
def __init__(self):
self._fact_cache = FactCache()
self._nonpersistent_fact_cache = defaultdict(dict)
self._vars_cache = defaultdict(dict)
self._extra_vars = defaultdict(dict)
self._host_vars_files = defaultdict(dict)
self._group_vars_files = defaultdict(dict)
self._inventory = None
self._omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
def _get_cache_entry(self, play=None, host=None, task=None):
play_id = "NONE"
if play:
play_id = play._uuid
host_id = "NONE"
if host:
host_id = host.get_name()
task_id = "NONE"
if task:
task_id = task._uuid
return "PLAY:%s;HOST:%s;TASK:%s" % (play_id, host_id, task_id)
@property
def extra_vars(self):
''' ensures a clean copy of the extra_vars are made '''
return self._extra_vars.copy()
@extra_vars.setter
def extra_vars(self, value):
''' ensures a clean copy of the extra_vars are used to set the value '''
assert isinstance(value, MutableMapping)
self._extra_vars = value.copy()
def set_inventory(self, inventory):
self._inventory = inventory
def _preprocess_vars(self, a):
'''
Ensures that vars contained in the parameter passed in are
returned as a list of dictionaries, to ensure for instance
that vars loaded from a file conform to an expected state.
'''
if a is None:
return None
elif not isinstance(a, list):
data = [ a ]
else:
data = a
for item in data:
if not isinstance(item, MutableMapping):
raise AnsibleError("variable files must contain either a dictionary of variables, or a list of dictionaries. Got: %s (%s)" % (a, type(a)))
return data
def get_vars(self, loader, play=None, host=None, task=None, include_hostvars=True, use_cache=True):
'''
Returns the variables, with optional "context" given via the parameters
for the play, host, and task (which could possibly result in different
sets of variables being returned due to the additional context).
The order of precedence is:
- play->roles->get_default_vars (if there is a play context)
- group_vars_files[host] (if there is a host context)
- host_vars_files[host] (if there is a host context)
- host->get_vars (if there is a host context)
- fact_cache[host] (if there is a host context)
- play vars (if there is a play context)
- play vars_files (if there's no host context, ignore
file names that cannot be templated)
- task->get_vars (if there is a task context)
- vars_cache[host] (if there is a host context)
- extra vars
'''
debug("in VariableManager get_vars()")
cache_entry = self._get_cache_entry(play=play, host=host, task=task)
if cache_entry in CACHED_VARS and use_cache:
debug("vars are cached, returning them now")
return CACHED_VARS[cache_entry]
all_vars = defaultdict(dict)
if play:
# first we compile any vars specified in defaults/main.yml
# for all roles within the specified play
for role in play.get_roles():
all_vars = combine_vars(all_vars, role.get_default_vars())
# if we have a task in this context, and that task has a role, make
# sure it sees its defaults above any other roles, as we previously
# (v1) made sure each task had a copy of its roles default vars
if task and task._role is not None:
all_vars = combine_vars(all_vars, task._role.get_default_vars())
if host:
# next, if a host is specified, we load any vars from group_vars
# files and then any vars from host_vars files which may apply to
# this host or the groups it belongs to
# we merge in vars from groups specified in the inventory (INI or script)
all_vars = combine_vars(all_vars, host.get_group_vars())
# then we merge in the special 'all' group_vars first, if they exist
if 'all' in self._group_vars_files:
data = preprocess_vars(self._group_vars_files['all'])
for item in data:
all_vars = combine_vars(all_vars, item)
for group in host.get_groups():
if group.name in self._group_vars_files and group.name != 'all':
for data in self._group_vars_files[group.name]:
data = preprocess_vars(data)
for item in data:
all_vars = combine_vars(all_vars, item)
# then we merge in vars from the host specified in the inventory (INI or script)
all_vars = combine_vars(all_vars, host.get_vars())
# then we merge in the host_vars/<hostname> file, if it exists
host_name = host.get_name()
if host_name in self._host_vars_files:
for data in self._host_vars_files[host_name]:
data = preprocess_vars(data)
for item in data:
all_vars = combine_vars(all_vars, item)
# finally, the facts caches for this host, if it exists
try:
host_facts = self._fact_cache.get(host.name, dict())
for k in host_facts.keys():
if host_facts[k] is not None and not isinstance(host_facts[k], UnsafeProxy):
host_facts[k] = UnsafeProxy(host_facts[k])
all_vars = combine_vars(all_vars, host_facts)
except KeyError:
pass
if play:
all_vars = combine_vars(all_vars, play.get_vars())
for vars_file_item in play.get_vars_files():
try:
# create a set of temporary vars here, which incorporate the
# extra vars so we can properly template the vars_files entries
temp_vars = combine_vars(all_vars, self._extra_vars)
templar = Templar(loader=loader, variables=temp_vars)
# we assume each item in the list is itself a list, as we
# support "conditional includes" for vars_files, which mimics
# the with_first_found mechanism.
vars_file_list = templar.template(vars_file_item)
if not isinstance(vars_file_list, list):
vars_file_list = [ vars_file_list ]
# now we iterate through the (potential) files, and break out
# as soon as we read one from the list. If none are found, we
# raise an error, which is silently ignored at this point.
for vars_file in vars_file_list:
data = preprocess_vars(loader.load_from_file(vars_file))
if data is not None:
for item in data:
all_vars = combine_vars(all_vars, item)
break
else:
raise AnsibleError("vars file %s was not found" % vars_file_item)
except UndefinedError:
continue
if not C.DEFAULT_PRIVATE_ROLE_VARS:
for role in play.get_roles():
all_vars = combine_vars(all_vars, role.get_vars())
if task:
if task._role:
all_vars = combine_vars(all_vars, task._role.get_vars())
all_vars = combine_vars(all_vars, task.get_vars())
if host:
all_vars = combine_vars(all_vars, self._vars_cache.get(host.get_name(), dict()))
all_vars = combine_vars(all_vars, self._nonpersistent_fact_cache.get(host.name, dict()))
all_vars = combine_vars(all_vars, self._extra_vars)
# FIXME: make sure all special vars are here
# Finally, we create special vars
all_vars['playbook_dir'] = loader.get_basedir()
if host:
all_vars['groups'] = [group.name for group in host.get_groups()]
if self._inventory is not None:
all_vars['groups'] = self._inventory.groups_list()
if include_hostvars:
hostvars = HostVars(vars_manager=self, play=play, inventory=self._inventory, loader=loader)
all_vars['hostvars'] = hostvars
if task:
if task._role:
all_vars['role_path'] = task._role._role_path
if self._inventory is not None:
all_vars['inventory_dir'] = self._inventory.basedir()
if play:
# add the list of hosts in the play, as adjusted for limit/filters
# DEPRECATED: play_hosts should be deprecated in favor of ansible_play_hosts,
# however this would take work in the templating engine, so for now
# we'll add both so we can give users something transitional to use
host_list = [x.name for x in self._inventory.get_hosts()]
all_vars['play_hosts'] = host_list
all_vars['ansible_play_hosts'] = host_list
# the 'omit' value alows params to be left out if the variable they are based on is undefined
all_vars['omit'] = self._omit_token
all_vars['ansible_version'] = CLI.version_info(gitinfo=False)
if 'hostvars' in all_vars and host:
all_vars['vars'] = all_vars['hostvars'][host.get_name()]
#CACHED_VARS[cache_entry] = all_vars
debug("done with get_vars()")
return all_vars
def _get_inventory_basename(self, path):
'''
Returns the basename minus the extension of the given path, so the
bare filename can be matched against host/group names later
'''
(name, ext) = os.path.splitext(os.path.basename(path))
if ext not in ('.yml', '.yaml'):
return os.path.basename(path)
else:
return name
def _load_inventory_file(self, path, loader):
'''
helper function, which loads the file and gets the
basename of the file without the extension
'''
if loader.is_directory(path):
data = dict()
try:
names = loader.list_directory(path)
except os.error as err:
raise AnsibleError("This folder cannot be listed: %s: %s." % (path, err.strerror))
# evaluate files in a stable order rather than whatever
# order the filesystem lists them.
names.sort()
# do not parse hidden files or dirs, e.g. .svn/
paths = [os.path.join(path, name) for name in names if not name.startswith('.')]
for p in paths:
_found, results = self._load_inventory_file(path=p, loader=loader)
if results is not None:
data = combine_vars(data, results)
else:
file_name, ext = os.path.splitext(path)
data = None
if not ext or ext not in C.YAML_FILENAME_EXTENSIONS:
for test_ext in C.YAML_FILENAME_EXTENSIONS:
new_path = path + test_ext
if loader.path_exists(new_path):
data = loader.load_from_file(new_path)
break
else:
if loader.path_exists(path):
data = loader.load_from_file(path)
name = self._get_inventory_basename(path)
return (name, data)
def add_host_vars_file(self, path, loader):
'''
Loads and caches a host_vars file in the _host_vars_files dict,
where the key to that dictionary is the basename of the file, minus
the extension, for matching against a given inventory host name
'''
(name, data) = self._load_inventory_file(path, loader)
if data:
if name not in self._host_vars_files:
self._host_vars_files[name] = []
self._host_vars_files[name].append(data)
return data
else:
return dict()
def add_group_vars_file(self, path, loader):
'''
Loads and caches a host_vars file in the _host_vars_files dict,
where the key to that dictionary is the basename of the file, minus
the extension, for matching against a given inventory host name
'''
(name, data) = self._load_inventory_file(path, loader)
if data:
if name not in self._group_vars_files:
self._group_vars_files[name] = []
self._group_vars_files[name].append(data)
return data
else:
return dict()
def set_host_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
assert isinstance(facts, dict)
if host.name not in self._fact_cache:
self._fact_cache[host.name] = facts
else:
try:
self._fact_cache[host.name].update(facts)
except KeyError:
self._fact_cache[host.name] = facts
def set_nonpersistent_facts(self, host, facts):
'''
Sets or updates the given facts for a host in the fact cache.
'''
assert isinstance(facts, dict)
if host.name not in self._nonpersistent_fact_cache:
self._nonpersistent_fact_cache[host.name] = facts
else:
try:
self._nonpersistent_fact_cache[host.name].update(facts)
except KeyError:
self._nonpersistent_fact_cache[host.name] = facts
def set_host_variable(self, host, varname, value):
'''
Sets a value in the vars_cache for a host.
'''
host_name = host.get_name()
if host_name not in self._vars_cache:
self._vars_cache[host_name] = dict()
self._vars_cache[host_name][varname] = value
|
pongem/python-bot-project
|
refs/heads/master
|
appengine/standard/background/main.py
|
8
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Sample application that demonstrates how to use the App Engine background
threads.
app.yaml scaling must be set to manual or basic.
"""
# [START background-imp]
from google.appengine.api import background_thread
# [END background-imp]
import webapp2
val = 'Dog'
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(str(val))
class SetDogHandler(webapp2.RequestHandler):
""" Resets the global val to 'Dog'"""
def get(self):
global val
val = 'Dog'
self.response.headers['Content-Type'] = 'text/plain'
self.response.write('Done')
class SetCatBackgroundHandler(webapp2.RequestHandler):
""" Demonstrates two ways to start new background threads
"""
def get(self):
"""
Demonstrates using a background thread to change the global
val from 'Dog' to 'Cat'
The auto GET parameter determines whether to start the thread
automatically or manually
"""
auto = self.request.get('auto')
# [START background-start]
# sample function to run in a background thread
def change_val(arg):
global val
val = arg
if auto:
# Start the new thread in one command
background_thread.start_new_background_thread(change_val, ['Cat'])
else:
# create a new thread and start it
t = background_thread.BackgroundThread(
target=change_val, args=['Cat'])
t.start()
# [END background-start]
self.response.headers['Content-Type'] = 'text/plain'
self.response.write('Done')
app = webapp2.WSGIApplication([
('/', MainHandler),
('/dog', SetDogHandler),
('/cat', SetCatBackgroundHandler),
], debug=True)
# [END all]
|
laumann/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/manifest/update.py
|
230
|
#!/usr/bin/env python
import argparse
import imp
import os
import sys
import manifest
import vcs
from log import get_logger
from tree import GitTree, NoVCSTree
here = os.path.dirname(__file__)
localpaths = imp.load_source("localpaths", os.path.abspath(os.path.join(here, os.pardir, "localpaths.py")))
def update(tests_root, url_base, manifest, ignore_local=False):
if vcs.is_git_repo(tests_root):
tests_tree = GitTree(tests_root, url_base)
remove_missing_local = False
else:
tests_tree = NoVCSTree(tests_root, url_base)
remove_missing_local = not ignore_local
if not ignore_local:
local_changes = tests_tree.local_changes()
else:
local_changes = None
manifest.update(tests_root,
url_base,
tests_tree.current_rev(),
tests_tree.committed_changes(manifest.rev),
local_changes,
remove_missing_local=remove_missing_local)
def update_from_cli(**kwargs):
tests_root = kwargs["tests_root"]
path = kwargs["path"]
assert tests_root is not None
m = None
logger = get_logger()
if not kwargs.get("rebuild", False):
try:
m = manifest.load(tests_root, path)
except manifest.ManifestVersionMismatch:
logger.info("Manifest version changed, rebuilding")
m = None
else:
logger.info("Updating manifest")
if m is None:
m = manifest.Manifest(None)
update(tests_root,
kwargs["url_base"],
m,
ignore_local=kwargs.get("ignore_local", False))
manifest.write(m, path)
def abs_path(path):
return os.path.abspath(os.path.expanduser(path))
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
"-p", "--path", type=abs_path, help="Path to manifest file.")
parser.add_argument(
"--tests-root", type=abs_path, help="Path to root of tests.")
parser.add_argument(
"-r", "--rebuild", action="store_true", default=False,
help="Force a full rebuild of the manifest.")
parser.add_argument(
"--ignore-local", action="store_true", default=False,
help="Don't include uncommitted local changes in the manifest.")
parser.add_argument(
"--url-base", action="store", default="/",
help="Base url to use as the mount point for tests in this manifest.")
return parser
def find_top_repo():
path = here
rv = None
while path != "/":
if vcs.is_git_repo(path):
rv = path
path = os.path.abspath(os.path.join(path, os.pardir))
return rv
def main(default_tests_root=None):
opts = create_parser().parse_args()
if opts.tests_root is None:
tests_root = None
if default_tests_root is not None:
tests_root = default_tests_root
else:
tests_root = find_top_repo()
if tests_root is None:
print >> sys.stderr, """No git repo found; could not determine test root.
Run again with --test-root"""
sys.exit(1)
opts.tests_root = tests_root
if opts.path is None:
opts.path = os.path.join(opts.tests_root, "MANIFEST.json")
update_from_cli(**vars(opts))
if __name__ == "__main__":
main()
|
jbuchbinder/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/googlesearch.py
|
168
|
from __future__ import unicode_literals
import itertools
import re
from .common import SearchInfoExtractor
from ..compat import (
compat_urllib_parse,
)
class GoogleSearchIE(SearchInfoExtractor):
IE_DESC = 'Google Video search'
_MAX_RESULTS = 1000
IE_NAME = 'video.google:search'
_SEARCH_KEY = 'gvsearch'
_TEST = {
'url': 'gvsearch15:python language',
'info_dict': {
'id': 'python language',
'title': 'python language',
},
'playlist_count': 15,
}
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
entries = []
res = {
'_type': 'playlist',
'id': query,
'title': query,
}
for pagenum in itertools.count():
result_url = (
'http://www.google.com/search?tbm=vid&q=%s&start=%s&hl=en'
% (compat_urllib_parse.quote_plus(query), pagenum * 10))
webpage = self._download_webpage(
result_url, 'gvsearch:' + query,
note='Downloading result page ' + str(pagenum + 1))
for hit_idx, mobj in enumerate(re.finditer(
r'<h3 class="r"><a href="([^"]+)"', webpage)):
# Skip playlists
if not re.search(r'id="vidthumb%d"' % (hit_idx + 1), webpage):
continue
entries.append({
'_type': 'url',
'url': mobj.group(1)
})
if (len(entries) >= n) or not re.search(r'id="pnnext"', webpage):
res['entries'] = entries[:n]
return res
|
david30907d/feedback_django
|
refs/heads/master
|
spirit/topic/notification/__init__.py
|
12
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
default_app_config = 'spirit.topic.notification.apps.SpiritTopicNotificationConfig'
|
LunarLanding/moviepy
|
refs/heads/master
|
moviepy/video/fx/painting.py
|
19
|
#------- CHECKING DEPENDENCIES -----------------------------------------
painting_possible = True
try:
from skimage.filter import sobel
except:
try:
from scipy.ndimage.filters import sobel
except:
painting_possible = False
#-----------------------------------------------------------------------
import numpy as np
def to_painting(image,saturation = 1.4,black = 0.006):
""" transforms any photo into some kind of painting """
edges = sobel(image.mean(axis=2))
darkening = black*(255*np.dstack(3*[edges]))
painting = saturation*image-darkening
return np.maximum(0,np.minimum(255,painting)).astype('uint8')
def painting(clip, saturation = 1.4,black = 0.006):
"""
Transforms any photo into some kind of painting. Saturation
tells at which point the colors of the result should be
flashy. ``black`` gives the anount of black lines wanted.
Requires Scikit-image or Scipy installed.
"""
return clip.fl_image(lambda im : to_painting(im,saturation,black))
#------- OVERWRITE IF REQUIREMENTS NOT MET -----------------------------
if not painting_possible:
doc = painting.__doc__
def painting(clip, newsize=None, height=None, width=None):
raise IOError("fx painting needs scikit-image or scipy")
painting.__doc__ = doc
#-----------------------------------------------------------------------
|
erdosmiller/pybase
|
refs/heads/master
|
pybase/objectify.py
|
1
|
class Objectify(object):
def __init__(self,tree,parent=None):
self._parent = parent
if isinstance(tree,str):
self._tree = fromstring(tree)
else:
self._tree = tree
#this is required to call on all the children
self._children = [pythonic_objectify(child,self) for child in self._tree]
#assigning attributes to the parent
if parent is not None:
#making the tags more pythonic - don't hate me!
tag = self._tree.tag
tag = tag.replace('-','_')
#getting the tags value
value = self._tree.text
#known type conversion
if 'type' in self._tree.attrib and value is not None:
kind = self._tree.attrib['type']
if kind == 'integer':
value = int(value)
elif kind == 'float':
value = float(value)
elif kind == 'boolean':
value = bool(value)
elif kind == 'date':
year, month, day = value.split('-')
value = datetime.datetime(int(year),int(month),int(day))
#apply it to it's parent
setattr(self._parent,tag,value)
def __repr__(self):
return self._tree.tag
def __iter__(self):
return self._children.__iter__()
def __getitem__(self,index):
try:
return self._children[index]
except AttributeError:
return getattr(self,index)
def get_children(self):
return self._children
children = property(get_children)
data = property(get_children)
|
liuzheng712/jumpserver
|
refs/heads/master
|
apps/assets/views/system_user.py
|
1
|
# ~*~ coding: utf-8 ~*~
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.urls import reverse_lazy
from django.contrib.messages.views import SuccessMessageMixin
from django.views.generic.detail import DetailView
from common.const import create_success_msg, update_success_msg
from ..forms import SystemUserForm
from ..models import SystemUser, Node, CommandFilter
from common.permissions import AdminUserRequiredMixin
__all__ = [
'SystemUserCreateView', 'SystemUserUpdateView',
'SystemUserDetailView', 'SystemUserDeleteView',
'SystemUserAssetView', 'SystemUserListView',
]
class SystemUserListView(AdminUserRequiredMixin, TemplateView):
template_name = 'assets/system_user_list.html'
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('System user list'),
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class SystemUserCreateView(AdminUserRequiredMixin, SuccessMessageMixin, CreateView):
model = SystemUser
form_class = SystemUserForm
template_name = 'assets/system_user_create.html'
success_url = reverse_lazy('assets:system-user-list')
success_message = create_success_msg
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Create system user'),
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class SystemUserUpdateView(AdminUserRequiredMixin, SuccessMessageMixin, UpdateView):
model = SystemUser
form_class = SystemUserForm
template_name = 'assets/system_user_update.html'
success_url = reverse_lazy('assets:system-user-list')
success_message = update_success_msg
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Update system user')
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class SystemUserDetailView(AdminUserRequiredMixin, DetailView):
template_name = 'assets/system_user_detail.html'
context_object_name = 'system_user'
model = SystemUser
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('System user detail'),
'cmd_filters_remain': CommandFilter.objects.exclude(system_users=self.object)
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class SystemUserDeleteView(AdminUserRequiredMixin, DeleteView):
model = SystemUser
template_name = 'delete_confirm.html'
success_url = reverse_lazy('assets:system-user-list')
class SystemUserAssetView(AdminUserRequiredMixin, DetailView):
model = SystemUser
template_name = 'assets/system_user_asset.html'
context_object_name = 'system_user'
def get_context_data(self, **kwargs):
nodes_remain = sorted(Node.objects.exclude(systemuser=self.object), reverse=True)
context = {
'app': _('assets'),
'action': _('System user asset'),
'nodes_remain': nodes_remain
}
kwargs.update(context)
return super().get_context_data(**kwargs)
|
gtaylor/django-athumb
|
refs/heads/master
|
athumb/templatetags/athumb.py
|
4
|
from django.template import Library
from thumbnail import thumbnail
register = Library()
register.tag(thumbnail)
|
glwu/python-for-android
|
refs/heads/master
|
python-modules/zope/zope/interface/exceptions.py
|
50
|
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Interface-specific exceptions
$Id: exceptions.py 110536 2010-04-06 02:59:44Z tseaver $
"""
class Invalid(Exception):
"""A specification is violated
"""
class DoesNotImplement(Invalid):
""" This object does not implement """
def __init__(self, interface):
self.interface = interface
def __str__(self):
return """An object does not implement interface %(interface)s
""" % self.__dict__
class BrokenImplementation(Invalid):
"""An attribute is not completely implemented.
"""
def __init__(self, interface, name):
self.interface=interface
self.name=name
def __str__(self):
return """An object has failed to implement interface %(interface)s
The %(name)s attribute was not provided.
""" % self.__dict__
class BrokenMethodImplementation(Invalid):
"""An method is not completely implemented.
"""
def __init__(self, method, mess):
self.method=method
self.mess=mess
def __str__(self):
return """The implementation of %(method)s violates its contract
because %(mess)s.
""" % self.__dict__
class InvalidInterface(Exception):
"""The interface has invalid contents
"""
class BadImplements(TypeError):
"""An implementation assertion is invalid
because it doesn't contain an interface or a sequence of valid
implementation assertions.
"""
|
IllusionRom-deprecated/android_platform_external_chromium_org
|
refs/heads/illusion-4.4
|
tools/telemetry/telemetry/core/bitmap_unittest.py
|
23
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import tempfile
import os
import unittest
from telemetry.core import bitmap
from telemetry.core import util
# This is a simple base64 encoded 2x2 PNG which contains, in order, a single
# Red, Yellow, Blue, and Green pixel.
test_png = """
iVBORw0KGgoAAAANSUhEUgAAAAIAAAACCAIAAAD91
JpzAAAAAXNSR0IArs4c6QAAAAlwSFlzAAALEwAACx
MBAJqcGAAAABZJREFUCNdj/M/AwPCfgYGB4T/DfwY
AHAAD/iOWZXsAAAAASUVORK5CYII=
"""
test_png_path = os.path.join(util.GetUnittestDataDir(), 'test_png.png')
test_png_2_path = os.path.join(util.GetUnittestDataDir(), 'test_png_2.png')
class BitmapTest(unittest.TestCase):
# pylint: disable=C0324
def testReadFromBase64Png(self):
bmp = bitmap.Bitmap.FromBase64Png(test_png)
self.assertEquals(2, bmp.width)
self.assertEquals(2, bmp.height)
bmp.GetPixelColor(0, 0).AssertIsRGB(255, 0, 0)
bmp.GetPixelColor(1, 1).AssertIsRGB(0, 255, 0)
bmp.GetPixelColor(0, 1).AssertIsRGB(0, 0, 255)
bmp.GetPixelColor(1, 0).AssertIsRGB(255, 255, 0)
def testReadFromPngFile(self):
file_bmp = bitmap.Bitmap.FromPngFile(test_png_path)
self.assertEquals(2, file_bmp.width)
self.assertEquals(2, file_bmp.height)
file_bmp.GetPixelColor(0, 0).AssertIsRGB(255, 0, 0)
file_bmp.GetPixelColor(1, 1).AssertIsRGB(0, 255, 0)
file_bmp.GetPixelColor(0, 1).AssertIsRGB(0, 0, 255)
file_bmp.GetPixelColor(1, 0).AssertIsRGB(255, 255, 0)
def testWritePngToPngFile(self):
orig = bitmap.Bitmap.FromPngFile(test_png_path)
temp_file = tempfile.NamedTemporaryFile().name
orig.WritePngFile(temp_file)
new_file = bitmap.Bitmap.FromPngFile(temp_file)
self.assertTrue(orig.IsEqual(new_file))
def testWriteCroppedBmpToPngFile(self):
pixels = [255,0,0, 255,255,0, 0,0,0,
255,255,0, 0,255,0, 0,0,0]
orig = bitmap.Bitmap(3, 3, 2, pixels)
orig.Crop(0, 0, 2, 2)
temp_file = tempfile.NamedTemporaryFile().name
orig.WritePngFile(temp_file)
new_file = bitmap.Bitmap.FromPngFile(temp_file)
self.assertTrue(orig.IsEqual(new_file))
def testIsEqual(self):
bmp = bitmap.Bitmap.FromBase64Png(test_png)
file_bmp = bitmap.Bitmap.FromPngFile(test_png_path)
self.assertTrue(bmp.IsEqual(file_bmp))
def testDiff(self):
file_bmp = bitmap.Bitmap.FromPngFile(test_png_path)
file_bmp_2 = bitmap.Bitmap.FromPngFile(test_png_2_path)
diff_bmp = file_bmp.Diff(file_bmp)
self.assertEquals(2, diff_bmp.width)
self.assertEquals(2, diff_bmp.height)
diff_bmp.GetPixelColor(0, 0).AssertIsRGB(0, 0, 0)
diff_bmp.GetPixelColor(1, 1).AssertIsRGB(0, 0, 0)
diff_bmp.GetPixelColor(0, 1).AssertIsRGB(0, 0, 0)
diff_bmp.GetPixelColor(1, 0).AssertIsRGB(0, 0, 0)
diff_bmp = file_bmp.Diff(file_bmp_2)
self.assertEquals(3, diff_bmp.width)
self.assertEquals(3, diff_bmp.height)
diff_bmp.GetPixelColor(0, 0).AssertIsRGB(0, 255, 255)
diff_bmp.GetPixelColor(1, 1).AssertIsRGB(255, 0, 255)
diff_bmp.GetPixelColor(0, 1).AssertIsRGB(255, 255, 0)
diff_bmp.GetPixelColor(1, 0).AssertIsRGB(0, 0, 255)
diff_bmp.GetPixelColor(0, 2).AssertIsRGB(255, 255, 255)
diff_bmp.GetPixelColor(1, 2).AssertIsRGB(255, 255, 255)
diff_bmp.GetPixelColor(2, 0).AssertIsRGB(255, 255, 255)
diff_bmp.GetPixelColor(2, 1).AssertIsRGB(255, 255, 255)
diff_bmp.GetPixelColor(2, 2).AssertIsRGB(255, 255, 255)
def testCrop(self):
pixels = [0,0,0, 0,0,0, 0,0,0, 0,0,0,
0,0,0, 1,0,0, 1,0,0, 0,0,0,
0,0,0, 0,0,0, 0,0,0, 0,0,0]
bmp = bitmap.Bitmap(3, 4, 3, pixels)
bmp.Crop(1, 1, 2, 1)
self.assertEquals(bmp.width, 2)
self.assertEquals(bmp.height, 1)
bmp.GetPixelColor(0, 0).AssertIsRGB(1, 0, 0)
bmp.GetPixelColor(1, 0).AssertIsRGB(1, 0, 0)
self.assertEquals(bmp.pixels, bytearray([1,0,0, 1,0,0]))
|
kurtgeebelen/rawesome
|
refs/heads/master
|
rawekite/kiteutils.py
|
1
|
# Copyright 2012-2013 Greg Horn
#
# This file is part of rawesome.
#
# rawesome is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# rawesome is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with rawesome. If not, see <http://www.gnu.org/licenses/>.
import casadi as C
def getEuler(ocp, k):
r11 = ocp.lookup('e11',timestep=k)
r12 = ocp.lookup('e12',timestep=k)
mr13 = -ocp.lookup('e13',timestep=k)
# mr13 -- nan protect
# | mr13' > 1 = 1
# | mr13' < -1 = -1
# | otherwise = mr13'
r23 = ocp.lookup('e23',timestep=k)
r33 = ocp.lookup('e33',timestep=k)
yaw = C.arctan2(r12,r11)
pitch = C.arcsin(mr13)
roll = C.arctan2(r23,r33)
return (yaw,pitch,roll)
# euler angle periodic constraints
def periodicEulers(ocp):
(yaw0,pitch0,roll0) = getEuler(ocp, 0)
(yawF,pitchF,rollF) = getEuler(ocp, -1)
ocp.constrain(yaw0,'==',yawF)
ocp.constrain(pitch0,'==',pitchF)
ocp.constrain(roll0,'==',rollF)
def getDcm(ocp,k,prefix='e'):
m11 = ocp.lookup(prefix+'11',timestep=k)
m12 = ocp.lookup(prefix+'12',timestep=k)
m13 = ocp.lookup(prefix+'13',timestep=k)
m21 = ocp.lookup(prefix+'21',timestep=k)
m22 = ocp.lookup(prefix+'22',timestep=k)
m23 = ocp.lookup(prefix+'23',timestep=k)
m31 = ocp.lookup(prefix+'31',timestep=k)
m32 = ocp.lookup(prefix+'32',timestep=k)
m33 = ocp.lookup(prefix+'33',timestep=k)
return C.vertcat([C.horzcat([m11,m12,m13]),
C.horzcat([m21,m22,m23]),
C.horzcat([m31,m32,m33])])
def getOrthonormalizedDcm(ocp,k):
m = getDcm(ocp,k)
return orthonormalizeDcm(m)
def get_orthonormal_constraints(R):
ret = []
ret.append( (C.mul(R[0,:],R[0,:].T) - 1, 'R1[0]: e1^T * e1 - 1 == 0') )
ret.append( (C.mul(R[1,:],R[0,:].T), 'R1[0]: e2^T * e1 == 0') )
ret.append( (C.mul(R[1,:],R[1,:].T) - 1, 'R1[0]: e2^T * e2 - 1 == 0') )
rhon = C.cross(R[0,:],R[1,:]) - R[2,:]
ret.append( (rhon[0], 'R1[0]: ( e1^T X e2 - e3 )[0] == 0') )
ret.append( (rhon[2], 'R1[0]: ( e1^T X e2 - e3 )[1] == 0') )
ret.append( (rhon[1], 'R1[0]: ( e1^T X e2 - e3 )[2] == 0') )
return ret
def makeOrthonormal(ocp_,R):
for val,description in get_orthonormal_constraints(R):
ocp_.constrain(val, '==', 0, tag=(description, None))
def matchDcms(ocp,R0,Rf,tag=None):
err = C.mul(R0.T, Rf)
if tag is None:
tag = ''
else:
tag = ' '+tag
ocp.constrain(err[0,1], '==', 0, tag=('dcm matching 01'+tag,None))
ocp.constrain(err[0,2], '==', 0, tag=('dcm matching 02'+tag,None))
ocp.constrain(err[1,2], '==', 0, tag=('dcm matching 12'+tag,None))
ocp.constrain(err[0,0], '>=', 0.5, tag=('dcm matching 00'+tag,None))
ocp.constrain(err[1,1], '>=', 0.5, tag=('dcm matching 11'+tag,None))
ocp.constrain(err[2,2], '>=', 0.5, tag=('dcm matching 22'+tag,None))
def periodicDcm(ocp):
R0 = getDcm(ocp,0)
Rf = getDcm(ocp,-1)
matchDcms(ocp,R0,Rf)
# dcm periodic constraints
def periodicOrthonormalizedDcm(ocp):
R0 = getOrthonormalizedDcm(ocp,0)
Rf = getOrthonormalizedDcm(ocp,-1)
matchDcms(ocp,R0,Rf)
def orthonormalizeDcm(m):
## OGRE (www.ogre3d.org) is made available under the MIT License.
##
## Copyright (c) 2000-2009 Torus Knot Software Ltd
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
## THE SOFTWARE.
# Algorithm uses Gram-Schmidt orthogonalization. If 'this' matrix is
# M = [m0|m1|m2], then orthonormal output matrix is Q = [q0|q1|q2],
#
# q0 = m0/|m0|
# q1 = (m1-(q0*m1)q0)/|m1-(q0*m1)q0|
# q2 = (m2-(q0*m2)q0-(q1*m2)q1)/|m2-(q0*m2)q0-(q1*m2)q1|
#
# where |V| indicates length of vector V and A*B indicates dot
# product of vectors A and B.
m00 = m[0,0]
m01 = m[0,1]
m02 = m[0,2]
m10 = m[1,0]
m11 = m[1,1]
m12 = m[1,2]
m20 = m[2,0]
m21 = m[2,1]
m22 = m[2,2]
# compute q0
fInvLength = 1.0/C.sqrt(m00*m00 + m10*m10 + m20*m20)
m00 *= fInvLength
m10 *= fInvLength
m20 *= fInvLength
# compute q1
fDot0 = m00*m01 + m10*m11 + m20*m21
m01 -= fDot0*m00
m11 -= fDot0*m10
m21 -= fDot0*m20
fInvLength = 1.0/C.sqrt(m01*m01 + m11*m11 + m21*m21)
m01 *= fInvLength
m11 *= fInvLength
m21 *= fInvLength
# compute q2
fDot1 = m01*m02 + m11*m12 + m21*m22
fDot0 = m00*m02 + m10*m12 + m20*m22
m02 -= fDot0*m00 + fDot1*m01
m12 -= fDot0*m10 + fDot1*m11
m22 -= fDot0*m20 + fDot1*m21
fInvLength = 1.0/C.sqrt(m02*m02 + m12*m12 + m22*m22)
m02 *= fInvLength
m12 *= fInvLength
m22 *= fInvLength
return C.vertcat([C.horzcat([m00,m01,m02]),
C.horzcat([m10,m11,m12]),
C.horzcat([m20,m21,m22])])
|
pkruskal/scikit-learn
|
refs/heads/master
|
sklearn/datasets/tests/test_base.py
|
205
|
import os
import shutil
import tempfile
import warnings
import nose
import numpy
from pickle import loads
from pickle import dumps
from sklearn.datasets import get_data_home
from sklearn.datasets import clear_data_home
from sklearn.datasets import load_files
from sklearn.datasets import load_sample_images
from sklearn.datasets import load_sample_image
from sklearn.datasets import load_digits
from sklearn.datasets import load_diabetes
from sklearn.datasets import load_linnerud
from sklearn.datasets import load_iris
from sklearn.datasets import load_boston
from sklearn.datasets.base import Bunch
from sklearn.externals.six import b, u
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_raises
DATA_HOME = tempfile.mkdtemp(prefix="scikit_learn_data_home_test_")
LOAD_FILES_ROOT = tempfile.mkdtemp(prefix="scikit_learn_load_files_test_")
TEST_CATEGORY_DIR1 = ""
TEST_CATEGORY_DIR2 = ""
def _remove_dir(path):
if os.path.isdir(path):
shutil.rmtree(path)
def teardown_module():
"""Test fixture (clean up) run once after all tests of this module"""
for path in [DATA_HOME, LOAD_FILES_ROOT]:
_remove_dir(path)
def setup_load_files():
global TEST_CATEGORY_DIR1
global TEST_CATEGORY_DIR2
TEST_CATEGORY_DIR1 = tempfile.mkdtemp(dir=LOAD_FILES_ROOT)
TEST_CATEGORY_DIR2 = tempfile.mkdtemp(dir=LOAD_FILES_ROOT)
sample_file = tempfile.NamedTemporaryFile(dir=TEST_CATEGORY_DIR1,
delete=False)
sample_file.write(b("Hello World!\n"))
sample_file.close()
def teardown_load_files():
_remove_dir(TEST_CATEGORY_DIR1)
_remove_dir(TEST_CATEGORY_DIR2)
def test_data_home():
# get_data_home will point to a pre-existing folder
data_home = get_data_home(data_home=DATA_HOME)
assert_equal(data_home, DATA_HOME)
assert_true(os.path.exists(data_home))
# clear_data_home will delete both the content and the folder it-self
clear_data_home(data_home=data_home)
assert_false(os.path.exists(data_home))
# if the folder is missing it will be created again
data_home = get_data_home(data_home=DATA_HOME)
assert_true(os.path.exists(data_home))
def test_default_empty_load_files():
res = load_files(LOAD_FILES_ROOT)
assert_equal(len(res.filenames), 0)
assert_equal(len(res.target_names), 0)
assert_equal(res.DESCR, None)
@nose.tools.with_setup(setup_load_files, teardown_load_files)
def test_default_load_files():
res = load_files(LOAD_FILES_ROOT)
assert_equal(len(res.filenames), 1)
assert_equal(len(res.target_names), 2)
assert_equal(res.DESCR, None)
assert_equal(res.data, [b("Hello World!\n")])
@nose.tools.with_setup(setup_load_files, teardown_load_files)
def test_load_files_w_categories_desc_and_encoding():
category = os.path.abspath(TEST_CATEGORY_DIR1).split('/').pop()
res = load_files(LOAD_FILES_ROOT, description="test",
categories=category, encoding="utf-8")
assert_equal(len(res.filenames), 1)
assert_equal(len(res.target_names), 1)
assert_equal(res.DESCR, "test")
assert_equal(res.data, [u("Hello World!\n")])
@nose.tools.with_setup(setup_load_files, teardown_load_files)
def test_load_files_wo_load_content():
res = load_files(LOAD_FILES_ROOT, load_content=False)
assert_equal(len(res.filenames), 1)
assert_equal(len(res.target_names), 2)
assert_equal(res.DESCR, None)
assert_equal(res.get('data'), None)
def test_load_sample_images():
try:
res = load_sample_images()
assert_equal(len(res.images), 2)
assert_equal(len(res.filenames), 2)
assert_true(res.DESCR)
except ImportError:
warnings.warn("Could not load sample images, PIL is not available.")
def test_load_digits():
digits = load_digits()
assert_equal(digits.data.shape, (1797, 64))
assert_equal(numpy.unique(digits.target).size, 10)
def test_load_digits_n_class_lt_10():
digits = load_digits(9)
assert_equal(digits.data.shape, (1617, 64))
assert_equal(numpy.unique(digits.target).size, 9)
def test_load_sample_image():
try:
china = load_sample_image('china.jpg')
assert_equal(china.dtype, 'uint8')
assert_equal(china.shape, (427, 640, 3))
except ImportError:
warnings.warn("Could not load sample images, PIL is not available.")
def test_load_missing_sample_image_error():
have_PIL = True
try:
try:
from scipy.misc import imread
except ImportError:
from scipy.misc.pilutil import imread
except ImportError:
have_PIL = False
if have_PIL:
assert_raises(AttributeError, load_sample_image,
'blop.jpg')
else:
warnings.warn("Could not load sample images, PIL is not available.")
def test_load_diabetes():
res = load_diabetes()
assert_equal(res.data.shape, (442, 10))
assert_true(res.target.size, 442)
def test_load_linnerud():
res = load_linnerud()
assert_equal(res.data.shape, (20, 3))
assert_equal(res.target.shape, (20, 3))
assert_equal(len(res.target_names), 3)
assert_true(res.DESCR)
def test_load_iris():
res = load_iris()
assert_equal(res.data.shape, (150, 4))
assert_equal(res.target.size, 150)
assert_equal(res.target_names.size, 3)
assert_true(res.DESCR)
def test_load_boston():
res = load_boston()
assert_equal(res.data.shape, (506, 13))
assert_equal(res.target.size, 506)
assert_equal(res.feature_names.size, 13)
assert_true(res.DESCR)
def test_loads_dumps_bunch():
bunch = Bunch(x="x")
bunch_from_pkl = loads(dumps(bunch))
bunch_from_pkl.x = "y"
assert_equal(bunch_from_pkl['x'], bunch_from_pkl.x)
|
arborh/tensorflow
|
refs/heads/master
|
tensorflow/python/autograph/pyct/static_analysis/activity_test.py
|
4
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for activity module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
import six
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.autograph.pyct import qual_names
from tensorflow.python.autograph.pyct import transformer
from tensorflow.python.autograph.pyct.static_analysis import activity
from tensorflow.python.autograph.pyct.static_analysis import annos
from tensorflow.python.platform import test
QN = qual_names.QN
NodeAnno = annos.NodeAnno
global_a = 7
global_b = 17
class ScopeTest(test.TestCase):
def assertMissing(self, qn, scope):
self.assertNotIn(qn, scope.read)
self.assertNotIn(qn, scope.modified)
def assertReadOnly(self, qn, scope):
self.assertIn(qn, scope.read)
self.assertNotIn(qn, scope.modified)
def assertWriteOnly(self, qn, scope):
self.assertNotIn(qn, scope.read)
self.assertIn(qn, scope.modified)
def assertReadWrite(self, qn, scope):
self.assertIn(qn, scope.read)
self.assertIn(qn, scope.modified)
def test_copy_from(self):
scope = activity.Scope(None)
scope.modified.add(QN('foo'))
other = activity.Scope(None)
other.copy_from(scope)
self.assertWriteOnly(QN('foo'), other)
scope.modified.add(QN('bar'))
scope.copy_from(other)
self.assertMissing(QN('bar'), scope)
scope.modified.add(QN('bar'))
scope.merge_from(other)
self.assertWriteOnly(QN('bar'), scope)
self.assertMissing(QN('bar'), other)
def test_copy_of(self):
scope = activity.Scope(None)
scope.read.add(QN('foo'))
other = activity.Scope.copy_of(scope)
self.assertReadOnly(QN('foo'), other)
child_scope = activity.Scope(scope)
child_scope.read.add(QN('bar'))
other = activity.Scope.copy_of(child_scope)
self.assertReadOnly(QN('bar'), other)
def test_referenced(self):
scope = activity.Scope(None)
scope.read.add(QN('a'))
child = activity.Scope(scope)
child.read.add(QN('b'))
child2 = activity.Scope(child, isolated=False)
child2.read.add(QN('c'))
child2.finalize()
child.finalize()
scope.finalize()
self.assertIn(QN('c'), child2.referenced)
self.assertIn(QN('b'), child2.referenced)
self.assertIn(QN('a'), child2.referenced)
self.assertIn(QN('c'), child.referenced)
self.assertIn(QN('b'), child.referenced)
self.assertIn(QN('a'), child.referenced)
class ActivityAnalyzerTestBase(test.TestCase):
def _parse_and_analyze(self, test_fn):
node, source = parser.parse_entity(test_fn, future_features=())
entity_info = transformer.EntityInfo(
source_code=source, source_file=None, future_features=(), namespace={})
node = qual_names.resolve(node)
ctx = transformer.Context(entity_info)
node = activity.resolve(node, ctx)
return node, entity_info
def assertSymbolSetsAre(self, expected, actual, name):
expected = set(expected)
actual = set(str(s) for s in actual)
self.assertSetEqual(
expected, actual, 'for symbol set: %s\n'
' Expected: %s\n'
' Got: %s\n'
' Missing: %s\n'
' Extra: %s\n' % (name.upper(), expected, actual,
expected - actual, actual - expected))
def assertScopeIs(self, scope, used, modified):
"""Assert the scope contains specific used, modified & created variables."""
self.assertSymbolSetsAre(used, scope.read, 'read')
self.assertSymbolSetsAre(modified, scope.modified, 'modified')
class ActivityAnalyzerTest(ActivityAnalyzerTestBase):
def test_print_statement(self):
def test_fn(a):
b = 0
c = 1
print(a, b)
return c
node, _ = self._parse_and_analyze(test_fn)
print_node = node.body[2]
if isinstance(print_node, gast.Print):
# Python 2
print_args_scope = anno.getanno(print_node, NodeAnno.ARGS_SCOPE)
else:
# Python 3
assert isinstance(print_node, gast.Expr)
# The call node should be the one being annotated.
print_node = print_node.value
print_args_scope = anno.getanno(print_node, NodeAnno.ARGS_SCOPE)
# We basically need to detect which variables are captured by the call
# arguments.
self.assertScopeIs(print_args_scope, ('a', 'b'), ())
def test_call_args(self):
def test_fn(a):
b = 0
c = 1
foo(a, b) # pylint:disable=undefined-variable
return c
node, _ = self._parse_and_analyze(test_fn)
call_node = node.body[2].value
# We basically need to detect which variables are captured by the call
# arguments.
self.assertScopeIs(
anno.getanno(call_node, NodeAnno.ARGS_SCOPE), ('a', 'b'), ())
def test_call_args_attributes(self):
def foo(*_):
pass
def test_fn(a):
a.c = 0
foo(a.b, a.c)
return a.d
node, _ = self._parse_and_analyze(test_fn)
call_node = node.body[1].value
self.assertScopeIs(
anno.getanno(call_node, NodeAnno.ARGS_SCOPE), ('a', 'a.b', 'a.c'), ())
def test_call_args_subscripts(self):
def foo(*_):
pass
def test_fn(a):
b = 1
c = 2
foo(a[0], a[b])
return a[c]
node, _ = self._parse_and_analyze(test_fn)
call_node = node.body[2].value
self.assertScopeIs(
anno.getanno(call_node, NodeAnno.ARGS_SCOPE),
('a', 'a[0]', 'a[b]', 'b'), ())
def test_while(self):
def test_fn(a):
b = a
while b > 0:
c = b
b -= 1
return b, c
node, _ = self._parse_and_analyze(test_fn)
while_node = node.body[1]
self.assertScopeIs(
anno.getanno(while_node, NodeAnno.BODY_SCOPE), ('b',), ('b', 'c'))
self.assertScopeIs(
anno.getanno(while_node, NodeAnno.BODY_SCOPE).parent, ('a', 'b', 'c'),
('b', 'c'))
self.assertScopeIs(
anno.getanno(while_node, NodeAnno.COND_SCOPE), ('b',), ())
def test_for(self):
def test_fn(a):
b = a
for _ in a:
c = b
b -= 1
return b, c
node, _ = self._parse_and_analyze(test_fn)
for_node = node.body[1]
self.assertScopeIs(
anno.getanno(for_node, NodeAnno.ITERATE_SCOPE), (), ('_'))
self.assertScopeIs(
anno.getanno(for_node, NodeAnno.BODY_SCOPE), ('b',), ('b', 'c'))
self.assertScopeIs(
anno.getanno(for_node, NodeAnno.BODY_SCOPE).parent, ('a', 'b', 'c'),
('b', 'c', '_'))
def test_if(self):
def test_fn(x):
if x > 0:
x = -x
y = 2 * x
z = -y
else:
x = 2 * x
y = -x
u = -y
return z, u
node, _ = self._parse_and_analyze(test_fn)
if_node = node.body[0]
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.BODY_SCOPE), ('x', 'y'), ('x', 'y', 'z'))
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.BODY_SCOPE).parent, ('x', 'y', 'z', 'u'),
('x', 'y', 'z', 'u'))
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.ORELSE_SCOPE), ('x', 'y'),
('x', 'y', 'u'))
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.ORELSE_SCOPE).parent,
('x', 'y', 'z', 'u'), ('x', 'y', 'z', 'u'))
def test_if_attributes(self):
def test_fn(a):
if a > 0:
a.b = -a.c
d = 2 * a
else:
a.b = a.c
d = 1
return d
node, _ = self._parse_and_analyze(test_fn)
if_node = node.body[0]
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.BODY_SCOPE), ('a', 'a.c'), ('a.b', 'd'))
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.ORELSE_SCOPE), ('a', 'a.c'),
('a.b', 'd'))
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.BODY_SCOPE).parent, ('a', 'a.c', 'd'),
('a.b', 'd'))
def test_if_subscripts(self):
def test_fn(a, b, c, e):
if a > 0:
a[b] = -a[c]
d = 2 * a
else:
a[0] = e
d = 1
return d
node, _ = self._parse_and_analyze(test_fn)
if_node = node.body[0]
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.BODY_SCOPE), ('a', 'b', 'c', 'a[c]'),
('a[b]', 'd'))
# TODO(mdan): Should subscript writes (a[0] = 1) be considered to read "a"?
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.ORELSE_SCOPE), ('a', 'e'), ('a[0]', 'd'))
self.assertScopeIs(
anno.getanno(if_node, NodeAnno.ORELSE_SCOPE).parent,
('a', 'b', 'c', 'd', 'e', 'a[c]'), ('d', 'a[b]', 'a[0]'))
def test_nested_if(self):
def test_fn(b):
if b > 0:
if b < 5:
a = b
else:
a = b * b
return a
node, _ = self._parse_and_analyze(test_fn)
inner_if_node = node.body[0].body[0]
self.assertScopeIs(
anno.getanno(inner_if_node, NodeAnno.BODY_SCOPE), ('b',), ('a',))
self.assertScopeIs(
anno.getanno(inner_if_node, NodeAnno.ORELSE_SCOPE), ('b',), ('a',))
def test_nested_function(self):
def test_fn(a):
def f(x):
y = x * x
return y
b = a
for i in a:
c = b
b -= f(i)
return b, c
node, _ = self._parse_and_analyze(test_fn)
fn_def_node = node.body[0]
self.assertScopeIs(
anno.getanno(fn_def_node, NodeAnno.BODY_SCOPE), ('x', 'y'), ('y',))
def test_constructor_attributes(self):
class TestClass(object):
def __init__(self, a):
self.b = a
self.b.c = 1
node, _ = self._parse_and_analyze(TestClass)
init_node = node.body[0]
self.assertScopeIs(
anno.getanno(init_node, NodeAnno.BODY_SCOPE), ('self', 'a', 'self.b'),
('self', 'self.b', 'self.b.c'))
def test_aug_assign_subscripts(self):
def test_fn(a):
a[0] += 1
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
self.assertScopeIs(
anno.getanno(fn_node, NodeAnno.BODY_SCOPE), ('a', 'a[0]'), ('a[0]',))
def test_return_vars_are_read(self):
def test_fn(a, b, c): # pylint: disable=unused-argument
return c
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
self.assertScopeIs(anno.getanno(fn_node, NodeAnno.BODY_SCOPE), ('c',), ())
def test_aug_assign(self):
def test_fn(a, b):
a += b
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
self.assertScopeIs(
anno.getanno(fn_node, NodeAnno.BODY_SCOPE), ('a', 'b'), ('a'))
def test_aug_assign_rvalues(self):
a = dict(bar=3)
def foo():
return a
def test_fn(x):
foo()['bar'] += x
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
self.assertScopeIs(
anno.getanno(fn_node, NodeAnno.BODY_SCOPE), ('foo', 'x'), ())
def test_params(self):
def test_fn(a, b): # pylint: disable=unused-argument
return b
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('b',), ())
self.assertScopeIs(body_scope.parent, ('b',), ())
args_scope = anno.getanno(fn_node.args, anno.Static.SCOPE)
self.assertSymbolSetsAre(('a', 'b'), args_scope.params.keys(), 'params')
def test_lambda_captures_reads(self):
def test_fn(a, b):
return lambda: a + b
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('a', 'b'), ())
# Nothing local to the lambda is tracked.
self.assertSymbolSetsAre((), body_scope.params.keys(), 'params')
def test_lambda_params_are_isolated(self):
def test_fn(a, b): # pylint: disable=unused-argument
return lambda a: a + b
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('b',), ())
self.assertSymbolSetsAre((), body_scope.params.keys(), 'params')
def test_lambda_complex(self):
def test_fn(a, b, c, d): # pylint: disable=unused-argument
a = (lambda a, b, c: a + b + c)(d, 1, 2) + b
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('b', 'd'), ('a',))
self.assertSymbolSetsAre((), body_scope.params.keys(), 'params')
def test_lambda_nested(self):
def test_fn(a, b, c, d, e): # pylint: disable=unused-argument
a = lambda a, b: d(lambda b: a + b + c) # pylint: disable=undefined-variable
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('c', 'd'), ('a',))
self.assertSymbolSetsAre((), body_scope.params.keys(), 'params')
def test_comprehension_targets_are_isolated(self):
def test_fn(a):
b = {c for c in a} # pylint:disable=unused-variable
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('a',), ('b',))
def test_comprehension_targets_are_isolated_list_function_w_generator(self):
def test_fn(a):
b = list(c for c in a) # pylint:disable=unused-variable
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('a', 'list'), ('b',))
def test_list_comprehension_targets_are_sometimes_isolated(self):
def test_fn(a):
b = [c for c in a] # pylint:disable=unused-variable
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
if six.PY2:
self.assertScopeIs(body_scope, ('a',), ('b', 'c'))
else:
self.assertScopeIs(body_scope, ('a',), ('b',))
def test_comprehension_targets_are_isolated_in_augassign(self):
def test_fn(a, b):
b += [c for c in a] # pylint:disable=unused-variable
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
if six.PY2:
self.assertScopeIs(body_scope, ('a', 'b'), ('b', 'c'))
else:
self.assertScopeIs(body_scope, ('a', 'b'), ('b',))
def test_comprehension_generator_order(self):
def test_fn(a, b, c): # pylint:disable=unused-argument
e = {d: (a, b) for (a, b) in c for d in b} # pylint:disable=unused-variable,g-complex-comprehension
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('c',), ('e',))
def test_global_symbol(self):
def test_fn(c):
global global_a
global global_b
global_a = global_b + c
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('global_b', 'c'), ('global_a',))
def test_class_definition_basic(self):
def test_fn(a, b):
class C(a(b)):
d = 1
return C
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('a', 'b', 'C'), ('C',))
def test_class_definition_isolates_method_writes(self):
def test_fn(a, b, c):
class C(a(b)):
d = 1
def e(self):
f = c + 1
return f
return C
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('a', 'b', 'C', 'c'), ('C',))
if __name__ == '__main__':
test.main()
|
gsehub/edx-platform
|
refs/heads/gsehub-release
|
common/djangoapps/xblock_django/management/commands/ensure_indexes.py
|
129
|
"""
Creates Indexes on contentstore and modulestore databases.
"""
from django.core.management.base import BaseCommand
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
class Command(BaseCommand):
"""
This command will create indexes on the stores used for both contentstore and modulestore.
"""
args = ''
help = 'Creates the indexes for ContentStore and ModuleStore databases'
def handle(self, *args, **options):
contentstore().ensure_indexes()
modulestore().ensure_indexes()
print 'contentstore and modulestore indexes created!'
|
sanmiguel/home-assistant
|
refs/heads/master
|
homeassistant/components/frontend/__init__.py
|
1
|
"""
homeassistant.components.frontend
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides a frontend for Home Assistant.
"""
import re
import os
import logging
from . import version
import homeassistant.util as util
from homeassistant.const import URL_ROOT, HTTP_OK
DOMAIN = 'frontend'
DEPENDENCIES = ['api']
INDEX_PATH = os.path.join(os.path.dirname(__file__), 'index.html.template')
_LOGGER = logging.getLogger(__name__)
FRONTEND_URLS = [
URL_ROOT, '/logbook', '/history', '/devService', '/devState', '/devEvent']
STATES_URL = re.compile(r'/states(/([a-zA-Z\._\-0-9/]+)|)')
def setup(hass, config):
""" Setup serving the frontend. """
if 'http' not in hass.config.components:
_LOGGER.error('Dependency http is not loaded')
return False
for url in FRONTEND_URLS:
hass.http.register_path('GET', url, _handle_get_root, False)
hass.http.register_path('GET', STATES_URL, _handle_get_root, False)
# Static files
hass.http.register_path(
'GET', re.compile(r'/static/(?P<file>[a-zA-Z\._\-0-9/]+)'),
_handle_get_static, False)
hass.http.register_path(
'HEAD', re.compile(r'/static/(?P<file>[a-zA-Z\._\-0-9/]+)'),
_handle_get_static, False)
return True
def _handle_get_root(handler, path_match, data):
""" Renders the debug interface. """
handler.send_response(HTTP_OK)
handler.send_header('Content-type', 'text/html; charset=utf-8')
handler.end_headers()
if handler.server.development:
app_url = "polymer/home-assistant.html"
else:
app_url = "frontend-{}.html".format(version.VERSION)
# auto login if no password was set, else check api_password param
auth = ('no_password_set' if handler.server.no_password_set
else data.get('api_password', ''))
with open(INDEX_PATH) as template_file:
template_html = template_file.read()
template_html = template_html.replace('{{ app_url }}', app_url)
template_html = template_html.replace('{{ auth }}', auth)
handler.wfile.write(template_html.encode("UTF-8"))
def _handle_get_static(handler, path_match, data):
""" Returns a static file for the frontend. """
req_file = util.sanitize_path(path_match.group('file'))
# Strip md5 hash out of frontend filename
if re.match(r'^frontend-[A-Za-z0-9]{32}\.html$', req_file):
req_file = "frontend.html"
path = os.path.join(os.path.dirname(__file__), 'www_static', req_file)
handler.write_file(path)
|
michellemorales/OpenMM
|
refs/heads/master
|
models/ptn/nets/im2vox_factory.py
|
10
|
# Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory module for getting the complete image to voxel generation network."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import perspective_projector
from nets import ptn_encoder
from nets import ptn_vox_decoder
_NAME_TO_NETS = {
'ptn_encoder': ptn_encoder,
'ptn_vox_decoder': ptn_vox_decoder,
'perspective_projector': perspective_projector,
}
def _get_network(name):
"""Gets a single encoder/decoder network model."""
if name not in _NAME_TO_NETS:
raise ValueError('Network name [%s] not recognized.' % name)
return _NAME_TO_NETS[name].model
def get(params, is_training=False, reuse=False, run_projection=True):
"""Factory function to get the training/pretraining im->vox model (NIPS16).
Args:
params: Different parameters used througout ptn, typically FLAGS (dict).
is_training: Set to True if while training (boolean).
reuse: Set as True if sharing variables with a model that has already
been built (boolean).
run_projection: Set as False if not interested in mask and projection
images. Useful in evaluation routine (boolean).
Returns:
Model function for network (inputs to outputs).
"""
def model(inputs):
"""Model function corresponding to a specific network architecture."""
outputs = {}
# First, build the encoder
encoder_fn = _get_network(params.encoder_name)
with tf.variable_scope('encoder', reuse=reuse):
# Produces id/pose units
enc_outputs = encoder_fn(inputs['images_1'], params, is_training)
outputs['ids_1'] = enc_outputs['ids']
# Second, build the decoder and projector
decoder_fn = _get_network(params.decoder_name)
with tf.variable_scope('decoder', reuse=reuse):
outputs['voxels_1'] = decoder_fn(outputs['ids_1'], params, is_training)
if run_projection:
projector_fn = _get_network(params.projector_name)
with tf.variable_scope('projector', reuse=reuse):
outputs['projs_1'] = projector_fn(
outputs['voxels_1'], inputs['matrix_1'], params, is_training)
# Infer the ground-truth mask
with tf.variable_scope('oracle', reuse=reuse):
outputs['masks_1'] = projector_fn(inputs['voxels'], inputs['matrix_1'],
params, False)
# Third, build the entire graph (bundled strategy described in PTN paper)
for k in range(1, params.step_size):
with tf.variable_scope('projector', reuse=True):
outputs['projs_%d' % (k + 1)] = projector_fn(
outputs['voxels_1'], inputs['matrix_%d' %
(k + 1)], params, is_training)
with tf.variable_scope('oracle', reuse=True):
outputs['masks_%d' % (k + 1)] = projector_fn(
inputs['voxels'], inputs['matrix_%d' % (k + 1)], params, False)
return outputs
return model
|
pjryan126/solid-start-careers
|
refs/heads/master
|
store/api/zillow/venv/lib/python2.7/site-packages/requests/packages/chardet/compat.py
|
2942
|
######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
# Ian Cordasco - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
if sys.version_info < (3, 0):
base_str = (str, unicode)
else:
base_str = (bytes, str)
def wrap_ord(a):
if sys.version_info < (3, 0) and isinstance(a, base_str):
return ord(a)
else:
return a
|
systers/mailman
|
refs/heads/master
|
src/mailman/interfaces/database.py
|
7
|
# Copyright (C) 2007-2015 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""Interfaces for database interaction."""
__all__ = [
'DatabaseError',
'IDatabase',
'IDatabaseFactory',
]
from mailman.interfaces.errors import MailmanError
from zope.interface import Attribute, Interface
class DatabaseError(MailmanError):
"""A problem with the database occurred."""
class IDatabase(Interface):
"""Database layer interface."""
def initialize(debug=None):
"""Initialize the database layer, using whatever means necessary.
:param debug: When None (the default), the configuration file
determines whether the database layer should have increased
debugging or not. When True or False, this overrides the
configuration file setting.
"""
def begin():
"""Begin the current transaction."""
def commit():
"""Commit the current transaction."""
def abort():
"""Abort the current transaction."""
store = Attribute(
"""The underlying database object on which you can do queries.""")
class IDatabaseFactory(Interface):
"Interface for creating new databases."""
def create():
"""Return a new `IDatabase`.
The database will be initialized and all migrations will be loaded.
:return: A new database.
:rtype: IDatabase
"""
|
sajuptpm/magnum
|
refs/heads/master
|
magnum/api/app.py
|
6
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
import pecan
from magnum.api import auth
from magnum.api import config as api_config
from magnum.api import middleware
# Register options for the service
API_SERVICE_OPTS = [
cfg.IntOpt('port',
default=9511,
help='The port for the Magnum API server.'),
cfg.StrOpt('host',
default='127.0.0.1',
help='The listen IP for the Magnum API server.'),
cfg.IntOpt('max_limit',
default=1000,
help='The maximum number of items returned in a single '
'response from a collection resource.')
]
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='api',
title='Options for the magnum-api service')
CONF.register_group(opt_group)
CONF.register_opts(API_SERVICE_OPTS, opt_group)
def get_pecan_config():
# Set up the pecan configuration
filename = api_config.__file__.replace('.pyc', '.py')
return pecan.configuration.conf_from_file(filename)
def setup_app(config=None):
if not config:
config = get_pecan_config()
app_conf = dict(config.app)
app = pecan.make_app(
app_conf.pop('root'),
logging=getattr(config, 'logging', {}),
wrap_app=middleware.ParsableErrorMiddleware,
**app_conf
)
return auth.install(app, CONF, config.app.acl_public_routes)
|
s20121035/rk3288_android5.1_repo
|
refs/heads/master
|
external/chromium_org/tools/perf/page_sets/key_silk_cases.py
|
25
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class KeySilkCasesPage(page_module.Page):
def __init__(self, url, page_set):
super(KeySilkCasesPage, self).__init__(url=url, page_set=page_set)
self.credentials_path = 'data/credentials.json'
self.user_agent_type = 'mobile'
self.archive_data_file = 'data/key_silk_cases.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(2)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollPage()
interaction.End()
class Page1(KeySilkCasesPage):
""" Why: Infinite scroll. Brings out all of our perf issues. """
def __init__(self, page_set):
super(Page1, self).__init__(
url='http://groupcloned.com/test/plain/list-recycle-transform.html',
page_set=page_set)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollElement(selector='#scrollable')
interaction.End()
class Page2(KeySilkCasesPage):
""" Why: Brings out layer management bottlenecks. """
def __init__(self, page_set):
super(Page2, self).__init__(
url='http://groupcloned.com/test/plain/list-animation-simple.html',
page_set=page_set)
def RunSmoothness(self, action_runner):
action_runner.Wait(2)
class Page3(KeySilkCasesPage):
"""
Why: Best-known method for fake sticky. Janks sometimes. Interacts badly with
compositor scrolls.
"""
def __init__(self, page_set):
super(Page3, self).__init__(
# pylint: disable=C0301
url='http://groupcloned.com/test/plain/sticky-using-webkit-backface-visibility.html',
page_set=page_set)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollElement(selector='#container')
interaction.End()
class Page4(KeySilkCasesPage):
"""
Why: Card expansion: only the card should repaint, but in reality lots of
storms happen.
"""
def __init__(self, page_set):
super(Page4, self).__init__(
url='http://jsfiddle.net/3yDKh/15/show/',
page_set=page_set)
def RunSmoothness(self, action_runner):
action_runner.Wait(3)
class Page5(KeySilkCasesPage):
"""
Why: Card expansion with animated contents, using will-change on the card
"""
def __init__(self, page_set):
super(Page5, self).__init__(
url='http://jsfiddle.net/jx5De/14/show/',
page_set=page_set)
self.gpu_raster = True
def RunSmoothness(self, action_runner):
action_runner.Wait(4)
class Page6(KeySilkCasesPage):
"""
Why: Card fly-in: It should be fast to animate in a bunch of cards using
margin-top and letting layout do the rest.
"""
def __init__(self, page_set):
super(Page6, self).__init__(
url='http://jsfiddle.net/3yDKh/16/show/',
page_set=page_set)
def RunSmoothness(self, action_runner):
action_runner.Wait(3)
class Page7(KeySilkCasesPage):
"""
Why: Image search expands a spacer div when you click an image to accomplish
a zoomin effect. Each image has a layer. Even so, this triggers a lot of
unnecessary repainting.
"""
def __init__(self, page_set):
super(Page7, self).__init__(
url='http://jsfiddle.net/R8DX9/4/show/',
page_set=page_set)
def RunSmoothness(self, action_runner):
action_runner.Wait(3)
class Page8(KeySilkCasesPage):
"""
Why: Swipe to dismiss of an element that has a fixed-position child that is
its pseudo-sticky header. Brings out issues with layer creation and
repainting.
"""
def __init__(self, page_set):
super(Page8, self).__init__(
url='http://jsfiddle.net/rF9Gh/7/show/',
page_set=page_set)
def RunSmoothness(self, action_runner):
action_runner.Wait(3)
class Page9(KeySilkCasesPage):
"""
Why: Horizontal and vertical expansion of a card that is cheap to layout but
costly to rasterize.
"""
def __init__(self, page_set):
super(Page9, self).__init__(
url='http://jsfiddle.net/TLXLu/3/show/',
page_set=page_set)
self.gpu_raster = True
def RunSmoothness(self, action_runner):
action_runner.Wait(4)
class Page10(KeySilkCasesPage):
"""
Why: Vertical Expansion of a card that is cheap to layout but costly to
rasterize.
"""
def __init__(self, page_set):
super(Page10, self).__init__(
url='http://jsfiddle.net/cKB9D/7/show/',
page_set=page_set)
self.gpu_raster = True
def RunSmoothness(self, action_runner):
action_runner.Wait(4)
class Page11(KeySilkCasesPage):
"""
Why: Parallax effect is common on photo-viewer-like applications, overloading
software rasterization
"""
def __init__(self, page_set):
super(Page11, self).__init__(
url='http://jsfiddle.net/vBQHH/11/show/',
page_set=page_set)
self.gpu_raster = True
def RunSmoothness(self, action_runner):
action_runner.Wait(4)
class Page12(KeySilkCasesPage):
""" Why: Addressing paint storms during coordinated animations. """
def __init__(self, page_set):
super(Page12, self).__init__(
url='http://jsfiddle.net/ugkd4/10/show/',
page_set=page_set)
def RunSmoothness(self, action_runner):
action_runner.Wait(5)
class Page13(KeySilkCasesPage):
""" Why: Mask transitions are common mobile use cases. """
def __init__(self, page_set):
super(Page13, self).__init__(
url='http://jsfiddle.net/xLuvC/1/show/',
page_set=page_set)
self.gpu_raster = True
def RunSmoothness(self, action_runner):
action_runner.Wait(4)
class Page14(KeySilkCasesPage):
""" Why: Card expansions with images and text are pretty and common. """
def __init__(self, page_set):
super(Page14, self).__init__(
url='http://jsfiddle.net/bNp2h/3/show/',
page_set=page_set)
self.gpu_raster = True
def RunSmoothness(self, action_runner):
action_runner.Wait(4)
class Page15(KeySilkCasesPage):
""" Why: Coordinated animations for expanding elements. """
def __init__(self, page_set):
super(Page15, self).__init__(
url='file://key_silk_cases/font_wipe.html',
page_set=page_set)
def RunSmoothness(self, action_runner):
action_runner.Wait(5)
class Page16(KeySilkCasesPage):
def __init__(self, page_set):
super(Page16, self).__init__(
url='file://key_silk_cases/inbox_app.html?swipe_to_dismiss',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(2)
def SwipeToDismiss(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'SwipeAction', is_smooth=True)
action_runner.SwipeElement(
left_start_ratio=0.8, top_start_ratio=0.2,
direction='left', distance=200, speed_in_pixels_per_second=5000,
element_function='document.getElementsByClassName("message")[2]')
interaction.End()
interaction = action_runner.BeginInteraction('Wait', is_smooth=True)
action_runner.WaitForJavaScriptCondition(
'document.getElementsByClassName("message").length < 18')
interaction.End()
def RunSmoothness(self, action_runner):
self.SwipeToDismiss(action_runner)
class Page17(KeySilkCasesPage):
def __init__(self, page_set):
super(Page17, self).__init__(
url='file://key_silk_cases/inbox_app.html?stress_hidey_bars',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(2)
def RunSmoothness(self, action_runner):
self.StressHideyBars(action_runner)
def StressHideyBars(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollElement(
selector='#messages', direction='down', speed_in_pixels_per_second=200)
interaction.End()
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollElement(
selector='#messages', direction='up', speed_in_pixels_per_second=200)
interaction.End()
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollElement(
selector='#messages', direction='down', speed_in_pixels_per_second=200)
interaction.End()
class Page18(KeySilkCasesPage):
def __init__(self, page_set):
super(Page18, self).__init__(
url='file://key_silk_cases/inbox_app.html?toggle_drawer',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(2)
def RunSmoothness(self, action_runner):
for _ in xrange(6):
self.ToggleDrawer(action_runner)
def ToggleDrawer(self, action_runner):
interaction = action_runner.BeginInteraction(
'Action_TapAction', is_smooth=True)
action_runner.TapElement('#menu-button')
action_runner.Wait(1)
interaction.End()
class Page19(KeySilkCasesPage):
def __init__(self, page_set):
super(Page19, self).__init__(
url='file://key_silk_cases/inbox_app.html?slide_drawer',
page_set=page_set)
def ToggleDrawer(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'TapAction', is_smooth=True)
action_runner.TapElement('#menu-button')
interaction.End()
interaction = action_runner.BeginInteraction('Wait', is_smooth=True)
action_runner.WaitForJavaScriptCondition('''
document.getElementById("nav-drawer").active &&
document.getElementById("nav-drawer").children[0]
.getBoundingClientRect().left == 0''')
interaction.End()
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(2)
self.ToggleDrawer(action_runner)
def RunSmoothness(self, action_runner):
self.SlideDrawer(action_runner)
def SlideDrawer(self, action_runner):
interaction = action_runner.BeginInteraction(
'Action_SwipeAction', is_smooth=True)
action_runner.SwipeElement(
left_start_ratio=0.8, top_start_ratio=0.2,
direction='left', distance=200,
element_function='document.getElementById("nav-drawer").children[0]')
action_runner.WaitForJavaScriptCondition(
'!document.getElementById("nav-drawer").active')
interaction.End()
class Page20(KeySilkCasesPage):
""" Why: Shadow DOM infinite scrolling. """
def __init__(self, page_set):
super(Page20, self).__init__(
url='file://key_silk_cases/infinite_scrolling.html',
page_set=page_set)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollElement(
selector='#container', speed_in_pixels_per_second=5000)
interaction.End()
class GwsExpansionPage(KeySilkCasesPage):
"""Abstract base class for pages that expand Google knowledge panels."""
def NavigateWait(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(3)
def ExpandKnowledgeCard(self, action_runner):
# expand card
interaction = action_runner.BeginInteraction(
'Action_TapAction', is_smooth=True)
action_runner.TapElement(
element_function='document.getElementsByClassName("vk_arc")[0]')
action_runner.Wait(2)
interaction.End()
def ScrollKnowledgeCardToTop(self, action_runner, card_id):
# scroll until the knowledge card is at the top
action_runner.ExecuteJavaScript(
"document.getElementById('%s').scrollIntoView()" % card_id)
def RunSmoothness(self, action_runner):
self.ExpandKnowledgeCard(action_runner)
class GwsGoogleExpansion(GwsExpansionPage):
""" Why: Animating height of a complex content card is common. """
def __init__(self, page_set):
super(GwsGoogleExpansion, self).__init__(
url='http://www.google.com/#q=google',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
self.NavigateWait(action_runner)
self.ScrollKnowledgeCardToTop(action_runner, 'kno-result')
class GwsBoogieExpansion(GwsExpansionPage):
""" Why: Same case as Google expansion but text-heavy rather than image. """
def __init__(self, page_set):
super(GwsBoogieExpansion, self).__init__(
url='https://www.google.com/search?hl=en&q=define%3Aboogie',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
self.NavigateWait(action_runner)
self.ScrollKnowledgeCardToTop(action_runner, 'rso')
class Page22(KeySilkCasesPage):
def __init__(self, page_set):
super(Page22, self).__init__(
url='http://plus.google.com/app/basic/stream',
page_set=page_set)
self.disabled = 'Times out on Windows; crbug.com/338838'
self.credentials = 'google'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.WaitForJavaScriptCondition(
'document.getElementsByClassName("fHa").length > 0')
action_runner.Wait(2)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollElement(selector='#mainContent')
interaction.End()
class Page23(KeySilkCasesPage):
"""
Why: Physical simulation demo that does a lot of element.style mutation
triggering JS and recalc slowness
"""
def __init__(self, page_set):
super(Page23, self).__init__(
url='http://jsbin.com/UVIgUTa/38/quiet',
page_set=page_set)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollPage(
distance_expr='window.innerHeight / 2',
direction='down',
use_touch=True)
interaction.End()
interaction = action_runner.BeginInteraction('Wait', is_smooth=True)
action_runner.Wait(1)
interaction.End()
class Page24(KeySilkCasesPage):
"""
Why: Google News: this iOS version is slower than accelerated scrolling
"""
def __init__(self, page_set):
super(Page24, self).__init__(
url='http://mobile-news.sandbox.google.com/news/pt0?scroll',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.WaitForJavaScriptCondition(
'document.getElementById(":h") != null')
action_runner.Wait(1)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollElement(
element_function='document.getElementById(":5")',
distance=2500,
use_touch=True)
interaction.End()
class Page25(KeySilkCasesPage):
def __init__(self, page_set):
super(Page25, self).__init__(
url='http://mobile-news.sandbox.google.com/news/pt0?swipe',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.WaitForJavaScriptCondition(
'document.getElementById(":h") != null')
action_runner.Wait(1)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'SwipeAction', is_smooth=True)
action_runner.SwipeElement(
direction='left', distance=100,
element_function='document.getElementById(":f")')
interaction.End()
interaction = action_runner.BeginInteraction('Wait', is_smooth=True)
action_runner.Wait(1)
interaction.End()
class Page26(KeySilkCasesPage):
""" Why: famo.us twitter demo """
def __init__(self, page_set):
super(Page26, self).__init__(
url='http://s.codepen.io/befamous/fullpage/pFsqb?scroll',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.WaitForJavaScriptCondition(
'document.getElementsByClassName("tweet").length > 0')
action_runner.Wait(1)
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollPage(distance=5000)
interaction.End()
class SVGIconRaster(KeySilkCasesPage):
""" Why: Mutating SVG icons; these paint storm and paint slowly. """
def __init__(self, page_set):
super(SVGIconRaster, self).__init__(
url='http://wiltzius.github.io/shape-shifter/',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.WaitForJavaScriptCondition(
'loaded = true')
action_runner.Wait(1)
def RunSmoothness(self, action_runner):
for i in xrange(9):
button_func = ('document.getElementById("demo").$.'
'buttons.children[%d]') % i
interaction = action_runner.BeginInteraction(
'Action_TapAction', is_smooth=True)
action_runner.TapElement(element_function=button_func)
action_runner.Wait(1)
interaction.End()
class UpdateHistoryState(KeySilkCasesPage):
""" Why: Modern apps often update history state, which currently is janky."""
def __init__(self, page_set):
super(UpdateHistoryState, self).__init__(
url='file://key_silk_cases/pushState.html',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.ExecuteJavaScript('''
window.requestAnimationFrame(function() {
window.__history_state_loaded = true;
});
''')
action_runner.WaitForJavaScriptCondition(
'window.__history_state_loaded == true;')
def RunSmoothness(self, action_runner):
interaction = action_runner.BeginInteraction('animation_interaction',
is_smooth=True)
action_runner.Wait(5) # JS runs the animation continuously on the page
interaction.End()
class TextSizeAnimation(KeySilkCasesPage):
""" Why: Scale animation with text. """
def __init__(self, page_set):
super(TextSizeAnimation, self).__init__(
url='http://jsbin.com/gikex/2/quiet',
page_set=page_set)
self.gpu_raster = True
def RunSmoothness(self, action_runner):
action_runner.Wait(4)
class KeySilkCasesPageSet(page_set_module.PageSet):
""" Pages hand-picked for project Silk. """
def __init__(self):
super(KeySilkCasesPageSet, self).__init__(
credentials_path='data/credentials.json',
user_agent_type='mobile',
archive_data_file='data/key_silk_cases.json',
bucket=page_set_module.PARTNER_BUCKET)
self.AddPage(Page1(self))
self.AddPage(Page2(self))
self.AddPage(Page3(self))
self.AddPage(Page4(self))
self.AddPage(Page5(self))
self.AddPage(Page6(self))
self.AddPage(Page7(self))
self.AddPage(Page8(self))
self.AddPage(Page9(self))
self.AddPage(Page10(self))
self.AddPage(Page11(self))
self.AddPage(Page12(self))
self.AddPage(Page13(self))
self.AddPage(Page14(self))
self.AddPage(Page15(self))
self.AddPage(Page16(self))
self.AddPage(Page17(self))
self.AddPage(Page18(self))
# crbug.com/404317
# self.AddPage(Page19(self))
self.AddPage(Page20(self))
self.AddPage(GwsGoogleExpansion(self))
self.AddPage(GwsBoogieExpansion(self))
self.AddPage(Page22(self))
self.AddPage(Page23(self))
self.AddPage(Page24(self))
self.AddPage(Page25(self))
self.AddPage(Page26(self))
self.AddPage(SVGIconRaster(self))
self.AddPage(UpdateHistoryState(self))
self.AddPage(TextSizeAnimation(self))
|
telwertowski/Books-Mac-OS-X
|
refs/heads/master
|
Export Plugins/WhatsOnMyBookShelf Exporter/SOAPpy/wstools/test/test_wstools.py
|
308
|
#!/usr/bin/env python
############################################################################
# Joshua R. Boverhof, David W. Robertson, LBNL
# See LBNLCopyright for copyright notice!
###########################################################################
import unittest, tarfile, os, ConfigParser
import test_wsdl
SECTION='files'
CONFIG_FILE = 'config.txt'
def extractFiles(section, option):
config = ConfigParser.ConfigParser()
config.read(CONFIG_FILE)
archives = config.get(section, option)
archives = eval(archives)
for file in archives:
tar = tarfile.open(file)
if not os.access(tar.membernames[0], os.R_OK):
for i in tar.getnames():
tar.extract(i)
def makeTestSuite():
suite = unittest.TestSuite()
suite.addTest(test_wsdl.makeTestSuite("services_by_file"))
return suite
def main():
extractFiles(SECTION, 'archives')
unittest.main(defaultTest="makeTestSuite")
if __name__ == "__main__" : main()
|
haoxli/crosswalk-test-suite
|
refs/heads/master
|
webapi/tct-csp-w3c-tests/csp-py/csp_ro_img-src_self_allowed_ext-manual.py
|
30
|
def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0])
_CSP = "img-src 'self'"
response.headers.set("Content-Security-Policy-Report-Only", _CSP)
response.headers.set("X-Content-Security-Policy-Report-Only", _CSP)
response.headers.set("X-WebKit-CSP-Report-Only", _CSP)
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Hao, Yunfei <yunfeix.hao@intel.com>
-->
<html>
<head>
<title>CSP Test: csp_ro_img-src_self_allowed_ext</title>
<link rel="author" title="Intel" href="http://www.intel.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#img-src"/>
<meta name="flags" content=""/>
<meta name="assert" content="img-src 'self'"/>
<meta charset="utf-8"/>
</head>
<body>
<p>Test passes if there is a filled green square.</p>
<img src='""" + url1 + """/tests/csp/support/green-100x100.png'/>
</body>
</html> """
|
chrisseto/osf.io
|
refs/heads/feature/reviews
|
api_tests/institutions/views/test_institution_nodes_list.py
|
3
|
import pytest
from api.base.settings.defaults import API_BASE
from api_tests.nodes.filters.test_filters import NodesListFilteringMixin, NodesListDateFilteringMixin
from osf_tests.factories import (
InstitutionFactory,
AuthUserFactory,
ProjectFactory,
NodeFactory,
RegistrationFactory,
)
@pytest.mark.django_db
class TestInstitutionNodeList:
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def public_node(self, institution):
public_node = ProjectFactory(is_public=True)
public_node.affiliated_institutions.add(institution)
public_node.save()
return public_node
@pytest.fixture()
def user_private_node(self, user, institution):
user_private_node = ProjectFactory(creator=user, is_public=False)
user_private_node.affiliated_institutions.add(institution)
user_private_node.save()
return user_private_node
@pytest.fixture()
def private_node(self, institution):
private_node = ProjectFactory(is_public=False)
private_node.affiliated_institutions.add(institution)
private_node.save()
return private_node
@pytest.fixture()
def institution_node_url(self, institution):
return '/{0}institutions/{1}/nodes/'.format(API_BASE, institution._id)
def test_return_all_public_nodes(self, app, public_node, user_private_node, private_node, institution_node_url):
res = app.get(institution_node_url)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert public_node._id in ids
assert user_private_node._id not in ids
assert private_node._id not in ids
def test_does_not_return_private_nodes_with_auth(self, app, user, public_node, user_private_node, private_node, institution_node_url):
res = app.get(institution_node_url, auth=user.auth)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert public_node._id in ids
assert user_private_node._id not in ids
assert private_node._id not in ids
def test_registration_not_returned(self, app, institution, public_node, institution_node_url):
registration = RegistrationFactory(project=public_node, is_public=True)
res = app.get(institution_node_url)
assert res.status_code == 200
ids = [each['id'] for each in res.json['data']]
assert public_node._id in ids
assert registration._id not in ids
def test_affiliated_component_with_affiliated_parent_not_returned(self, app, user, institution, public_node, institution_node_url):
# version < 2.2
component = NodeFactory(parent=public_node, is_public=True)
component.affiliated_institutions.add(institution)
component.save()
res = app.get(institution_node_url, auth=user.auth)
affiliated_node_ids = [node['id'] for node in res.json['data']]
assert res.status_code == 200
assert public_node._id in affiliated_node_ids
assert component._id not in affiliated_node_ids
def test_affiliated_component_without_affiliated_parent_not_returned(self, app, user, institution, institution_node_url):
# version < 2.2
node = ProjectFactory(is_public=True)
component = NodeFactory(parent=node, is_public=True)
component.affiliated_institutions.add(institution)
component.save()
res = app.get(institution_node_url, auth=user.auth)
affiliated_node_ids = [node['id'] for node in res.json['data']]
assert res.status_code == 200
assert node._id not in affiliated_node_ids
assert component._id not in affiliated_node_ids
def test_affiliated_component_with_affiliated_parent_returned(self, app, user, institution, public_node, institution_node_url):
# version 2.2
component = NodeFactory(parent=public_node, is_public=True)
component.affiliated_institutions.add(institution)
component.save()
url = '{}?version=2.2'.format(institution_node_url)
res = app.get(url, auth=user.auth)
affiliated_node_ids = [node['id'] for node in res.json['data']]
assert res.status_code == 200
assert public_node._id in affiliated_node_ids
assert component._id in affiliated_node_ids
def test_affiliated_component_without_affiliated_parent_returned(self, app, user, institution, public_node, institution_node_url):
# version 2.2
node = ProjectFactory(is_public=True)
component = NodeFactory(parent=node, is_public=True)
component.affiliated_institutions.add(institution)
component.save()
url = '{}?version=2.2'.format(institution_node_url)
res = app.get(url, auth=user.auth)
affiliated_node_ids = [item['id'] for item in res.json['data']]
assert res.status_code == 200
assert node._id not in affiliated_node_ids
assert component._id in affiliated_node_ids
@pytest.mark.django_db
class TestNodeListFiltering(NodesListFilteringMixin):
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def url(self, institution):
return '/{}institutions/{}/nodes/?version=2.2&'.format(API_BASE, institution._id)
@pytest.fixture()
def parent_project(self, user, contrib, institution):
parent_project = ProjectFactory(creator=user, is_public=True)
parent_project.add_contributor(contrib, save=False)
parent_project.affiliated_institutions.add(institution)
parent_project.save()
return parent_project
@pytest.fixture()
def child_node_one(self, user, parent_project, institution):
child_node_one = NodeFactory(parent=parent_project, creator=user, is_public = True)
child_node_one.affiliated_institutions.add(institution)
child_node_one.save()
return child_node_one
@pytest.fixture()
def child_node_two(self, user, parent_project, institution):
child_node_two = NodeFactory(parent=parent_project, creator=user, is_public = True)
child_node_two.affiliated_institutions.add(institution)
child_node_two.save()
return child_node_two
@pytest.fixture()
def grandchild_node_one(self, user, child_node_one, institution):
grandchild_node_one = NodeFactory(parent=child_node_one, creator=user, is_public = True)
grandchild_node_one.affiliated_institutions.add(institution)
grandchild_node_one.save()
return grandchild_node_one
@pytest.fixture()
def grandchild_node_two(self, user, child_node_two, institution):
grandchild_node_two = NodeFactory(parent=child_node_two, creator=user, is_public = True)
grandchild_node_two.affiliated_institutions.add(institution)
grandchild_node_two.save()
return grandchild_node_two
@pytest.fixture()
def great_grandchild_node_two(self, user, grandchild_node_two, institution):
great_grandchild_node_two = NodeFactory(parent=grandchild_node_two, creator=user, is_public = True)
great_grandchild_node_two.affiliated_institutions.add(institution)
great_grandchild_node_two.save()
return great_grandchild_node_two
@pytest.mark.django_db
class TestNodeListDateFiltering(NodesListDateFilteringMixin):
@pytest.fixture()
def institution(self):
return InstitutionFactory()
@pytest.fixture()
def url(self, institution):
return '/{}institutions/{}/nodes/?'.format(API_BASE, institution._id)
@pytest.fixture()
def node_may(self, user, institution):
node_may = ProjectFactory(creator=user, is_public = True)
node_may.date_created = '2016-05-01 00:00:00.000000+00:00'
node_may.affiliated_institutions.add(institution)
node_may.save()
return node_may
@pytest.fixture()
def node_june(self, user, institution):
node_june = ProjectFactory(creator=user, is_public = True)
node_june.date_created = '2016-06-01 00:00:00.000000+00:00'
node_june.affiliated_institutions.add(institution)
node_june.save()
return node_june
@pytest.fixture()
def node_july(self, user, institution):
node_july = ProjectFactory(creator=user, is_public = True)
node_july.date_created = '2016-07-01 00:00:00.000000+00:00'
node_july.affiliated_institutions.add(institution)
node_july.save()
return node_july
|
mediawiki-utilities/python-mwcites
|
refs/heads/master
|
mwcites/extractors/doi.py
|
3
|
import re
from collections import defaultdict
from more_itertools import peekable
from ..identifier import Identifier
DOI_START_RE = re.compile(r'10\.[0-9]{4,}/')
HTML_TAGS = ['ref', 'span', 'div', 'table', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'b', 'u', 'i', 's', 'ins', 'del', 'code', 'tt', 'blockquote',
'pre']
TAGS_RE = re.compile(r'<(/\s*)?(' + '|'.join(HTML_TAGS) + ')(\s[^>\n\r]+)?>', re.I)
'''
DOI_RE = re.compile(r'\b(10\.\d+/[^\s\|\]\}\?\,]+)')
def extract_regex(text):
for match in DOI_RE.finditer(text):
id = re.sub(TAGS_RE, "", match.group(1)).rstrip(".")
yield Identifier("doi", id)
import mwparserfromhell as mwp
def extract_mwp(text):
no_tags = mwp.parse(text).strip_code()
for match in DOI_RE.finditer(no_tags):
id = re.sub(TAGS_RE, "", match.group(1)).rstrip(".")
yield Identifier("doi", id)
'''
LEXICON = [
(DOI_START_RE.pattern, 'doi_start'),
(r'\(', 'open_paren'),
(r'\)', 'close_paren'),
(r'\[', 'open_bracket'),
(r'\]', 'close_bracket'),
(r'<!--', 'comment_start'),
(r'-->', 'comment_end'),
(TAGS_RE.pattern, 'tag'),
(r'<', 'open_angle'),
(r'>', 'close_angle'),
(r'\{', 'open_curly'),
(r'\}', 'close_curly'),
(r'\|', 'pipe'),
(r'[,\.;!]', 'punct'),
(r'[\?#]', 'url_end'),
(r'[\n\r]+', 'break'),
(r'\s+', 'whitespace'),
(r'\w+', 'word'),
(r'.', 'etc')
]
def extract_island(text):
tokens = tokenize_finditer(text, LEXICON)
tokens = peekable(tokens)
while tokens.peek(None) is not None:
if tokens.peek()[0] == 'doi_start':
yield ('doi', read_doi(tokens))
next(tokens)
def tokenize_finditer(text, lexicon=LEXICON):
pattern = '|'.join("(?P<{0}>{1})".format(name, pattern)
for pattern, name in lexicon)
group_regex = re.compile(pattern, re.I|re.U|re.M)
for match in group_regex.finditer(text):
yield match.lastgroup, match.group(0)
"""
def tokenize_scanner(text, lexicon=LEXICON):
scanner = re.Scanner(lexicon)
tokens, remainder = scanner.scan(text)
return tokens
"""
#from mwcites.extractors.doi import tokenize_scan
#list(tokenize_scan("foo bar baz.{}"))
def read_doi(tokens):
assert tokens.peek()[0] == 'doi_start'
depth = defaultdict(lambda: 0)
doi_buffer = [next(tokens)[1]]
while tokens.peek(None) is not None:
name, match = tokens.peek()
if name in ('url_end', 'break', 'whitespace', 'tag', 'pipe',
'comment_start', 'comment_end'):
break
elif name == 'open_bracket':
depth['bracket'] += 1
doi_buffer.append(next(tokens)[1])
elif name == 'open_curly':
depth['curly'] += 1
doi_buffer.append(next(tokens)[1])
elif name == 'close_bracket':
if depth['bracket'] > 0:
depth['bracket'] -= 1
doi_buffer.append(next(tokens)[1])
else:
break
elif name == 'close_curly':
if depth['curly'] > 0:
depth['curly'] -= 1
doi_buffer.append(next(tokens)[1])
else:
break
else:
doi_buffer.append(next(tokens)[1])
# Do not return a doi with punctuation at the end
return re.sub(r'[\.,!]+$', '', ''.join(doi_buffer))
def tokenize_search(text, start, lexicon=LEXICON):
pattern = '|'.join("(?P<{0}>{1})".format(name, pattern)
for pattern, name in lexicon)
group_regex = re.compile(pattern, re.I|re.U)
match = group_regex.search(text, start)
while match is not None:
yield match.lastgroup, match.group(0)
match = group_regex.search(text, match.span()[1])
def extract_search(text, lexicon=LEXICON):
last_end = 0
for match in DOI_START_RE.finditer(text):
if match.span()[0] > last_end:
tokens = tokenize_search(text, match.span()[0], lexicon=lexicon)
tokens = peekable(tokens)
doi = read_doi(tokens)
last_end = match.span()[0] + len(doi)
yield Identifier('doi', doi)
else:
last_end = max(match.span()[1], last_end)
extract = extract_search # Setting the default to the best method
|
janhahne/nest-simulator
|
refs/heads/master
|
pynest/examples/brunel_alpha_evolution_strategies.py
|
2
|
# -*- coding: utf-8 -*-
#
# brunel_alpha_evolution_strategies.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""Use evolution strategies to find parameters for a random balanced network (alpha synapses)
-----------------------------------------------------------------------------------------------------
This script uses an optimization algorithm to find the appropriate
parameter values for the external drive "eta" and the relative ratio
of excitation and inhibition "g" for a balanced random network that
lead to particular population-averaged rates, coefficients of
variation and correlations.
From an initial Gaussian search distribution parameterized with mean
and standard deviation network parameters are sampled. Network
realizations of these parameters are simulated and evaluated according
to an objective function that measures how close the activity
statistics are to their desired values (~fitness). From these fitness
values the approximate natural gradient of the fitness landscape is
computed and used to update the parameters of the search
distribution. This procedure is repeated until the maximal number of
function evaluations is reached or the width of the search
distribution becomes extremely small. We use the following fitness
function:
.. math::
f = - alpha(r - r*)^2 - beta(cv - cv*)^2 - gamma(corr - corr*)^2
where `alpha`, `beta` and `gamma` are weighting factors, and stars indicate
target values.
The network contains an excitatory and an inhibitory population on
the basis of the network used in [1]_.
The optimization algorithm (evolution strategies) is described in
Wierstra et al. [2]_.
References
~~~~~~~~~~~~
.. [1] Brunel N (2000). Dynamics of Sparsely Connected Networks of
Excitatory and Inhibitory Spiking Neurons. Journal of Computational
Neuroscience 8, 183-208.
.. [2] Wierstra et al. (2014). Natural evolution strategies. Journal of
Machine Learning Research, 15(1), 949-980.
See Also
~~~~~~~~~~
:doc:`brunel_alpha_nest`
Authors
~~~~~~~
Jakob Jordan
"""
from __future__ import print_function
import matplotlib.pyplot as plt
from matplotlib.patches import Ellipse
import numpy as np
import scipy.special as sp
import nest
###############################################################################
# Analysis
def cut_warmup_time(spikes, warmup_time):
# Removes initial warmup time from recorded spikes
spikes['senders'] = spikes['senders'][
spikes['times'] > warmup_time]
spikes['times'] = spikes['times'][
spikes['times'] > warmup_time]
return spikes
def compute_rate(spikes, N_rec, sim_time):
# Computes average rate from recorded spikes
return (1. * len(spikes['times']) / N_rec / sim_time * 1e3)
def sort_spikes(spikes):
# Sorts recorded spikes by node ID
unique_node_ids = sorted(np.unique(spikes['senders']))
spiketrains = []
for node_id in unique_node_ids:
spiketrains.append(spikes['times'][spikes['senders'] == node_id])
return unique_node_ids, spiketrains
def compute_cv(spiketrains):
# Computes coefficient of variation from sorted spikes
if spiketrains:
isis = np.hstack([np.diff(st) for st in spiketrains])
if len(isis) > 1:
return np.std(isis) / np.mean(isis)
else:
return 0.
else:
return 0.
def bin_spiketrains(spiketrains, t_min, t_max, t_bin):
# Bins sorted spikes
bins = np.arange(t_min, t_max, t_bin)
return bins, [np.histogram(s, bins=bins)[0] for s in spiketrains]
def compute_correlations(binned_spiketrains):
# Computes correlations from binned spiketrains
n = len(binned_spiketrains)
if n > 1:
cc = np.corrcoef(binned_spiketrains)
return 1. / (n * (n - 1.)) * (np.sum(cc) - n)
else:
return 0.
def compute_statistics(parameters, espikes, ispikes):
# Computes population-averaged rates coefficients of variation and
# correlations from recorded spikes of excitatory and inhibitory
# populations
espikes = cut_warmup_time(espikes, parameters['warmup_time'])
ispikes = cut_warmup_time(ispikes, parameters['warmup_time'])
erate = compute_rate(espikes, parameters['N_rec'], parameters['sim_time'])
irate = compute_rate(espikes, parameters['N_rec'], parameters['sim_time'])
enode_ids, espiketrains = sort_spikes(espikes)
inode_ids, ispiketrains = sort_spikes(ispikes)
ecv = compute_cv(espiketrains)
icv = compute_cv(ispiketrains)
ecorr = compute_correlations(
bin_spiketrains(espiketrains, 0., parameters['sim_time'], 1.)[1])
icorr = compute_correlations(
bin_spiketrains(ispiketrains, 0., parameters['sim_time'], 1.)[1])
return (np.mean([erate, irate]),
np.mean([ecv, icv]),
np.mean([ecorr, icorr]))
###############################################################################
# Network simulation
def simulate(parameters):
# Simulates the network and returns recorded spikes for excitatory
# and inhibitory population
# Code taken from brunel_alpha_nest.py
def LambertWm1(x):
# Using scipy to mimic the gsl_sf_lambert_Wm1 function.
return sp.lambertw(x, k=-1 if x < 0 else 0).real
def ComputePSPnorm(tauMem, CMem, tauSyn):
a = (tauMem / tauSyn)
b = (1.0 / tauSyn - 1.0 / tauMem)
# time of maximum
t_max = 1.0 / b * (-LambertWm1(-np.exp(-1.0 / a) / a) - 1.0 / a)
# maximum of PSP for current of unit amplitude
return (np.exp(1.0) / (tauSyn * CMem * b) *
((np.exp(-t_max / tauMem) - np.exp(-t_max / tauSyn)) / b -
t_max * np.exp(-t_max / tauSyn)))
# number of excitatory neurons
NE = int(parameters['gamma'] * parameters['N'])
# number of inhibitory neurons
NI = parameters['N'] - NE
# number of excitatory synapses per neuron
CE = int(parameters['epsilon'] * NE)
# number of inhibitory synapses per neuron
CI = int(parameters['epsilon'] * NI)
tauSyn = 0.5 # synaptic time constant in ms
tauMem = 20.0 # time constant of membrane potential in ms
CMem = 250.0 # capacitance of membrane in in pF
theta = 20.0 # membrane threshold potential in mV
neuron_parameters = {
'C_m': CMem,
'tau_m': tauMem,
'tau_syn_ex': tauSyn,
'tau_syn_in': tauSyn,
't_ref': 2.0,
'E_L': 0.0,
'V_reset': 0.0,
'V_m': 0.0,
'V_th': theta
}
J = 0.1 # postsynaptic amplitude in mV
J_unit = ComputePSPnorm(tauMem, CMem, tauSyn)
J_ex = J / J_unit # amplitude of excitatory postsynaptic current
# amplitude of inhibitory postsynaptic current
J_in = -parameters['g'] * J_ex
nu_th = (theta * CMem) / (J_ex * CE * np.exp(1) * tauMem * tauSyn)
nu_ex = parameters['eta'] * nu_th
p_rate = 1000.0 * nu_ex * CE
nest.ResetKernel()
nest.set_verbosity('M_FATAL')
nest.SetKernelStatus({'rng_seeds': [parameters['seed']],
'resolution': parameters['dt']})
nest.SetDefaults('iaf_psc_alpha', neuron_parameters)
nest.SetDefaults('poisson_generator', {'rate': p_rate})
nodes_ex = nest.Create('iaf_psc_alpha', NE)
nodes_in = nest.Create('iaf_psc_alpha', NI)
noise = nest.Create('poisson_generator')
espikes = nest.Create('spike_detector', params={'label': 'brunel-py-ex'})
ispikes = nest.Create('spike_detector', params={'label': 'brunel-py-in'})
nest.CopyModel('static_synapse', 'excitatory',
{'weight': J_ex, 'delay': parameters['delay']})
nest.CopyModel('static_synapse', 'inhibitory',
{'weight': J_in, 'delay': parameters['delay']})
nest.Connect(noise, nodes_ex, syn_spec='excitatory')
nest.Connect(noise, nodes_in, syn_spec='excitatory')
if parameters['N_rec'] > NE:
raise ValueError(
'Requested recording from {} neurons, \
but only {} in excitatory population'.format(
parameters['N_rec'], NE))
if parameters['N_rec'] > NI:
raise ValueError(
'Requested recording from {} neurons, \
but only {} in inhibitory population'.format(
parameters['N_rec'], NI))
nest.Connect(nodes_ex[:parameters['N_rec']], espikes)
nest.Connect(nodes_in[:parameters['N_rec']], ispikes)
conn_parameters_ex = {'rule': 'fixed_indegree', 'indegree': CE}
nest.Connect(nodes_ex, nodes_ex + nodes_in, conn_parameters_ex, 'excitatory')
conn_parameters_in = {'rule': 'fixed_indegree', 'indegree': CI}
nest.Connect(nodes_in, nodes_ex + nodes_in, conn_parameters_in, 'inhibitory')
nest.Simulate(parameters['sim_time'])
return (espikes.events,
ispikes.events)
###############################################################################
# Optimization
def default_population_size(dimensions):
# Returns a population size suited for the given number of dimensions
# See Wierstra et al. (2014)
return 4 + int(np.floor(3 * np.log(dimensions)))
def default_learning_rate_mu():
# Returns a default learning rate for the mean of the search distribution
# See Wierstra et al. (2014)
return 1
def default_learning_rate_sigma(dimensions):
# Returns a default learning rate for the standard deviation of the
# search distribution for the given number of dimensions
# See Wierstra et al. (2014)
return (3 + np.log(dimensions)) / (12. * np.sqrt(dimensions))
def compute_utility(fitness):
# Computes utility and order used for fitness shaping
# See Wierstra et al. (2014)
n = len(fitness)
order = np.argsort(fitness)[::-1]
fitness = fitness[order]
utility = [
np.max([0, np.log((n / 2) + 1)]) - np.log(k + 1) for k in range(n)]
utility = utility / np.sum(utility) - 1. / n
return order, utility
def optimize(func, mu, sigma, learning_rate_mu=None, learning_rate_sigma=None,
population_size=None, fitness_shaping=True,
mirrored_sampling=True, record_history=False,
max_generations=2000, min_sigma=1e-8, verbosity=0):
###########################################################################
# Optimizes an objective function via evolution strategies using the
# natural gradient of multinormal search distributions in natural
# coordinates. Does not consider covariances between parameters (
# "Separable natural evolution strategies").
# See Wierstra et al. (2014)
#
# Parameters
# ----------
# func: function
# The function to be maximized.
# mu: float
# Initial mean of the search distribution.
# sigma: float
# Initial standard deviation of the search distribution.
# learning_rate_mu: float
# Learning rate of mu.
# learning_rate_sigma: float
# Learning rate of sigma.
# population_size: int
# Number of individuals sampled in each generation.
# fitness_shaping: bool
# Whether to use fitness shaping, compensating for large
# deviations in fitness, see Wierstra et al. (2014).
# mirrored_sampling: bool
# Whether to use mirrored sampling, i.e., evaluating a mirrored
# sample for each sample, see Wierstra et al. (2014).
# record_history: bool
# Whether to record history of search distribution parameters,
# fitness values and individuals.
# max_generations: int
# Maximal number of generations.
# min_sigma: float
# Minimal value for standard deviation of search
# distribution. If any dimension has a value smaller than this,
# the search is stoppped.
# verbosity: bool
# Whether to continuously print progress information.
#
# Returns
# -------
# dict
# Dictionary of final parameters of search distribution and
# history.
if not isinstance(mu, np.ndarray):
raise TypeError('mu needs to be of type np.ndarray')
if not isinstance(sigma, np.ndarray):
raise TypeError('sigma needs to be of type np.ndarray')
if learning_rate_mu is None:
learning_rate_mu = default_learning_rate_mu()
if learning_rate_sigma is None:
learning_rate_sigma = default_learning_rate_sigma(mu.size)
if population_size is None:
population_size = default_population_size(mu.size)
generation = 0
mu_history = []
sigma_history = []
pop_history = []
fitness_history = []
while True:
# create new population using the search distribution
s = np.random.normal(0, 1, size=(population_size,) + np.shape(mu))
z = mu + sigma * s
# add mirrored perturbations if enabled
if mirrored_sampling:
z = np.vstack([z, mu - sigma * s])
s = np.vstack([s, -s])
# evaluate fitness for every individual in population
fitness = np.fromiter((func(*zi) for zi in z), np.float)
# print status if enabled
if verbosity > 0:
print(
'# Generation {:d} | fitness {:.3f} | mu {} | sigma {}'.format(
generation, np.mean(fitness),
', '.join(str(np.round(mu_i, 3)) for mu_i in mu),
', '.join(str(np.round(sigma_i, 3)) for sigma_i in sigma)
))
# apply fitness shaping if enabled
if fitness_shaping:
order, utility = compute_utility(fitness)
s = s[order]
z = z[order]
else:
utility = fitness
# bookkeeping
if record_history:
mu_history.append(mu.copy())
sigma_history.append(sigma.copy())
pop_history.append(z.copy())
fitness_history.append(fitness)
# exit if max generations reached or search distributions are
# very narrow
if generation == max_generations or np.all(sigma < min_sigma):
break
# update parameter of search distribution via natural gradient
# descent in natural coordinates
mu += learning_rate_mu * sigma * np.dot(utility, s)
sigma *= np.exp(learning_rate_sigma / 2. * np.dot(utility, s**2 - 1))
generation += 1
return {
'mu': mu,
'sigma': sigma,
'fitness_history': np.array(fitness_history),
'mu_history': np.array(mu_history),
'sigma_history': np.array(sigma_history),
'pop_history': np.array(pop_history)
}
def optimize_network(optimization_parameters, simulation_parameters):
# Searches for suitable network parameters to fulfill defined constraints
np.random.seed(simulation_parameters['seed'])
def objective_function(g, eta):
# Returns the fitness of a specific network parametrization
# create local copy of parameters that uses parameters given
# by optimization algorithm
simulation_parameters_local = simulation_parameters.copy()
simulation_parameters_local['g'] = g
simulation_parameters_local['eta'] = eta
# perform the network simulation
espikes, ispikes = simulate(simulation_parameters_local)
# analyse the result and compute fitness
rate, cv, corr = compute_statistics(
simulation_parameters, espikes, ispikes)
fitness = \
- optimization_parameters['fitness_weight_rate'] * (
rate - optimization_parameters['target_rate']) ** 2 \
- optimization_parameters['fitness_weight_cv'] * (
cv - optimization_parameters['target_cv']) ** 2 \
- optimization_parameters['fitness_weight_corr'] * (
corr - optimization_parameters['target_corr']) ** 2
return fitness
return optimize(
objective_function,
np.array(optimization_parameters['mu']),
np.array(optimization_parameters['sigma']),
max_generations=optimization_parameters['max_generations'],
record_history=True,
verbosity=optimization_parameters['verbosity']
)
###############################################################################
# Main
if __name__ == '__main__':
simulation_parameters = {
'seed': 123,
'dt': 0.1, # (ms) simulation resolution
'sim_time': 1000., # (ms) simulation duration
'warmup_time': 300., # (ms) duration ignored during analysis
'delay': 1.5, # (ms) synaptic delay
'g': None, # relative ratio of excitation and inhibition
'eta': None, # relative strength of external drive
'epsilon': 0.1, # average connectivity of network
'N': 400, # number of neurons in network
'gamma': 0.8, # relative size of excitatory and
# inhibitory population
'N_rec': 40, # number of neurons to record activity from
}
optimization_parameters = {
'verbosity': 1, # print progress over generations
'max_generations': 20, # maximal number of generations
'target_rate': 1.89, # (spikes/s) target rate
'target_corr': 0.0, # target correlation
'target_cv': 1., # target coefficient of variation
'mu': [1., 3.], # initial mean for search distribution
# (mu(g), mu(eta))
'sigma': [0.15, 0.05], # initial sigma for search
# distribution (sigma(g), sigma(eta))
# hyperparameters of the fitness function; these are used to
# compensate for the different typical scales of the
# individual measures, rate ~ O(1), cv ~ (0.1), corr ~ O(0.01)
'fitness_weight_rate': 1., # relative weight of rate deviation
'fitness_weight_cv': 10., # relative weight of cv deviation
'fitness_weight_corr': 100., # relative weight of corr deviation
}
# optimize network parameters
optimization_result = optimize_network(optimization_parameters,
simulation_parameters)
simulation_parameters['g'] = optimization_result['mu'][0]
simulation_parameters['eta'] = optimization_result['mu'][1]
espikes, ispikes = simulate(simulation_parameters)
rate, cv, corr = compute_statistics(
simulation_parameters, espikes, ispikes)
print('Statistics after optimization:', end=' ')
print('Rate: {:.3f}, cv: {:.3f}, correlation: {:.3f}'.format(
rate, cv, corr))
# plot results
fig = plt.figure(figsize=(10, 4))
ax1 = fig.add_axes([0.06, 0.12, 0.25, 0.8])
ax2 = fig.add_axes([0.4, 0.12, 0.25, 0.8])
ax3 = fig.add_axes([0.74, 0.12, 0.25, 0.8])
ax1.set_xlabel('Time (ms)')
ax1.set_ylabel('Neuron id')
ax2.set_xlabel(r'Relative strength of inhibition $g$')
ax2.set_ylabel(r'Relative strength of external drive $\eta$')
ax3.set_xlabel('Generation')
ax3.set_ylabel('Fitness')
# raster plot
ax1.plot(espikes['times'], espikes['senders'], ls='', marker='.')
# search distributions and individuals
for mu, sigma in zip(optimization_result['mu_history'],
optimization_result['sigma_history']):
ellipse = Ellipse(
xy=mu, width=2 * sigma[0], height=2 * sigma[1], alpha=0.5, fc='k')
ellipse.set_clip_box(ax2.bbox)
ax2.add_artist(ellipse)
ax2.plot(optimization_result['mu_history'][:, 0],
optimization_result['mu_history'][:, 1],
marker='.', color='k', alpha=0.5)
for generation in optimization_result['pop_history']:
ax2.scatter(generation[:, 0], generation[:, 1])
# fitness over generations
ax3.errorbar(np.arange(len(optimization_result['fitness_history'])),
np.mean(optimization_result['fitness_history'], axis=1),
yerr=np.std(optimization_result['fitness_history'], axis=1))
fig.savefig('brunel_alpha_evolution_strategies.pdf')
|
orangeduck/PyAutoC
|
refs/heads/master
|
Python27/Lib/encodings/gb18030.py
|
816
|
#
# gb18030.py: Python Unicode Codec for GB18030
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_cn, codecs
import _multibytecodec as mbc
codec = _codecs_cn.getcodec('gb18030')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='gb18030',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
MichaelNedzelsky/intellij-community
|
refs/heads/master
|
python/testData/resolve/ClassPrivateOutsideClass.py
|
83
|
class A(object):
__X = 1
A._<ref>_X
|
vFense/vFenseAgent-nix
|
refs/heads/development
|
agent/deps/rpm6-32/Python-2.7.5/lib/python2.7/test/test_threaded_import.py
|
137
|
# This is a variant of the very old (early 90's) file
# Demo/threads/bug.py. It simply provokes a number of threads into
# trying to import the same module "at the same time".
# There are no pleasant failure modes -- most likely is that Python
# complains several times about module random having no attribute
# randrange, and then Python hangs.
import unittest
from test.test_support import verbose, TestFailed, import_module
thread = import_module('thread')
critical_section = thread.allocate_lock()
done = thread.allocate_lock()
def task():
global N, critical_section, done
import random
x = random.randrange(1, 3)
critical_section.acquire()
N -= 1
# Must release critical_section before releasing done, else the main
# thread can exit and set critical_section to None as part of global
# teardown; then critical_section.release() raises AttributeError.
finished = N == 0
critical_section.release()
if finished:
done.release()
def test_import_hangers():
import sys
if verbose:
print "testing import hangers ...",
import test.threaded_import_hangers
try:
if test.threaded_import_hangers.errors:
raise TestFailed(test.threaded_import_hangers.errors)
elif verbose:
print "OK."
finally:
# In case this test is run again, make sure the helper module
# gets loaded from scratch again.
del sys.modules['test.threaded_import_hangers']
# Tricky: When regrtest imports this module, the thread running regrtest
# grabs the import lock and won't let go of it until this module returns.
# All other threads attempting an import hang for the duration. Since
# this test spawns threads that do little *but* import, we can't do that
# successfully until after this module finishes importing and regrtest
# regains control. To make this work, a special case was added to
# regrtest to invoke a module's "test_main" function (if any) after
# importing it.
def test_main(): # magic name! see above
global N, done
import imp
if imp.lock_held():
# This triggers on, e.g., from test import autotest.
raise unittest.SkipTest("can't run when import lock is held")
done.acquire()
for N in (20, 50) * 3:
if verbose:
print "Trying", N, "threads ...",
for i in range(N):
thread.start_new_thread(task, ())
done.acquire()
if verbose:
print "OK."
done.release()
test_import_hangers()
if __name__ == "__main__":
test_main()
|
40023256/2015cdag1man
|
refs/heads/master
|
static/Brython3.1.3-20150514-095342/Lib/site-packages/pygame/event.py
|
603
|
#!/usr/bin/env python
'''Pygame module for interacting with events and queues.
Pygame handles all it's event messaging through an event queue. The routines
in this module help you manage that event queue. The input queue is heavily
dependent on the pygame display module. If the display has not been
initialized and a video mode not set, the event queue will not really work.
The queue is a regular queue of Event objects, there are a variety of ways
to access the events it contains. From simply checking for the existance of
events, to grabbing them directly off the stack.
All events have a type identifier. This event type is in between the values
of NOEVENT and NUMEVENTS. All user defined events can have the value of
USEREVENT or higher. It is recommended make sure your event id's follow this
system.
To get the state of various input devices, you can forego the event queue
and access the input devices directly with their appropriate modules; mouse,
key, and joystick. If you use this method, remember that pygame requires some
form of communication with the system window manager and other parts of the
platform. To keep pygame in synch with the system, you will need to call
pygame.event.pump() to keep everything current. You'll want to call this
function usually once per game loop.
The event queue offers some simple filtering. This can help performance
slightly by blocking certain event types from the queue, use the
pygame.event.set_allowed() and pygame.event.set_blocked() to work with
this filtering. All events default to allowed.
Joysticks will not send any events until the device has been initialized.
An Event object contains an event type and a readonly set of member data.
The Event object contains no method functions, just member data. Event
objects are retrieved from the pygame event queue. You can create your
own new events with the pygame.event.Event() function.
Your program must take steps to keep the event queue from overflowing. If the
program is not clearing or getting all events off the queue at regular
intervals, it can overflow. When the queue overflows an exception is thrown.
All Event objects contain an event type identifier in the Event.type member.
You may also get full access to the Event's member data through the Event.dict
method. All other member lookups will be passed through to the Event's
dictionary values.
While debugging and experimenting, you can print the Event objects for a
quick display of its type and members. Events that come from the system
will have a guaranteed set of member items based on the type. Here is a
list of the Event members that are defined with each type.
QUIT
(none)
ACTIVEEVENT
gain, state
KEYDOWN
unicode, key, mod
KEYUP
key, mod
MOUSEMOTION
pos, rel, buttons
MOUSEBUTTONUP
pos, button
MOUSEBUTTONDOWN
pos, button
JOYAXISMOTION
joy, axis, value
JOYBALLMOTION
joy, ball, rel
JOYHATMOTION
joy, hat, value
JOYBUTTONUP
joy, button
JOYBUTTONDOWN
joy, button
VIDEORESIZE
size, w, h
VIDEOEXPOSE
(none)
USEREVENT
code
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from copy import copy
#from ctypes import * #brython
from SDL import *
import pygame.base
import pygame.locals
import pygame.display
def pump():
'''Internally process pygame event handlers.
For each frame of your game, you will need to make some sort of call to
the event queue. This ensures your program can internally interact with
the rest of the operating system. If you are not using other event
functions in your game, you should call pygame.event.pump() to allow
pygame to handle internal actions.
This function is not necessary if your program is consistently processing
events on the queue through the other pygame.event functions.
There are important things that must be dealt with internally in the event
queue. The main window may need to be repainted or respond to the system.
If you fail to make a call to the event queue for too long, the system may
decide your program has locked up.
'''
pygame.display._video_init_check()
SDL_PumpEvents()
def get(typelist=None):
'''Get events from the queue.
pygame.event.get(): return Eventlist
pygame.event.get(type): return Eventlist
pygame.event.get(typelist): return Eventlist
This will get all the messages and remove them from the queue. If a type
or sequence of types is given only those messages will be removed from the
queue.
If you are only taking specific events from the queue, be aware that the
queue could eventually fill up with the events you are not interested.
:Parameters:
`typelist` : int or sequence of int
Event type or list of event types that can be returned.
:rtype: list of `Event`
'''
pygame.display._video_init_check()
if typelist is None:
mask = SDL_ALLEVENTS
else:
if hasattr(typelist, '__len__'):
mask = reduce(lambda a,b: a | SDL_EVENTMASK(b), typelist, 0)
else:
mask = int(typelist)
SDL_PumpEvents()
events = []
new_events = SDL_PeepEvents(1, SDL_GETEVENT, mask)
while new_events:
events.append(Event(0, sdl_event=new_events[0]))
new_events = SDL_PeepEvents(1, SDL_GETEVENT, mask)
return events
def poll():
'''Get a single event from the queue.
Returns a single event from the queue. If the event queue is empty an event
of type pygame.NOEVENT will be returned immediately. The returned event is
removed from the queue.
:rtype: Event
'''
pygame.display._video_init_check()
event = SDL_PollEventAndReturn()
if event:
return Event(0, sdl_event=event, keep_userdata=True)
else:
return Event(pygame.locals.NOEVENT)
def wait():
'''Wait for a single event from the queue.
Returns a single event from the queue. If the queue is empty this function
will wait until one is created. While the program is waiting it will sleep
in an idle state. This is important for programs that want to share the
system with other applications.
:rtype: Event
'''
pygame.display._video_init_check()
return Event(0, sdl_event=SDL_WaitEventAndReturn())
def peek(typelist=None):
'''Test if event types are waiting on the queue.
Returns true if there are any events of the given type waiting on the
queue. If a sequence of event types is passed, this will return True if
any of those events are on the queue.
:Parameters:
`typelist` : int or sequence of int
Event type or list of event types to look for.
:rtype: bool
'''
pygame.display._video_init_check()
if typelist is None:
mask = SDL_ALLEVENTS
else:
if hasattr(typelist, '__len__'):
mask = reduce(lambda a,b: a | SDL_EVENTMASK(b), typelist, 0)
else:
mask = SDL_EVENTMASK(int(typelist))
SDL_PumpEvents()
events = SDL_PeepEvents(1, SDL_PEEKEVENT, mask)
if typelist is None:
if events:
return Event(0, sdl_event=events[0], keep_userdata=True)
else:
return Event(pygame.locals.NOEVENT) # XXX deviation from pygame
return len(events) > 0
def clear(typelist=None):
'''Remove all events from the queue.
Remove all events or events of a specific type from the queue. This has the
same effect as `get` except nothing is returned. This can be slightly more
effecient when clearing a full event queue.
:Parameters:
`typelist` : int or sequence of int
Event type or list of event types to remove.
'''
pygame.display._video_init_check()
if typelist is None:
mask = SDL_ALLEVENTS
else:
if hasattr(typelist, '__len__'):
mask = reduce(lambda a,b: a | SDL_EVENTMASK(b), typelist, 0)
else:
mask = int(typelist)
SDL_PumpEvents()
events = []
new_events = SDL_PeepEvents(1, SDL_GETEVENT, mask)
while new_events:
new_events = SDL_PeepEvents(1, SDL_GETEVENT, mask)
_event_names = {
SDL_ACTIVEEVENT: 'ActiveEvent',
SDL_KEYDOWN: 'KeyDown',
SDL_KEYUP: 'KeyUp',
SDL_MOUSEMOTION: 'MouseMotion',
SDL_MOUSEBUTTONDOWN:'MouseButtonDown',
SDL_MOUSEBUTTONUP: 'MouseButtonUp',
SDL_JOYAXISMOTION: 'JoyAxisMotion',
SDL_JOYBALLMOTION: 'JoyBallMotion',
SDL_JOYHATMOTION: 'JoyHatMotion',
SDL_JOYBUTTONUP: 'JoyButtonUp',
SDL_JOYBUTTONDOWN: 'JoyButtonDown',
SDL_QUIT: 'Quit',
SDL_SYSWMEVENT: 'SysWMEvent',
SDL_VIDEORESIZE: 'VideoResize',
SDL_VIDEOEXPOSE: 'VideoExpose',
SDL_NOEVENT: 'NoEvent'
}
def event_name(event_type):
'''Get the string name from an event id.
Pygame uses integer ids to represent the event types. If you want to
report these types to the user they should be converted to strings. This
will return a the simple name for an event type. The string is in the
CamelCase style.
:Parameters:
- `event_type`: int
:rtype: str
'''
if event_type >= SDL_USEREVENT and event_type < SDL_NUMEVENTS:
return 'UserEvent'
return _event_names.get(event_type, 'Unknown')
def set_blocked(typelist):
'''Control which events are allowed on the queue.
The given event types are not allowed to appear on the event queue. By
default all events can be placed on the queue. It is safe to disable an
event type multiple times.
If None is passed as the argument, this has the opposite effect and none of
the event types are allowed to be placed on the queue.
:note: events posted with `post` will not be blocked.
:Parameters:
`typelist` : int or sequence of int or None
Event type or list of event types to disallow.
'''
pygame.display._video_init_check()
if typelist is None:
SDL_EventState(SDL_ALLEVENTS, SDL_IGNORE)
elif hasattr(typelist, '__len__'):
for val in typelist:
SDL_EventState(val, SDL_IGNORE)
else:
SDL_EventState(typelist, SDL_IGNORE)
def set_allowed(typelist):
'''Control which events are allowed on the queue.
The given event types are allowed to appear on the event queue. By default
all events can be placed on the queue. It is safe to enable an event type
multiple times.
If None is passed as the argument, this has the opposite effect and all of
the event types are allowed to be placed on the queue.
:Parameters:
`typelist` : int or sequence of int or None
Event type or list of event types to disallow.
'''
pygame.display._video_init_check()
if typelist is None:
SDL_EventState(SDL_ALLEVENTS, SDL_ENABLE)
elif hasattr(typelist, '__len__'):
for val in typelist:
SDL_EventState(val, SDL_ENABLE)
else:
SDL_EventState(typelist, SDL_ENABLE)
def get_blocked(typelist):
'''Test if a type of event is blocked from the queue.
Returns true if the given event type is blocked from the queue.
:Parameters:
- `event_type`: int
:rtype: int
'''
pygame.display._video_init_check()
if typelist == None:
return SDL_EventState(SDL_ALLEVENTS, SDL_QUERY) == SDL_ENABLE
elif hasattr(typelist, '__len__'): # XXX undocumented behaviour
for val in typelist:
if SDL_EventState(val, SDL_QUERY) == SDL_ENABLE:
return True
return False
else:
return SDL_EventState(typelist, SDL_QUERY) == SDL_ENABLE
def set_grab(grab):
'''Control the sharing of input devices with other applications.
When your program runs in a windowed environment, it will share the mouse
and keyboard devices with other applications that have focus. If your
program sets the event grab to True, it will lock all input into your
program.
It is best to not always grab the input, since it prevents the user from
doing other things on their system.
:Parameters:
- `grab`: bool
'''
pygame.display._video_init_check()
if grab:
SDL_WM_GrabInput(SDL_GRAB_ON)
else:
SDL_WM_GrabInput(SDL_GRAB_OFF)
def get_grab():
'''Test if the program is sharing input devices.
Returns true when the input events are grabbed for this application. Use
`set_grab` to control this state.
:rtype: bool
'''
pygame.display._video_init_check()
return SDL_WM_GrabInput(SDL_GRAB_QUERY) == SDL_GRAB_ON
_USEROBJECT_CHECK1 = int(0xdeadbeef) # signed
_USEROBJECT_CHECK2 = 0xfeedf00d
_user_event_objects = {}
_user_event_nextid = 1
def post(event):
'''Place a new event on the queue.
This places a new event at the end of the event queue. These Events will
later be retrieved from the other queue functions.
This is usually used for placing pygame.USEREVENT events on the queue.
Although any type of event can be placed, if using the sytem event types
your program should be sure to create the standard attributes with
appropriate values.
:Parameters:
`event` : Event
Event to add to the queue.
'''
global _user_event_nextid
pygame.display._video_init_check()
sdl_event = SDL_Event(event.type)
sdl_event.user.code = _USEROBJECT_CHECK1
sdl_event.user.data1 = c_void_p(_USEROBJECT_CHECK2)
sdl_event.user.data2 = c_void_p(_user_event_nextid)
_user_event_objects[_user_event_nextid] = event
_user_event_nextid += 1
SDL_PushEvent(sdl_event)
class Event:
def __init__(self, event_type, event_dict=None, sdl_event=None,
keep_userdata=False, **attributes):
'''Create a new event object.
Creates a new event with the given type. The event is created with the
given attributes and values. The attributes can come from a dictionary
argument, or as string keys from a dictionary.
The given attributes will be readonly attributes on the new event
object itself. These are the only attributes on the Event object,
there are no methods attached to Event objects.
:Parameters:
`event_type` : int
Event type to create
`event_dict` : dict
Dictionary of attributes to assign.
`sdl_event` : `SDL_Event`
Construct a Pygame event from the given SDL_Event; used
internally.
`keep_userdata` : bool
Used internally.
`attributes` : additional keyword arguments
Additional attributes to assign to the event.
'''
if sdl_event:
uevent = cast(pointer(sdl_event), POINTER(SDL_UserEvent)).contents
if uevent.code == _USEROBJECT_CHECK1 and \
uevent.data1 == _USEROBJECT_CHECK2 and \
uevent.data2 in _user_event_objects:
# An event that was posted; grab dict from local store.
id = sdl_event.data2
for key, value in _user_event_objects[id].__dict__.items():
setattr(self, key, value)
# Free memory unless just peeking
if not keep_userdata:
del _user_event_objects[id]
else:
# Standard SDL event
self.type = sdl_event.type
if self.type == SDL_QUIT:
pass
elif self.type == SDL_ACTIVEEVENT:
self.gain = sdl_event.gain
self.state = sdl_event.state
elif self.type == SDL_KEYDOWN:
self.unicode = sdl_event.keysym.unicode
self.key = sdl_event.keysym.sym
self.mod = sdl_event.keysym.mod
elif self.type == SDL_KEYUP:
self.key = sdl_event.keysym.sym
self.mod = sdl_event.keysym.mod
elif self.type == SDL_MOUSEMOTION:
self.pos = (sdl_event.x, sdl_event.y)
self.rel = (sdl_event.xrel, sdl_event.yrel)
self.buttons = (sdl_event.state & SDL_BUTTON(1) != 0,
sdl_event.state & SDL_BUTTON(2) != 0,
sdl_event.state & SDL_BUTTON(3) != 0)
elif self.type in (SDL_MOUSEBUTTONDOWN, SDL_MOUSEBUTTONUP):
self.pos = (sdl_event.x, sdl_event.y)
self.button = sdl_event.button
elif self.type == SDL_JOYAXISMOTION:
self.joy = sdl_event.which
self.axis = sdl_event.axis
self.value = sdl_event.value / 32767.0
elif self.type == SDL_JOYBALLMOTION:
self.joy = sdl_event.which
self.ball = sdl_event.ball
self.rel = (sdl_event.xrel, sdl_event.yrel)
elif self.type == SDL_JOYHATMOTION:
self.joy = sdl_event.which
self.hat = sdl_event.hat
hx = hy = 0
if sdl_event.value & SDL_HAT_UP:
hy = 1
if sdl_event.value & SDL_HAT_DOWN:
hy = -1
if sdl_event.value & SDL_HAT_RIGHT:
hx = 1
if sdl_event.value & SDL_HAT_LEFT:
hx = -1
self.value = (hx, hy)
elif self.type in (SDL_JOYBUTTONUP, SDL_JOYBUTTONDOWN):
self.joy = sdl_event.which
self.button = sdl_event.button
elif self.type == SDL_VIDEORESIZE:
self.size = (sdl_event.w, sdl_event.h)
self.w = sdl_event.w
self.h = sdl_event.h
elif self.type == SDL_VIDEOEXPOSE:
pass
elif self.type == SDL_SYSWMEVENT:
pass ### XXX: not implemented
elif self.type >= SDL_USEREVENT and self.type < SDL_NUMEVENTS:
self.code = sdl_event.code
else:
# Create an event (not from event queue)
self.type = event_type
if event_dict:
for key, value in event_dict.items():
setattr(self, key, value)
for key, value in attributes.items():
setattr(self, key, value)
# Bizarre undocumented but used by some people.
self.dict = self.__dict__
def __repr__(self):
d = copy(self.__dict__)
del d['type']
return '<Event(%d-%s %r)>' % \
(self.type, event_name(self.type), d)
def __nonzero__(self):
return self.type != SDL_NOEVENT
EventType = Event
|
ulope/django
|
refs/heads/master
|
tests/migrations/migrations_test_apps/migrated_app/migrations/0001_initial.py
|
2995
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
operations = [
migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=255)),
("slug", models.SlugField(null=True)),
("age", models.IntegerField(default=0)),
("silly_field", models.BooleanField(default=False)),
],
),
migrations.CreateModel(
"Tribble",
[
("id", models.AutoField(primary_key=True)),
("fluffy", models.BooleanField(default=True)),
],
)
]
|
jkonecny12/pykickstart
|
refs/heads/master
|
tests/commands/lang.py
|
8
|
import unittest
from tests.baseclass import CommandTest
from pykickstart.errors import KickstartValueError
class FC3_TestCase(CommandTest):
command = "lang"
def runTest(self):
# pass
self.assert_parse("lang en_US", "lang en_US\n")
# fail
# Fail if less than or more than one argument is specified
self.assert_parse_error("lang", KickstartValueError)
self.assert_parse_error("lang en_US en_CA", KickstartValueError)
class F19_TestCase(FC3_TestCase):
def runTest(self):
# pass
self.assert_parse("lang en_US")
self.assert_parse("lang en_US --addsupport=cs_CZ",
"lang en_US --addsupport=cs_CZ\n")
self.assert_parse("lang en_US --addsupport=sr_RS.UTF-8@latin",
"lang en_US --addsupport=sr_RS.UTF-8@latin\n")
self.assert_parse("lang en_US --addsupport=cs_CZ,fr_FR",
"lang en_US --addsupport=cs_CZ,fr_FR\n")
# fail
# Fail if less than or more than one argument is specified
self.assert_parse_error("lang", KickstartValueError)
self.assert_parse_error("lang en_US en_CA", KickstartValueError)
self.assert_parse_error("lang --addsupport=en_US", KickstartValueError)
if __name__ == "__main__":
unittest.main()
|
Maximilian-Reuter/SickRage-1
|
refs/heads/master
|
lib/feedparser/http.py
|
43
|
from __future__ import absolute_import, unicode_literals, with_statement
import datetime
import gzip
import re
import struct
import zlib
try:
import urllib.parse
import urllib.request
except ImportError:
from urllib import splithost, splittype, splituser
from urllib2 import build_opener, HTTPDigestAuthHandler, HTTPRedirectHandler, HTTPDefaultErrorHandler, Request
from urlparse import urlparse
class urllib(object):
class parse(object):
splithost = staticmethod(splithost)
splittype = staticmethod(splittype)
splituser = staticmethod(splituser)
urlparse = staticmethod(urlparse)
class request(object):
build_opener = staticmethod(build_opener)
HTTPDigestAuthHandler = HTTPDigestAuthHandler
HTTPRedirectHandler = HTTPRedirectHandler
HTTPDefaultErrorHandler = HTTPDefaultErrorHandler
Request = Request
try:
from io import BytesIO as _StringIO
except ImportError:
try:
from cStringIO import StringIO as _StringIO
except ImportError:
from StringIO import StringIO as _StringIO
try:
import base64, binascii
except ImportError:
base64 = binascii = None
else:
# Python 3.1 deprecated decodestring in favor of decodebytes
_base64decode = getattr(base64, 'decodebytes', base64.decodestring)
from .datetimes import _parse_date
from .urls import _convert_to_idn
try:
basestring
except NameError:
basestring = str
bytes_ = type(b'')
# HTTP "Accept" header to send to servers when downloading feeds. If you don't
# want to send an Accept header, set this to None.
ACCEPT_HEADER = "application/atom+xml,application/rdf+xml,application/rss+xml,application/x-netcdf,application/xml;q=0.9,text/xml;q=0.2,*/*;q=0.1"
class _FeedURLHandler(urllib.request.HTTPDigestAuthHandler, urllib.request.HTTPRedirectHandler, urllib.request.HTTPDefaultErrorHandler):
def http_error_default(self, req, fp, code, msg, headers):
# The default implementation just raises HTTPError.
# Forget that.
fp.status = code
return fp
def http_error_301(self, req, fp, code, msg, hdrs):
result = urllib.request.HTTPRedirectHandler.http_error_301(self, req, fp,
code, msg, hdrs)
result.status = code
result.newurl = result.geturl()
return result
# The default implementations in urllib.request.HTTPRedirectHandler
# are identical, so hardcoding a http_error_301 call above
# won't affect anything
http_error_300 = http_error_301
http_error_302 = http_error_301
http_error_303 = http_error_301
http_error_307 = http_error_301
def http_error_401(self, req, fp, code, msg, headers):
# Check if
# - server requires digest auth, AND
# - we tried (unsuccessfully) with basic auth, AND
# If all conditions hold, parse authentication information
# out of the Authorization header we sent the first time
# (for the username and password) and the WWW-Authenticate
# header the server sent back (for the realm) and retry
# the request with the appropriate digest auth headers instead.
# This evil genius hack has been brought to you by Aaron Swartz.
host = urllib.parse.urlparse(req.get_full_url())[1]
if base64 is None or 'Authorization' not in req.headers \
or 'WWW-Authenticate' not in headers:
return self.http_error_default(req, fp, code, msg, headers)
auth = _base64decode(req.headers['Authorization'].split(' ')[1])
user, passw = auth.split(':')
realm = re.findall('realm="([^"]*)"', headers['WWW-Authenticate'])[0]
self.add_password(realm, host, user, passw)
retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
self.reset_retry_count()
return retry
def _build_urllib2_request(url, agent, accept_header, etag, modified, referrer, auth, request_headers):
request = urllib.request.Request(url)
request.add_header('User-Agent', agent)
if etag:
request.add_header('If-None-Match', etag)
if isinstance(modified, basestring):
modified = _parse_date(modified)
elif isinstance(modified, datetime.datetime):
modified = modified.utctimetuple()
if modified:
# format into an RFC 1123-compliant timestamp. We can't use
# time.strftime() since the %a and %b directives can be affected
# by the current locale, but RFC 2616 states that dates must be
# in English.
short_weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
request.add_header('If-Modified-Since', '%s, %02d %s %04d %02d:%02d:%02d GMT' % (short_weekdays[modified[6]], modified[2], months[modified[1] - 1], modified[0], modified[3], modified[4], modified[5]))
if referrer:
request.add_header('Referer', referrer)
if gzip and zlib:
request.add_header('Accept-encoding', 'gzip, deflate')
elif gzip:
request.add_header('Accept-encoding', 'gzip')
elif zlib:
request.add_header('Accept-encoding', 'deflate')
else:
request.add_header('Accept-encoding', '')
if auth:
request.add_header('Authorization', 'Basic %s' % auth)
if accept_header:
request.add_header('Accept', accept_header)
# use this for whatever -- cookies, special headers, etc
# [('Cookie','Something'),('x-special-header','Another Value')]
for header_name, header_value in request_headers.items():
request.add_header(header_name, header_value)
request.add_header('A-IM', 'feed') # RFC 3229 support
return request
def get(url, etag=None, modified=None, agent=None, referrer=None, handlers=None, request_headers=None, result=None):
if handlers is None:
handlers = []
elif not isinstance(handlers, list):
handlers = [handlers]
if request_headers is None:
request_headers = {}
# Deal with the feed URI scheme
if url.startswith('feed:http'):
url = url[5:]
elif url.startswith('feed:'):
url = 'http:' + url[5:]
if not agent:
agent = USER_AGENT
# Test for inline user:password credentials for HTTP basic auth
auth = None
if base64 and not url.startswith('ftp:'):
urltype, rest = urllib.parse.splittype(url)
realhost, rest = urllib.parse.splithost(rest)
if realhost:
user_passwd, realhost = urllib.parse.splituser(realhost)
if user_passwd:
url = '%s://%s%s' % (urltype, realhost, rest)
auth = base64.standard_b64encode(user_passwd).strip()
# iri support
if not isinstance(url, bytes_):
url = _convert_to_idn(url)
# try to open with urllib2 (to use optional headers)
request = _build_urllib2_request(url, agent, ACCEPT_HEADER, etag, modified, referrer, auth, request_headers)
opener = urllib.request.build_opener(*tuple(handlers + [_FeedURLHandler()]))
opener.addheaders = [] # RMK - must clear so we only send our custom User-Agent
f = opener.open(request)
data = f.read()
f.close()
# lowercase all of the HTTP headers for comparisons per RFC 2616
result['headers'] = dict((k.lower(), v) for k, v in f.headers.items())
# if feed is gzip-compressed, decompress it
if data and 'gzip' in result['headers'].get('content-encoding', ''):
try:
data = gzip.GzipFile(fileobj=_StringIO(data)).read()
except (EOFError, IOError, struct.error) as e:
# IOError can occur if the gzip header is bad.
# struct.error can occur if the data is damaged.
result['bozo'] = True
result['bozo_exception'] = e
if isinstance(e, struct.error):
# A gzip header was found but the data is corrupt.
# Ideally, we should re-request the feed without the
# 'Accept-encoding: gzip' header, but we don't.
data = None
elif data and 'deflate' in result['headers'].get('content-encoding', ''):
try:
data = zlib.decompress(data)
except zlib.error as e:
try:
# The data may have no headers and no checksum.
data = zlib.decompress(data, -15)
except zlib.error as e:
result['bozo'] = True
result['bozo_exception'] = e
# save HTTP headers
if 'etag' in result['headers']:
etag = result['headers'].get('etag', '')
if isinstance(etag, bytes_):
etag = etag.decode('utf-8', 'ignore')
if etag:
result['etag'] = etag
if 'last-modified' in result['headers']:
modified = result['headers'].get('last-modified', '')
if modified:
result['modified'] = modified
result['modified_parsed'] = _parse_date(modified)
if isinstance(f.url, bytes_):
result['href'] = f.url.decode('utf-8', 'ignore')
else:
result['href'] = f.url
result['status'] = getattr(f, 'status', 200)
# Stop processing if the server sent HTTP 304 Not Modified.
if getattr(f, 'code', 0) == 304:
result['version'] = ''
result['debug_message'] = 'The feed has not changed since you last checked, ' + \
'so the server sent no data. This is a feature, not a bug!'
return data
|
pombredanne/pants
|
refs/heads/master
|
src/python/pants/util/fileutil.py
|
16
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import random
import shutil
from pants.util.contextutil import temporary_file
def atomic_copy(src, dst):
"""Copy the file src to dst, overwriting dst atomically."""
with temporary_file(root_dir=os.path.dirname(dst)) as tmp_dst:
shutil.copyfile(src, tmp_dst.name)
os.chmod(tmp_dst.name, os.stat(src).st_mode)
os.rename(tmp_dst.name, dst)
def create_size_estimators():
"""Create a dict of name to a function that returns an estimated size for a given target.
The estimated size is used to build the largest targets first (subject to dependency constraints).
Choose 'random' to choose random sizes for each target, which may be useful for distributed
builds.
:returns: Dict of a name to a function that returns an estimated size.
"""
def line_count(filename):
with open(filename, 'rb') as fh:
return sum(1 for line in fh)
return {
'linecount': lambda srcs: sum(line_count(src) for src in srcs),
'filecount': lambda srcs: len(srcs),
'filesize': lambda srcs: sum(os.path.getsize(src) for src in srcs),
'nosize': lambda srcs: 0,
'random': lambda srcs: random.randint(0, 10000),
}
|
gururajl/deep-learning
|
refs/heads/master
|
transfer-learning/tensorflow_vgg/utils.py
|
145
|
import skimage
import skimage.io
import skimage.transform
import numpy as np
# synset = [l.strip() for l in open('synset.txt').readlines()]
# returns image of shape [224, 224, 3]
# [height, width, depth]
def load_image(path):
# load image
img = skimage.io.imread(path)
img = img / 255.0
assert (0 <= img).all() and (img <= 1.0).all()
# print "Original Image Shape: ", img.shape
# we crop image from center
short_edge = min(img.shape[:2])
yy = int((img.shape[0] - short_edge) / 2)
xx = int((img.shape[1] - short_edge) / 2)
crop_img = img[yy: yy + short_edge, xx: xx + short_edge]
# resize to 224, 224
resized_img = skimage.transform.resize(crop_img, (224, 224), mode='constant')
return resized_img
# returns the top1 string
def print_prob(prob, file_path):
synset = [l.strip() for l in open(file_path).readlines()]
# print prob
pred = np.argsort(prob)[::-1]
# Get top1 label
top1 = synset[pred[0]]
print(("Top1: ", top1, prob[pred[0]]))
# Get top5 label
top5 = [(synset[pred[i]], prob[pred[i]]) for i in range(5)]
print(("Top5: ", top5))
return top1
def load_image2(path, height=None, width=None):
# load image
img = skimage.io.imread(path)
img = img / 255.0
if height is not None and width is not None:
ny = height
nx = width
elif height is not None:
ny = height
nx = img.shape[1] * ny / img.shape[0]
elif width is not None:
nx = width
ny = img.shape[0] * nx / img.shape[1]
else:
ny = img.shape[0]
nx = img.shape[1]
return skimage.transform.resize(img, (ny, nx), mode='constant')
def test():
img = skimage.io.imread("./test_data/starry_night.jpg")
ny = 300
nx = img.shape[1] * ny / img.shape[0]
img = skimage.transform.resize(img, (ny, nx), mode='constant')
skimage.io.imsave("./test_data/test/output.jpg", img)
if __name__ == "__main__":
test()
|
sander76/home-assistant
|
refs/heads/dev
|
homeassistant/components/webhook/trigger.py
|
5
|
"""Offer webhook triggered automation rules."""
from functools import partial
from aiohttp import hdrs
import voluptuous as vol
from homeassistant.const import CONF_PLATFORM, CONF_WEBHOOK_ID
from homeassistant.core import HassJob, callback
import homeassistant.helpers.config_validation as cv
# mypy: allow-untyped-defs
DEPENDENCIES = ("webhook",)
TRIGGER_SCHEMA = vol.Schema(
{vol.Required(CONF_PLATFORM): "webhook", vol.Required(CONF_WEBHOOK_ID): cv.string}
)
async def _handle_webhook(job, trigger_id, hass, webhook_id, request):
"""Handle incoming webhook."""
result = {"platform": "webhook", "webhook_id": webhook_id}
if "json" in request.headers.get(hdrs.CONTENT_TYPE, ""):
result["json"] = await request.json()
else:
result["data"] = await request.post()
result["query"] = request.query
result["description"] = "webhook"
result["id"] = trigger_id
hass.async_run_hass_job(job, {"trigger": result})
async def async_attach_trigger(hass, config, action, automation_info):
"""Trigger based on incoming webhooks."""
trigger_id = automation_info.get("trigger_id") if automation_info else None
webhook_id = config.get(CONF_WEBHOOK_ID)
job = HassJob(action)
hass.components.webhook.async_register(
automation_info["domain"],
automation_info["name"],
webhook_id,
partial(_handle_webhook, job, trigger_id),
)
@callback
def unregister():
"""Unregister webhook."""
hass.components.webhook.async_unregister(webhook_id)
return unregister
|
scroggo/skia
|
refs/heads/master
|
platform_tools/android/tests/test_variables.py
|
132
|
#!/usr/bin/python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Common variables for tests.
"""
import os
# Find this file so we can find the python files to test.
SCRIPT_DIR = os.path.dirname(__file__)
ANDROID_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir))
# Path to gyp_to_android.
BIN_DIR = os.path.join(ANDROID_DIR, 'bin')
# Path to generator files.
GYP_GEN_DIR = os.path.join(ANDROID_DIR, 'gyp_gen')
ANDROID_MK = 'Android.mk'
|
wbc2010/django1.2.5
|
refs/heads/master
|
django1.2.5/django/contrib/syndication/views.py
|
43
|
import datetime
from django.conf import settings
from django.contrib.sites.models import get_current_site
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.http import HttpResponse, Http404
from django.template import loader, Template, TemplateDoesNotExist, RequestContext
from django.utils import feedgenerator, tzinfo
from django.utils.encoding import force_unicode, iri_to_uri, smart_unicode
from django.utils.html import escape
def add_domain(domain, url, secure=False):
if not (url.startswith('http://')
or url.startswith('https://')
or url.startswith('mailto:')):
# 'url' must already be ASCII and URL-quoted, so no need for encoding
# conversions here.
if secure:
protocol = 'https'
else:
protocol = 'http'
url = iri_to_uri(u'%s://%s%s' % (protocol, domain, url))
return url
class FeedDoesNotExist(ObjectDoesNotExist):
pass
class Feed(object):
feed_type = feedgenerator.DefaultFeed
title_template = None
description_template = None
def __call__(self, request, *args, **kwargs):
try:
obj = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
raise Http404('Feed object does not exist.')
feedgen = self.get_feed(obj, request)
response = HttpResponse(mimetype=feedgen.mime_type)
feedgen.write(response, 'utf-8')
return response
def item_title(self, item):
# Titles should be double escaped by default (see #6533)
return escape(force_unicode(item))
def item_description(self, item):
return force_unicode(item)
def item_link(self, item):
try:
return item.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured('Give your %s class a get_absolute_url() method, or define an item_link() method in your Feed class.' % item.__class__.__name__)
def __get_dynamic_attr(self, attname, obj, default=None):
try:
attr = getattr(self, attname)
except AttributeError:
return default
if callable(attr):
# Check func_code.co_argcount rather than try/excepting the
# function and catching the TypeError, because something inside
# the function may raise the TypeError. This technique is more
# accurate.
if hasattr(attr, 'func_code'):
argcount = attr.func_code.co_argcount
else:
argcount = attr.__call__.func_code.co_argcount
if argcount == 2: # one argument is 'self'
return attr(obj)
else:
return attr()
return attr
def feed_extra_kwargs(self, obj):
"""
Returns an extra keyword arguments dictionary that is used when
initializing the feed generator.
"""
return {}
def item_extra_kwargs(self, item):
"""
Returns an extra keyword arguments dictionary that is used with
the `add_item` call of the feed generator.
"""
return {}
def get_object(self, request, *args, **kwargs):
return None
def get_feed(self, obj, request):
"""
Returns a feedgenerator.DefaultFeed object, fully populated, for
this feed. Raises FeedDoesNotExist for invalid parameters.
"""
current_site = get_current_site(request)
link = self.__get_dynamic_attr('link', obj)
link = add_domain(current_site.domain, link, request.is_secure())
feed = self.feed_type(
title = self.__get_dynamic_attr('title', obj),
subtitle = self.__get_dynamic_attr('subtitle', obj),
link = link,
description = self.__get_dynamic_attr('description', obj),
language = settings.LANGUAGE_CODE.decode(),
feed_url = add_domain(
current_site.domain,
self.__get_dynamic_attr('feed_url', obj) or request.path,
request.is_secure(),
),
author_name = self.__get_dynamic_attr('author_name', obj),
author_link = self.__get_dynamic_attr('author_link', obj),
author_email = self.__get_dynamic_attr('author_email', obj),
categories = self.__get_dynamic_attr('categories', obj),
feed_copyright = self.__get_dynamic_attr('feed_copyright', obj),
feed_guid = self.__get_dynamic_attr('feed_guid', obj),
ttl = self.__get_dynamic_attr('ttl', obj),
**self.feed_extra_kwargs(obj)
)
title_tmp = None
if self.title_template is not None:
try:
title_tmp = loader.get_template(self.title_template)
except TemplateDoesNotExist:
pass
description_tmp = None
if self.description_template is not None:
try:
description_tmp = loader.get_template(self.description_template)
except TemplateDoesNotExist:
pass
for item in self.__get_dynamic_attr('items', obj):
if title_tmp is not None:
title = title_tmp.render(RequestContext(request, {'obj': item, 'site': current_site}))
else:
title = self.__get_dynamic_attr('item_title', item)
if description_tmp is not None:
description = description_tmp.render(RequestContext(request, {'obj': item, 'site': current_site}))
else:
description = self.__get_dynamic_attr('item_description', item)
link = add_domain(
current_site.domain,
self.__get_dynamic_attr('item_link', item),
request.is_secure(),
)
enc = None
enc_url = self.__get_dynamic_attr('item_enclosure_url', item)
if enc_url:
enc = feedgenerator.Enclosure(
url = smart_unicode(enc_url),
length = smart_unicode(self.__get_dynamic_attr('item_enclosure_length', item)),
mime_type = smart_unicode(self.__get_dynamic_attr('item_enclosure_mime_type', item))
)
author_name = self.__get_dynamic_attr('item_author_name', item)
if author_name is not None:
author_email = self.__get_dynamic_attr('item_author_email', item)
author_link = self.__get_dynamic_attr('item_author_link', item)
else:
author_email = author_link = None
pubdate = self.__get_dynamic_attr('item_pubdate', item)
if pubdate and not pubdate.tzinfo:
ltz = tzinfo.LocalTimezone(pubdate)
pubdate = pubdate.replace(tzinfo=ltz)
feed.add_item(
title = title,
link = link,
description = description,
unique_id = self.__get_dynamic_attr('item_guid', item, link),
enclosure = enc,
pubdate = pubdate,
author_name = author_name,
author_email = author_email,
author_link = author_link,
categories = self.__get_dynamic_attr('item_categories', item),
item_copyright = self.__get_dynamic_attr('item_copyright', item),
**self.item_extra_kwargs(item)
)
return feed
def feed(request, url, feed_dict=None):
"""Provided for backwards compatibility."""
from django.contrib.syndication.feeds import Feed as LegacyFeed
import warnings
warnings.warn('The syndication feed() view is deprecated. Please use the '
'new class based view API.',
category=PendingDeprecationWarning)
if not feed_dict:
raise Http404("No feeds are registered.")
try:
slug, param = url.split('/', 1)
except ValueError:
slug, param = url, ''
try:
f = feed_dict[slug]
except KeyError:
raise Http404("Slug %r isn't registered." % slug)
# Backwards compatibility within the backwards compatibility;
# Feeds can be updated to be class-based, but still be deployed
# using the legacy feed view. This only works if the feed takes
# no arguments (i.e., get_object returns None). Refs #14176.
if not issubclass(f, LegacyFeed):
instance = f()
instance.feed_url = getattr(f, 'feed_url', None) or request.path
instance.title_template = f.title_template or ('feeds/%s_title.html' % slug)
instance.description_template = f.description_template or ('feeds/%s_description.html' % slug)
return instance(request)
try:
feedgen = f(slug, request).get_feed(param)
except FeedDoesNotExist:
raise Http404("Invalid feed parameters. Slug %r is valid, but other parameters, or lack thereof, are not." % slug)
response = HttpResponse(mimetype=feedgen.mime_type)
feedgen.write(response, 'utf-8')
return response
|
linearregression/socorro
|
refs/heads/master
|
socorro/unittest/external/postgresql/test_adi.py
|
1
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import datetime
from nose.plugins.attrib import attr
from nose.tools import eq_, assert_raises
from socorro.external.postgresql.adi import ADI
from socorro.external import MissingArgumentError
from unittestbase import PostgreSQLTestCase
@attr(integration='postgres') # for nosetests
class IntegrationTestADI(PostgreSQLTestCase):
def setUp(self):
"""Set up this test class by populating the reports table with fake
data. """
super(IntegrationTestADI, self).setUp()
self._truncate()
cursor = self.connection.cursor()
self.date = datetime.datetime(2015, 7, 1)
yesterday = self.date - datetime.timedelta(hours=24)
products = ('Firefox', 'Thunderbird')
versions = ('39', '40')
platforms = ('Linux', 'Darwin')
channels = ('release', 'beta')
adi_count = 1
for product in products:
for version in versions:
for platform in platforms:
for channel in channels:
cursor.execute("""
INSERT INTO raw_adi (
adi_count,
date,
product_name,
product_os_platform,
product_os_version,
product_version,
build,
product_guid,
update_channel,
received_at
)
VALUES (
%s, %s, %s, %s, %s, %s, %s, %s, %s, now()
)
""", (
adi_count,
yesterday,
product,
platform,
'1.0',
version,
'20140903141017',
'{abc}',
channel,
))
adi_count *= 2
cursor.execute('select count(*) from raw_adi')
count, = cursor.fetchone()
# We expect there to be 2 channels per every 2 platforms,
# per every 2 versions per every 2 products.
assert count == 2 * 2 * 2 * 2, count
self.connection.commit()
def tearDown(self):
self._truncate()
super(IntegrationTestADI, self).tearDown()
def _truncate(self):
cursor = self.connection.cursor()
cursor.execute("""
TRUNCATE raw_adi CASCADE
""")
self.connection.commit()
def test_get(self):
impl = ADI(config=self.config)
assert_raises(
MissingArgumentError,
impl.get
)
start = self.date - datetime.timedelta(days=1)
end = self.date
stats = impl.get(
start_date=start,
end_date=end,
product='Firefox',
version='42'
)
eq_(stats['hits'], [])
stats = impl.get(
start_date=start,
end_date=end,
product='Firefox',
version='40'
)
start_formatted = start.strftime('%Y-%m-%d')
hits = stats['hits']
# Because the results come back in no particular order,
# to make it easier to compare, sort by something predictable.
hits.sort(key=lambda x: x['adi_count'])
eq_(stats['hits'][0], {
'adi_count': 16L,
'date': start_formatted,
'product': 'Firefox',
'version': '40',
'platform': 'Linux',
'release_channel': 'release'
})
eq_(stats['hits'][1], {
'adi_count': 32L,
'date': start_formatted,
'product': 'Firefox',
'version': '40',
'platform': 'Linux',
'release_channel': 'beta'
})
eq_(stats['hits'][2], {
'adi_count': 64L,
'date': start_formatted,
'product': 'Firefox',
'version': '40',
'platform': 'Darwin',
'release_channel': 'release'
})
eq_(stats['hits'][3], {
'adi_count': 128L,
'date': start_formatted,
'product': 'Firefox',
'version': '40',
'platform': 'Darwin',
'release_channel': 'beta'
})
|
umitproject/packet-manipulator
|
refs/heads/master
|
umit/pm/backend/abstract/basecontext/__init__.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008 Adriano Monteiro Marques
#
# Author: Francesco Piccinno <stack.box@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
This module is for writing others context for
different backends that could work with PM
The class hierarchy is the following:
+ BaseStaticContext
\-- BaseTimedContext
|-- BaseSendContext
|-- BaseSendReceiveContext
|-- BaseSniffContext
\-- BaseSequenceContext
"""
|
gangadharkadam/v5_erp
|
refs/heads/v5.0
|
erpnext/manufacturing/doctype/bom/bom.py
|
3
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cint, cstr, flt
from frappe import _
from frappe.model.document import Document
from operator import itemgetter
class BOM(Document):
def autoname(self):
last_name = frappe.db.sql("""select max(name) from `tabBOM`
where name like "BOM/%s/%%" """ % cstr(self.item).replace('"', '\\"'))
if last_name:
idx = cint(cstr(last_name[0][0]).split('/')[-1].split('-')[0]) + 1
else:
idx = 1
self.name = 'BOM/' + self.item + ('/%.3i' % idx)
def validate(self):
self.clear_operations()
self.validate_main_item()
from erpnext.utilities.transaction_base import validate_uom_is_integer
validate_uom_is_integer(self, "stock_uom", "qty")
self.validate_materials()
self.set_bom_material_details()
self.calculate_cost()
def on_update(self):
self.check_recursion()
self.update_exploded_items()
def on_submit(self):
self.manage_default_bom()
def on_cancel(self):
frappe.db.set(self, "is_active", 0)
frappe.db.set(self, "is_default", 0)
# check if used in any other bom
self.validate_bom_links()
self.manage_default_bom()
def on_update_after_submit(self):
self.validate_bom_links()
self.manage_default_bom()
def get_item_det(self, item_code):
item = frappe.db.sql("""select name, is_asset_item, is_purchase_item,
docstatus, description, is_sub_contracted_item, stock_uom, default_bom,
last_purchase_rate
from `tabItem` where name=%s""", item_code, as_dict = 1)
if not item:
frappe.throw(_("Item: {0} does not exist in the system").format(item_code))
return item
def validate_rm_item(self, item):
if item[0]['name'] == self.item:
frappe.throw(_("Raw material cannot be same as main Item"))
def set_bom_material_details(self):
for item in self.get("items"):
ret = self.get_bom_material_detail({"item_code": item.item_code, "bom_no": item.bom_no,
"qty": item.qty})
for r in ret:
if not item.get(r):
item.set(r, ret[r])
def get_bom_material_detail(self, args=None):
""" Get raw material details like uom, desc and rate"""
if not args:
args = frappe.form_dict.get('args')
if isinstance(args, basestring):
import json
args = json.loads(args)
item = self.get_item_det(args['item_code'])
self.validate_rm_item(item)
args['bom_no'] = args['bom_no'] or item and cstr(item[0]['default_bom']) or ''
args.update(item[0])
rate = self.get_rm_rate(args)
ret_item = {
'description' : item and args['description'] or '',
'stock_uom' : item and args['stock_uom'] or '',
'bom_no' : args['bom_no'],
'rate' : rate
}
return ret_item
def get_rm_rate(self, arg):
""" Get raw material rate as per selected method, if bom exists takes bom cost """
rate = 0
if arg['bom_no']:
rate = self.get_bom_unitcost(arg['bom_no'])
elif arg and (arg['is_purchase_item'] == 'Yes' or arg['is_sub_contracted_item'] == 'Yes'):
if self.rm_cost_as_per == 'Valuation Rate':
rate = self.get_valuation_rate(arg)
elif self.rm_cost_as_per == 'Last Purchase Rate':
rate = arg['last_purchase_rate']
elif self.rm_cost_as_per == "Price List":
if not self.buying_price_list:
frappe.throw(_("Please select Price List"))
rate = frappe.db.get_value("Item Price", {"price_list": self.buying_price_list,
"item_code": arg["item_code"]}, "price_list_rate") or 0
return rate
def update_cost(self):
if self.docstatus == 2:
return
for d in self.get("items"):
rate = self.get_bom_material_detail({'item_code': d.item_code, 'bom_no': d.bom_no,
'qty': d.qty})["rate"]
if rate:
d.rate = rate
if self.docstatus == 1:
self.ignore_validate_update_after_submit = True
self.calculate_cost()
self.save()
def get_bom_unitcost(self, bom_no):
bom = frappe.db.sql("""select name, total_cost/quantity as unit_cost from `tabBOM`
where is_active = 1 and name = %s""", bom_no, as_dict=1)
return bom and bom[0]['unit_cost'] or 0
def get_valuation_rate(self, args):
""" Get weighted average of valuation rate from all warehouses """
total_qty, total_value, valuation_rate = 0.0, 0.0, 0.0
for d in frappe.db.sql("""select actual_qty, stock_value from `tabBin`
where item_code=%s""", args['item_code'], as_dict=1):
total_qty += flt(d.actual_qty)
total_value += flt(d.stock_value)
if total_qty:
valuation_rate = total_value / total_qty
if valuation_rate <= 0:
last_valuation_rate = frappe.db.sql("""select valuation_rate
from `tabStock Ledger Entry`
where item_code = %s and ifnull(valuation_rate, 0) > 0
order by posting_date desc, posting_time desc, name desc limit 1""", args['item_code'])
valuation_rate = flt(last_valuation_rate[0][0]) if last_valuation_rate else 0
return valuation_rate
def manage_default_bom(self):
""" Uncheck others if current one is selected as default,
update default bom in item master
"""
if self.is_default and self.is_active:
from frappe.model.utils import set_default
set_default(self, "item")
item = frappe.get_doc("Item", self.item)
if item.default_bom != self.name:
item.default_bom = self.name
item.save()
else:
if not self.is_active:
frappe.db.set(self, "is_default", 0)
item = frappe.get_doc("Item", self.item)
if item.default_bom == self.name:
item.default_bom = None
item.save()
def clear_operations(self):
if not self.with_operations:
self.set('operations', [])
for d in self.get("items"):
d.operation = None
def validate_main_item(self):
""" Validate main FG item"""
item = self.get_item_det(self.item)
if not item:
frappe.throw(_("Item {0} does not exist in the system or has expired").format(self.item))
else:
ret = frappe.db.get_value("Item", self.item, ["description", "stock_uom"])
self.description = ret[0]
self.uom = ret[1]
def validate_materials(self):
""" Validate raw material entries """
check_list = []
for m in self.get('items'):
if m.bom_no:
validate_bom_no(m.item_code, m.bom_no)
if flt(m.qty) <= 0:
frappe.throw(_("Quantity required for Item {0} in row {1}").format(m.item_code, m.idx))
self.check_if_item_repeated(m.item_code, m.operation, check_list)
def check_if_item_repeated(self, item, op, check_list):
if [cstr(item), cstr(op)] in check_list:
frappe.throw(_("Item {0} has been entered multiple times against same operation").format(item))
else:
check_list.append([cstr(item), cstr(op)])
def check_recursion(self):
""" Check whether recursion occurs in any bom"""
check_list = [['parent', 'bom_no', 'parent'], ['bom_no', 'parent', 'child']]
for d in check_list:
bom_list, count = [self.name], 0
while (len(bom_list) > count ):
boms = frappe.db.sql(" select %s from `tabBOM Item` where %s = %s " %
(d[0], d[1], '%s'), cstr(bom_list[count]))
count = count + 1
for b in boms:
if b[0] == self.name:
frappe.throw(_("BOM recursion: {0} cannot be parent or child of {2}").format(b[0], self.name))
if b[0]:
bom_list.append(b[0])
def update_cost_and_exploded_items(self, bom_list=[]):
bom_list = self.traverse_tree(bom_list)
for bom in bom_list:
bom_obj = frappe.get_doc("BOM", bom)
bom_obj.on_update()
return bom_list
def traverse_tree(self, bom_list=[]):
def _get_children(bom_no):
return [cstr(d[0]) for d in frappe.db.sql("""select bom_no from `tabBOM Item`
where parent = %s and ifnull(bom_no, '') != ''""", bom_no)]
count = 0
if self.name not in bom_list:
bom_list.append(self.name)
while(count < len(bom_list)):
for child_bom in _get_children(bom_list[count]):
if child_bom not in bom_list:
bom_list.append(child_bom)
count += 1
bom_list.reverse()
return bom_list
def calculate_cost(self):
"""Calculate bom totals"""
self.calculate_op_cost()
self.calculate_rm_cost()
self.total_cost = self.operating_cost + self.raw_material_cost
def calculate_op_cost(self):
"""Update workstation rate and calculates totals"""
self.operating_cost = 0
for d in self.get('operations'):
if d.workstation:
if not d.hour_rate:
d.hour_rate = flt(frappe.db.get_value("Workstation", d.workstation, "hour_rate"))
if d.hour_rate and d.time_in_mins:
d.operating_cost = flt(d.hour_rate) * flt(d.time_in_mins) / 60.0
self.operating_cost += flt(d.operating_cost)
def calculate_rm_cost(self):
"""Fetch RM rate as per today's valuation rate and calculate totals"""
total_rm_cost = 0
for d in self.get('items'):
if d.bom_no:
d.rate = self.get_bom_unitcost(d.bom_no)
d.amount = flt(d.rate, self.precision("rate", d)) * flt(d.qty, self.precision("qty", d))
d.qty_consumed_per_unit = flt(d.qty, self.precision("qty", d)) / flt(self.quantity, self.precision("quantity"))
total_rm_cost += d.amount
self.raw_material_cost = total_rm_cost
def update_exploded_items(self):
""" Update Flat BOM, following will be correct data"""
self.get_exploded_items()
self.add_exploded_items()
def get_exploded_items(self):
""" Get all raw materials including items from child bom"""
self.cur_exploded_items = {}
for d in self.get('items'):
if d.bom_no:
self.get_child_exploded_items(d.bom_no, d.qty)
else:
self.add_to_cur_exploded_items(frappe._dict({
'item_code' : d.item_code,
'description' : d.description,
'stock_uom' : d.stock_uom,
'qty' : flt(d.qty),
'rate' : flt(d.rate),
}))
def add_to_cur_exploded_items(self, args):
if self.cur_exploded_items.get(args.item_code):
self.cur_exploded_items[args.item_code]["qty"] += args.qty
else:
self.cur_exploded_items[args.item_code] = args
def get_child_exploded_items(self, bom_no, qty):
""" Add all items from Flat BOM of child BOM"""
# Did not use qty_consumed_per_unit in the query, as it leads to rounding loss
child_fb_items = frappe.db.sql("""select bom_item.item_code, bom_item.description,
bom_item.stock_uom, bom_item.qty, bom_item.rate,
ifnull(bom_item.qty, 0 ) / ifnull(bom.quantity, 1) as qty_consumed_per_unit
from `tabBOM Explosion Item` bom_item, tabBOM bom
where bom_item.parent = bom.name and bom.name = %s and bom.docstatus = 1""", bom_no, as_dict = 1)
for d in child_fb_items:
self.add_to_cur_exploded_items(frappe._dict({
'item_code' : d['item_code'],
'description' : d['description'],
'stock_uom' : d['stock_uom'],
'qty' : d['qty_consumed_per_unit']*qty,
'rate' : flt(d['rate']),
}))
def add_exploded_items(self):
"Add items to Flat BOM table"
frappe.db.sql("""delete from `tabBOM Explosion Item` where parent=%s""", self.name)
self.set('exploded_items', [])
for d in sorted(self.cur_exploded_items, key=itemgetter(0)):
ch = self.append('exploded_items', {})
for i in self.cur_exploded_items[d].keys():
ch.set(i, self.cur_exploded_items[d][i])
ch.amount = flt(ch.qty) * flt(ch.rate)
ch.qty_consumed_per_unit = flt(ch.qty) / flt(self.quantity)
ch.docstatus = self.docstatus
ch.db_insert()
def validate_bom_links(self):
if not self.is_active:
act_pbom = frappe.db.sql("""select distinct bom_item.parent from `tabBOM Item` bom_item
where bom_item.bom_no = %s and bom_item.docstatus = 1
and exists (select * from `tabBOM` where name = bom_item.parent
and docstatus = 1 and is_active = 1)""", self.name)
if act_pbom and act_pbom[0][0]:
frappe.throw(_("Cannot deactivate or cancel BOM as it is linked with other BOMs"))
def get_bom_items_as_dict(bom, qty=1, fetch_exploded=1):
item_dict = {}
# Did not use qty_consumed_per_unit in the query, as it leads to rounding loss
query = """select
bom_item.item_code,
item.item_name,
sum(ifnull(bom_item.qty, 0)/ifnull(bom.quantity, 1)) * %(qty)s as qty,
item.description,
item.stock_uom,
item.default_warehouse,
item.expense_account as expense_account,
item.buying_cost_center as cost_center
from
`tab%(table)s` bom_item, `tabBOM` bom, `tabItem` item
where
bom_item.parent = bom.name
and bom_item.docstatus < 2
and bom_item.parent = "%(bom)s"
and item.name = bom_item.item_code
%(conditions)s
group by item_code, stock_uom"""
if fetch_exploded:
items = frappe.db.sql(query % {
"qty": qty,
"table": "BOM Explosion Item",
"bom": bom,
"conditions": """and ifnull(item.is_pro_applicable, 'No') = 'No'
and ifnull(item.is_sub_contracted_item, 'No') = 'No' """
}, as_dict=True)
else:
items = frappe.db.sql(query % {
"qty": qty,
"table": "BOM Item",
"bom": bom,
"conditions": ""
}, as_dict=True)
# make unique
for item in items:
if item_dict.has_key(item.item_code):
item_dict[item.item_code]["qty"] += flt(item.qty)
else:
item_dict[item.item_code] = item
return item_dict
@frappe.whitelist()
def get_bom_items(bom, qty=1, fetch_exploded=1):
items = get_bom_items_as_dict(bom, qty, fetch_exploded).values()
items.sort(lambda a, b: a.item_code > b.item_code and 1 or -1)
return items
def validate_bom_no(item, bom_no):
"""Validate BOM No of sub-contracted items"""
bom = frappe.get_doc("BOM", bom_no)
if not bom.is_active:
frappe.throw(_("BOM {0} must be active").format(bom_no))
if bom.docstatus != 1:
if not getattr(frappe.flags, "in_test", False):
frappe.throw(_("BOM {0} must be submitted").format(bom_no))
if item and not (bom.item == item or \
bom.item == frappe.db.get_value("Item", item, "variant_of")):
frappe.throw(_("BOM {0} does not belong to Item {1}").format(bom_no, item))
|
anandpdoshi/frappe
|
refs/heads/develop
|
frappe/patches/v6_6/fix_file_url.py
|
41
|
from __future__ import unicode_literals
import frappe
from frappe.model.meta import is_single
def execute():
"""Fix old style file urls that start with files/"""
fix_file_urls()
fix_attach_field_urls()
def fix_file_urls():
for file in frappe.db.sql_list("""select name from `tabFile` where file_url like 'files/%'"""):
file = frappe.get_doc("File", file)
file.db_set("file_url", "/" + file.file_url, update_modified=False)
try:
file.validate_file()
file.db_set("file_name", file.file_name, update_modified=False)
if not file.content_hash:
file.generate_content_hash()
file.db_set("content_hash", file.content_hash, update_modified=False)
except IOError:
pass
def fix_attach_field_urls():
# taken from an old patch
attach_fields = (frappe.db.sql("""select parent, fieldname from `tabDocField` where fieldtype in ('Attach', 'Attach Image')""") +
frappe.db.sql("""select dt, fieldname from `tabCustom Field` where fieldtype in ('Attach', 'Attach Image')"""))
for doctype, fieldname in attach_fields:
if is_single(doctype):
frappe.db.sql("""update `tabSingles` set value=concat("/", `value`)
where doctype=%(doctype)s and field=%(fieldname)s
and value like 'files/%%'""", {"doctype": doctype, "fieldname": fieldname})
else:
frappe.db.sql("""update `tab{doctype}` set `{fieldname}`=concat("/", `{fieldname}`)
where `{fieldname}` like 'files/%'""".format(doctype=doctype, fieldname=fieldname))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.